From dd84b9d64fb98746a230cd24233ff50a562c39c9 Mon Sep 17 00:00:00 2001 From: 简律纯 Date: Fri, 28 Apr 2023 01:36:44 +0800 Subject: --- .cargo/config.toml | 19 + .devcontainer/Dockerfile | 33 + .devcontainer/devcontainer.json | 56 + .editorconfig | 17 + .eslintignore | 13 + .eslintrc.js | 20 + .git-blame-ignore-revs | 2 + .gitattributes | 1 + .husky/pre-commit | 4 + .node-version | 1 + .npmrc | 2 + .prettierignore | 27 + .rustfmt.toml | 11 + .taplo.toml | 8 + .vscode/settings.json | 24 +- CODE_OF_CONDUCT.md | 50 + CONTRIBUTING.md | 268 + Cargo.lock | 10242 +++++++++++++++ Cargo.toml | 229 + NOTICES.md | 36 + SECURITY.md | 1 + benchmark/.gitignore | 3 + benchmark/README.md | 6 + benchmark/package.json | 16 + benchmark/src/index.ts | 273 + cli/.gitignore | 21 + cli/.golangci.yml | 30 + cli/LICENSE | 373 + cli/Makefile | 332 + cli/README.md | 3 + cli/cmd/turbo/main.go | 28 + cli/cmd/turbo/version.go | 3 + cli/combined-release.yml | 78 + cli/combined-shim.yml | 78 + cli/cross-release.yml | 61 + cli/darwin-release.yml | 35 + cli/fixtures/01-git-hash-object/.gitignore | 2 + cli/fixtures/01-git-hash-object/child/child.json | 0 .../child/grandchild/grandchild.json | 0 cli/fixtures/01-git-hash-object/root.json | 0 cli/go.mod | 91 + cli/go.sum | 952 ++ cli/internal/analytics/analytics.go | 175 + cli/internal/analytics/analytics_test.go | 192 + cli/internal/cache/async_cache.go | 82 + cli/internal/cache/cache.go | 317 + cli/internal/cache/cache_fs.go | 174 + cli/internal/cache/cache_fs_test.go | 253 + cli/internal/cache/cache_http.go | 375 + cli/internal/cache/cache_http_test.go | 245 + cli/internal/cache/cache_noop.go | 23 + .../cache/cache_signature_authentication.go | 88 + .../cache/cache_signature_authentication_test.go | 195 + cli/internal/cache/cache_test.go | 318 + cli/internal/cacheitem/cacheitem.go | 76 + cli/internal/cacheitem/create.go | 119 + cli/internal/cacheitem/create_test.go | 205 + cli/internal/cacheitem/create_unix_test.go | 20 + cli/internal/cacheitem/create_windows_test.go | 14 + cli/internal/cacheitem/filepath.go | 162 + cli/internal/cacheitem/filepath_unix.go | 14 + cli/internal/cacheitem/filepath_windows.go | 50 + cli/internal/cacheitem/restore.go | 200 + cli/internal/cacheitem/restore_directory.go | 144 + cli/internal/cacheitem/restore_directory_test.go | 103 + cli/internal/cacheitem/restore_regular.go | 46 + cli/internal/cacheitem/restore_symlink.go | 180 + cli/internal/cacheitem/restore_test.go | 1493 +++ cli/internal/chrometracing/chrometracing.go | 227 + cli/internal/chrometracing/chrometracing_close.go | 26 + cli/internal/ci/ci.go | 58 + cli/internal/ci/ci_test.go | 105 + cli/internal/ci/vendors.go | 253 + cli/internal/client/analytics.go | 21 + cli/internal/client/cache.go | 167 + cli/internal/client/client.go | 309 + cli/internal/client/client_test.go | 159 + cli/internal/cmd/root.go | 157 + cli/internal/cmdutil/cmdutil.go | 245 + cli/internal/cmdutil/cmdutil_test.go | 109 + cli/internal/colorcache/colorcache.go | 56 + cli/internal/config/config_file.go | 192 + cli/internal/config/config_file_test.go | 157 + cli/internal/context/context.go | 480 + cli/internal/context/context_test.go | 162 + .../dupe-workspace-names/apps/a/package.json | 6 + .../dupe-workspace-names/apps/b/package.json | 6 + .../testdata/dupe-workspace-names/package.json | 7 + .../dupe-workspace-names/packages/ui/package.json | 3 + .../testdata/dupe-workspace-names/pnpm-lock.yaml | 21 + .../dupe-workspace-names/pnpm-workspace.yaml | 3 + cli/internal/core/engine.go | 591 + cli/internal/core/engine_test.go | 88 + cli/internal/daemon/connector/connector.go | 391 + cli/internal/daemon/connector/connector_test.go | 256 + cli/internal/daemon/connector/fork.go | 15 + cli/internal/daemon/connector/fork_windows.go | 15 + cli/internal/daemon/daemon.go | 307 + cli/internal/daemon/daemon_test.go | 262 + cli/internal/daemonclient/daemonclient.go | 70 + cli/internal/doublestar/doublestar.go | 11 + cli/internal/doublestar/doublestar_test.go | 557 + cli/internal/doublestar/glob.go | 393 + cli/internal/doublestar/globwalk.go | 277 + cli/internal/doublestar/match.go | 377 + cli/internal/doublestar/utils.go | 71 + cli/internal/doublestar/validate.go | 83 + cli/internal/encoding/gitoutput/gitoutput.go | 345 + cli/internal/encoding/gitoutput/gitoutput_test.go | 377 + cli/internal/encoding/gitoutput/validators.go | 148 + cli/internal/encoding/gitoutput/validators_test.go | 514 + cli/internal/ffi/bindings.h | 21 + cli/internal/ffi/ffi.go | 224 + cli/internal/ffi/proto/messages.pb.go | 1380 +++ cli/internal/filewatcher/backend.go | 209 + cli/internal/filewatcher/backend_darwin.go | 220 + cli/internal/filewatcher/cookie.go | 160 + cli/internal/filewatcher/cookie_test.go | 130 + cli/internal/filewatcher/filewatcher.go | 167 + cli/internal/filewatcher/filewatcher_test.go | 152 + cli/internal/fs/copy_file.go | 81 + cli/internal/fs/copy_file_test.go | 198 + cli/internal/fs/fs.go | 191 + cli/internal/fs/fs_test.go | 60 + cli/internal/fs/fs_windows_test.go | 18 + cli/internal/fs/get_turbo_data_dir_go.go | 16 + cli/internal/fs/get_turbo_data_dir_rust.go | 16 + cli/internal/fs/hash.go | 61 + cli/internal/fs/hash_test.go | 53 + cli/internal/fs/lstat.go | 74 + cli/internal/fs/package_json.go | 142 + cli/internal/fs/package_json_test.go | 174 + cli/internal/fs/path.go | 113 + cli/internal/fs/testdata/both/package.json | 7 + cli/internal/fs/testdata/both/turbo.json | 18 + cli/internal/fs/testdata/correct/turbo.json | 49 + cli/internal/fs/testdata/invalid-env-1/turbo.json | 8 + cli/internal/fs/testdata/invalid-env-2/turbo.json | 8 + .../fs/testdata/invalid-global-env/turbo.json | 11 + cli/internal/fs/testdata/legacy-env/turbo.json | 34 + cli/internal/fs/testdata/legacy-only/package.json | 7 + cli/internal/fs/turbo_json.go | 741 ++ cli/internal/fs/turbo_json_test.go | 277 + cli/internal/globby/globby.go | 187 + cli/internal/globby/globby_test.go | 832 ++ cli/internal/globwatcher/globwatcher.go | 210 + cli/internal/globwatcher/globwatcher_test.go | 232 + cli/internal/graph/graph.go | 274 + cli/internal/graph/graph_test.go | 50 + cli/internal/graphvisualizer/graphvisualizer.go | 205 + cli/internal/hashing/package_deps_hash.go | 461 + cli/internal/hashing/package_deps_hash_test.go | 386 + cli/internal/inference/inference.go | 167 + cli/internal/inference/inference_test.go | 97 + cli/internal/lockfile/berry_lockfile.go | 709 ++ cli/internal/lockfile/berry_lockfile_test.go | 273 + cli/internal/lockfile/lockfile.go | 135 + cli/internal/lockfile/lockfile_test.go | 25 + cli/internal/lockfile/npm_lockfile.go | 107 + cli/internal/lockfile/pnpm_lockfile.go | 579 + cli/internal/lockfile/pnpm_lockfile_test.go | 405 + cli/internal/lockfile/testdata/berry.lock | 3283 +++++ cli/internal/lockfile/testdata/minimal-berry.lock | 45 + .../testdata/npm-lock-workspace-variation.json | 186 + cli/internal/lockfile/testdata/npm-lock.json | 6472 ++++++++++ .../lockfile/testdata/pnpm-absolute-v6.yaml | 18 + cli/internal/lockfile/testdata/pnpm-absolute.yaml | 38 + cli/internal/lockfile/testdata/pnpm-patch-v6.yaml | 40 + cli/internal/lockfile/testdata/pnpm-patch.yaml | 63 + cli/internal/lockfile/testdata/pnpm-peer-v6.yaml | 67 + .../lockfile/testdata/pnpm-top-level-dupe.yaml | 36 + .../lockfile/testdata/pnpm6-workspace.yaml | 1704 +++ .../lockfile/testdata/pnpm7-workspace.yaml | 3445 ++++++ cli/internal/lockfile/testdata/pnpm8.yaml | 107 + cli/internal/lockfile/testdata/pnpm_override.yaml | 24 + cli/internal/lockfile/testdata/yarn.lock | 2304 ++++ cli/internal/lockfile/yarn_lockfile.go | 124 + cli/internal/lockfile/yarn_lockfile_test.go | 51 + cli/internal/logstreamer/logstreamer.go | 159 + cli/internal/logstreamer/logstreamer_test.go | 114 + cli/internal/nodes/packagetask.go | 45 + cli/internal/packagemanager/berry.go | 156 + cli/internal/packagemanager/fixtures/package.json | 7 + .../packagemanager/fixtures/pnpm-patches.json | 11 + .../packagemanager/fixtures/pnpm-workspace.yaml | 3 + cli/internal/packagemanager/infer_root.go | 146 + cli/internal/packagemanager/infer_root_test.go | 347 + cli/internal/packagemanager/npm.go | 59 + cli/internal/packagemanager/packagemanager.go | 197 + cli/internal/packagemanager/packagemanager_test.go | 411 + cli/internal/packagemanager/pnpm.go | 168 + cli/internal/packagemanager/pnpm6.go | 63 + cli/internal/packagemanager/pnpm_test.go | 57 + cli/internal/packagemanager/yarn.go | 116 + cli/internal/process/child.go | 406 + cli/internal/process/child_nix_test.go | 190 + cli/internal/process/child_test.go | 193 + cli/internal/process/manager.go | 120 + cli/internal/process/manager_test.go | 94 + cli/internal/process/sys_nix.go | 23 + cli/internal/process/sys_windows.go | 17 + cli/internal/prune/prune.go | 314 + cli/internal/run/dry_run.go | 122 + cli/internal/run/global_hash.go | 164 + cli/internal/run/graph_run.go | 46 + cli/internal/run/log_tag_go.go | 11 + cli/internal/run/log_tag_rust.go | 11 + cli/internal/run/real_run.go | 420 + cli/internal/run/run.go | 487 + cli/internal/run/run_spec.go | 90 + cli/internal/run/run_spec_test.go | 107 + cli/internal/runcache/output_watcher.go | 32 + cli/internal/runcache/runcache.go | 354 + cli/internal/runsummary/execution_summary.go | 282 + .../runsummary/format_execution_summary.go | 70 + cli/internal/runsummary/format_json.go | 66 + cli/internal/runsummary/format_text.go | 100 + cli/internal/runsummary/globalhash_summary.go | 38 + cli/internal/runsummary/run_summary.go | 320 + cli/internal/runsummary/spaces.go | 96 + cli/internal/runsummary/task_summary.go | 117 + cli/internal/scm/git_go.go | 111 + cli/internal/scm/git_rust.go | 34 + cli/internal/scm/scm.go | 53 + cli/internal/scm/stub.go | 14 + cli/internal/scope/filter/filter.go | 421 + cli/internal/scope/filter/filter_test.go | 614 + cli/internal/scope/filter/matcher.go | 32 + cli/internal/scope/filter/matcher_test.go | 65 + cli/internal/scope/filter/parse_target_selector.go | 165 + .../scope/filter/parse_target_selector_test.go | 311 + cli/internal/scope/scope.go | 380 + cli/internal/scope/scope_test.go | 550 + cli/internal/server/server.go | 192 + cli/internal/server/server_test.go | 73 + cli/internal/signals/signals.go | 60 + cli/internal/spinner/spinner.go | 89 + cli/internal/tarpatch/tar.go | 92 + cli/internal/tarpatch/tar_unix.go | 42 + cli/internal/tarpatch/tar_windows.go | 27 + cli/internal/taskhash/taskhash.go | 497 + cli/internal/taskhash/taskhash_test.go | 138 + cli/internal/turbodprotocol/turbod.proto | 53 + cli/internal/turbopath/absolute_system_path.go | 258 + .../turbopath/absolute_system_path_darwin.go | 23 + .../turbopath/absolute_system_path_notdarwin.go | 13 + .../turbopath/absolute_system_path_test.go | 174 + cli/internal/turbopath/anchored_system_path.go | 75 + cli/internal/turbopath/anchored_unix_path.go | 31 + cli/internal/turbopath/find_up.go | 50 + cli/internal/turbopath/relative_system_path.go | 44 + cli/internal/turbopath/relative_unix_path.go | 31 + cli/internal/turbopath/turbopath.go | 112 + cli/internal/turbostate/turbostate.go | 141 + cli/internal/ui/charset.go | 3 + cli/internal/ui/colors.go | 54 + cli/internal/ui/spinner.go | 80 + cli/internal/ui/term/cursor.go | 73 + cli/internal/ui/term/cursor_test.go | 43 + cli/internal/ui/ui.go | 121 + cli/internal/util/backends.go | 30 + cli/internal/util/browser/open.go | 37 + cli/internal/util/closer.go | 15 + cli/internal/util/cmd.go | 24 + cli/internal/util/filter/filter.go | 133 + cli/internal/util/filter/filter_test.go | 116 + cli/internal/util/graph.go | 35 + cli/internal/util/modulo.go | 13 + cli/internal/util/parse_concurrency.go | 39 + cli/internal/util/parse_concurrency_test.go | 79 + cli/internal/util/printf.go | 63 + cli/internal/util/run_opts.go | 53 + cli/internal/util/semaphore.go | 43 + cli/internal/util/set.go | 147 + cli/internal/util/set_test.go | 149 + cli/internal/util/status.go | 47 + cli/internal/util/task_id.go | 66 + cli/internal/util/task_output_mode.go | 100 + cli/internal/workspace/workspace.go | 10 + cli/internal/xxhash/xxhash.go | 202 + cli/internal/yaml/apic.go | 747 ++ cli/internal/yaml/decode.go | 1000 ++ cli/internal/yaml/emitterc.go | 2019 +++ cli/internal/yaml/encode.go | 577 + cli/internal/yaml/parserc.go | 1274 ++ cli/internal/yaml/readerc.go | 434 + cli/internal/yaml/resolve.go | 326 + cli/internal/yaml/scannerc.go | 3040 +++++ cli/internal/yaml/sorter.go | 134 + cli/internal/yaml/writerc.go | 48 + cli/internal/yaml/yaml.go | 693 ++ cli/internal/yaml/yamlh.go | 809 ++ cli/internal/yaml/yamlprivateh.go | 198 + cli/package.json | 18 + cli/scripts/generate.mjs | 297 + cli/scripts/nginx/.dockerignore | 1 + cli/scripts/nginx/Dockerfile.cacher | 11 + cli/scripts/nginx/docker-compose.yml | 9 + cli/scripts/nginx/nginx.conf | 39 + cli/scripts/npm-native-packages/.gitignore | 1 + .../npm-native-packages/npm-native-packages.js | 57 + cli/scripts/npm-native-packages/template/README.md | 3 + cli/scripts/npm-native-packages/template/bin/turbo | 15 + .../template/template.package.json | 12 + cli/scripts/templates/jest.config.js | 10 + cli/scripts/templates/src/__tests__/index.test.ts | 7 + cli/scripts/templates/src/__tests__/tsconfig.json | 4 + cli/scripts/templates/src/index.ts | 3 + cli/scripts/templates/tsconfig.json | 9 + cli/turbo.json | 17 + clippy.toml | 14 + deny.toml | 40 + docs/next.config.js | 306 +- docs/package.json | 5 +- package.json | 63 + packages/create-turbo/.gitignore | 1 + packages/create-turbo/LICENSE | 373 + packages/create-turbo/README.md | 13 + packages/create-turbo/__tests__/examples.test.ts | 134 + packages/create-turbo/__tests__/git.test.ts | 239 + packages/create-turbo/__tests__/index.test.ts | 90 + .../create-turbo/__tests__/isFolderEmpty.test.ts | 41 + packages/create-turbo/__tests__/isWritable.test.ts | 35 + packages/create-turbo/__tests__/test-utils.ts | 34 + packages/create-turbo/jest.config.js | 11 + packages/create-turbo/package.json | 65 + packages/create-turbo/src/cli.ts | 65 + .../src/commands/create/createProject.ts | 192 + packages/create-turbo/src/commands/create/index.ts | 243 + .../create-turbo/src/commands/create/prompts.ts | 124 + packages/create-turbo/src/commands/create/types.ts | 8 + packages/create-turbo/src/commands/index.ts | 1 + packages/create-turbo/src/logger.ts | 32 + packages/create-turbo/src/transforms/errors.ts | 17 + packages/create-turbo/src/transforms/git-ignore.ts | 30 + packages/create-turbo/src/transforms/index.ts | 13 + .../src/transforms/official-starter.ts | 73 + .../create-turbo/src/transforms/package-manager.ts | 26 + packages/create-turbo/src/transforms/types.ts | 30 + packages/create-turbo/src/utils/examples.ts | 139 + packages/create-turbo/src/utils/git.ts | 90 + .../create-turbo/src/utils/isDefaultExample.ts | 5 + packages/create-turbo/src/utils/isFolderEmpty.ts | 37 + packages/create-turbo/src/utils/isOnline.ts | 40 + packages/create-turbo/src/utils/isWriteable.ts | 10 + packages/create-turbo/src/utils/notifyUpdate.ts | 22 + packages/create-turbo/tsconfig.json | 7 + packages/create-turbo/tsup.config.ts | 9 + packages/create-turbo/turbo.json | 12 + packages/eslint-config-turbo/LICENSE | 373 + packages/eslint-config-turbo/README.md | 27 + packages/eslint-config-turbo/index.js | 3 + packages/eslint-config-turbo/package.json | 32 + packages/eslint-plugin-turbo/LICENSE | 373 + packages/eslint-plugin-turbo/README.md | 53 + .../__fixtures__/configs/single/turbo.json | 25 + .../workspace-configs/apps/docs/index.js | 6 + .../workspace-configs/apps/docs/package.json | 4 + .../workspace-configs/apps/docs/turbo.json | 9 + .../workspace-configs/apps/web/index.js | 6 + .../workspace-configs/apps/web/package.json | 4 + .../workspace-configs/apps/web/turbo.json | 9 + .../__fixtures__/workspace-configs/package.json | 14 + .../workspace-configs/packages/ui/index.js | 6 + .../workspace-configs/packages/ui/package.json | 4 + .../workspace-configs/packages/ui/turbo.json | 9 + .../__fixtures__/workspace-configs/turbo.json | 9 + .../__fixtures__/workspace/.eslintrc.js | 4 + .../__fixtures__/workspace/child/child.js | 2 + .../__fixtures__/workspace/package-lock.json | 58 + .../__fixtures__/workspace/package.json | 5 + .../__fixtures__/workspace/peer.js | 1 + .../__fixtures__/workspace/turbo.json | 34 + packages/eslint-plugin-turbo/__tests__/cwd.test.ts | 88 + .../__tests__/lib/no-undeclared-env-vars.test.ts | 433 + .../docs/rules/no-undeclared-env-vars.md | 74 + packages/eslint-plugin-turbo/jest.config.js | 12 + .../eslint-plugin-turbo/lib/configs/recommended.ts | 26 + packages/eslint-plugin-turbo/lib/constants.ts | 5 + packages/eslint-plugin-turbo/lib/index.ts | 17 + .../lib/rules/no-undeclared-env-vars.ts | 187 + .../lib/utils/getEnvVarDependencies.ts | 75 + packages/eslint-plugin-turbo/package.json | 51 + packages/eslint-plugin-turbo/tsconfig.json | 6 + packages/eslint-plugin-turbo/tsup.config.ts | 8 + packages/eslint-plugin-turbo/turbo.json | 9 + packages/node-module-trace/package.json | 10 + packages/tsconfig/README.md | 3 + packages/tsconfig/base.json | 20 + packages/tsconfig/library.json | 12 + packages/tsconfig/package.json | 5 + packages/turbo-codemod/LICENSE | 373 + packages/turbo-codemod/README.md | 55 + .../has-package-manager/package.json | 7 + .../no-package-manager/package.json | 6 + .../wrong-package-manager/package.json | 7 + .../create-turbo-config/both-configs/package.json | 28 + .../create-turbo-config/both-configs/turbo.json | 18 + .../no-package-json-config/package.json | 7 + .../no-package-json-file/a-random-file.txt | 1 + .../no-turbo-json-config/package.json | 24 + .../turbo-json-config/package.json | 7 + .../turbo-json-config/turbo.json | 18 + .../get-turbo-upgrade-command/no-deps/package.json | 4 + .../get-turbo-upgrade-command/no-package/README.md | 1 + .../no-turbo/package.json | 6 + .../normal-workspaces-dev-install/package.json | 12 + .../normal-workspaces/package.json | 12 + .../pnpm-workspaces-dev-install/package.json | 8 + .../pnpm-workspace.yaml | 3 + .../pnpm-workspaces/package.json | 8 + .../pnpm-workspaces/pnpm-workspace.yaml | 3 + .../single-package-dev-install/package.json | 8 + .../single-package/package.json | 8 + .../env-dependencies/turbo.json | 21 + .../migrated-env-dependencies/turbo.json | 25 + .../no-turbo-json/package.json | 7 + .../old-config/package.json | 20 + .../old-config/turbo.json | 12 + .../workspace-configs/apps/docs/index.js | 6 + .../workspace-configs/apps/docs/package.json | 4 + .../workspace-configs/apps/docs/turbo.json | 9 + .../workspace-configs/apps/web/index.js | 6 + .../workspace-configs/apps/web/package.json | 4 + .../workspace-configs/apps/web/turbo.json | 12 + .../workspace-configs/package.json | 14 + .../workspace-configs/packages/ui/index.js | 6 + .../workspace-configs/packages/ui/package.json | 4 + .../workspace-configs/packages/ui/turbo.json | 9 + .../workspace-configs/turbo.json | 21 + .../__fixtures__/migrate/no-repo/README.md | 1 + .../__fixtures__/migrate/old-turbo/package.json | 26 + .../invalid-outputs/package.json | 7 + .../set-default-outputs/invalid-outputs/turbo.json | 36 + .../set-default-outputs/no-outputs/package.json | 7 + .../set-default-outputs/no-outputs/turbo.json | 14 + .../set-default-outputs/no-pipeline/package.json | 7 + .../set-default-outputs/no-pipeline/turbo.json | 5 + .../set-default-outputs/no-turbo-json/package.json | 7 + .../set-default-outputs/old-config/package.json | 20 + .../set-default-outputs/old-config/turbo.json | 12 + .../set-default-outputs/old-outputs/package.json | 7 + .../set-default-outputs/old-outputs/turbo.json | 12 + .../workspace-configs/apps/docs/index.js | 6 + .../workspace-configs/apps/docs/package.json | 4 + .../workspace-configs/apps/docs/turbo.json | 7 + .../workspace-configs/apps/web/index.js | 6 + .../workspace-configs/apps/web/package.json | 4 + .../workspace-configs/apps/web/turbo.json | 10 + .../workspace-configs/package.json | 14 + .../workspace-configs/packages/ui/index.js | 6 + .../workspace-configs/packages/ui/package.json | 4 + .../workspace-configs/packages/ui/turbo.json | 7 + .../workspace-configs/turbo.json | 12 + .../__fixtures__/transform/basic/package.json | 8 + .../__tests__/add-package-manager.test.ts | 504 + .../__tests__/create-turbo-config.test.ts | 416 + .../__tests__/get-turbo-upgrade-command.test.ts | 576 + .../__tests__/migrate-env-var-dependencies.test.ts | 758 ++ packages/turbo-codemod/__tests__/migrate.test.ts | 761 ++ .../__tests__/set-default-outputs.test.ts | 391 + packages/turbo-codemod/__tests__/transform.test.ts | 172 + packages/turbo-codemod/index.d.ts | 1 + packages/turbo-codemod/jest.config.js | 18 + packages/turbo-codemod/package.json | 67 + packages/turbo-codemod/plopfile.js | 46 + packages/turbo-codemod/src/cli.ts | 73 + packages/turbo-codemod/src/commands/index.ts | 11 + .../turbo-codemod/src/commands/migrate/index.ts | 215 + .../commands/migrate/steps/getCurrentVersion.ts | 45 + .../src/commands/migrate/steps/getLatestVersion.ts | 31 + .../migrate/steps/getTransformsForMigration.ts | 25 + .../migrate/steps/getTurboUpgradeCommand.ts | 182 + .../turbo-codemod/src/commands/migrate/types.ts | 9 + .../turbo-codemod/src/commands/migrate/utils.ts | 16 + .../turbo-codemod/src/commands/transform/index.ts | 101 + .../turbo-codemod/src/commands/transform/types.ts | 7 + packages/turbo-codemod/src/runner/FileTransform.ts | 94 + packages/turbo-codemod/src/runner/Runner.ts | 132 + packages/turbo-codemod/src/runner/index.ts | 3 + packages/turbo-codemod/src/runner/types.ts | 40 + packages/turbo-codemod/src/transforms/README.md | 36 + .../src/transforms/add-package-manager.ts | 75 + .../src/transforms/create-turbo-config.ts | 70 + .../src/transforms/migrate-env-var-dependencies.ts | 181 + .../src/transforms/set-default-outputs.ts | 97 + packages/turbo-codemod/src/types.ts | 24 + packages/turbo-codemod/src/utils/checkGitStatus.ts | 40 + packages/turbo-codemod/src/utils/directoryInfo.ts | 10 + .../turbo-codemod/src/utils/getPackageManager.ts | 42 + .../src/utils/getPackageManagerVersion.ts | 16 + .../src/utils/getTransformerHelpers.ts | 23 + .../turbo-codemod/src/utils/loadTransformers.ts | 27 + packages/turbo-codemod/src/utils/logger.ts | 47 + packages/turbo-codemod/src/utils/looksLikeRepo.ts | 12 + packages/turbo-codemod/src/utils/notifyUpdate.ts | 35 + packages/turbo-codemod/templates/transformer.hbs | 45 + .../turbo-codemod/templates/transformer.test.hbs | 25 + packages/turbo-codemod/tsconfig.json | 6 + packages/turbo-codemod/tsup.config.ts | 9 + packages/turbo-ignore/README.md | 99 + .../turbo-ignore/__fixtures__/app/package.json | 11 + .../__fixtures__/invalid-app/package.json | 10 + packages/turbo-ignore/__fixtures__/no-app/index.js | 0 packages/turbo-ignore/__tests__/args.test.ts | 109 + .../turbo-ignore/__tests__/checkCommit.test.ts | 229 + packages/turbo-ignore/__tests__/errors.test.ts | 46 + .../turbo-ignore/__tests__/getComparison.test.ts | 61 + packages/turbo-ignore/__tests__/getTask.test.ts | 27 + .../turbo-ignore/__tests__/getWorkspace.test.ts | 62 + packages/turbo-ignore/__tests__/ignore.test.ts | 578 + packages/turbo-ignore/jest.config.js | 18 + packages/turbo-ignore/package.json | 40 + packages/turbo-ignore/src/args.ts | 89 + packages/turbo-ignore/src/checkCommit.ts | 104 + packages/turbo-ignore/src/errors.ts | 43 + packages/turbo-ignore/src/getComparison.ts | 39 + packages/turbo-ignore/src/getTask.ts | 13 + packages/turbo-ignore/src/getWorkspace.ts | 37 + packages/turbo-ignore/src/ignore.ts | 125 + packages/turbo-ignore/src/index.ts | 6 + packages/turbo-ignore/src/logger.ts | 16 + packages/turbo-ignore/src/types.ts | 23 + packages/turbo-ignore/tsconfig.json | 6 + packages/turbo-ignore/tsup.config.ts | 9 + packages/turbo-test-utils/README.md | 3 + packages/turbo-test-utils/package.json | 40 + packages/turbo-test-utils/src/index.ts | 9 + packages/turbo-test-utils/src/mockEnv.ts | 12 + packages/turbo-test-utils/src/spyConsole.ts | 25 + packages/turbo-test-utils/src/spyExit.ts | 21 + packages/turbo-test-utils/src/useFixtures.ts | 89 + packages/turbo-test-utils/src/validateLogs.ts | 27 + packages/turbo-test-utils/tsconfig.json | 6 + packages/turbo-tracing-next-plugin/README.md | 39 + packages/turbo-tracing-next-plugin/package.json | 25 + packages/turbo-tracing-next-plugin/src/index.ts | 27 + .../test/with-mongodb-mongoose/.env.local.example | 1 + .../test/with-mongodb-mongoose/.gitignore | 34 + .../test/with-mongodb-mongoose/README.md | 5 + .../test/with-mongodb-mongoose/components/Form.js | 202 + .../test/with-mongodb-mongoose/css/form.css | 39 + .../test/with-mongodb-mongoose/css/style.css | 184 + .../test/with-mongodb-mongoose/lib/dbConnect.js | 40 + .../test/with-mongodb-mongoose/models/Pet.js | 59 + .../test/with-mongodb-mongoose/next.config.js | 12 + .../test/with-mongodb-mongoose/package.json | 20 + .../test/with-mongodb-mongoose/pages/[id]/edit.js | 33 + .../test/with-mongodb-mongoose/pages/[id]/index.js | 75 + .../test/with-mongodb-mongoose/pages/_app.js | 36 + .../with-mongodb-mongoose/pages/api/pets/[id].js | 56 + .../with-mongodb-mongoose/pages/api/pets/index.js | 32 + .../test/with-mongodb-mongoose/pages/index.js | 65 + .../test/with-mongodb-mongoose/pages/new.js | 19 + .../test/with-mongodb-mongoose/public/favicon.ico | Bin 0 -> 15086 bytes .../test/with-mongodb-mongoose/public/zeit.svg | 10 + packages/turbo-tracing-next-plugin/tsconfig.json | 14 + packages/turbo-types/package.json | 19 + packages/turbo-types/src/index.ts | 1 + packages/turbo-types/src/scripts/codegen.js | 22 + packages/turbo-types/src/types/config.ts | 231 + packages/turbo-types/tsconfig.json | 6 + packages/turbo-types/turbo.json | 8 + packages/turbo-utils/LICENSE | 373 + packages/turbo-utils/README.md | 3 + .../common/single-package/child/child.js | 2 + .../common/single-package/package.json | 3 + .../__fixtures__/common/single-package/turbo.json | 34 + .../common/workspace-configs/apps/docs/index.js | 6 + .../workspace-configs/apps/docs/package.json | 4 + .../common/workspace-configs/apps/web/index.js | 6 + .../common/workspace-configs/apps/web/package.json | 4 + .../common/workspace-configs/apps/web/turbo.json | 9 + .../common/workspace-configs/package.json | 14 + .../common/workspace-configs/packages/ui/index.js | 6 + .../workspace-configs/packages/ui/package.json | 4 + .../workspace-configs/packages/ui/turbo.json | 9 + .../workspace-configs/packages/utils/index.js | 6 + .../workspace-configs/packages/utils/package.json | 4 + .../workspace-configs/packages/utils/turbo.json | 9 + .../common/workspace-configs/turbo.json | 9 + .../turbo-utils/__tests__/getTurboConfigs.test.ts | 112 + .../turbo-utils/__tests__/getTurboRoot.test.ts | 33 + packages/turbo-utils/jest.config.js | 7 + packages/turbo-utils/package.json | 45 + packages/turbo-utils/src/getTurboConfigs.ts | 106 + packages/turbo-utils/src/getTurboRoot.ts | 49 + packages/turbo-utils/src/index.ts | 8 + packages/turbo-utils/src/managers.ts | 53 + packages/turbo-utils/src/searchUp.ts | 44 + packages/turbo-utils/tsconfig.json | 6 + packages/turbo-utils/tsup.config.ts | 12 + packages/turbo-workspaces/LICENSE | 373 + packages/turbo-workspaces/README.md | 49 + .../turbo-workspaces/__fixtures__/invalid/index.js | 1 + .../npm/monorepo/apps/docs/package.json | 11 + .../npm/monorepo/apps/web/package.json | 11 + .../__fixtures__/npm/monorepo/package-lock.json | 385 + .../__fixtures__/npm/monorepo/package.json | 14 + .../npm/monorepo/packages/tsconfig/package.json | 5 + .../npm/monorepo/packages/ui/package.json | 7 + .../npm/non-monorepo/package-lock.json | 12 + .../__fixtures__/npm/non-monorepo/package.json | 6 + .../pnpm/monorepo/apps/docs/package.json | 11 + .../pnpm/monorepo/apps/web/package.json | 11 + .../__fixtures__/pnpm/monorepo/package.json | 10 + .../pnpm/monorepo/packages/tsconfig/package.json | 5 + .../pnpm/monorepo/packages/ui/package.json | 7 + .../__fixtures__/pnpm/monorepo/pnpm-lock.yaml | 33 + .../__fixtures__/pnpm/monorepo/pnpm-workspace.yaml | 3 + .../__fixtures__/pnpm/non-monorepo/package.json | 8 + .../__fixtures__/pnpm/non-monorepo/pnpm-lock.yaml | 33 + .../yarn/monorepo/apps/docs/package.json | 11 + .../yarn/monorepo/apps/web/package.json | 11 + .../__fixtures__/yarn/monorepo/package.json | 14 + .../yarn/monorepo/packages/tsconfig/package.json | 5 + .../yarn/monorepo/packages/ui/package.json | 7 + .../__fixtures__/yarn/monorepo/yarn.lock | 4 + .../__fixtures__/yarn/non-monorepo/package.json | 8 + .../__fixtures__/yarn/non-monorepo/yarn.lock | 4 + packages/turbo-workspaces/__tests__/index.test.ts | 85 + .../turbo-workspaces/__tests__/managers.test.ts | 285 + packages/turbo-workspaces/__tests__/test-utils.ts | 153 + packages/turbo-workspaces/jest.config.js | 19 + packages/turbo-workspaces/package.json | 66 + packages/turbo-workspaces/src/cli.ts | 53 + .../turbo-workspaces/src/commands/convert/index.ts | 109 + .../turbo-workspaces/src/commands/convert/types.ts | 6 + packages/turbo-workspaces/src/commands/index.ts | 8 + .../turbo-workspaces/src/commands/summary/index.ts | 98 + .../turbo-workspaces/src/commands/summary/types.ts | 1 + packages/turbo-workspaces/src/convert.ts | 61 + packages/turbo-workspaces/src/errors.ts | 31 + .../turbo-workspaces/src/getWorkspaceDetails.ts | 35 + packages/turbo-workspaces/src/index.ts | 58 + packages/turbo-workspaces/src/install.ts | 125 + packages/turbo-workspaces/src/logger.ts | 109 + packages/turbo-workspaces/src/managers/index.ts | 11 + packages/turbo-workspaces/src/managers/npm.ts | 223 + packages/turbo-workspaces/src/managers/pnpm.ts | 238 + packages/turbo-workspaces/src/managers/yarn.ts | 222 + packages/turbo-workspaces/src/types.ts | 127 + .../turbo-workspaces/src/updateDependencies.ts | 135 + packages/turbo-workspaces/src/utils.ts | 197 + packages/turbo-workspaces/tsconfig.json | 6 + packages/turbo-workspaces/tsup.config.ts | 10 + packages/turbo-workspaces/turbo.json | 9 + packages/turbo/.dev-mode | 3 + packages/turbo/README.md | 54 + packages/turbo/bin/turbo | 14 + packages/turbo/bump-version.js | 21 + packages/turbo/install.js | 331 + packages/turbo/node-platform.js | 257 + packages/turbo/package.json | 29 + packages/webpack-nmt/package.json | 26 + packages/webpack-nmt/src/index.ts | 173 + packages/webpack-nmt/tsconfig.json | 9 + pnpm-lock.yaml | 12361 +++++++++++++++++++ pnpm-workspace.yaml | 14 + release.md | 81 + rust-toolchain | 1 + troubleshooting.md | 42 + tsconfig.json | 17 + tsconfig.project.json | 8 + turbo.json | 29 + turbow.js | 19 + version.txt | 2 + 667 files changed, 112618 insertions(+), 156 deletions(-) create mode 100644 .cargo/config.toml create mode 100644 .devcontainer/Dockerfile create mode 100644 .devcontainer/devcontainer.json create mode 100644 .eslintignore create mode 100644 .eslintrc.js create mode 100644 .git-blame-ignore-revs create mode 100644 .gitattributes create mode 100644 .husky/pre-commit create mode 100644 .node-version create mode 100644 .npmrc create mode 100644 .prettierignore create mode 100644 .rustfmt.toml create mode 100644 .taplo.toml create mode 100644 CODE_OF_CONDUCT.md create mode 100644 CONTRIBUTING.md create mode 100644 Cargo.lock create mode 100644 Cargo.toml create mode 100644 NOTICES.md create mode 100644 SECURITY.md create mode 100644 benchmark/.gitignore create mode 100644 benchmark/README.md create mode 100644 benchmark/package.json create mode 100644 benchmark/src/index.ts create mode 100644 cli/.gitignore create mode 100644 cli/.golangci.yml create mode 100644 cli/LICENSE create mode 100644 cli/Makefile create mode 100644 cli/README.md create mode 100644 cli/cmd/turbo/main.go create mode 100644 cli/cmd/turbo/version.go create mode 100644 cli/combined-release.yml create mode 100644 cli/combined-shim.yml create mode 100644 cli/cross-release.yml create mode 100644 cli/darwin-release.yml create mode 100644 cli/fixtures/01-git-hash-object/.gitignore create mode 100644 cli/fixtures/01-git-hash-object/child/child.json create mode 100644 cli/fixtures/01-git-hash-object/child/grandchild/grandchild.json create mode 100644 cli/fixtures/01-git-hash-object/root.json create mode 100644 cli/go.mod create mode 100644 cli/go.sum create mode 100644 cli/internal/analytics/analytics.go create mode 100644 cli/internal/analytics/analytics_test.go create mode 100644 cli/internal/cache/async_cache.go create mode 100644 cli/internal/cache/cache.go create mode 100644 cli/internal/cache/cache_fs.go create mode 100644 cli/internal/cache/cache_fs_test.go create mode 100644 cli/internal/cache/cache_http.go create mode 100644 cli/internal/cache/cache_http_test.go create mode 100644 cli/internal/cache/cache_noop.go create mode 100644 cli/internal/cache/cache_signature_authentication.go create mode 100644 cli/internal/cache/cache_signature_authentication_test.go create mode 100644 cli/internal/cache/cache_test.go create mode 100644 cli/internal/cacheitem/cacheitem.go create mode 100644 cli/internal/cacheitem/create.go create mode 100644 cli/internal/cacheitem/create_test.go create mode 100644 cli/internal/cacheitem/create_unix_test.go create mode 100644 cli/internal/cacheitem/create_windows_test.go create mode 100644 cli/internal/cacheitem/filepath.go create mode 100644 cli/internal/cacheitem/filepath_unix.go create mode 100644 cli/internal/cacheitem/filepath_windows.go create mode 100644 cli/internal/cacheitem/restore.go create mode 100644 cli/internal/cacheitem/restore_directory.go create mode 100644 cli/internal/cacheitem/restore_directory_test.go create mode 100644 cli/internal/cacheitem/restore_regular.go create mode 100644 cli/internal/cacheitem/restore_symlink.go create mode 100644 cli/internal/cacheitem/restore_test.go create mode 100644 cli/internal/chrometracing/chrometracing.go create mode 100644 cli/internal/chrometracing/chrometracing_close.go create mode 100644 cli/internal/ci/ci.go create mode 100644 cli/internal/ci/ci_test.go create mode 100644 cli/internal/ci/vendors.go create mode 100644 cli/internal/client/analytics.go create mode 100644 cli/internal/client/cache.go create mode 100644 cli/internal/client/client.go create mode 100644 cli/internal/client/client_test.go create mode 100644 cli/internal/cmd/root.go create mode 100644 cli/internal/cmdutil/cmdutil.go create mode 100644 cli/internal/cmdutil/cmdutil_test.go create mode 100644 cli/internal/colorcache/colorcache.go create mode 100644 cli/internal/config/config_file.go create mode 100644 cli/internal/config/config_file_test.go create mode 100644 cli/internal/context/context.go create mode 100644 cli/internal/context/context_test.go create mode 100644 cli/internal/context/testdata/dupe-workspace-names/apps/a/package.json create mode 100644 cli/internal/context/testdata/dupe-workspace-names/apps/b/package.json create mode 100644 cli/internal/context/testdata/dupe-workspace-names/package.json create mode 100644 cli/internal/context/testdata/dupe-workspace-names/packages/ui/package.json create mode 100644 cli/internal/context/testdata/dupe-workspace-names/pnpm-lock.yaml create mode 100644 cli/internal/context/testdata/dupe-workspace-names/pnpm-workspace.yaml create mode 100644 cli/internal/core/engine.go create mode 100644 cli/internal/core/engine_test.go create mode 100644 cli/internal/daemon/connector/connector.go create mode 100644 cli/internal/daemon/connector/connector_test.go create mode 100644 cli/internal/daemon/connector/fork.go create mode 100644 cli/internal/daemon/connector/fork_windows.go create mode 100644 cli/internal/daemon/daemon.go create mode 100644 cli/internal/daemon/daemon_test.go create mode 100644 cli/internal/daemonclient/daemonclient.go create mode 100644 cli/internal/doublestar/doublestar.go create mode 100644 cli/internal/doublestar/doublestar_test.go create mode 100644 cli/internal/doublestar/glob.go create mode 100644 cli/internal/doublestar/globwalk.go create mode 100644 cli/internal/doublestar/match.go create mode 100644 cli/internal/doublestar/utils.go create mode 100644 cli/internal/doublestar/validate.go create mode 100644 cli/internal/encoding/gitoutput/gitoutput.go create mode 100644 cli/internal/encoding/gitoutput/gitoutput_test.go create mode 100644 cli/internal/encoding/gitoutput/validators.go create mode 100644 cli/internal/encoding/gitoutput/validators_test.go create mode 100644 cli/internal/ffi/bindings.h create mode 100644 cli/internal/ffi/ffi.go create mode 100644 cli/internal/ffi/proto/messages.pb.go create mode 100644 cli/internal/filewatcher/backend.go create mode 100644 cli/internal/filewatcher/backend_darwin.go create mode 100644 cli/internal/filewatcher/cookie.go create mode 100644 cli/internal/filewatcher/cookie_test.go create mode 100644 cli/internal/filewatcher/filewatcher.go create mode 100644 cli/internal/filewatcher/filewatcher_test.go create mode 100644 cli/internal/fs/copy_file.go create mode 100644 cli/internal/fs/copy_file_test.go create mode 100644 cli/internal/fs/fs.go create mode 100644 cli/internal/fs/fs_test.go create mode 100644 cli/internal/fs/fs_windows_test.go create mode 100644 cli/internal/fs/get_turbo_data_dir_go.go create mode 100644 cli/internal/fs/get_turbo_data_dir_rust.go create mode 100644 cli/internal/fs/hash.go create mode 100644 cli/internal/fs/hash_test.go create mode 100644 cli/internal/fs/lstat.go create mode 100644 cli/internal/fs/package_json.go create mode 100644 cli/internal/fs/package_json_test.go create mode 100644 cli/internal/fs/path.go create mode 100644 cli/internal/fs/testdata/both/package.json create mode 100644 cli/internal/fs/testdata/both/turbo.json create mode 100644 cli/internal/fs/testdata/correct/turbo.json create mode 100644 cli/internal/fs/testdata/invalid-env-1/turbo.json create mode 100644 cli/internal/fs/testdata/invalid-env-2/turbo.json create mode 100644 cli/internal/fs/testdata/invalid-global-env/turbo.json create mode 100644 cli/internal/fs/testdata/legacy-env/turbo.json create mode 100644 cli/internal/fs/testdata/legacy-only/package.json create mode 100644 cli/internal/fs/turbo_json.go create mode 100644 cli/internal/fs/turbo_json_test.go create mode 100644 cli/internal/globby/globby.go create mode 100644 cli/internal/globby/globby_test.go create mode 100644 cli/internal/globwatcher/globwatcher.go create mode 100644 cli/internal/globwatcher/globwatcher_test.go create mode 100644 cli/internal/graph/graph.go create mode 100644 cli/internal/graph/graph_test.go create mode 100644 cli/internal/graphvisualizer/graphvisualizer.go create mode 100644 cli/internal/hashing/package_deps_hash.go create mode 100644 cli/internal/hashing/package_deps_hash_test.go create mode 100644 cli/internal/inference/inference.go create mode 100644 cli/internal/inference/inference_test.go create mode 100644 cli/internal/lockfile/berry_lockfile.go create mode 100644 cli/internal/lockfile/berry_lockfile_test.go create mode 100644 cli/internal/lockfile/lockfile.go create mode 100644 cli/internal/lockfile/lockfile_test.go create mode 100644 cli/internal/lockfile/npm_lockfile.go create mode 100644 cli/internal/lockfile/pnpm_lockfile.go create mode 100644 cli/internal/lockfile/pnpm_lockfile_test.go create mode 100644 cli/internal/lockfile/testdata/berry.lock create mode 100644 cli/internal/lockfile/testdata/minimal-berry.lock create mode 100644 cli/internal/lockfile/testdata/npm-lock-workspace-variation.json create mode 100644 cli/internal/lockfile/testdata/npm-lock.json create mode 100644 cli/internal/lockfile/testdata/pnpm-absolute-v6.yaml create mode 100644 cli/internal/lockfile/testdata/pnpm-absolute.yaml create mode 100644 cli/internal/lockfile/testdata/pnpm-patch-v6.yaml create mode 100644 cli/internal/lockfile/testdata/pnpm-patch.yaml create mode 100644 cli/internal/lockfile/testdata/pnpm-peer-v6.yaml create mode 100644 cli/internal/lockfile/testdata/pnpm-top-level-dupe.yaml create mode 100644 cli/internal/lockfile/testdata/pnpm6-workspace.yaml create mode 100644 cli/internal/lockfile/testdata/pnpm7-workspace.yaml create mode 100644 cli/internal/lockfile/testdata/pnpm8.yaml create mode 100644 cli/internal/lockfile/testdata/pnpm_override.yaml create mode 100644 cli/internal/lockfile/testdata/yarn.lock create mode 100644 cli/internal/lockfile/yarn_lockfile.go create mode 100644 cli/internal/lockfile/yarn_lockfile_test.go create mode 100644 cli/internal/logstreamer/logstreamer.go create mode 100644 cli/internal/logstreamer/logstreamer_test.go create mode 100644 cli/internal/nodes/packagetask.go create mode 100644 cli/internal/packagemanager/berry.go create mode 100644 cli/internal/packagemanager/fixtures/package.json create mode 100644 cli/internal/packagemanager/fixtures/pnpm-patches.json create mode 100644 cli/internal/packagemanager/fixtures/pnpm-workspace.yaml create mode 100644 cli/internal/packagemanager/infer_root.go create mode 100644 cli/internal/packagemanager/infer_root_test.go create mode 100644 cli/internal/packagemanager/npm.go create mode 100644 cli/internal/packagemanager/packagemanager.go create mode 100644 cli/internal/packagemanager/packagemanager_test.go create mode 100644 cli/internal/packagemanager/pnpm.go create mode 100644 cli/internal/packagemanager/pnpm6.go create mode 100644 cli/internal/packagemanager/pnpm_test.go create mode 100644 cli/internal/packagemanager/yarn.go create mode 100644 cli/internal/process/child.go create mode 100644 cli/internal/process/child_nix_test.go create mode 100644 cli/internal/process/child_test.go create mode 100644 cli/internal/process/manager.go create mode 100644 cli/internal/process/manager_test.go create mode 100644 cli/internal/process/sys_nix.go create mode 100644 cli/internal/process/sys_windows.go create mode 100644 cli/internal/prune/prune.go create mode 100644 cli/internal/run/dry_run.go create mode 100644 cli/internal/run/global_hash.go create mode 100644 cli/internal/run/graph_run.go create mode 100644 cli/internal/run/log_tag_go.go create mode 100644 cli/internal/run/log_tag_rust.go create mode 100644 cli/internal/run/real_run.go create mode 100644 cli/internal/run/run.go create mode 100644 cli/internal/run/run_spec.go create mode 100644 cli/internal/run/run_spec_test.go create mode 100644 cli/internal/runcache/output_watcher.go create mode 100644 cli/internal/runcache/runcache.go create mode 100644 cli/internal/runsummary/execution_summary.go create mode 100644 cli/internal/runsummary/format_execution_summary.go create mode 100644 cli/internal/runsummary/format_json.go create mode 100644 cli/internal/runsummary/format_text.go create mode 100644 cli/internal/runsummary/globalhash_summary.go create mode 100644 cli/internal/runsummary/run_summary.go create mode 100644 cli/internal/runsummary/spaces.go create mode 100644 cli/internal/runsummary/task_summary.go create mode 100644 cli/internal/scm/git_go.go create mode 100644 cli/internal/scm/git_rust.go create mode 100644 cli/internal/scm/scm.go create mode 100644 cli/internal/scm/stub.go create mode 100644 cli/internal/scope/filter/filter.go create mode 100644 cli/internal/scope/filter/filter_test.go create mode 100644 cli/internal/scope/filter/matcher.go create mode 100644 cli/internal/scope/filter/matcher_test.go create mode 100644 cli/internal/scope/filter/parse_target_selector.go create mode 100644 cli/internal/scope/filter/parse_target_selector_test.go create mode 100644 cli/internal/scope/scope.go create mode 100644 cli/internal/scope/scope_test.go create mode 100644 cli/internal/server/server.go create mode 100644 cli/internal/server/server_test.go create mode 100644 cli/internal/signals/signals.go create mode 100644 cli/internal/spinner/spinner.go create mode 100644 cli/internal/tarpatch/tar.go create mode 100644 cli/internal/tarpatch/tar_unix.go create mode 100644 cli/internal/tarpatch/tar_windows.go create mode 100644 cli/internal/taskhash/taskhash.go create mode 100644 cli/internal/taskhash/taskhash_test.go create mode 100644 cli/internal/turbodprotocol/turbod.proto create mode 100644 cli/internal/turbopath/absolute_system_path.go create mode 100644 cli/internal/turbopath/absolute_system_path_darwin.go create mode 100644 cli/internal/turbopath/absolute_system_path_notdarwin.go create mode 100644 cli/internal/turbopath/absolute_system_path_test.go create mode 100644 cli/internal/turbopath/anchored_system_path.go create mode 100644 cli/internal/turbopath/anchored_unix_path.go create mode 100644 cli/internal/turbopath/find_up.go create mode 100644 cli/internal/turbopath/relative_system_path.go create mode 100644 cli/internal/turbopath/relative_unix_path.go create mode 100644 cli/internal/turbopath/turbopath.go create mode 100644 cli/internal/turbostate/turbostate.go create mode 100644 cli/internal/ui/charset.go create mode 100644 cli/internal/ui/colors.go create mode 100644 cli/internal/ui/spinner.go create mode 100644 cli/internal/ui/term/cursor.go create mode 100644 cli/internal/ui/term/cursor_test.go create mode 100644 cli/internal/ui/ui.go create mode 100644 cli/internal/util/backends.go create mode 100644 cli/internal/util/browser/open.go create mode 100644 cli/internal/util/closer.go create mode 100644 cli/internal/util/cmd.go create mode 100644 cli/internal/util/filter/filter.go create mode 100644 cli/internal/util/filter/filter_test.go create mode 100644 cli/internal/util/graph.go create mode 100644 cli/internal/util/modulo.go create mode 100644 cli/internal/util/parse_concurrency.go create mode 100644 cli/internal/util/parse_concurrency_test.go create mode 100644 cli/internal/util/printf.go create mode 100644 cli/internal/util/run_opts.go create mode 100644 cli/internal/util/semaphore.go create mode 100644 cli/internal/util/set.go create mode 100644 cli/internal/util/set_test.go create mode 100644 cli/internal/util/status.go create mode 100644 cli/internal/util/task_id.go create mode 100644 cli/internal/util/task_output_mode.go create mode 100644 cli/internal/workspace/workspace.go create mode 100644 cli/internal/xxhash/xxhash.go create mode 100644 cli/internal/yaml/apic.go create mode 100644 cli/internal/yaml/decode.go create mode 100644 cli/internal/yaml/emitterc.go create mode 100644 cli/internal/yaml/encode.go create mode 100644 cli/internal/yaml/parserc.go create mode 100644 cli/internal/yaml/readerc.go create mode 100644 cli/internal/yaml/resolve.go create mode 100644 cli/internal/yaml/scannerc.go create mode 100644 cli/internal/yaml/sorter.go create mode 100644 cli/internal/yaml/writerc.go create mode 100644 cli/internal/yaml/yaml.go create mode 100644 cli/internal/yaml/yamlh.go create mode 100644 cli/internal/yaml/yamlprivateh.go create mode 100644 cli/package.json create mode 100644 cli/scripts/generate.mjs create mode 100644 cli/scripts/nginx/.dockerignore create mode 100644 cli/scripts/nginx/Dockerfile.cacher create mode 100644 cli/scripts/nginx/docker-compose.yml create mode 100644 cli/scripts/nginx/nginx.conf create mode 100644 cli/scripts/npm-native-packages/.gitignore create mode 100644 cli/scripts/npm-native-packages/npm-native-packages.js create mode 100644 cli/scripts/npm-native-packages/template/README.md create mode 100644 cli/scripts/npm-native-packages/template/bin/turbo create mode 100644 cli/scripts/npm-native-packages/template/template.package.json create mode 100644 cli/scripts/templates/jest.config.js create mode 100644 cli/scripts/templates/src/__tests__/index.test.ts create mode 100644 cli/scripts/templates/src/__tests__/tsconfig.json create mode 100644 cli/scripts/templates/src/index.ts create mode 100644 cli/scripts/templates/tsconfig.json create mode 100644 cli/turbo.json create mode 100644 clippy.toml create mode 100644 deny.toml create mode 100644 package.json create mode 100644 packages/create-turbo/.gitignore create mode 100644 packages/create-turbo/LICENSE create mode 100644 packages/create-turbo/README.md create mode 100644 packages/create-turbo/__tests__/examples.test.ts create mode 100644 packages/create-turbo/__tests__/git.test.ts create mode 100644 packages/create-turbo/__tests__/index.test.ts create mode 100644 packages/create-turbo/__tests__/isFolderEmpty.test.ts create mode 100644 packages/create-turbo/__tests__/isWritable.test.ts create mode 100644 packages/create-turbo/__tests__/test-utils.ts create mode 100644 packages/create-turbo/jest.config.js create mode 100644 packages/create-turbo/package.json create mode 100644 packages/create-turbo/src/cli.ts create mode 100644 packages/create-turbo/src/commands/create/createProject.ts create mode 100644 packages/create-turbo/src/commands/create/index.ts create mode 100644 packages/create-turbo/src/commands/create/prompts.ts create mode 100644 packages/create-turbo/src/commands/create/types.ts create mode 100644 packages/create-turbo/src/commands/index.ts create mode 100644 packages/create-turbo/src/logger.ts create mode 100644 packages/create-turbo/src/transforms/errors.ts create mode 100644 packages/create-turbo/src/transforms/git-ignore.ts create mode 100644 packages/create-turbo/src/transforms/index.ts create mode 100644 packages/create-turbo/src/transforms/official-starter.ts create mode 100644 packages/create-turbo/src/transforms/package-manager.ts create mode 100644 packages/create-turbo/src/transforms/types.ts create mode 100644 packages/create-turbo/src/utils/examples.ts create mode 100644 packages/create-turbo/src/utils/git.ts create mode 100644 packages/create-turbo/src/utils/isDefaultExample.ts create mode 100644 packages/create-turbo/src/utils/isFolderEmpty.ts create mode 100644 packages/create-turbo/src/utils/isOnline.ts create mode 100644 packages/create-turbo/src/utils/isWriteable.ts create mode 100644 packages/create-turbo/src/utils/notifyUpdate.ts create mode 100644 packages/create-turbo/tsconfig.json create mode 100644 packages/create-turbo/tsup.config.ts create mode 100644 packages/create-turbo/turbo.json create mode 100644 packages/eslint-config-turbo/LICENSE create mode 100644 packages/eslint-config-turbo/README.md create mode 100644 packages/eslint-config-turbo/index.js create mode 100644 packages/eslint-config-turbo/package.json create mode 100644 packages/eslint-plugin-turbo/LICENSE create mode 100644 packages/eslint-plugin-turbo/README.md create mode 100644 packages/eslint-plugin-turbo/__fixtures__/configs/single/turbo.json create mode 100644 packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/docs/index.js create mode 100644 packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/docs/package.json create mode 100644 packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/docs/turbo.json create mode 100644 packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/index.js create mode 100644 packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/package.json create mode 100644 packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/turbo.json create mode 100644 packages/eslint-plugin-turbo/__fixtures__/workspace-configs/package.json create mode 100644 packages/eslint-plugin-turbo/__fixtures__/workspace-configs/packages/ui/index.js create mode 100644 packages/eslint-plugin-turbo/__fixtures__/workspace-configs/packages/ui/package.json create mode 100644 packages/eslint-plugin-turbo/__fixtures__/workspace-configs/packages/ui/turbo.json create mode 100644 packages/eslint-plugin-turbo/__fixtures__/workspace-configs/turbo.json create mode 100644 packages/eslint-plugin-turbo/__fixtures__/workspace/.eslintrc.js create mode 100644 packages/eslint-plugin-turbo/__fixtures__/workspace/child/child.js create mode 100644 packages/eslint-plugin-turbo/__fixtures__/workspace/package-lock.json create mode 100644 packages/eslint-plugin-turbo/__fixtures__/workspace/package.json create mode 100644 packages/eslint-plugin-turbo/__fixtures__/workspace/peer.js create mode 100644 packages/eslint-plugin-turbo/__fixtures__/workspace/turbo.json create mode 100644 packages/eslint-plugin-turbo/__tests__/cwd.test.ts create mode 100644 packages/eslint-plugin-turbo/__tests__/lib/no-undeclared-env-vars.test.ts create mode 100644 packages/eslint-plugin-turbo/docs/rules/no-undeclared-env-vars.md create mode 100644 packages/eslint-plugin-turbo/jest.config.js create mode 100644 packages/eslint-plugin-turbo/lib/configs/recommended.ts create mode 100644 packages/eslint-plugin-turbo/lib/constants.ts create mode 100644 packages/eslint-plugin-turbo/lib/index.ts create mode 100644 packages/eslint-plugin-turbo/lib/rules/no-undeclared-env-vars.ts create mode 100644 packages/eslint-plugin-turbo/lib/utils/getEnvVarDependencies.ts create mode 100644 packages/eslint-plugin-turbo/package.json create mode 100644 packages/eslint-plugin-turbo/tsconfig.json create mode 100644 packages/eslint-plugin-turbo/tsup.config.ts create mode 100644 packages/eslint-plugin-turbo/turbo.json create mode 100644 packages/node-module-trace/package.json create mode 100644 packages/tsconfig/README.md create mode 100644 packages/tsconfig/base.json create mode 100644 packages/tsconfig/library.json create mode 100644 packages/tsconfig/package.json create mode 100644 packages/turbo-codemod/LICENSE create mode 100644 packages/turbo-codemod/README.md create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/add-package-manager/has-package-manager/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/add-package-manager/no-package-manager/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/add-package-manager/wrong-package-manager/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/both-configs/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/both-configs/turbo.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/no-package-json-config/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/no-package-json-file/a-random-file.txt create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/no-turbo-json-config/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/turbo-json-config/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/turbo-json-config/turbo.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/no-deps/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/no-package/README.md create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/no-turbo/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/normal-workspaces-dev-install/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/normal-workspaces/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/pnpm-workspaces-dev-install/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/pnpm-workspaces-dev-install/pnpm-workspace.yaml create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/pnpm-workspaces/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/pnpm-workspaces/pnpm-workspace.yaml create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/single-package-dev-install/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/single-package/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/env-dependencies/turbo.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/migrated-env-dependencies/turbo.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/no-turbo-json/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/old-config/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/old-config/turbo.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/docs/index.js create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/docs/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/docs/turbo.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/web/index.js create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/web/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/web/turbo.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/packages/ui/index.js create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/packages/ui/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/packages/ui/turbo.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/turbo.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/migrate/no-repo/README.md create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/migrate/old-turbo/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/invalid-outputs/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/invalid-outputs/turbo.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/no-outputs/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/no-outputs/turbo.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/no-pipeline/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/no-pipeline/turbo.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/no-turbo-json/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/old-config/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/old-config/turbo.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/old-outputs/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/old-outputs/turbo.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/docs/index.js create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/docs/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/docs/turbo.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/web/index.js create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/web/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/web/turbo.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/packages/ui/index.js create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/packages/ui/package.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/packages/ui/turbo.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/turbo.json create mode 100644 packages/turbo-codemod/__tests__/__fixtures__/transform/basic/package.json create mode 100644 packages/turbo-codemod/__tests__/add-package-manager.test.ts create mode 100644 packages/turbo-codemod/__tests__/create-turbo-config.test.ts create mode 100644 packages/turbo-codemod/__tests__/get-turbo-upgrade-command.test.ts create mode 100644 packages/turbo-codemod/__tests__/migrate-env-var-dependencies.test.ts create mode 100644 packages/turbo-codemod/__tests__/migrate.test.ts create mode 100644 packages/turbo-codemod/__tests__/set-default-outputs.test.ts create mode 100644 packages/turbo-codemod/__tests__/transform.test.ts create mode 100644 packages/turbo-codemod/index.d.ts create mode 100644 packages/turbo-codemod/jest.config.js create mode 100644 packages/turbo-codemod/package.json create mode 100644 packages/turbo-codemod/plopfile.js create mode 100644 packages/turbo-codemod/src/cli.ts create mode 100644 packages/turbo-codemod/src/commands/index.ts create mode 100644 packages/turbo-codemod/src/commands/migrate/index.ts create mode 100644 packages/turbo-codemod/src/commands/migrate/steps/getCurrentVersion.ts create mode 100644 packages/turbo-codemod/src/commands/migrate/steps/getLatestVersion.ts create mode 100644 packages/turbo-codemod/src/commands/migrate/steps/getTransformsForMigration.ts create mode 100644 packages/turbo-codemod/src/commands/migrate/steps/getTurboUpgradeCommand.ts create mode 100644 packages/turbo-codemod/src/commands/migrate/types.ts create mode 100644 packages/turbo-codemod/src/commands/migrate/utils.ts create mode 100644 packages/turbo-codemod/src/commands/transform/index.ts create mode 100644 packages/turbo-codemod/src/commands/transform/types.ts create mode 100644 packages/turbo-codemod/src/runner/FileTransform.ts create mode 100644 packages/turbo-codemod/src/runner/Runner.ts create mode 100644 packages/turbo-codemod/src/runner/index.ts create mode 100644 packages/turbo-codemod/src/runner/types.ts create mode 100644 packages/turbo-codemod/src/transforms/README.md create mode 100644 packages/turbo-codemod/src/transforms/add-package-manager.ts create mode 100644 packages/turbo-codemod/src/transforms/create-turbo-config.ts create mode 100644 packages/turbo-codemod/src/transforms/migrate-env-var-dependencies.ts create mode 100644 packages/turbo-codemod/src/transforms/set-default-outputs.ts create mode 100644 packages/turbo-codemod/src/types.ts create mode 100644 packages/turbo-codemod/src/utils/checkGitStatus.ts create mode 100644 packages/turbo-codemod/src/utils/directoryInfo.ts create mode 100644 packages/turbo-codemod/src/utils/getPackageManager.ts create mode 100644 packages/turbo-codemod/src/utils/getPackageManagerVersion.ts create mode 100644 packages/turbo-codemod/src/utils/getTransformerHelpers.ts create mode 100644 packages/turbo-codemod/src/utils/loadTransformers.ts create mode 100644 packages/turbo-codemod/src/utils/logger.ts create mode 100644 packages/turbo-codemod/src/utils/looksLikeRepo.ts create mode 100644 packages/turbo-codemod/src/utils/notifyUpdate.ts create mode 100644 packages/turbo-codemod/templates/transformer.hbs create mode 100644 packages/turbo-codemod/templates/transformer.test.hbs create mode 100644 packages/turbo-codemod/tsconfig.json create mode 100644 packages/turbo-codemod/tsup.config.ts create mode 100644 packages/turbo-ignore/README.md create mode 100644 packages/turbo-ignore/__fixtures__/app/package.json create mode 100644 packages/turbo-ignore/__fixtures__/invalid-app/package.json create mode 100644 packages/turbo-ignore/__fixtures__/no-app/index.js create mode 100644 packages/turbo-ignore/__tests__/args.test.ts create mode 100644 packages/turbo-ignore/__tests__/checkCommit.test.ts create mode 100644 packages/turbo-ignore/__tests__/errors.test.ts create mode 100644 packages/turbo-ignore/__tests__/getComparison.test.ts create mode 100644 packages/turbo-ignore/__tests__/getTask.test.ts create mode 100644 packages/turbo-ignore/__tests__/getWorkspace.test.ts create mode 100644 packages/turbo-ignore/__tests__/ignore.test.ts create mode 100644 packages/turbo-ignore/jest.config.js create mode 100644 packages/turbo-ignore/package.json create mode 100644 packages/turbo-ignore/src/args.ts create mode 100644 packages/turbo-ignore/src/checkCommit.ts create mode 100644 packages/turbo-ignore/src/errors.ts create mode 100644 packages/turbo-ignore/src/getComparison.ts create mode 100644 packages/turbo-ignore/src/getTask.ts create mode 100644 packages/turbo-ignore/src/getWorkspace.ts create mode 100644 packages/turbo-ignore/src/ignore.ts create mode 100644 packages/turbo-ignore/src/index.ts create mode 100644 packages/turbo-ignore/src/logger.ts create mode 100644 packages/turbo-ignore/src/types.ts create mode 100644 packages/turbo-ignore/tsconfig.json create mode 100644 packages/turbo-ignore/tsup.config.ts create mode 100644 packages/turbo-test-utils/README.md create mode 100644 packages/turbo-test-utils/package.json create mode 100644 packages/turbo-test-utils/src/index.ts create mode 100644 packages/turbo-test-utils/src/mockEnv.ts create mode 100644 packages/turbo-test-utils/src/spyConsole.ts create mode 100644 packages/turbo-test-utils/src/spyExit.ts create mode 100644 packages/turbo-test-utils/src/useFixtures.ts create mode 100644 packages/turbo-test-utils/src/validateLogs.ts create mode 100644 packages/turbo-test-utils/tsconfig.json create mode 100644 packages/turbo-tracing-next-plugin/README.md create mode 100644 packages/turbo-tracing-next-plugin/package.json create mode 100644 packages/turbo-tracing-next-plugin/src/index.ts create mode 100644 packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/.env.local.example create mode 100644 packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/.gitignore create mode 100644 packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/README.md create mode 100644 packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/components/Form.js create mode 100644 packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/css/form.css create mode 100644 packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/css/style.css create mode 100644 packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/lib/dbConnect.js create mode 100644 packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/models/Pet.js create mode 100644 packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/next.config.js create mode 100644 packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/package.json create mode 100644 packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/pages/[id]/edit.js create mode 100644 packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/pages/[id]/index.js create mode 100644 packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/pages/_app.js create mode 100644 packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/pages/api/pets/[id].js create mode 100644 packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/pages/api/pets/index.js create mode 100644 packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/pages/index.js create mode 100644 packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/pages/new.js create mode 100644 packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/public/favicon.ico create mode 100644 packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/public/zeit.svg create mode 100644 packages/turbo-tracing-next-plugin/tsconfig.json create mode 100644 packages/turbo-types/package.json create mode 100644 packages/turbo-types/src/index.ts create mode 100644 packages/turbo-types/src/scripts/codegen.js create mode 100644 packages/turbo-types/src/types/config.ts create mode 100644 packages/turbo-types/tsconfig.json create mode 100644 packages/turbo-types/turbo.json create mode 100644 packages/turbo-utils/LICENSE create mode 100644 packages/turbo-utils/README.md create mode 100644 packages/turbo-utils/__fixtures__/common/single-package/child/child.js create mode 100644 packages/turbo-utils/__fixtures__/common/single-package/package.json create mode 100644 packages/turbo-utils/__fixtures__/common/single-package/turbo.json create mode 100644 packages/turbo-utils/__fixtures__/common/workspace-configs/apps/docs/index.js create mode 100644 packages/turbo-utils/__fixtures__/common/workspace-configs/apps/docs/package.json create mode 100644 packages/turbo-utils/__fixtures__/common/workspace-configs/apps/web/index.js create mode 100644 packages/turbo-utils/__fixtures__/common/workspace-configs/apps/web/package.json create mode 100644 packages/turbo-utils/__fixtures__/common/workspace-configs/apps/web/turbo.json create mode 100644 packages/turbo-utils/__fixtures__/common/workspace-configs/package.json create mode 100644 packages/turbo-utils/__fixtures__/common/workspace-configs/packages/ui/index.js create mode 100644 packages/turbo-utils/__fixtures__/common/workspace-configs/packages/ui/package.json create mode 100644 packages/turbo-utils/__fixtures__/common/workspace-configs/packages/ui/turbo.json create mode 100644 packages/turbo-utils/__fixtures__/common/workspace-configs/packages/utils/index.js create mode 100644 packages/turbo-utils/__fixtures__/common/workspace-configs/packages/utils/package.json create mode 100644 packages/turbo-utils/__fixtures__/common/workspace-configs/packages/utils/turbo.json create mode 100644 packages/turbo-utils/__fixtures__/common/workspace-configs/turbo.json create mode 100644 packages/turbo-utils/__tests__/getTurboConfigs.test.ts create mode 100644 packages/turbo-utils/__tests__/getTurboRoot.test.ts create mode 100644 packages/turbo-utils/jest.config.js create mode 100644 packages/turbo-utils/package.json create mode 100644 packages/turbo-utils/src/getTurboConfigs.ts create mode 100644 packages/turbo-utils/src/getTurboRoot.ts create mode 100644 packages/turbo-utils/src/index.ts create mode 100644 packages/turbo-utils/src/managers.ts create mode 100644 packages/turbo-utils/src/searchUp.ts create mode 100644 packages/turbo-utils/tsconfig.json create mode 100644 packages/turbo-utils/tsup.config.ts create mode 100644 packages/turbo-workspaces/LICENSE create mode 100644 packages/turbo-workspaces/README.md create mode 100644 packages/turbo-workspaces/__fixtures__/invalid/index.js create mode 100644 packages/turbo-workspaces/__fixtures__/npm/monorepo/apps/docs/package.json create mode 100644 packages/turbo-workspaces/__fixtures__/npm/monorepo/apps/web/package.json create mode 100644 packages/turbo-workspaces/__fixtures__/npm/monorepo/package-lock.json create mode 100644 packages/turbo-workspaces/__fixtures__/npm/monorepo/package.json create mode 100644 packages/turbo-workspaces/__fixtures__/npm/monorepo/packages/tsconfig/package.json create mode 100644 packages/turbo-workspaces/__fixtures__/npm/monorepo/packages/ui/package.json create mode 100644 packages/turbo-workspaces/__fixtures__/npm/non-monorepo/package-lock.json create mode 100644 packages/turbo-workspaces/__fixtures__/npm/non-monorepo/package.json create mode 100644 packages/turbo-workspaces/__fixtures__/pnpm/monorepo/apps/docs/package.json create mode 100644 packages/turbo-workspaces/__fixtures__/pnpm/monorepo/apps/web/package.json create mode 100644 packages/turbo-workspaces/__fixtures__/pnpm/monorepo/package.json create mode 100644 packages/turbo-workspaces/__fixtures__/pnpm/monorepo/packages/tsconfig/package.json create mode 100644 packages/turbo-workspaces/__fixtures__/pnpm/monorepo/packages/ui/package.json create mode 100644 packages/turbo-workspaces/__fixtures__/pnpm/monorepo/pnpm-lock.yaml create mode 100644 packages/turbo-workspaces/__fixtures__/pnpm/monorepo/pnpm-workspace.yaml create mode 100644 packages/turbo-workspaces/__fixtures__/pnpm/non-monorepo/package.json create mode 100644 packages/turbo-workspaces/__fixtures__/pnpm/non-monorepo/pnpm-lock.yaml create mode 100644 packages/turbo-workspaces/__fixtures__/yarn/monorepo/apps/docs/package.json create mode 100644 packages/turbo-workspaces/__fixtures__/yarn/monorepo/apps/web/package.json create mode 100644 packages/turbo-workspaces/__fixtures__/yarn/monorepo/package.json create mode 100644 packages/turbo-workspaces/__fixtures__/yarn/monorepo/packages/tsconfig/package.json create mode 100644 packages/turbo-workspaces/__fixtures__/yarn/monorepo/packages/ui/package.json create mode 100644 packages/turbo-workspaces/__fixtures__/yarn/monorepo/yarn.lock create mode 100644 packages/turbo-workspaces/__fixtures__/yarn/non-monorepo/package.json create mode 100644 packages/turbo-workspaces/__fixtures__/yarn/non-monorepo/yarn.lock create mode 100644 packages/turbo-workspaces/__tests__/index.test.ts create mode 100644 packages/turbo-workspaces/__tests__/managers.test.ts create mode 100644 packages/turbo-workspaces/__tests__/test-utils.ts create mode 100644 packages/turbo-workspaces/jest.config.js create mode 100644 packages/turbo-workspaces/package.json create mode 100644 packages/turbo-workspaces/src/cli.ts create mode 100644 packages/turbo-workspaces/src/commands/convert/index.ts create mode 100644 packages/turbo-workspaces/src/commands/convert/types.ts create mode 100644 packages/turbo-workspaces/src/commands/index.ts create mode 100644 packages/turbo-workspaces/src/commands/summary/index.ts create mode 100644 packages/turbo-workspaces/src/commands/summary/types.ts create mode 100644 packages/turbo-workspaces/src/convert.ts create mode 100644 packages/turbo-workspaces/src/errors.ts create mode 100644 packages/turbo-workspaces/src/getWorkspaceDetails.ts create mode 100644 packages/turbo-workspaces/src/index.ts create mode 100644 packages/turbo-workspaces/src/install.ts create mode 100644 packages/turbo-workspaces/src/logger.ts create mode 100644 packages/turbo-workspaces/src/managers/index.ts create mode 100644 packages/turbo-workspaces/src/managers/npm.ts create mode 100644 packages/turbo-workspaces/src/managers/pnpm.ts create mode 100644 packages/turbo-workspaces/src/managers/yarn.ts create mode 100644 packages/turbo-workspaces/src/types.ts create mode 100644 packages/turbo-workspaces/src/updateDependencies.ts create mode 100644 packages/turbo-workspaces/src/utils.ts create mode 100644 packages/turbo-workspaces/tsconfig.json create mode 100644 packages/turbo-workspaces/tsup.config.ts create mode 100644 packages/turbo-workspaces/turbo.json create mode 100644 packages/turbo/.dev-mode create mode 100644 packages/turbo/README.md create mode 100644 packages/turbo/bin/turbo create mode 100644 packages/turbo/bump-version.js create mode 100644 packages/turbo/install.js create mode 100644 packages/turbo/node-platform.js create mode 100644 packages/turbo/package.json create mode 100644 packages/webpack-nmt/package.json create mode 100644 packages/webpack-nmt/src/index.ts create mode 100644 packages/webpack-nmt/tsconfig.json create mode 100644 pnpm-lock.yaml create mode 100644 pnpm-workspace.yaml create mode 100644 release.md create mode 100644 rust-toolchain create mode 100644 troubleshooting.md create mode 100644 tsconfig.json create mode 100644 tsconfig.project.json create mode 100644 turbo.json create mode 100644 turbow.js create mode 100644 version.txt diff --git a/.cargo/config.toml b/.cargo/config.toml new file mode 100644 index 0000000..3b9eb41 --- /dev/null +++ b/.cargo/config.toml @@ -0,0 +1,19 @@ +[env] +CARGO_WORKSPACE_DIR = { value = "", relative = true } + +[target.x86_64-pc-windows-msvc] +linker = "rust-lld" + +[alias] +xtask = "run --package xtask --" +tr-build = "build -p turbo" +tr-run = "run -p turbo" +tr-test = "test -p turborepo-* -p turbopath -p vercel-api-mock --features rustls-tls" +tr-check = "check -p turbo -p vercel-api-mock" +# Builds all test code to check for compiler errors before running +tp-pre-test = "nextest run --no-run --workspace --release --exclude turbo --exclude turborepo-* --exclude turbopath --exclude vercel-api-mock" +tp-test = "nextest run --workspace --release --no-fail-fast --exclude turbo --exclude turborepo-* --exclude turbopath --exclude vercel-api-mock" +tp-bench-test = "test --benches --workspace --release --no-fail-fast --exclude turbopack-bench --exclude turbo --exclude turborepo-* --exclude turbopath --exclude vercel-api-mock" + +[target.'cfg(all())'] +rustflags = ["--cfg", "tokio_unstable", "-Csymbol-mangling-version=v0", "-Aclippy::too_many_arguments"] diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 0000000..3c0db66 --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,33 @@ +# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.233.0/containers/go/.devcontainer/base.Dockerfile + +# [Choice] Go version (use -bullseye variants on local arm64/Apple Silicon): 1, 1.16, 1.17, 1-bullseye, 1.16-bullseye, 1.17-bullseye, 1-buster, 1.16-buster, 1.17-buster +ARG VARIANT="1.18-bullseye" +FROM mcr.microsoft.com/vscode/devcontainers/go:0-${VARIANT} + +RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ + && apt-get -y install --no-install-recommends \ + # Chromium for running Turbopack benchmarks + chromium \ + # Used for plotters graph visualizations in turbopack benchmarks + libfontconfig1-dev + +# Add hyperfine, a useful benchmarking tool +RUN dpkgArch="$(dpkg --print-architecture)"; \ + wget "https://github.com/sharkdp/hyperfine/releases/download/v1.12.0/hyperfine_1.12.0_${dpkgArch}.deb" && dpkg -i "hyperfine_1.12.0_${dpkgArch}.deb" + +# +# Everything below is run as the vscode user. If superuser permissions are necessary, +# run it before this. Otherwise, prefer running as the vscode user. +# +USER vscode + +# [Choice] Node.js version: none, lts/*, 16, 14, 12, 10 +ARG NODE_VERSION="none" +RUN if [ "${NODE_VERSION}" != "none" ]; then umask 0002 && sh -c ". /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION}" 2>&1; fi + +RUN sh -c ". /usr/local/share/nvm/nvm.sh && npm install -g vercel yarn yalc pnpm nodemon" 2>&1 + +# The installer from https://rustup.rs/ homepage, with the following changes: +# * `-y` to accept without interactivity +# * `--default-toolchain none` to avoid installing stable rust. Our specific toolchain is installed post-create. +RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain none diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000..d9c7d4c --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,56 @@ +// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at: +// https://github.com/microsoft/vscode-dev-containers/tree/v0.233.0/containers/go +{ + "name": "turbo (go, node, rust)", + "build": { + "dockerfile": "Dockerfile", + "args": { + // Update the VARIANT arg to pick a version of Go: 1, 1.18, 1.17 + // Append -bullseye or -buster to pin to an OS version. + // Use -bullseye variants on local arm64/Apple Silicon. + "VARIANT": "1.18-bullseye", + // Options + "NODE_VERSION": "lts/*" + } + }, + "runArgs": ["--cap-add=SYS_PTRACE", "--security-opt", "seccomp=unconfined"], + + // Set *default* container specific settings.json values on container create. + "settings": { + "go.toolsManagement.checkForUpdates": "local", + "go.useLanguageServer": true, + "go.gopath": "/go" + }, + + // Add the IDs of extensions you want installed when the container is created. + "extensions": [ + "bradlc.vscode-tailwindcss", + "christian-kohler.npm-intellisense", + "dbaeumer.vscode-eslint", + "eamodio.gitlens", + "EditorConfig.EditorConfig", + "esbenp.prettier-vscode", + "github.copilot", + "github.vscode-pull-request-github", + "golang.go", + "heybourn.headwind", + "rust-lang.rust-analyzer", + "silvenon.mdx", + "windmilleng.vscode-go-autotest", + "yzhang.markdown-all-in-one" + ], + + // Use 'forwardPorts' to make a list of ports inside the container available locally. + // "forwardPorts": [], + + // Use 'postCreateCommand' to run commands after the container is created. + "postCreateCommand": "go version && cargo --version", // Invoking `cargo` will eagerly install the toolchain specified in rust-toolchain file + + // Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. + "remoteUser": "vscode", + "features": { + "docker-in-docker": "latest", + "git": "latest", + "github-cli": "latest" + } +} diff --git a/.editorconfig b/.editorconfig index 1e022b5..748ec59 100644 --- a/.editorconfig +++ b/.editorconfig @@ -1,6 +1,7 @@ root = true [*] +charset = utf-8 indent_style = space indent_size = 2 end_of_line = lf @@ -8,6 +9,8 @@ charset = utf-8 trim_trailing_whitespace = true insert_final_newline = true +[*.t] +trim_trailing_whitespace = false [*.py] indent_size = 4 @@ -17,3 +20,17 @@ insert_final_newline = false [*.md] trim_trailing_whitespace = false + +[Makefile] +indent_style = tab + +[{go.mod,go.sum,*.go}] +indent_style = tab + +[nginx.conf] +indent_size = 8 + +[*.rs] +# Keep in sync with rustfmt +max_line_length = 100 +indent_size = 4 diff --git a/.eslintignore b/.eslintignore new file mode 100644 index 0000000..ee719fd --- /dev/null +++ b/.eslintignore @@ -0,0 +1,13 @@ +node_modules/ +target/ +.next/ +build/ +dist/ + +/examples/ + +packages/eslint-plugin-turbo/__fixtures__ +packages/create-turbo/templates +packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose +crates/*/tests/** +crates/next-core/js/src/compiled diff --git a/.eslintrc.js b/.eslintrc.js new file mode 100644 index 0000000..af97d78 --- /dev/null +++ b/.eslintrc.js @@ -0,0 +1,20 @@ +module.exports = { + root: true, + extends: ["next", "prettier"], + settings: { + next: { + rootDir: ["docs/", "create-turbo/"], + }, + }, + rules: { + "@next/next/no-html-link-for-pages": "off", + }, + overrides: [ + { + files: "crates/*/js/**", + rules: { + "prefer-const": "error", + }, + }, + ], +}; diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 0000000..bb1a9c7 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,2 @@ +# Apply EditorConfig to all files +e9a9249db781d6498a7c2ad057149ac1cb432302 diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..43875a4 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +crates/turbopack-tests/tests/snapshot/**/output/** linguist-generated=true diff --git a/.husky/pre-commit b/.husky/pre-commit new file mode 100644 index 0000000..58b1861 --- /dev/null +++ b/.husky/pre-commit @@ -0,0 +1,4 @@ +#!/usr/bin/env sh +. "$(dirname -- "$0")/_/husky.sh" + +pnpm exec lint-staged diff --git a/.node-version b/.node-version new file mode 100644 index 0000000..6f7f377 --- /dev/null +++ b/.node-version @@ -0,0 +1 @@ +v16 diff --git a/.npmrc b/.npmrc new file mode 100644 index 0000000..f430b45 --- /dev/null +++ b/.npmrc @@ -0,0 +1,2 @@ +auto-install-peers = true +hoist = false diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 0000000..0964dfb --- /dev/null +++ b/.prettierignore @@ -0,0 +1,27 @@ +.next/ +build/ +dist/ +node_modules/ +target/ +coverage/ +snapshot/ +generated/ +pnpm-lock.yaml + +__generated__/ +/docs/public/schema.json +/packages/eslint-plugin-turbo/__tests__/fixtures/ +/packages/turbo-codemod/templates/ +/docs/components/pages/pack-home/benchmark-data/data.json +/examples/with-svelte + + +# crates +crates/next-core/js/src/compiled +crates/turbopack-node/js/src/compiled +crates/turbopack/bench.json +crates/turbopack/tests +crates/turbopack-ecmascript/tests/analyzer/graph +crates/turbopack-ecmascript/tests/tree-shaker +crates/next-transform-strip-page-exports/tests +crates/next-transform-dynamic/tests diff --git a/.rustfmt.toml b/.rustfmt.toml new file mode 100644 index 0000000..c4e4d34 --- /dev/null +++ b/.rustfmt.toml @@ -0,0 +1,11 @@ +# Keep in sync with .editorconfig +max_width = 100 + +tab_spaces = 4 +hard_tabs = false + +format_strings = true +wrap_comments = true + +imports_granularity = "Crate" +group_imports = "StdExternalCrate" diff --git a/.taplo.toml b/.taplo.toml new file mode 100644 index 0000000..10e0215 --- /dev/null +++ b/.taplo.toml @@ -0,0 +1,8 @@ +include = ["./*.toml", "crates/*/*.toml"] + +[[rule]] +keys = ["dependencies", "*-dependencies"] + +[rule.formatting] +reorder_keys = true +indent_tables = true diff --git a/.vscode/settings.json b/.vscode/settings.json index e769a45..23b6ae3 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,4 +1,26 @@ { "typescript.tsdk": "node_modules/.pnpm/typescript@4.9.4/node_modules/typescript/lib", - "typescript.enablePromptUseWorkspaceTsdk": true + "typescript.enablePromptUseWorkspaceTsdk": true, + "eslint.validate": [ + "javascript", + "javascriptreact", + "typescript", + "typescriptreact" + ], + "eslint.packageManager": "pnpm", + "debug.javascript.unmapMissingSources": true, + "go.lintTool": "golangci-lint", + "files.associations": { + "libturbo.h": "c", + "turbo.json": "jsonc" + }, + "[cram]": { + "editor.trimAutoWhitespace": false, + "files.trimFinalNewlines": false, + "files.insertFinalNewline": false, + "files.trimTrailingWhitespace": false + }, + "search.exclude": { + "crates/turbopack-tests/tests/snapshot/**": true + } } \ No newline at end of file diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..ddccaea --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,50 @@ +# Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, caste, color, religion, or sexual identity and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our community include: + +- Demonstrating empathy and kindness toward other people +- Being respectful of differing opinions, viewpoints, and experiences +- Giving and gracefully accepting constructive feedback +- Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience +- Focusing on what is best not just for us as individuals, but for the overall community + +Examples of unacceptable behavior include: + +- The use of sexualized language or imagery, and sexual attention or advances of any kind +- Trolling, insulting or derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or email address, without their explicit permission +- Other conduct which could reasonably be considered inappropriate in a professional setting + +## Enforcement Responsibilities + +Project maintainers are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful. + +Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and will communicate reasons for moderation decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when an individual is officially representing the community in public spaces. Examples of representing our community include using an official e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the project team responsible for enforcement at [coc@vercel.com](mailto:coc@vercel.com). All complaints will be reviewed and investigated promptly and fairly. + +All project maintainers are obligated to respect the privacy and security of the reporter of any incident. + +Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.1, available at [https://www.contributor-covenant.org/version/2/1/code_of_conduct/][version]. + +[homepage]: http://contributor-covenant.org +[version]: https://www.contributor-covenant.org/version/2/1 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..e592a85 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,268 @@ +# Contributing to Turbo + +Thanks for your interest in contributing to Turbo! + +**Important note**: At the moment, Turbo is made up of two tools, Turborepo and Turbopack, built with different languages and toolchains. In the future, Turbo will become a single toolchain built on Rust and the Turbo engine. In the meantime, please follow the respective guide when contributing to each tool: + +- [Contributing to Turbo](#contributing-to-turbo) + - [Contributing to Turborepo](#contributing-to-turborepo) + - [Building Turborepo](#building-turborepo) + - [TLS Implementation](#tls-implementation) + - [Running Turborepo Tests](#running-turborepo-tests) + - [Go Tests](#go-tests) + - [Debugging Turborepo](#debugging-turborepo) + - [Benchmarking Turborepo](#benchmarking-turborepo) + - [Updating `turbo`](#updating-turbo) + - [Manually testing `turbo`](#manually-testing-turbo) + - [Publishing `turbo` to the npm registry](#publishing-turbo-to-the-npm-registry) + - [Adding A New Crate](#adding-a-new-crate) + - [Contributing to Turbopack](#contributing-to-turbopack) + - [Turbopack Architecture](#turbopack-architecture) + - [Testing Turbopack](#testing-turbopack) + - [Benchmarking Turbopack](#benchmarking-turbopack) + - [Profiling Turbopack](#profiling-turbopack) + - [Troubleshooting](#troubleshooting) + +## Contributing to Turborepo + +### Building Turborepo + +Dependencies + +1. Install [turborepo crate](./crates/turborepo/README.md) build requirements + +1. Run `pnpm install` at root + +Building + +- Building `turbo` CLI: In `cli` run `make turbo` +- Using `turbo` to build `turbo` CLI: `./turbow.js` + +### TLS Implementation + +Turborepo uses `reqwest`, a Rust HTTP client, to make requests to the Turbo API. `reqwest` supports two TLS +implementations: `rustls` and `native-tls`. `rustls` is a pure Rust implementation of TLS, while `native-tls` +is a wrapper around OpenSSL. Turborepo allows users to select which implementation they want with the `native-tls` +and `rustls-tls` features. By default, the `native-tls` feature is selected---this is done so that `cargo build` works +out of the box. If you wish to select `rustls-tls`, you may do so by passing `--no-default-features --features rustls-tls` +to the build command. This allows for us to build for more platforms, as `native-tls` is not supported everywhere. + +### Running Turborepo Tests + +Install dependencies + +On macOS: + +```bash +brew install moreutils jq zstd # (moreutils is for sponge) +``` + +#### Go Tests + +First: `npm install -g turbo`. + +Then from the root directory, you can run: + +- Go unit tests + ```bash + pnpm test -- --filter=cli + ``` +- A single Go unit test (see more [in the Go docs](https://pkg.go.dev/cmd/go#hdr-Test_packages)) + ```bash + cd cli && go test ./[path/to/package/] + ``` +- Rust unit tests ([install `nextest` first](https://nexte.st/book/pre-built-binaries.html)) + ```bash + cargo nextest run -p turborepo-lib --features rustls-tls + ``` + You can also use the built in [`cargo test`](https://doc.rust-lang.org/cargo/commands/cargo-test.html) + directly with `cargo test -p turborepo-lib`. +- CLI Integration tests + ```bash + pnpm test -- --filter=turborepo-tests-integration + ``` +- E2E test + ```bash + pnpm -- turbo e2e --filter=cli + ``` +- Example tests + ```bash + pnpm test -- --filter=turborepo-tests-examples -- + ``` + +## Debugging Turborepo + +1. Install `go install github.com/go-delve/delve/cmd/dlv@latest` +1. In VS Code's "Run and Debug" tab, select `Build Basic` to start debugging the initial launch of `turbo` against the `build` target of the Basic Example. This task is configured in [launch.json](./.vscode/launch.json). + +## Benchmarking Turborepo + +1. `make turbo-prod` +2. From the `benchmark/` directory, run `pnpm run benchmark`. + +## Updating `turbo` + +You might need to update `packages/turbo` in order to support a new platform. When you do that you will need to link the module in order to be able to continue working. As an example, with `npm link`: + +```sh +cd ~/repos/vercel/turbo/packages/turbo +npm link + +# Run your build, e.g. `go build ./cmd/turbo` if you're on the platform you're adding. +cd ~/repos/vercel/turbo/cli +go build ./cmd/turbo + +# You can then run the basic example specifying the build asset path. +cd ~/repos/vercel/turbo/examples/basic +TURBO_BINARY_PATH=~/repos/vercel/turbo/cli/turbo.exe npm install +TURBO_BINARY_PATH=~/repos/vercel/turbo/cli/turbo.exe npm link turbo +``` + +If you're using a different package manager replace npm accordingly. + +## Manually testing `turbo` + +Before releasing, it's recommended to test the `turbo` binary manually. +Here's a checklist of testing strategies to cover: + +- Test `login`, `logout`, `login --sso-team`, `link`, `unlink` +- Test `prune` (Note `turbo` here is the unreleased turbo binary) + - `npx create-turbo --use-pnpm prune-test && cd prune-test` + - `turbo --skip-infer prune --scope=docs && cd out && pnpm install --frozen-lockfile` + - `turbo --skip-infer build` +- Test `--dry-run` and `--graph`. +- Test with and without daemon. + +There are also multiple installation scenarios worth testing: + +- Global-only. `turbo` is installed as global binary, no local `turbo` in repository. +- Local-only. `turbo` is installed as local binary, no global `turbo` in PATH. turbo` is invoked via a root package script. +- Global + local. `turbo` is installed as global binary, and local `turbo` in repository. Global `turbo` delegates to local `turbo` + +Here are a few repositories that you can test on: + +- [next.js](https://github.com/vercel/next.js) +- [tldraw](https://github.com/tldraw/tldraw) +- [tailwindcss](https://github.com/tailwindlabs/tailwindcss) +- [vercel](https://github.com/vercel/vercel) + +These lists are by no means exhaustive. Feel free to add to them with other strategies. + +## Publishing `turbo` to the npm registry + +See [the publishing guide](./release.md#release-turborepo). + +## Adding A New Crate + +When adding a new crate to the repo, it is essential that it is included/excluded from the +relevant workflows. This ensures that changes to the crate are tested by the correct workflows, +but that they do not trigger unnecessary workflows as well. + +First, determine whether the crate is for Turbopack or Turborepo. If it is for Turbopack, then the crate +should be added to the `default-members` key in the root `Cargo.toml`. If the crate is for Turborepo, the +crate must be added to the `PATTERNS` list in "Turborepo related changes" section of the `test.yml` +workflow file. It must also be excluded from the "Turbopack related changes" section of the +`test.yml` workflow file. + +For instance, if we were adding a `turborepo-foo` crate, we would add the following patterns: + +```diff + - name: Turbopack related changes + id: turbopack + uses: technote-space/get-diff-action@v6 + with: + PATTERNS: | + pnpm-lock.yaml + package.json + crates/** + xtask/** + .cargo/** + rust-toolchain + !crates/turborepo/** + !crates/turborepo-lib/** + !crates/turborepo-ffi/** + !crates/turbo-updater/** ++ !crates/turborepo-foo/** + !**.md + !**.mdx + + - name: Turborepo related changes + id: turborepo + uses: technote-space/get-diff-action@v6 + with: + PATTERNS: | + pnpm-lock.yaml + package.json + crates/turborepo/** + crates/turborepo-lib/** + crates/turborepo-ffi/** + crates/turbo-updater/** ++ crates/turborepo-foo/** + .cargo/** + rust-toolchain + !**.md + !**.mdx +``` + +The crate must also be explicitly excluded from build commands +for Turbopack and included in build commands for Turborepo. +To do so, add a `--exclude turborepo-foo` flag to the Turbopack commands in +`.cargo/config.toml` such as `tp-test`, and add an `-p turborepo-foo` to the Turborepo +commands such as `tr-test`. + +Finally, the crate must be added to the Turborepo section of CODEOWNERS: + +```diff +# overrides for crates that are owned by turbo-oss + /crates/turborepo @vercel/turbo-oss + /crates/turborepo-ffi @vercel/turbo-oss ++ /crates/turborepo-foo @vercel/turbo-oss + /crates/turborepo-lib @vercel/turbo-oss + /crates/turborepo-scm @vercel/turbo-oss + /crates/turbo-updater @vercel/turbo-oss +``` + +## Contributing to Turbopack + +Turbopack uses [Cargo workspaces][workspaces] in the Turbo monorepo. You'll find +several workspaces inside the `crates/` directory. In order to run a particular +crate, you can use the `cargo run -p [CRATE_NAME]` command. For example, to test the Next.js development server, run `cargo run -p next-dev`. + +### Turbopack Architecture + +A high-level introduction to Turbopack's architecture, workspace crates, and Turbo engine (the turbo-tasks crates) is available at [crates/turbopack/architecture.md](crates/turbopack/architecture.md). + +### Testing Turbopack + +Install `cargo-nextest` (https://nexte.st/): + +`cargo install cargo-nextest` + +Run via: + +```shell +cargo nextest run +``` + +For the test cases you need to run `pnpm install` to install some node_modules. See [Troubleshooting][] for solutions to common problems. + +You can also create a little demo app and run + +```shell +cargo run -p node-file-trace -- print demo/index.js +``` + +### Benchmarking Turbopack + +See [the benchmarking README for Turbopack](crates/next-dev/benches/README.md) for details. + +### Profiling Turbopack + +See [the profiling docs for Turbopack](https://turbo.build/pack/docs/advanced/profiling) for details. + +## Troubleshooting + +See [Troubleshooting][]. + +[workspaces]: https://doc.rust-lang.org/book/ch14-03-cargo-workspaces.html +[troubleshooting]: troubleshooting.md diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..57e2dab --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,10242 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "Inflector" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3" +dependencies = [ + "lazy_static", + "regex", +] + +[[package]] +name = "addr2line" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" +dependencies = [ + "gimli 0.27.2", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "ahash" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +dependencies = [ + "getrandom", + "once_cell", + "serde", + "version_check", +] + +[[package]] +name = "ahash" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +dependencies = [ + "cfg-if 1.0.0", + "once_cell", + "version_check", +] + +[[package]] +name = "aho-corasick" +version = "0.7.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac" +dependencies = [ + "memchr", +] + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + +[[package]] +name = "ansi-str" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84252a7e1a0df81706ce70bbad85ed1e4916448a4093ccd52dd98c6a44a477cd" +dependencies = [ + "ansitok", +] + +[[package]] +name = "ansi_term" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" +dependencies = [ + "winapi 0.3.9", +] + +[[package]] +name = "ansitok" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "220044e6a1bb31ddee4e3db724d29767f352de47445a6cd75e1a173142136c83" +dependencies = [ + "nom", + "vte", +] + +[[package]] +name = "any_ascii" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70033777eb8b5124a81a1889416543dddef2de240019b674c81285a2635a7e1e" + +[[package]] +name = "anyhow" +version = "1.0.70" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7de8ce5e0f9f8d88245311066a578d72b7af3e7088f32783804676302df237e4" +dependencies = [ + "backtrace", +] + +[[package]] +name = "arbitrary" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db55d72333851e17d572bec876e390cd3b11eb1ef53ae821dd9f3b653d2b4569" + +[[package]] +name = "arg_enum_proc_macro" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7c29b43ee8654590587cd033b3eca2f9c4f8cdff945ec0e6ee91ceb057d87f3" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "arrayref" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" + +[[package]] +name = "arrayvec" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" + +[[package]] +name = "arrayvec" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" +dependencies = [ + "serde", +] + +[[package]] +name = "assert-json-diff" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e4f2b81832e72834d7518d8487a0396a28cc408186a2e8854c0f98011faf12" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "assert_cmd" +version = "2.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9834fcc22e0874394a010230586367d4a3e9f11b560f469262678547e1d2575e" +dependencies = [ + "bstr", + "doc-comment", + "predicates", + "predicates-core", + "predicates-tree", + "wait-timeout", +] + +[[package]] +name = "ast_node" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52f7fd7740c5752c16281a1c1f9442b1e69ba41738acde85dc604aaf3ce41890" +dependencies = [ + "pmutil", + "proc-macro2", + "quote", + "swc_macros_common", + "syn 1.0.109", +] + +[[package]] +name = "async-channel" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf46fee83e5ccffc220104713af3292ff9bc7c64c7de289f66dae8e38d826833" +dependencies = [ + "concurrent-queue", + "event-listener", + "futures-core", +] + +[[package]] +name = "async-compression" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "942c7cd7ae39e91bde4820d74132e9862e62c2f386c3aa90ccf55949f5bad63a" +dependencies = [ + "flate2", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "async-executor" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17adb73da160dfb475c183343c8cccd80721ea5a605d3eb57125f0a7b7a92d0b" +dependencies = [ + "async-lock", + "async-task", + "concurrent-queue", + "fastrand", + "futures-lite", + "slab", +] + +[[package]] +name = "async-global-executor" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1b6f5d7df27bd294849f8eec66ecfc63d11814df7a4f5d74168a2394467b776" +dependencies = [ + "async-channel", + "async-executor", + "async-io", + "async-lock", + "blocking", + "futures-lite", + "once_cell", +] + +[[package]] +name = "async-io" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" +dependencies = [ + "async-lock", + "autocfg", + "cfg-if 1.0.0", + "concurrent-queue", + "futures-lite", + "log", + "parking", + "polling", + "rustix 0.37.11", + "slab", + "socket2", + "waker-fn", +] + +[[package]] +name = "async-lock" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa24f727524730b077666307f2734b4a1a1c57acb79193127dcc8914d5242dd7" +dependencies = [ + "event-listener", +] + +[[package]] +name = "async-object-pool" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aeb901c30ebc2fc4ab46395bbfbdba9542c16559d853645d75190c3056caf3bc" +dependencies = [ + "async-std", +] + +[[package]] +name = "async-process" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6381ead98388605d0d9ff86371043b5aa922a3905824244de40dc263a14fcba4" +dependencies = [ + "async-io", + "async-lock", + "autocfg", + "blocking", + "cfg-if 1.0.0", + "event-listener", + "futures-lite", + "libc", + "signal-hook", + "windows-sys 0.42.0", +] + +[[package]] +name = "async-recursion" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e97ce7de6cf12de5d7226c73f5ba9811622f4db3a5b91b55c53e987e5f91cba" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "async-std" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" +dependencies = [ + "async-channel", + "async-global-executor", + "async-io", + "async-lock", + "async-process", + "crossbeam-utils", + "futures-channel", + "futures-core", + "futures-io", + "futures-lite", + "gloo-timers", + "kv-log-macro", + "log", + "memchr", + "once_cell", + "pin-project-lite", + "pin-utils", + "slab", + "wasm-bindgen-futures", +] + +[[package]] +name = "async-stream" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad445822218ce64be7a341abfb0b1ea43b5c23aa83902542a4542e78309d8e5e" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4655ae1a7b0cdf149156f780c5bf3f1352bc53cbd9e0a361a7ef7b22947e965" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "async-task" +version = "4.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a40729d2133846d9ed0ea60a8b9541bccddab49cd30f0715a1da672fe9a2524" + +[[package]] +name = "async-trait" +version = "0.1.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "async-tungstenite" +version = "0.17.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1b71b31561643aa8e7df3effe284fa83ab1a840e52294c5f4bd7bfd8b2becbb" +dependencies = [ + "futures-io", + "futures-util", + "log", + "pin-project-lite", + "tokio", + "tungstenite 0.17.3", +] + +[[package]] +name = "atomic-polyfill" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3ff7eb3f316534d83a8a2c3d1674ace8a5a71198eba31e2e2b597833f699b28" +dependencies = [ + "critical-section", +] + +[[package]] +name = "atomic-waker" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "debc29dde2e69f9e47506b525f639ed42300fc014a3e007832592448fa8e4599" + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi 0.1.19", + "libc", + "winapi 0.3.9", +] + +[[package]] +name = "auto-hash-map" +version = "0.1.0" +dependencies = [ + "serde", +] + +[[package]] +name = "auto_impl" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7862e21c893d65a1650125d157eaeec691439379a1cee17ee49031b79236ada4" +dependencies = [ + "proc-macro-error", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "av-metrics" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13638b394190295622c0d2493d0c8c39210b92c2110895bfb14c58db213c2b39" +dependencies = [ + "crossbeam", + "itertools", + "lab", + "num-traits", + "rayon", + "thiserror", + "v_frame", +] + +[[package]] +name = "av1-grain" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f6ca6f0c18c02c2fbfc119df551b8aeb8a385f6d5980f1475ba0255f1e97f1e" +dependencies = [ + "anyhow", + "arrayvec 0.7.2", + "itertools", + "log", + "nom", + "num-rational", + "serde", + "v_frame", +] + +[[package]] +name = "avif-serialize" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "876c75a42f6364451a033496a14c44bffe41f5f4a8236f697391f11024e596d2" +dependencies = [ + "arrayvec 0.7.2", +] + +[[package]] +name = "axum" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f8ccfd9221ee7d1f3d4b33e1f8319b3a81ed8f61f2ea40b37b859794b4491" +dependencies = [ + "async-trait", + "axum-core", + "bitflags 1.3.2", + "bytes", + "futures-util", + "http", + "http-body", + "hyper", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2f958c80c248b34b9a877a643811be8dbca03ca5ba827f2b63baf3a81e5fc4e" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http", + "http-body", + "mime", + "rustversion", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-server" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bace45b270e36e3c27a190c65883de6dfc9f1d18c829907c127464815dc67b24" +dependencies = [ + "bytes", + "futures-util", + "http", + "http-body", + "hyper", + "tokio", + "tower-service", +] + +[[package]] +name = "backtrace" +version = "0.3.67" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca" +dependencies = [ + "addr2line", + "cc", + "cfg-if 1.0.0", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base-x" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cbbc9d0964165b47557570cce6c952866c2678457aca742aafc9fb771d30270" + +[[package]] +name = "base16" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d27c3610c36aee21ce8ac510e6224498de4228ad772a171ed65643a24693a5a8" + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a4ddaa51a5bc52a6948f74c06d20aaaddb71924eab79b8c97a8c556e942d6a" + +[[package]] +name = "better_scoped_tls" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b73e8ecdec39e98aa3b19e8cd0b8ed8f77ccb86a6b0b2dc7cd86d105438a2123" +dependencies = [ + "scoped-tls", +] + +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + +[[package]] +name = "bindgen" +version = "0.59.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bd2a9a458e8f4304c52c43ebb0cfbd520289f8379a52e329a38afda99bf8eb8" +dependencies = [ + "bitflags 1.3.2", + "cexpr", + "clang-sys", + "clap 2.34.0", + "env_logger 0.9.3", + "lazy_static", + "lazycell", + "log", + "peeking_take_while", + "proc-macro2", + "quote", + "regex", + "rustc-hash", + "shlex", + "which", +] + +[[package]] +name = "binding_macros" +version = "0.49.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1604d4f9300c26c632f33aeb636dc9843b50d365518168cc2583022699533a8" +dependencies = [ + "anyhow", + "console_error_panic_hook", + "js-sys", + "once_cell", + "serde", + "serde-wasm-bindgen", + "swc", + "swc_common", + "swc_ecma_ast", + "swc_ecma_transforms", + "swc_ecma_visit", + "wasm-bindgen", + "wasm-bindgen-futures", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c70beb79cbb5ce9c4f8e20849978f34225931f665bb49efa6982875a4d5facb3" + +[[package]] +name = "bitreader" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d84ea71c85d1fe98fe67a9b9988b1695bc24c0b0d3bfb18d4c510f44b4b09941" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "bitstream-io" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d28070975aaf4ef1fd0bd1f29b739c06c2cdd9972e090617fb6dca3b2cb564e" + +[[package]] +name = "blake3" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ae2468a89544a466886840aa467a25b766499f4f04bf7d9fcd10ecee9fccef" +dependencies = [ + "arrayref", + "arrayvec 0.7.2", + "cc", + "cfg-if 1.0.0", + "constant_time_eq", + "digest", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "blocking" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c67b173a56acffd6d2326fb7ab938ba0b00a71480e14902b2591c87bc5741e8" +dependencies = [ + "async-channel", + "async-lock", + "async-task", + "atomic-waker", + "fastrand", + "futures-lite", +] + +[[package]] +name = "browserslist-rs" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef956561c9a03c35af46714efd0c135e21768a2a012f900ca8a59b28e75d0cd1" +dependencies = [ + "ahash 0.7.6", + "anyhow", + "chrono", + "either", + "itertools", + "js-sys", + "nom", + "once_cell", + "quote", + "serde", + "serde-wasm-bindgen", + "serde_json", + "string_cache", + "string_cache_codegen", + "thiserror", + "wasm-bindgen", +] + +[[package]] +name = "bstr" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ffdb39cb703212f3c11973452c2861b972f757b021158f3516ba10f2fa8b2c1" +dependencies = [ + "memchr", + "once_cell", + "regex-automata", + "serde", +] + +[[package]] +name = "build-target" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "832133bbabbbaa9fbdba793456a2827627a7d2b8fb96032fa1e7666d7895832b" + +[[package]] +name = "built" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b9c056b9ed43aee5e064b683aa1ec783e19c6acec7559e3ae931b7490472fbe" +dependencies = [ + "cargo-lock", + "git2 0.15.0", +] + +[[package]] +name = "bumpalo" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" + +[[package]] +name = "bytecheck" +version = "0.6.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13fe11640a23eb24562225322cd3e452b93a3d4091d62fab69c70542fcd17d1f" +dependencies = [ + "bytecheck_derive", + "ptr_meta", + "simdutf8", +] + +[[package]] +name = "bytecheck_derive" +version = "0.6.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e31225543cb46f81a7e224762764f4a6a0f097b1db0b175f69e8065efaa42de5" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "bytecount" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c676a478f63e9fa2dd5368a42f28bba0d6c560b775f38583c8bbaa7fcd67c9c" + +[[package]] +name = "bytemuck" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17febce684fd15d89027105661fec94afb475cb995fbc59d2865198446ba2eea" + +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + +[[package]] +name = "bytes" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo-lock" +version = "8.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "031718ddb8f78aa5def78a09e90defe30151d1f6c672f937af4dd916429ed996" +dependencies = [ + "semver 1.0.17", + "serde", + "toml 0.5.11", + "url", +] + +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + +[[package]] +name = "castaway" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6" + +[[package]] +name = "cbindgen" +version = "0.24.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6358dedf60f4d9b8db43ad187391afe959746101346fe51bb978126bec61dfb" +dependencies = [ + "clap 3.2.23", + "heck 0.4.1", + "indexmap", + "log", + "proc-macro2", + "quote", + "serde", + "serde_json", + "syn 1.0.109", + "tempfile", + "toml 0.5.11", +] + +[[package]] +name = "cc" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" +dependencies = [ + "jobserver", +] + +[[package]] +name = "cesu8" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" + +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + +[[package]] +name = "cfg-expr" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a35b255461940a32985c627ce82900867c61db1659764d3675ea81963f72a4c6" +dependencies = [ + "smallvec", +] + +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chromiumoxide" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5506e432f602b1747e8a0d60ac6607c6977af4ee9720237764170305323e62" +dependencies = [ + "async-tungstenite", + "base64 0.13.1", + "cfg-if 1.0.0", + "chromiumoxide_cdp", + "chromiumoxide_types", + "fnv", + "futures", + "futures-timer", + "pin-project-lite", + "serde", + "serde_json", + "thiserror", + "tokio", + "tracing", + "url", + "which", + "winreg", +] + +[[package]] +name = "chromiumoxide_cdp" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b6988af5c6bbf097999e7db879729dd7b27a62010c482d4922fddeb4f220d4" +dependencies = [ + "chromiumoxide_pdl", + "chromiumoxide_types", + "serde", + "serde_json", +] + +[[package]] +name = "chromiumoxide_pdl" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cdf6513e24d260548345a5ef13a04110f5915b7764c274933e10f9363a43e3b" +dependencies = [ + "chromiumoxide_types", + "either", + "heck 0.4.1", + "once_cell", + "proc-macro2", + "quote", + "regex", + "serde", + "serde_json", +] + +[[package]] +name = "chromiumoxide_types" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1af9c183b5aac7f09639cc7b4ddde8a8551850d2c9bf36530830cb10e28e676f" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "chrono" +version = "0.4.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e3c5919066adf22df73762e50cffcde3a758f2a848b113b586d1f86728b673b" +dependencies = [ + "iana-time-zone", + "js-sys", + "num-integer", + "num-traits", + "serde", + "time 0.1.45", + "wasm-bindgen", + "winapi 0.3.9", +] + +[[package]] +name = "ciborium" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c137568cc60b904a7724001b35ce2630fd00d5d84805fbb608ab89509d788f" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "346de753af073cc87b52b2083a506b38ac176a44cfb05497b622e27be899b369" + +[[package]] +name = "ciborium-ll" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "213030a2b5a4e0c0892b6652260cf6ccac84827b83a85a534e178e3906c4cf1b" +dependencies = [ + "ciborium-io", + "half", +] + +[[package]] +name = "clang-sys" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c688fc74432808e3eb684cae8830a86be1d66a2bd58e1f248ed0960a590baf6f" +dependencies = [ + "glob", + "libc", + "libloading", +] + +[[package]] +name = "clap" +version = "2.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" +dependencies = [ + "ansi_term", + "atty", + "bitflags 1.3.2", + "strsim 0.8.0", + "textwrap 0.11.0", + "unicode-width", + "vec_map", +] + +[[package]] +name = "clap" +version = "3.2.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71655c45cb9845d3270c9d6df84ebe72b4dad3c2ba3f7023ad47c144e4e473a5" +dependencies = [ + "atty", + "bitflags 1.3.2", + "clap_lex 0.2.4", + "indexmap", + "strsim 0.10.0", + "termcolor", + "textwrap 0.16.0", +] + +[[package]] +name = "clap" +version = "4.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42dfd32784433290c51d92c438bb72ea5063797fc3cc9a21a8c4346bebbb2098" +dependencies = [ + "bitflags 2.1.0", + "clap_derive", + "clap_lex 0.3.3", + "is-terminal", + "once_cell", + "strsim 0.10.0", + "termcolor", + "terminal_size 0.2.6", +] + +[[package]] +name = "clap_complete" +version = "4.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "501ff0a401473ea1d4c3b125ff95506b62c5bc5768d818634195fbb7c4ad5ff4" +dependencies = [ + "clap 4.1.11", +] + +[[package]] +name = "clap_derive" +version = "4.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fddf67631444a3a3e3e5ac51c36a5e01335302de677bd78759eaa90ab1f46644" +dependencies = [ + "heck 0.4.1", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "clap_lex" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" +dependencies = [ + "os_str_bytes", +] + +[[package]] +name = "clap_lex" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "033f6b7a4acb1f358c742aaca805c939ee73b4c6209ae4318ec7aca81c42e646" +dependencies = [ + "os_str_bytes", +] + +[[package]] +name = "cmake" +version = "0.1.49" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db34956e100b30725f2eb215f90d4871051239535632f84fea3bc92722c66b7c" +dependencies = [ + "cc", +] + +[[package]] +name = "codespan-reporting" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" +dependencies = [ + "termcolor", + "unicode-width", +] + +[[package]] +name = "color_quant" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b" + +[[package]] +name = "combine" +version = "4.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" +dependencies = [ + "bytes", + "memchr", +] + +[[package]] +name = "command-group" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5080df6b0f0ecb76cab30808f00d937ba725cebe266a3da8cd89dff92f2a9916" +dependencies = [ + "async-trait", + "nix", + "tokio", + "winapi 0.3.9", +] + +[[package]] +name = "concurrent-queue" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c278839b831783b70278b14df4d45e1beb1aad306c07bb796637de9a0e323e8e" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "config" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d379af7f68bfc21714c6c7dea883544201741d2ce8274bb12fa54f89507f52a7" +dependencies = [ + "async-trait", + "json5", + "lazy_static", + "nom", + "pathdiff", + "ron", + "rust-ini", + "serde", + "serde_json", + "toml 0.5.11", + "yaml-rust", +] + +[[package]] +name = "console" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3d79fbe8970a77e3e34151cc13d3b3e248aa0faaecb9f6091fa07ebefe5ad60" +dependencies = [ + "encode_unicode", + "lazy_static", + "libc", + "unicode-width", + "windows-sys 0.42.0", +] + +[[package]] +name = "console-api" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e57ff02e8ad8e06ab9731d5dc72dc23bef9200778eae1a89d555d8c42e5d4a86" +dependencies = [ + "prost", + "prost-types", + "tonic", + "tracing-core", +] + +[[package]] +name = "console-subscriber" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22a3a81dfaf6b66bce5d159eddae701e3a002f194d378cbf7be5f053c281d9be" +dependencies = [ + "console-api", + "crossbeam-channel", + "crossbeam-utils", + "futures", + "hdrhistogram", + "humantime", + "prost-types", + "serde", + "serde_json", + "thread_local", + "tokio", + "tokio-stream", + "tonic", + "tracing", + "tracing-core", + "tracing-subscriber", +] + +[[package]] +name = "console_error_panic_hook" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" +dependencies = [ + "cfg-if 1.0.0", + "wasm-bindgen", +] + +[[package]] +name = "const-cstr" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed3d0b5ff30645a68f35ece8cea4556ca14ef8a1651455f789a099a0513532a6" + +[[package]] +name = "const_fn" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbdcdcb6d86f71c5e97409ad45898af11cbc995b4ee8112d59095a28d376c935" + +[[package]] +name = "const_fn_assert" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27d614f23f34f7b5165a77dc1591f497e2518f9cec4b4f4b92bfc4dc6cf7a190" + +[[package]] +name = "const_format" +version = "0.2.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7309d9b4d3d2c0641e018d449232f2e28f1b22933c137f157d3dbc14228b8c0e" +dependencies = [ + "const_format_proc_macros", +] + +[[package]] +name = "const_format_proc_macros" +version = "0.2.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f47bf7270cf70d370f8f98c1abb6d2d4cf60a6845d30e05bfb90c6568650" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + +[[package]] +name = "constant_time_eq" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13418e745008f7349ec7e449155f419a61b92b58a99cc3616942b926825ec76b" + +[[package]] +name = "convert_case" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb4a24b1aaf0fd0ce8b45161144d6f42cd91677fd5940fd431183eb023b3a2b8" + +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "cooked-waker" +version = "5.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147be55d677052dabc6b22252d5dd0fd4c29c8c27aa4f2fbef0f94aa003b406f" + +[[package]] +name = "core-foundation" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" + +[[package]] +name = "core-graphics" +version = "0.22.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2581bbab3b8ffc6fcbd550bf46c355135d16e9ff2a6ea032ad6b9bf1d7efe4fb" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "core-graphics-types", + "foreign-types", + "libc", +] + +[[package]] +name = "core-graphics-types" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a68b68b3446082644c91ac778bf50cd4104bfb002b5a6a7c44cca5a2c70788b" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "foreign-types", + "libc", +] + +[[package]] +name = "core-text" +version = "19.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d74ada66e07c1cefa18f8abfba765b486f250de2e4a999e5727fc0dd4b4a25" +dependencies = [ + "core-foundation", + "core-graphics", + "foreign-types", + "libc", +] + +[[package]] +name = "corosensei" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9847f90f32a50b0dcbd68bc23ff242798b13080b97b0569f6ed96a45ce4cf2cd" +dependencies = [ + "autocfg", + "cfg-if 1.0.0", + "libc", + "scopeguard", + "windows-sys 0.33.0", +] + +[[package]] +name = "cpufeatures" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "280a9f2d8b3a38871a3c8a46fb80db65e5e5ed97da80c4d08bf27fb63e35e181" +dependencies = [ + "libc", +] + +[[package]] +name = "cranelift-bforest" +version = "0.91.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a2ab4512dfd3a6f4be184403a195f76e81a8a9f9e6c898e19d2dc3ce20e0115" +dependencies = [ + "cranelift-entity", +] + +[[package]] +name = "cranelift-codegen" +version = "0.91.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "98b022ed2a5913a38839dfbafe6cf135342661293b08049843362df4301261dc" +dependencies = [ + "arrayvec 0.7.2", + "bumpalo", + "cranelift-bforest", + "cranelift-codegen-meta", + "cranelift-codegen-shared", + "cranelift-egraph", + "cranelift-entity", + "cranelift-isle", + "gimli 0.26.2", + "log", + "regalloc2", + "smallvec", + "target-lexicon", +] + +[[package]] +name = "cranelift-codegen-meta" +version = "0.91.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "639307b45434ad112a98f8300c0f0ab085cbefcd767efcdef9ef19d4c0756e74" +dependencies = [ + "cranelift-codegen-shared", +] + +[[package]] +name = "cranelift-codegen-shared" +version = "0.91.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "278e52e29c53fcf32431ef08406c295699a70306d05a0715c5b1bf50e33a9ab7" + +[[package]] +name = "cranelift-egraph" +version = "0.91.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "624b54323b06e675293939311943ba82d323bb340468ce1889be5da7932c8d73" +dependencies = [ + "cranelift-entity", + "fxhash", + "hashbrown 0.12.3", + "indexmap", + "log", + "smallvec", +] + +[[package]] +name = "cranelift-entity" +version = "0.91.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a59bcbca89c3f1b70b93ab3cbba5e5e0cbf3e63dadb23c7525cb142e21a9d4c" + +[[package]] +name = "cranelift-frontend" +version = "0.91.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d70abacb8cfef3dc8ff7e8836e9c1d70f7967dfdac824a4cd5e30223415aca6" +dependencies = [ + "cranelift-codegen", + "log", + "smallvec", + "target-lexicon", +] + +[[package]] +name = "cranelift-isle" +version = "0.91.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "393bc73c451830ff8dbb3a07f61843d6cb41a084f9996319917c0b291ed785bb" + +[[package]] +name = "crc" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49fc9a695bca7f35f5f4c15cddc84415f66a74ea78eef08e90c5024f2b540e23" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccaeedb56da03b09f598226e25e80088cb4cd25f316e6e4df7d695f0feeb1403" + +[[package]] +name = "crc32fast" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "criterion" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7c76e09c1aae2bc52b3d2f29e13c6572553b30c4aa1b8a49fd70de6412654cb" +dependencies = [ + "anes", + "atty", + "cast", + "ciborium", + "clap 3.2.23", + "criterion-plot", + "futures", + "itertools", + "lazy_static", + "num-traits", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "tokio", + "walkdir", +] + +[[package]] +name = "criterion-plot" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +dependencies = [ + "cast", + "itertools", +] + +[[package]] +name = "critical-section" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6548a0ad5d2549e111e1f6a11a6c2e2d00ce6a3dafe22948d67c2b443f775e52" + +[[package]] +name = "crossbeam" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2801af0d36612ae591caa9568261fddce32ce6e08a7275ea334a06a4ad021a2c" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-channel", + "crossbeam-deque", + "crossbeam-epoch", + "crossbeam-queue", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46bd5f3f85273295a9d14aedfb86f6aadbff6d8f5295c4a9edb08e819dcf5695" +dependencies = [ + "autocfg", + "cfg-if 1.0.0", + "crossbeam-utils", + "memoffset 0.8.0", + "scopeguard", +] + +[[package]] +name = "crossbeam-queue" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" +dependencies = [ + "cfg-if 1.0.0", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "crossterm" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e64e6c0fbe2c17357405f7c758c1ef960fce08bdfb2c03d88d2a18d7e09c4b67" +dependencies = [ + "bitflags 1.3.2", + "crossterm_winapi", + "libc", + "mio 0.8.6", + "parking_lot", + "signal-hook", + "signal-hook-mio", + "winapi 0.3.9", +] + +[[package]] +name = "crossterm" +version = "0.26.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a84cda67535339806297f1b331d6dd6320470d2a0fe65381e79ee9e156dd3d13" +dependencies = [ + "bitflags 1.3.2", + "crossterm_winapi", + "libc", + "mio 0.8.6", + "parking_lot", + "signal-hook", + "signal-hook-mio", + "winapi 0.3.9", +] + +[[package]] +name = "crossterm_winapi" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ae1b35a484aa10e07fe0638d02301c5ad24de82d310ccbd2f3693da5f09bf1c" +dependencies = [ + "winapi 0.3.9", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "ctor" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096" +dependencies = [ + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ctrlc" +version = "3.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbcf33c2a618cbe41ee43ae6e9f2e48368cd9f9db2896f10167d8d762679f639" +dependencies = [ + "nix", + "windows-sys 0.45.0", +] + +[[package]] +name = "cty" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b365fabc795046672053e29c954733ec3b05e4be654ab130fe8f1f94d7051f35" + +[[package]] +name = "curl" +version = "0.4.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "509bd11746c7ac09ebd19f0b17782eae80aadee26237658a6b4808afb5c11a22" +dependencies = [ + "curl-sys", + "libc", + "openssl-probe", + "openssl-sys", + "schannel", + "socket2", + "winapi 0.3.9", +] + +[[package]] +name = "curl-sys" +version = "0.4.60+curl-7.88.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "717abe2cb465a5da6ce06617388a3980c9a2844196734bec8ccb8e575250f13f" +dependencies = [ + "cc", + "libc", + "libnghttp2-sys", + "libz-sys", + "openssl-sys", + "pkg-config", + "vcpkg", + "winapi 0.3.9", +] + +[[package]] +name = "cxx" +version = "1.0.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9c00419335c41018365ddf7e4d5f1c12ee3659ddcf3e01974650ba1de73d038" +dependencies = [ + "cc", + "cxxbridge-flags", + "cxxbridge-macro", + "link-cplusplus", +] + +[[package]] +name = "cxx-build" +version = "1.0.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb8307ad413a98fff033c8545ecf133e3257747b3bae935e7602aab8aa92d4ca" +dependencies = [ + "cc", + "codespan-reporting", + "once_cell", + "proc-macro2", + "quote", + "scratch", + "syn 2.0.15", +] + +[[package]] +name = "cxxbridge-flags" +version = "1.0.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edc52e2eb08915cb12596d29d55f0b5384f00d697a646dbd269b6ecb0fbd9d31" + +[[package]] +name = "cxxbridge-macro" +version = "1.0.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "631569015d0d8d54e6c241733f944042623ab6df7bc3be7466874b05fcdb1c5f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "darling" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b750cb3417fd1b327431a470f388520309479ab0bf5e323505daf0290cd3850" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim 0.10.0", + "syn 1.0.109", +] + +[[package]] +name = "darling_macro" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e" +dependencies = [ + "darling_core", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "dashmap" +version = "5.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc" +dependencies = [ + "cfg-if 1.0.0", + "hashbrown 0.12.3", + "lock_api", + "once_cell", + "parking_lot_core", +] + +[[package]] +name = "data-encoding" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23d8666cb01533c39dde32bcbab8e227b4ed6679b2c925eba05feabea39508fb" + +[[package]] +name = "dav1d" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7284148338177cb1cd0d0cdd7bf26440f8326999063eed294aa7d77b46a7e263" +dependencies = [ + "dav1d-sys", +] + +[[package]] +name = "dav1d-sys" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88e40c4c77d141a3b70113ee45a1502b9c80e24f176958d39a8361abcf30c883" +dependencies = [ + "bindgen", + "system-deps", +] + +[[package]] +name = "dcv-color-primitives" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1457f4dd8395fef9f61996b5783b82ed7b234b4b55e1843d04e07fded0538005" +dependencies = [ + "paste", + "wasm-bindgen", +] + +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "dialoguer" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af3c796f3b0b408d9fd581611b47fa850821fcb84aa640b83a3c1a5be2d691f2" +dependencies = [ + "console", + "fuzzy-matcher", + "shell-words", + "tempfile", + "zeroize", +] + +[[package]] +name = "diff" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" + +[[package]] +name = "difference" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198" + +[[package]] +name = "difflib" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" + +[[package]] +name = "digest" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f" +dependencies = [ + "block-buffer", + "crypto-common", + "subtle", +] + +[[package]] +name = "directories" +version = "4.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f51c5d4ddabd36886dd3e1438cb358cdcb0d7c499cb99cb4ac2e38e18b5cb210" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs" +version = "4.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-next" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" +dependencies = [ + "cfg-if 1.0.0", + "dirs-sys-next", +] + +[[package]] +name = "dirs-sys" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6" +dependencies = [ + "libc", + "redox_users", + "winapi 0.3.9", +] + +[[package]] +name = "dirs-sys-next" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +dependencies = [ + "libc", + "redox_users", + "winapi 0.3.9", +] + +[[package]] +name = "discard" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "212d0f5754cb6769937f4501cc0e67f4f4483c8d2c3e1e922ee9edbe4ab4c7c0" + +[[package]] +name = "dlib" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac1b7517328c04c2aa68422fc60a41b92208182142ed04a25879c26c8f878794" +dependencies = [ + "libloading", +] + +[[package]] +name = "dlv-list" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0688c2a7f92e427f44895cd63841bff7b29f8d7a1648b9e7e07a4a365b2e1257" + +[[package]] +name = "doc-comment" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" + +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + +[[package]] +name = "dunce" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bd4b30a6560bbd9b4620f4de34c3f14f60848e58a9b7216801afcb4c7b31c3c" + +[[package]] +name = "dwrote" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439a1c2ba5611ad3ed731280541d36d2e9c4ac5e7fb818a27b604bdc5a6aa65b" +dependencies = [ + "lazy_static", + "libc", + "winapi 0.3.9", + "wio", +] + +[[package]] +name = "dyn-clone" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68b0cf012f1230e43cd00ebb729c6bb58707ecfa8ad08b52ef3a4ccd2697fc30" + +[[package]] +name = "easy-error" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04cc9717c61d2908f50d16ebb5677c7e82ea2bdf7cb52f66b30fe079f3212e16" + +[[package]] +name = "either" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" + +[[package]] +name = "encode_unicode" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" + +[[package]] +name = "encoding_rs" +version = "0.8.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "enum-iterator" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4eeac5c5edb79e4e39fe8439ef35207780a11f69c52cbe424ce3dfad4cb78de6" +dependencies = [ + "enum-iterator-derive 0.7.0", +] + +[[package]] +name = "enum-iterator" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "706d9e7cf1c7664859d79cd524e4e53ea2b67ea03c98cc2870c5e539695d597e" +dependencies = [ + "enum-iterator-derive 1.2.0", +] + +[[package]] +name = "enum-iterator-derive" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c134c37760b27a871ba422106eedbb8247da973a09e82558bf26d619c882b159" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "enum-iterator-derive" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "355f93763ef7b0ae1c43c4d8eccc9d5848d84ad1a1d8ce61c421d1ac85a19d05" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "enumset" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19be8061a06ab6f3a6cf21106c873578bf01bd42ad15e0311a9c76161cb1c753" +dependencies = [ + "enumset_derive", +] + +[[package]] +name = "enumset_derive" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03e7b551eba279bf0fa88b83a46330168c1560a52a94f5126f892f0b364ab3e0" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "env_logger" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3" +dependencies = [ + "atty", + "humantime", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "env_logger" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7" +dependencies = [ + "atty", + "humantime", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "env_logger" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85cdab6a89accf66733ad5a1693a4dcced6aeff64602b634530dd73c1f3ee9f0" +dependencies = [ + "humantime", + "is-terminal", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "erased-serde" +version = "0.3.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f2b0c2380453a92ea8b6c8e5f64ecaafccddde8ceab55ff7a8ac1029f894569" +dependencies = [ + "serde", +] + +[[package]] +name = "errno" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1" +dependencies = [ + "errno-dragonfly", + "libc", + "winapi 0.3.9", +] + +[[package]] +name = "errno" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" +dependencies = [ + "errno-dragonfly", + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "fallible-iterator" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" + +[[package]] +name = "fallible_collections" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9acf77205554f3cfeca94a4b910e159ad9824e8c2d164de02b3f12495cc1074d" +dependencies = [ + "hashbrown 0.13.2", +] + +[[package]] +name = "fastrand" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" +dependencies = [ + "instant", +] + +[[package]] +name = "fern" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9f0c14694cbd524c8720dd69b0e3179344f04ebb5f90f2e4a440c6ea3b2f1ee" +dependencies = [ + "log", +] + +[[package]] +name = "filetime" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cbc844cecaee9d4443931972e1289c8ff485cb4cc2767cb03ca139ed6885153" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "redox_syscall 0.2.16", + "windows-sys 0.48.0", +] + +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + +[[package]] +name = "flate2" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "float-ord" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7bad48618fdb549078c333a7a8528acb57af271d0433bdecd523eb620628364e" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "font-kit" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21fe28504d371085fae9ac7a3450f0b289ab71e07c8e57baa3fb68b9e57d6ce5" +dependencies = [ + "bitflags 1.3.2", + "byteorder", + "core-foundation", + "core-graphics", + "core-text", + "dirs-next", + "dwrote", + "float-ord", + "freetype", + "lazy_static", + "libc", + "log", + "pathfinder_geometry", + "pathfinder_simd", + "walkdir", + "winapi 0.3.9", + "yeslogic-fontconfig-sys", +] + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "freetype" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bee38378a9e3db1cc693b4f88d166ae375338a0ff75cb8263e1c601d51f35dc6" +dependencies = [ + "freetype-sys", + "libc", +] + +[[package]] +name = "freetype-sys" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a37d4011c0cc628dfa766fcc195454f4b068d7afdc2adfd28861191d866e731a" +dependencies = [ + "cmake", + "libc", + "pkg-config", +] + +[[package]] +name = "from_variant" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d449976075322384507443937df2f1d5577afbf4282f12a5a66ef29fa3e6307" +dependencies = [ + "pmutil", + "proc-macro2", + "swc_macros_common", + "syn 1.0.109", +] + +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + +[[package]] +name = "fsevent" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ab7d1bd1bd33cc98b0889831b72da23c0aa4df9cec7e0702f46ecea04b35db6" +dependencies = [ + "bitflags 1.3.2", + "fsevent-sys", +] + +[[package]] +name = "fsevent-sys" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f41b048a94555da0f42f1d632e2e19510084fb8e303b0daa2816e733fb3644a0" +dependencies = [ + "libc", +] + +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" + +[[package]] +name = "fuchsia-zircon" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" +dependencies = [ + "bitflags 1.3.2", + "fuchsia-zircon-sys", +] + +[[package]] +name = "fuchsia-zircon-sys" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" + +[[package]] +name = "futures" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" + +[[package]] +name = "futures-executor" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" + +[[package]] +name = "futures-lite" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7694489acd39452c77daa48516b894c153f192c3578d5a839b62c58099fcbf48" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "memchr", + "parking", + "pin-project-lite", + "waker-fn", +] + +[[package]] +name = "futures-macro" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "futures-retry" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fde5a672a61f96552aa5ed9fd9c81c3fbdae4be9b1e205d6eaf17c83705adc0f" +dependencies = [ + "futures", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "futures-sink" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e" + +[[package]] +name = "futures-task" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" + +[[package]] +name = "futures-timer" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" + +[[package]] +name = "futures-util" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "fuzzy-matcher" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54614a3312934d066701a80f20f15fa3b56d67ac7722b39eea5b4c9dd1d66c94" +dependencies = [ + "thread_local", +] + +[[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" +dependencies = [ + "byteorder", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c85e1d9ab2eadba7e5040d4e09cbd6d072b76a557ad64e797c2cb9d4da21d7e4" +dependencies = [ + "cfg-if 1.0.0", + "js-sys", + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", + "wasm-bindgen", +] + +[[package]] +name = "getset" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e45727250e75cc04ff2846a66397da8ef2b3db8e40e0cef4df67950a07621eb9" +dependencies = [ + "proc-macro-error", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "gif" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3edd93c6756b4dfaf2709eafcc345ba2636565295c198a9cfbf75fa5e3e00b06" +dependencies = [ + "color_quant", + "weezl", +] + +[[package]] +name = "gimli" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d" +dependencies = [ + "fallible-iterator", + "indexmap", + "stable_deref_trait", +] + +[[package]] +name = "gimli" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad0a93d233ebf96623465aad4046a8d3aa4da22d4f4beba5388838c8a434bbb4" + +[[package]] +name = "git2" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2994bee4a3a6a51eb90c218523be382fd7ea09b16380b9312e9dbe955ff7c7d1" +dependencies = [ + "bitflags 1.3.2", + "libc", + "libgit2-sys", + "log", + "url", +] + +[[package]] +name = "git2" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf7f68c2995f392c49fffb4f95ae2c873297830eb25c6bc4c114ce8f4562acc" +dependencies = [ + "bitflags 1.3.2", + "libc", + "libgit2-sys", + "log", + "url", +] + +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + +[[package]] +name = "glob-match" +version = "0.2.1" +dependencies = [ + "criterion", + "glob", + "globset", + "test-case", + "unic-segment", +] + +[[package]] +name = "glob-match" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9985c9503b412198aa4197559e9a318524ebc4519c229bfa05a535828c950b9d" + +[[package]] +name = "globset" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "029d74589adefde59de1a0c4f4732695c32805624aec7b68d91503d4dba79afc" +dependencies = [ + "aho-corasick", + "bstr", + "fnv", + "log", + "regex", +] + +[[package]] +name = "gloo-timers" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "h2" +version = "0.3.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5be7b54589b581f624f566bf5d8eb2bab1db736c51528720b6bd36b96b55924d" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "half" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" + +[[package]] +name = "handlebars" +version = "4.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "035ef95d03713f2c347a72547b7cd38cbc9af7cd51e6099fb62d586d4a6dee3a" +dependencies = [ + "log", + "pest", + "pest_derive", + "serde", + "serde_json", + "thiserror", +] + +[[package]] +name = "hash32" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c35f58762feb77d74ebe43bdbc3210f09be9fe6742234d573bacc26ed92b67" +dependencies = [ + "byteorder", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +dependencies = [ + "ahash 0.7.6", +] + +[[package]] +name = "hashbrown" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" +dependencies = [ + "ahash 0.8.3", +] + +[[package]] +name = "hdrhistogram" +version = "7.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f19b9f54f7c7f55e31401bb647626ce0cf0f67b0004982ce815b3ee72a02aa8" +dependencies = [ + "base64 0.13.1", + "byteorder", + "flate2", + "nom", + "num-traits", +] + +[[package]] +name = "heapless" +version = "0.7.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db04bc24a18b9ea980628ecf00e6c0264f3c1426dac36c00cb49b6fbad8b0743" +dependencies = [ + "atomic-polyfill", + "hash32", + "rustc_version 0.4.0", + "spin 0.9.8", + "stable_deref_trait", +] + +[[package]] +name = "heck" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hostname" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867" +dependencies = [ + "libc", + "match_cfg", + "winapi 0.3.9", +] + +[[package]] +name = "http" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "httpdate" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" + +[[package]] +name = "httpmock" +version = "0.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6b56b6265f15908780cbee987912c1e98dbca675361f748291605a8a3a1df09" +dependencies = [ + "assert-json-diff", + "async-object-pool", + "async-trait", + "base64 0.13.1", + "crossbeam-utils", + "form_urlencoded", + "futures-util", + "hyper", + "isahc", + "lazy_static", + "levenshtein", + "log", + "regex", + "serde", + "serde_json", + "serde_regex", + "similar", + "tokio", + "url", +] + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "hyper" +version = "0.14.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc5e554ff619822309ffd57d8734d77cd5ce6238bc956f037ea06c58238c9899" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1788965e61b367cd03a62950836d5cd41560c3577d90e40e0819373194d1661c" +dependencies = [ + "http", + "hyper", + "rustls", + "tokio", + "tokio-rustls", +] + +[[package]] +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper", + "pin-project-lite", + "tokio", + "tokio-io-timeout", +] + +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes", + "hyper", + "native-tls", + "tokio", + "tokio-native-tls", +] + +[[package]] +name = "hyper-tungstenite" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "880b8b1c98a5ec2a505c7c90db6d3f6f1f480af5655d9c5b55facc9382a5a5b5" +dependencies = [ + "hyper", + "pin-project", + "tokio", + "tokio-tungstenite", + "tungstenite 0.18.0", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0722cd7114b7de04316e7ea5456a0bbb20e4adb46fd27a3697adb812cff0f37c" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0703ae284fc167426161c2e3f1da3ea71d94b21bedbcc9494e92b28e334e3dca" +dependencies = [ + "cxx", + "cxx-build", +] + +[[package]] +name = "id-arena" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25a2bc672d1148e28034f176e01fffebb08b35768468cc954630da77a1449005" + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "if_chain" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb56e1aa765b4b4f3aadfab769793b7087bb03a4ea4920644a6d238e2df5b9ed" + +[[package]] +name = "image" +version = "0.24.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "527909aa81e20ac3a44803521443a765550f09b5130c2c2fa1ea59c2f8f50a3a" +dependencies = [ + "bytemuck", + "byteorder", + "color_quant", + "dav1d", + "dcv-color-primitives", + "jpeg-decoder", + "mp4parse", + "num-rational", + "num-traits", + "png", + "ravif", + "rgb", + "webp", +] + +[[package]] +name = "imgref" +version = "1.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2cf49df1085dcfb171460e4592597b84abe50d900fb83efb6e41b20fefd6c2c" + +[[package]] +name = "include_dir" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18762faeff7122e89e0857b02f7ce6fcc0d101d5e9ad2ad7846cc01d61b7f19e" +dependencies = [ + "include_dir_macros", +] + +[[package]] +name = "include_dir_macros" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b139284b5cf57ecfa712bcc66950bb635b31aff41c188e8a4cfc758eca374a3f" +dependencies = [ + "proc-macro2", + "quote", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", + "rayon", + "serde", +] + +[[package]] +name = "indicatif" +version = "0.17.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cef509aa9bc73864d6756f0d34d35504af3cf0844373afe9b8669a5b8005a729" +dependencies = [ + "console", + "number_prefix", + "portable-atomic", + "unicode-width", +] + +[[package]] +name = "indoc" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f2cb48b81b1dc9f39676bf99f5499babfec7cd8fe14307f7b3d747208fb5690" + +[[package]] +name = "inotify" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4816c66d2c8ae673df83366c18341538f234a26d65a9ecea5c348b453ac1d02f" +dependencies = [ + "bitflags 1.3.2", + "inotify-sys", + "libc", +] + +[[package]] +name = "inotify" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8069d3ec154eb856955c1c0fbffefbf5f3c40a104ec912d4797314c1801abff" +dependencies = [ + "bitflags 1.3.2", + "inotify-sys", + "libc", +] + +[[package]] +name = "inotify-sys" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb" +dependencies = [ + "libc", +] + +[[package]] +name = "inquire" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3a94f0659efe59329832ba0452d3ec753145fc1fb12a8e1d60de4ccf99f5364" +dependencies = [ + "bitflags 1.3.2", + "crossterm 0.25.0", + "dyn-clone", + "lazy_static", + "newline-converter", + "thiserror", + "unicode-segmentation", + "unicode-width", +] + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if 1.0.0", +] + +[[package]] +name = "interpolate_name" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4b35f4a811037cfdcd44c5db40678464b2d5d248fc1abeeaaa125b370d47f17" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "io-lifetimes" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c66c74d2ae7e79a5a8f7ac924adbe38ee42a859c6539ad869eb51f0b52dc220" +dependencies = [ + "hermit-abi 0.3.1", + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "iovec" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e" +dependencies = [ + "libc", +] + +[[package]] +name = "ipnet" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30e22bd8629359895450b59ea7a776c850561b96a3b1d31321c1949d9e6c9146" + +[[package]] +name = "is-macro" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a7d079e129b77477a49c5c4f1cfe9ce6c2c909ef52520693e8e811a714c7b20" +dependencies = [ + "Inflector", + "pmutil", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "is-terminal" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8687c819457e979cc940d09cb16e42a1bf70aa6b60a549de6d3a62a0ee90c69e" +dependencies = [ + "hermit-abi 0.3.1", + "io-lifetimes", + "rustix 0.36.11", + "windows-sys 0.45.0", +] + +[[package]] +name = "is_ci" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "616cde7c720bb2bb5824a224687d8f77bfd38922027f01d825cd7453be5099fb" + +[[package]] +name = "isahc" +version = "1.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "334e04b4d781f436dc315cb1e7515bd96826426345d498149e4bde36b67f8ee9" +dependencies = [ + "async-channel", + "castaway", + "crossbeam-utils", + "curl", + "curl-sys", + "encoding_rs", + "event-listener", + "futures-lite", + "http", + "log", + "mime", + "once_cell", + "polling", + "slab", + "sluice", + "tracing", + "tracing-futures", + "url", + "waker-fn", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" + +[[package]] +name = "ivf" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fb01c64361a3a67b511439f0dcd54fa3aa5581c861a17e2ede76e46b9c5b7e2" +dependencies = [ + "bitstream-io", +] + +[[package]] +name = "jni" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" +dependencies = [ + "cesu8", + "cfg-if 1.0.0", + "combine", + "jni-sys", + "log", + "thiserror", + "walkdir", + "windows-sys 0.45.0", +] + +[[package]] +name = "jni-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" + +[[package]] +name = "jobserver" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "936cfd212a0155903bcbc060e316fb6cc7cbf2e1907329391ebadc1fe0ce77c2" +dependencies = [ + "libc", +] + +[[package]] +name = "jpeg-decoder" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc0000e42512c92e31c2252315bda326620a4e034105e900c98ec492fa077b3e" + +[[package]] +name = "js-sys" +version = "0.3.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "json5" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96b0db21af676c1ce64250b5f40f3ce2cf27e4e47cb91ed91eb6fe9350b430c1" +dependencies = [ + "pest", + "pest_derive", + "serde", +] + +[[package]] +name = "jsonc-parser" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a1853e40333206f9a685358046d13ab200169e3ee573019bddf0ede0dc29307" +dependencies = [ + "serde_json", +] + +[[package]] +name = "kernel32-sys" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" +dependencies = [ + "winapi 0.2.8", + "winapi-build", +] + +[[package]] +name = "kqueue" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c8fc60ba15bf51257aa9807a48a61013db043fcf3a78cb0d916e8e396dcad98" +dependencies = [ + "kqueue-sys", + "libc", +] + +[[package]] +name = "kqueue-sys" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8367585489f01bc55dd27404dcf56b95e6da061a256a666ab23be9ba96a2e587" +dependencies = [ + "bitflags 1.3.2", + "libc", +] + +[[package]] +name = "kv-log-macro" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" +dependencies = [ + "log", +] + +[[package]] +name = "lab" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf36173d4167ed999940f804952e6b08197cae5ad5d572eb4db150ce8ad5d58f" + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + +[[package]] +name = "leb128" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" + +[[package]] +name = "levenshtein" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db13adb97ab515a3691f56e4dbab09283d0b86cb45abd991d8634a9d6f501760" + +[[package]] +name = "lexical" +version = "6.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7aefb36fd43fef7003334742cbf77b243fcd36418a1d1bdd480d613a67968f6" +dependencies = [ + "lexical-core", +] + +[[package]] +name = "lexical-core" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2cde5de06e8d4c2faabc400238f9ae1c74d5412d03a7bd067645ccbc47070e46" +dependencies = [ + "lexical-parse-float", + "lexical-parse-integer", + "lexical-util", + "lexical-write-float", + "lexical-write-integer", +] + +[[package]] +name = "lexical-parse-float" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683b3a5ebd0130b8fb52ba0bdc718cc56815b6a097e28ae5a6997d0ad17dc05f" +dependencies = [ + "lexical-parse-integer", + "lexical-util", + "static_assertions", +] + +[[package]] +name = "lexical-parse-integer" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d0994485ed0c312f6d965766754ea177d07f9c00c9b82a5ee62ed5b47945ee9" +dependencies = [ + "lexical-util", + "static_assertions", +] + +[[package]] +name = "lexical-sort" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c09e4591611e231daf4d4c685a66cb0410cc1e502027a20ae55f2bb9e997207a" +dependencies = [ + "any_ascii", +] + +[[package]] +name = "lexical-util" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5255b9ff16ff898710eb9eb63cb39248ea8a5bb036bea8085b1a767ff6c4e3fc" +dependencies = [ + "static_assertions", +] + +[[package]] +name = "lexical-write-float" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accabaa1c4581f05a3923d1b4cfd124c329352288b7b9da09e766b0668116862" +dependencies = [ + "lexical-util", + "lexical-write-integer", + "static_assertions", +] + +[[package]] +name = "lexical-write-integer" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1b6f3d1f4422866b68192d62f77bc5c700bee84f3069f2469d7bc8c77852446" +dependencies = [ + "lexical-util", + "static_assertions", +] + +[[package]] +name = "libc" +version = "0.2.141" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3304a64d199bb964be99741b7a14d26972741915b3649639149b2479bb46f4b5" + +[[package]] +name = "libfuzzer-sys" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcf184a4b6b274f82a5df6b357da6055d3e82272327bba281c28bbba6f1664ef" +dependencies = [ + "arbitrary", + "cc", +] + +[[package]] +name = "libgit2-sys" +version = "0.14.2+1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f3d95f6b51075fe9810a7ae22c7095f12b98005ab364d8544797a825ce946a4" +dependencies = [ + "cc", + "libc", + "libz-sys", + "pkg-config", +] + +[[package]] +name = "libloading" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" +dependencies = [ + "cfg-if 1.0.0", + "winapi 0.3.9", +] + +[[package]] +name = "libm" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb" + +[[package]] +name = "libmimalloc-sys" +version = "0.1.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd8c7cbf8b89019683667e347572e6d55a7df7ea36b0c4ce69961b0cde67b174" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "libnghttp2-sys" +version = "0.1.7+1.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57ed28aba195b38d5ff02b9170cbff627e336a20925e43b4945390401c5dc93f" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "libwebp-sys" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439fd1885aa28937e7edcd68d2e793cb4a22f8733460d2519fbafd2b215672bf" +dependencies = [ + "cc", +] + +[[package]] +name = "libz-sys" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9702761c3935f8cc2f101793272e202c72b99da8f4224a19ddcf1279a6450bbf" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "link-cplusplus" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecd207c9c713c34f95a097a5b029ac2ce6010530c7b49d7fea24d977dede04f5" +dependencies = [ + "cc", +] + +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + +[[package]] +name = "linked_hash_set" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47186c6da4d81ca383c7c47c1bfc80f4b95f4720514d860a5407aaf4233f9588" +dependencies = [ + "linked-hash-map", +] + +[[package]] +name = "linux-raw-sys" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" + +[[package]] +name = "linux-raw-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd550e73688e6d578f0ac2119e32b797a327631a42f9433e59d02e139c8df60d" + +[[package]] +name = "lock_api" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" +dependencies = [ + "cfg-if 1.0.0", + "value-bag", +] + +[[package]] +name = "loop9" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a703804431e5927454bcaf2b2a162595e95db931130c2728c18d050090f69940" +dependencies = [ + "imgref", +] + +[[package]] +name = "lru" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999beba7b6e8345721bd280141ed958096a2e4abdf74f67ff4ce49b4b54e47a" +dependencies = [ + "hashbrown 0.12.3", +] + +[[package]] +name = "mach" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b823e83b2affd8f40a9ee8c29dbc56404c1e34cd2710921f2801e2cf29527afa" +dependencies = [ + "libc", +] + +[[package]] +name = "malloc_buf" +version = "0.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62bb907fe88d54d8d9ce32a3cceab4218ed2f6b7d35617cafe9adf84e43919cb" +dependencies = [ + "libc", +] + +[[package]] +name = "markdown" +version = "1.0.0-alpha.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "98de49c677e95e00eaa74c42a0b07ea55e1e0b1ebca5b2cbc7657f288cd714eb" +dependencies = [ + "unicode-id", +] + +[[package]] +name = "match_cfg" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata", +] + +[[package]] +name = "matchit" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b87248edafb776e59e6ee64a79086f65890d3510f2c656c000bf2a7e8a0aea40" + +[[package]] +name = "maybe-rayon" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ea1f30cedd69f0a2954655f7188c6a834246d2bcf1e315e2ac40c4b24dc9519" +dependencies = [ + "cfg-if 1.0.0", + "rayon", +] + +[[package]] +name = "md4" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da5ac363534dce5fabf69949225e174fbf111a498bf0ff794c8ea1fba9f3dda" +dependencies = [ + "digest", +] + +[[package]] +name = "mdxjs" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe9bef082151ac4aba3884306e47fd2c1afcc2e208a9cb9a67c4ecfb96bb5d0c" +dependencies = [ + "markdown", + "serde", + "swc_core", +] + +[[package]] +name = "memchr" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" + +[[package]] +name = "memmap2" +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327" +dependencies = [ + "libc", +] + +[[package]] +name = "memoffset" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce" +dependencies = [ + "autocfg", +] + +[[package]] +name = "memoffset" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4" +dependencies = [ + "autocfg", +] + +[[package]] +name = "memoffset" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" +dependencies = [ + "autocfg", +] + +[[package]] +name = "miette" +version = "4.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c90329e44f9208b55f45711f9558cec15d7ef8295cc65ecd6d4188ae8edc58c" +dependencies = [ + "atty", + "backtrace", + "miette-derive", + "once_cell", + "owo-colors", + "supports-color", + "supports-hyperlinks", + "supports-unicode", + "terminal_size 0.1.17", + "textwrap 0.15.2", + "thiserror", + "unicode-width", +] + +[[package]] +name = "miette-derive" +version = "4.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b5bc45b761bcf1b5e6e6c4128cd93b84c218721a8d9b894aa0aff4ed180174c" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "mimalloc" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dcb174b18635f7561a0c6c9fc2ce57218ac7523cf72c50af80e2d79ab8f3ba1" +dependencies = [ + "libmimalloc-sys", +] + +[[package]] +name = "mimalloc-rust" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6973866e0bc6504c03a16b6817b7e70839cc8a1dbd5d6dab00c65d8034868d8b" +dependencies = [ + "cty", + "mimalloc-rust-sys", +] + +[[package]] +name = "mimalloc-rust-sys" +version = "1.7.6-source" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a50daf45336b979a202a19f53b4b382f2c4bd50f392a8dbdb4c6c56ba5dfa64" +dependencies = [ + "cc", + "cty", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "mime_guess" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef" +dependencies = [ + "mime", + "unicase", +] + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.6.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4afd66f5b91bf2a3bc13fad0e21caedac168ca4c707504e75585648ae80e4cc4" +dependencies = [ + "cfg-if 0.1.10", + "fuchsia-zircon", + "fuchsia-zircon-sys", + "iovec", + "kernel32-sys", + "libc", + "log", + "miow", + "net2", + "slab", + "winapi 0.2.8", +] + +[[package]] +name = "mio" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b9d9a46eff5b4ff64b45a9e316a6d1e0bc719ef429cbec4dc630684212bfdf9" +dependencies = [ + "libc", + "log", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys 0.45.0", +] + +[[package]] +name = "mio-extras" +version = "2.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52403fe290012ce777c4626790c8951324a2b9e3316b3143779c72b029742f19" +dependencies = [ + "lazycell", + "log", + "mio 0.6.23", + "slab", +] + +[[package]] +name = "miow" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebd808424166322d4a38da87083bfddd3ac4c131334ed55856112eb06d46944d" +dependencies = [ + "kernel32-sys", + "net2", + "winapi 0.2.8", + "ws2_32-sys", +] + +[[package]] +name = "modularize_imports" +version = "0.27.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82b62c65fb82bdb1a50f16b05fd477977c9a384224455a20753aa07c90f06a2c" +dependencies = [ + "convert_case 0.5.0", + "handlebars", + "once_cell", + "regex", + "serde", + "swc_core", +] + +[[package]] +name = "mopa" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a785740271256c230f57462d3b83e52f998433a7062fc18f96d5999474a9f915" + +[[package]] +name = "more-asserts" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389" + +[[package]] +name = "mp4parse" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67d2b6c6932c2cce721e9bd213461412300466d4613d93d25a36fb8881d9f8de" +dependencies = [ + "bitreader", + "byteorder", + "env_logger 0.8.4", + "fallible_collections", + "log", + "num-traits", + "static_assertions", +] + +[[package]] +name = "multimap" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" + +[[package]] +name = "napi" +version = "2.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de689526aff547ad70ad7feef42f1a5ccaa6f768910fd93984dae25a3fc9699" +dependencies = [ + "bitflags 2.1.0", + "ctor", + "napi-derive", + "napi-sys", + "once_cell", + "serde", + "serde_json", +] + +[[package]] +name = "napi-derive" +version = "2.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6bd0beb0ac7e8576bc92d293361a461b42eaf41740bbdec7e0cbf64d8dc89f7" +dependencies = [ + "convert_case 0.6.0", + "napi-derive-backend", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "napi-derive-backend" +version = "1.0.48" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c713ff9ff5baa6d6ad9aedc46fad73c91e2f16ebe5ece0f41983d4e70e020c7c" +dependencies = [ + "convert_case 0.6.0", + "once_cell", + "proc-macro2", + "quote", + "regex", + "semver 1.0.17", + "syn 1.0.109", +] + +[[package]] +name = "napi-sys" +version = "2.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "166b5ef52a3ab5575047a9fe8d4a030cdd0f63c96f071cd6907674453b07bae3" +dependencies = [ + "libloading", +] + +[[package]] +name = "nasm-rs" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe4d98d0065f4b1daf164b3eafb11974c94662e5e2396cf03f32d0bb5c17da51" +dependencies = [ + "rayon", +] + +[[package]] +name = "native-tls" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +dependencies = [ + "lazy_static", + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "ndk-context" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27b02d87554356db9e9a873add8782d4ea6e3e58ea071a9adb9a2e8ddb884a8b" + +[[package]] +name = "net2" +version = "0.2.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74d0df99cfcd2530b2e694f6e17e7f37b8e26bb23983ac530c0c97408837c631" +dependencies = [ + "cfg-if 0.1.10", + "libc", + "winapi 0.3.9", +] + +[[package]] +name = "new_debug_unreachable" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" + +[[package]] +name = "newline-converter" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f71d09d5c87634207f894c6b31b6a2b2c64ea3bdcf71bd5599fdbbe1600c00f" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "nix" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a" +dependencies = [ + "bitflags 1.3.2", + "cfg-if 1.0.0", + "libc", + "memoffset 0.7.1", + "pin-utils", + "static_assertions", +] + +[[package]] +name = "node-file-trace" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap 4.1.11", + "console-subscriber", + "serde", + "serde_json", + "tokio", + "turbo-malloc", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-fs", + "turbo-tasks-memory", + "turbopack", + "turbopack-cli-utils", + "turbopack-core", +] + +[[package]] +name = "nohash-hasher" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451" + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "noop_proc_macro" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0676bb32a98c1a483ce53e500a81ad9c3d5b3f7c920c28c24e9cb0980d0b5bc8" + +[[package]] +name = "normpath" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a9da8c9922c35a1033d76f7272dfc2e7ee20392083d75aeea6ced23c6266578" +dependencies = [ + "winapi 0.3.9", +] + +[[package]] +name = "notify" +version = "4.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae03c8c853dba7bfd23e571ff0cff7bc9dceb40a4cd684cd1681824183f45257" +dependencies = [ + "bitflags 1.3.2", + "filetime", + "fsevent", + "fsevent-sys", + "inotify 0.7.1", + "libc", + "mio 0.6.23", + "mio-extras", + "walkdir", + "winapi 0.3.9", +] + +[[package]] +name = "notify" +version = "5.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58ea850aa68a06e48fdb069c0ec44d0d64c8dbffa49bf3b6f7f0a901fdea1ba9" +dependencies = [ + "bitflags 1.3.2", + "filetime", + "inotify 0.9.6", + "kqueue", + "libc", + "mio 0.8.6", + "walkdir", + "windows-sys 0.42.0", +] + +[[package]] +name = "ntapi" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc51db7b362b205941f71232e56c625156eb9a929f8cf74a428fd5bc094a4afc" +dependencies = [ + "winapi 0.3.9", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi 0.3.9", +] + +[[package]] +name = "num-bigint" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "num-derive" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "num-format" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a652d9771a63711fd3c3deb670acfbe5c30a4072e664d7a3bf5a9e1056ac72c3" +dependencies = [ + "arrayvec 0.7.2", + "itoa", +] + +[[package]] +name = "num-integer" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +dependencies = [ + "autocfg", + "num-traits", +] + +[[package]] +name = "num-rational" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" +dependencies = [ + "autocfg", + "num-bigint", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_cpus" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" +dependencies = [ + "hermit-abi 0.2.6", + "libc", +] + +[[package]] +name = "num_enum" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9" +dependencies = [ + "num_enum_derive", +] + +[[package]] +name = "num_enum_derive" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "number_prefix" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" + +[[package]] +name = "objc" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "915b1b472bc21c53464d6c8461c9d3af805ba1ef837e1cac254428f4a77177b1" +dependencies = [ + "malloc_buf", +] + +[[package]] +name = "object" +version = "0.30.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea86265d3d3dcb6a27fc51bd29a4bf387fae9d2986b823079d4986af253eb439" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.17.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" + +[[package]] +name = "oorandom" +version = "11.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" + +[[package]] +name = "openssl" +version = "0.10.47" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8b277f87dacc05a6b709965d1cbafac4649d6ce9f3ce9ceb88508b5666dfec9" +dependencies = [ + "bitflags 1.3.2", + "cfg-if 1.0.0", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b501e44f11665960c7e7fcf062c7d96a14ade4aa98116c004b2e37b5be7d736c" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.82" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a95792af3c4e0153c3914df2261bedd30a98476f94dc892b67dfe1d89d433a04" +dependencies = [ + "autocfg", + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "ordered-multimap" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccd746e37177e1711c20dd619a1620f34f5c8b569c53590a72dedd5344d8924a" +dependencies = [ + "dlv-list", + "hashbrown 0.12.3", +] + +[[package]] +name = "os_str_bytes" +version = "6.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ceedf44fb00f2d1984b0bc98102627ce622e083e49a5bacdb3e514fa4238e267" + +[[package]] +name = "output_vt100" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "628223faebab4e3e40667ee0b2336d34a5b960ff60ea743ddfdbcf7770bcfb66" +dependencies = [ + "winapi 0.3.9", +] + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "owo-colors" +version = "3.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f" + +[[package]] +name = "papergrid" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1526bb6aa9f10ec339fb10360f22c57edf81d5678d0278e93bc12a47ffbe4b01" +dependencies = [ + "ansi-str", + "ansitok", + "bytecount", + "fnv", + "unicode-width", +] + +[[package]] +name = "parking" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "427c3892f9e783d91cc128285287e70a59e206ca452770ece88a76f7a3eddd72" + +[[package]] +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "redox_syscall 0.2.16", + "smallvec", + "windows-sys 0.45.0", +] + +[[package]] +name = "paste" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f746c4065a8fa3fe23974dd82f15431cc8d40779821001404d10d2e79ca7d79" + +[[package]] +name = "path-clean" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecba01bf2678719532c5e3059e0b5f0811273d94b397088b82e3bd0a78c78fdd" + +[[package]] +name = "path-slash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e91099d4268b0e11973f036e885d652fb0b21fedcf69738c627f94db6a44f42" + +[[package]] +name = "pathdiff" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd" + +[[package]] +name = "pathfinder_geometry" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b7e7b4ea703700ce73ebf128e1450eb69c3a8329199ffbfb9b2a0418e5ad3" +dependencies = [ + "log", + "pathfinder_simd", +] + +[[package]] +name = "pathfinder_simd" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39fe46acc5503595e5949c17b818714d26fdf9b4920eacf3b2947f0199f4a6ff" +dependencies = [ + "rustc_version 0.3.3", +] + +[[package]] +name = "patricia_tree" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "062a6297f2cd3969a780156ccb288eafb34bb5ed0f3c9a2b4500dbde869d4b86" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "peeking_take_while" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" + +[[package]] +name = "percent-encoding" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" + +[[package]] +name = "pest" +version = "2.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cbd939b234e95d72bc393d51788aec68aeeb5d51e748ca08ff3aad58cb722f7" +dependencies = [ + "thiserror", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a81186863f3d0a27340815be8f2078dd8050b14cd71913db9fbda795e5f707d7" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75a1ef20bf3193c15ac345acb32e26b3dc3223aff4d77ae4fc5359567683796b" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "pest_meta" +version = "2.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e3b284b1f13a20dc5ebc90aff59a51b8d7137c221131b52a7260c08cbc1cc80" +dependencies = [ + "once_cell", + "pest", + "sha2", +] + +[[package]] +name = "petgraph" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4dd7d28ee937e54fe3080c91faa1c3a46c06de6252988a7f4592ba2310ef22a4" +dependencies = [ + "fixedbitset", + "indexmap", +] + +[[package]] +name = "phf" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259" +dependencies = [ + "phf_macros", + "phf_shared", + "proc-macro-hack", +] + +[[package]] +name = "phf_generator" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" +dependencies = [ + "phf_shared", + "rand 0.8.5", +] + +[[package]] +name = "phf_macros" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58fdf3184dd560f160dd73922bea2d5cd6e8f064bf4b13110abd81b03697b4e0" +dependencies = [ + "phf_generator", + "phf_shared", + "proc-macro-hack", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "phf_shared" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" +dependencies = [ + "siphasher", +] + +[[package]] +name = "pidlock" +version = "0.1.4" +dependencies = [ + "libc", + "log", + "rand 0.8.5", + "tempdir", + "windows-sys 0.45.0", +] + +[[package]] +name = "pin-project" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad29a609b6bcd67fee905812e544992d216af9d755757c05ed2d0e15a74c6ecc" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkg-config" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160" + +[[package]] +name = "plotters" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2538b639e642295546c50fcd545198c9d64ee2a38620a628724a3b266d5fbf97" +dependencies = [ + "chrono", + "font-kit", + "image", + "lazy_static", + "num-traits", + "pathfinder_geometry", + "plotters-backend", + "plotters-bitmap", + "plotters-svg", + "ttf-parser", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "193228616381fecdc1224c62e96946dfbc73ff4384fba576e052ff8c1bea8142" + +[[package]] +name = "plotters-bitmap" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c4a1f21490a6cf4a84c272ad20bd7844ed99a3178187a4c5ab7f2051295beef" +dependencies = [ + "gif", + "image", + "plotters-backend", +] + +[[package]] +name = "plotters-svg" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9a81d2759aae1dae668f783c308bc5c8ebd191ff4184aaa1b37f65a6ae5a56f" +dependencies = [ + "plotters-backend", +] + +[[package]] +name = "pmutil" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3894e5d549cccbe44afecf72922f277f603cd4bb0219c8342631ef18fffbe004" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "png" +version = "0.17.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d708eaf860a19b19ce538740d2b4bdeeb8337fa53f7738455e706623ad5c638" +dependencies = [ + "bitflags 1.3.2", + "crc32fast", + "flate2", + "miniz_oxide", +] + +[[package]] +name = "polling" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e1f879b2998099c2d69ab9605d145d5b661195627eccc680002c4918a7fb6fa" +dependencies = [ + "autocfg", + "bitflags 1.3.2", + "cfg-if 1.0.0", + "concurrent-queue", + "libc", + "log", + "pin-project-lite", + "windows-sys 0.45.0", +] + +[[package]] +name = "port_scanner" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "325a6d2ac5dee293c3b2612d4993b98aec1dff096b0a2dae70ed7d95784a05da" + +[[package]] +name = "portable-atomic" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26f6a7b87c2e435a3241addceeeff740ff8b7e76b74c13bf9acb17fa454ea00b" + +[[package]] +name = "portpicker" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be97d76faf1bfab666e1375477b23fde79eccf0276e9b63b92a39d676a889ba9" +dependencies = [ + "rand 0.8.5", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "precomputed-hash" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" + +[[package]] +name = "predicates" +version = "2.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59230a63c37f3e18569bdb90e4a89cbf5bf8b06fea0b84e65ea10cc4df47addd" +dependencies = [ + "difflib", + "itertools", + "predicates-core", +] + +[[package]] +name = "predicates-core" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72f883590242d3c6fc5bf50299011695fa6590c2c70eac95ee1bdb9a733ad1a2" + +[[package]] +name = "predicates-tree" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54ff541861505aabf6ea722d2131ee980b8276e10a1297b94e896dd8b621850d" +dependencies = [ + "predicates-core", + "termtree", +] + +[[package]] +name = "preset_env_base" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c963ac17c08dfc36f01b7d2c4426e759ac1cbd181c2a9ed807f3dea5200b90e1" +dependencies = [ + "ahash 0.7.6", + "anyhow", + "browserslist-rs", + "dashmap", + "from_variant", + "once_cell", + "semver 1.0.17", + "serde", + "st-map", + "tracing", +] + +[[package]] +name = "pretty_assertions" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a25e9bcb20aa780fd0bb16b72403a9064d6b3f22f026946029acb941a50af755" +dependencies = [ + "ctor", + "diff", + "output_vt100", + "yansi", +] + +[[package]] +name = "prettyplease" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ebcd279d20a4a0a2404a33056388e950504d891c855c7975b9a8fef75f3bf04" +dependencies = [ + "proc-macro2", + "syn 1.0.109", +] + +[[package]] +name = "priority-queue" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ca9c6be70d989d21a136eb86c2d83e4b328447fac4a88dace2143c179c86267" +dependencies = [ + "autocfg", + "indexmap", +] + +[[package]] +name = "proc-macro-crate" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +dependencies = [ + "once_cell", + "toml_edit", +] + +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2", + "quote", + "syn 1.0.109", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2", + "quote", + "version_check", +] + +[[package]] +name = "proc-macro-hack" +version = "0.5.20+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" + +[[package]] +name = "proc-macro2" +version = "1.0.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "prost" +version = "0.11.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e48e50df39172a3e7eb17e14642445da64996989bc212b583015435d39a58537" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-build" +version = "0.11.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c828f93f5ca4826f97fedcbd3f9a536c16b12cff3dbbb4a007f932bbad95b12" +dependencies = [ + "bytes", + "heck 0.4.1", + "itertools", + "lazy_static", + "log", + "multimap", + "petgraph", + "prettyplease", + "prost", + "prost-types", + "regex", + "syn 1.0.109", + "tempfile", + "which", +] + +[[package]] +name = "prost-derive" +version = "0.11.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ea9b0f8cbe5e15a8a042d030bd96668db28ecb567ec37d691971ff5731d2b1b" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "prost-types" +version = "0.11.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "379119666929a1afd7a043aa6cf96fa67a6dce9af60c88095a4686dbce4c9c88" +dependencies = [ + "prost", +] + +[[package]] +name = "psm" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5787f7cda34e3033a72192c018bc5883100330f362ef279a8cbccfce8bb4e874" +dependencies = [ + "cc", +] + +[[package]] +name = "ptr_meta" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0738ccf7ea06b608c10564b31debd4f5bc5e197fc8bfe088f68ae5ce81e7a4f1" +dependencies = [ + "ptr_meta_derive", +] + +[[package]] +name = "ptr_meta_derive" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "pulldown-cmark" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffade02495f22453cd593159ea2f59827aae7f53fa8323f756799b670881dcf8" +dependencies = [ + "bitflags 1.3.2", + "memchr", + "unicase", +] + +[[package]] +name = "qstring" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d464fae65fff2680baf48019211ce37aaec0c78e9264c84a3e484717f965104e" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "quick-error" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" + +[[package]] +name = "quote" +version = "1.0.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "radix_fmt" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce082a9940a7ace2ad4a8b7d0b1eac6aa378895f18be598230c5f2284ac05426" + +[[package]] +name = "rand" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" +dependencies = [ + "fuchsia-cprng", + "libc", + "rand_core 0.3.1", + "rdrand", + "winapi 0.3.9", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +dependencies = [ + "rand_core 0.4.2", +] + +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rav1e" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "277898094f0d03c6a609e491324102daf5080e71c06b4b25e5acf8b89d26c945" +dependencies = [ + "arbitrary", + "arg_enum_proc_macro", + "arrayvec 0.7.2", + "av-metrics", + "av1-grain", + "bitstream-io", + "built", + "cc", + "cfg-if 1.0.0", + "clap 4.1.11", + "clap_complete", + "console", + "const_fn_assert", + "fern", + "interpolate_name", + "itertools", + "ivf", + "libc", + "libfuzzer-sys", + "log", + "maybe-rayon", + "nasm-rs", + "new_debug_unreachable", + "nom", + "noop_proc_macro", + "num-derive", + "num-traits", + "once_cell", + "paste", + "rand 0.8.5", + "rand_chacha", + "rust_hawktracer", + "rustc_version 0.4.0", + "scan_fmt", + "signal-hook", + "simd_helpers", + "system-deps", + "thiserror", + "v_frame", + "wasm-bindgen", + "y4m", +] + +[[package]] +name = "ravif" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26cda034bd7ec1f4e799b207ef0444dd28b2fb310877378a5bb5b8b6ef6c4158" +dependencies = [ + "avif-serialize", + "imgref", + "loop9", + "quick-error", + "rav1e", + "rayon", + "rgb", +] + +[[package]] +name = "raw-window-handle" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f851a03551ceefd30132e447f07f96cb7011d6b658374f3aed847333adb5559" + +[[package]] +name = "rayon" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d" +dependencies = [ + "crossbeam-channel", + "crossbeam-deque", + "crossbeam-utils", + "num_cpus", +] + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "redox_syscall" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "redox_syscall" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "redox_users" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" +dependencies = [ + "getrandom", + "redox_syscall 0.2.16", + "thiserror", +] + +[[package]] +name = "regalloc2" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "300d4fbfb40c1c66a78ba3ddd41c1110247cf52f97b87d0f2fc9209bd49b030c" +dependencies = [ + "fxhash", + "log", + "slice-group-by", + "smallvec", +] + +[[package]] +name = "regex" +version = "1.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "region" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76e189c2369884dce920945e2ddf79b3dff49e071a167dd1817fa9c4c00d512e" +dependencies = [ + "bitflags 1.3.2", + "libc", + "mach", + "winapi 0.3.9", +] + +[[package]] +name = "relative-path" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bf2521270932c3c7bed1a59151222bd7643c79310f2916f01925e1e16255698" + +[[package]] +name = "remove_dir_all" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" +dependencies = [ + "winapi 0.3.9", +] + +[[package]] +name = "rend" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "581008d2099240d37fb08d77ad713bcaec2c4d89d50b5b21a8bb1996bbab68ab" +dependencies = [ + "bytecheck", +] + +[[package]] +name = "reqwest" +version = "0.11.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ba30cc2c0cd02af1222ed216ba659cdb2f879dfe3181852fe7c50b1d0005949" +dependencies = [ + "base64 0.21.0", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-rustls", + "hyper-tls", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "tokio", + "tokio-native-tls", + "tokio-rustls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "webpki-roots", + "winreg", +] + +[[package]] +name = "rgb" +version = "0.8.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20ec2d3e3fc7a92ced357df9cebd5a10b6fb2aa1ee797bf7e9ce2f17dffc8f59" +dependencies = [ + "bytemuck", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin 0.5.2", + "untrusted", + "web-sys", + "winapi 0.3.9", +] + +[[package]] +name = "rkyv" +version = "0.7.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21499ed91807f07ae081880aabb2ccc0235e9d88011867d984525e9a4c3cfa3e" +dependencies = [ + "bytecheck", + "hashbrown 0.12.3", + "indexmap", + "ptr_meta", + "rend", + "rkyv_derive", + "seahash", +] + +[[package]] +name = "rkyv_derive" +version = "0.7.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac1c672430eb41556291981f45ca900a0239ad007242d1cb4b4167af842db666" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ron" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88073939a61e5b7680558e6be56b419e208420c2adb92be54921fa6b72283f1a" +dependencies = [ + "base64 0.13.1", + "bitflags 1.3.2", + "serde", +] + +[[package]] +name = "rstest" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b07f2d176c472198ec1e6551dc7da28f1c089652f66a7b722676c2238ebc0edf" +dependencies = [ + "futures", + "futures-timer", + "rstest_macros", + "rustc_version 0.4.0", +] + +[[package]] +name = "rstest_macros" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7229b505ae0706e64f37ffc54a9c163e11022a6636d58fe1f3f52018257ff9f7" +dependencies = [ + "cfg-if 1.0.0", + "proc-macro2", + "quote", + "rustc_version 0.4.0", + "syn 1.0.109", + "unicode-ident", +] + +[[package]] +name = "rstest_reuse" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45f80dcc84beab3a327bbe161f77db25f336a1452428176787c8c79ac79d7073" +dependencies = [ + "quote", + "rand 0.8.5", + "rustc_version 0.4.0", + "syn 1.0.109", +] + +[[package]] +name = "rust-ini" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6d5f2436026b4f6e79dc829837d467cc7e9a55ee40e750d716713540715a2df" +dependencies = [ + "cfg-if 1.0.0", + "ordered-multimap", +] + +[[package]] +name = "rust_hawktracer" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3480a29b927f66c6e06527be7f49ef4d291a01d694ec1fe85b0de71d6b02ac1" +dependencies = [ + "rust_hawktracer_normal_macro", + "rust_hawktracer_proc_macro", +] + +[[package]] +name = "rust_hawktracer_normal_macro" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a570059949e1dcdc6f35228fa389f54c2c84dfe0c94c05022baacd56eacd2e9" + +[[package]] +name = "rust_hawktracer_proc_macro" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb626abdbed5e93f031baae60d72032f56bc964e11ac2ff65f2ba3ed98d6d3e1" + +[[package]] +name = "rustc-demangle" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4a36c42d1873f9a77c53bde094f9664d9891bc604a45b4798fd2c389ed12e5b" + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustc_version" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" +dependencies = [ + "semver 0.9.0", +] + +[[package]] +name = "rustc_version" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" +dependencies = [ + "semver 0.11.0", +] + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver 1.0.17", +] + +[[package]] +name = "rustc_version_runtime" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d31b7153270ebf48bf91c65ae5b0c00e749c4cfad505f66530ac74950249582f" +dependencies = [ + "rustc_version 0.2.3", + "semver 0.9.0", +] + +[[package]] +name = "rustix" +version = "0.36.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db4165c9963ab29e422d6c26fbc1d37f15bace6b2810221f9d925023480fcf0e" +dependencies = [ + "bitflags 1.3.2", + "errno 0.2.8", + "io-lifetimes", + "libc", + "linux-raw-sys 0.1.4", + "windows-sys 0.45.0", +] + +[[package]] +name = "rustix" +version = "0.37.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85597d61f83914ddeba6a47b3b8ffe7365107221c2e557ed94426489fefb5f77" +dependencies = [ + "bitflags 1.3.2", + "errno 0.3.1", + "io-lifetimes", + "libc", + "linux-raw-sys 0.3.0", + "windows-sys 0.48.0", +] + +[[package]] +name = "rustls" +version = "0.20.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fff78fc74d175294f4e83b28343315ffcfb114b156f0185e9741cb5570f50e2f" +dependencies = [ + "log", + "ring", + "sct", + "webpki", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d194b56d58803a43635bdc398cd17e383d6f71f9182b9a192c127ca42494a59b" +dependencies = [ + "base64 0.21.0", +] + +[[package]] +name = "rustversion" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f3208ce4d8448b3f3e7d168a73f5e0c43a61e32930de3bceeccedb388b6bf06" + +[[package]] +name = "ryu" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" + +[[package]] +name = "ryu-js" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6518fc26bced4d53678a22d6e423e9d8716377def84545fe328236e3af070e7f" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "scan_fmt" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b53b0a5db882a8e2fdaae0a43f7b39e7e9082389e978398bdf223a55b581248" + +[[package]] +name = "schannel" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "713cfb06c7059f3588fb8044c0fad1d09e3c01d225e25b9220dbfdcf16dbb1b3" +dependencies = [ + "windows-sys 0.42.0", +] + +[[package]] +name = "scoped-tls" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "scratch" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1792db035ce95be60c3f8853017b3999209281c24e2ba5bc8e59bf97a0c590c1" + +[[package]] +name = "sct" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "seahash" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" + +[[package]] +name = "security-framework" +version = "2.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a332be01508d814fed64bf28f798a146d73792121129962fdf335bb3c49a4254" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31c9bb296072e961fcbd8853511dd39c2d8be2deb1e17c6860b1d30732b323b4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" +dependencies = [ + "semver-parser 0.7.0", +] + +[[package]] +name = "semver" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" +dependencies = [ + "semver-parser 0.10.2", +] + +[[package]] +name = "semver" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed" +dependencies = [ + "serde", +] + +[[package]] +name = "semver-parser" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" + +[[package]] +name = "semver-parser" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7" +dependencies = [ + "pest", +] + +[[package]] +name = "serde" +version = "1.0.160" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb2f3770c8bce3bcda7e149193a069a0f4365bda1fa5cd88e03bca26afc1216c" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde-wasm-bindgen" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3b4c031cd0d9014307d82b8abf653c0290fbdaeb4c02d00c63cf52f728628bf" +dependencies = [ + "js-sys", + "serde", + "wasm-bindgen", +] + +[[package]] +name = "serde_bytes" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "416bda436f9aab92e02c8e10d49a15ddd339cea90b6e340fe51ed97abb548294" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_cbor" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5" +dependencies = [ + "half", + "serde", +] + +[[package]] +name = "serde_derive" +version = "1.0.160" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291a097c63d8497e00160b166a967a4a79c64f3facdd01cbd7502231688d77df" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "serde_json" +version = "1.0.96" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1" +dependencies = [ + "indexmap", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_path_to_error" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7f05c1d5476066defcdfacce1f52fc3cae3af1d3089727100c02ae92e5abbe0" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_qs" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c679fa27b429f2bb57fd4710257e643e86c966e716037259f8baa33de594a1b6" +dependencies = [ + "percent-encoding", + "serde", + "thiserror", +] + +[[package]] +name = "serde_regex" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8136f1a4ea815d7eac4101cfd0b16dc0cb5e1fe1b8609dfd728058656b7badf" +dependencies = [ + "regex", + "serde", +] + +[[package]] +name = "serde_spanned" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0efd8caf556a6cebd3b285caf480045fcc1ac04f6bd786b09a6f11af30c4fcf4" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_test" +version = "1.0.157" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4231c6fab29d02b3cc705db3422aa36f7d23f1e1c096c947c8b8816d7c43dd45" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "331bb8c3bf9b92457ab7abecf07078c13f7d270ba490103e84e8b014490cd0b0" +dependencies = [ + "base64 0.13.1", + "chrono", + "hex", + "indexmap", + "serde", + "serde_json", + "serde_with_macros", + "time 0.3.20", +] + +[[package]] +name = "serde_with_macros" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "859011bddcc11f289f07f467cc1fe01c7a941daa4d8f6c40d4d1c92eb6d9319c" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "serde_yaml" +version = "0.8.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "578a7433b776b56a35785ed5ce9a7e777ac0598aac5a6dd1b4b18a307c7fc71b" +dependencies = [ + "indexmap", + "ryu", + "serde", + "yaml-rust", +] + +[[package]] +name = "serde_yaml" +version = "0.9.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f82e6c8c047aa50a7328632d067bcae6ef38772a79e28daf32f735e0e4f3dd10" +dependencies = [ + "indexmap", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + +[[package]] +name = "sha-1" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "028f48d513f9678cda28f6e4064755b3fbb2af6acd672f2c209b62323f7aea0f" +dependencies = [ + "cfg-if 1.0.0", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha1" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1da05c97445caa12d05e848c4a4fcbbea29e748ac28f7e80e9b010392063770" +dependencies = [ + "sha1_smol", +] + +[[package]] +name = "sha1" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +dependencies = [ + "cfg-if 1.0.0", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha1_smol" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012" + +[[package]] +name = "sha2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" +dependencies = [ + "cfg-if 1.0.0", + "cpufeatures", + "digest", +] + +[[package]] +name = "sharded-slab" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shared_child" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0d94659ad3c2137fef23ae75b03d5241d633f8acded53d672decfa0e6e0caef" +dependencies = [ + "libc", + "winapi 0.3.9", +] + +[[package]] +name = "shell-words" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" + +[[package]] +name = "shellexpand" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ccc8076840c4da029af4f87e4e8daeb0fca6b87bbb02e10cb60b791450e11e4" +dependencies = [ + "dirs", +] + +[[package]] +name = "shlex" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43b2853a4d09f215c24cc5489c992ce46052d359b5109343cbafbf26bc62f8a3" + +[[package]] +name = "signal-hook" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "732768f1176d21d09e076c23a93123d40bba92d50c4058da34d45c8de8e682b9" +dependencies = [ + "libc", + "signal-hook-registry", +] + +[[package]] +name = "signal-hook-mio" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29ad2e15f37ec9a6cc544097b78a1ec90001e9f71b81338ca39f430adaca99af" +dependencies = [ + "libc", + "mio 0.8.6", + "signal-hook", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +dependencies = [ + "libc", +] + +[[package]] +name = "simd_helpers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95890f873bec569a0362c235787f3aca6e1e887302ba4840839bcc6459c42da6" +dependencies = [ + "quote", +] + +[[package]] +name = "simdutf8" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f27f6278552951f1f2b8cf9da965d10969b2efdea95a6ec47987ab46edfe263a" + +[[package]] +name = "similar" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "420acb44afdae038210c99e69aae24109f32f15500aa708e81d46c9f29d55fcf" + +[[package]] +name = "siphasher" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" + +[[package]] +name = "slab" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d" +dependencies = [ + "autocfg", +] + +[[package]] +name = "slice-group-by" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03b634d87b960ab1a38c4fe143b508576f075e7c978bfad18217645ebfdfa2ec" + +[[package]] +name = "sluice" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d7400c0eff44aa2fcb5e31a5f24ba9716ed90138769e4977a2ba6014ae63eb5" +dependencies = [ + "async-channel", + "futures-core", + "futures-io", +] + +[[package]] +name = "smallvec" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" + +[[package]] +name = "smartstring" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fb72c633efbaa2dd666986505016c32c3044395ceaf881518399d2f4127ee29" +dependencies = [ + "autocfg", + "static_assertions", + "version_check", +] + +[[package]] +name = "smawk" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f67ad224767faa3c7d8b6d91985b78e70a1324408abcb1cfcc2be4c06bc06043" + +[[package]] +name = "socket2" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" +dependencies = [ + "libc", + "winapi 0.3.9", +] + +[[package]] +name = "sourcemap" +version = "6.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eed16231c92d0a6f0388f56e0ab2be24ecff1173f8e22f0ea5e074d0525631cb" +dependencies = [ + "data-encoding", + "if_chain", + "rustc_version 0.2.3", + "serde", + "serde_json", + "unicode-id", + "url", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +dependencies = [ + "lock_api", +] + +[[package]] +name = "st-map" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc9c9f3a1df5f73b7392bd9773108fef41ad9126f0282412fd5904389f0c0c4f" +dependencies = [ + "arrayvec 0.7.2", + "static-map-macro", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "stacker" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c886bd4480155fd3ef527d45e9ac8dd7118a898a46530b7b94c3e21866259fce" +dependencies = [ + "cc", + "cfg-if 1.0.0", + "libc", + "psm", + "winapi 0.3.9", +] + +[[package]] +name = "standback" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e113fb6f3de07a243d434a56ec6f186dfd51cb08448239fe7bcae73f87ff28ff" +dependencies = [ + "version_check", +] + +[[package]] +name = "static-map-macro" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "752564de9cd8937fdbc1c55d47ac391758c352ab3755607cc391b659fe87d56b" +dependencies = [ + "pmutil", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "stdweb" +version = "0.4.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d022496b16281348b52d0e30ae99e01a73d737b2f45d38fed4edf79f9325a1d5" +dependencies = [ + "discard", + "rustc_version 0.2.3", + "stdweb-derive", + "stdweb-internal-macros", + "stdweb-internal-runtime", + "wasm-bindgen", +] + +[[package]] +name = "stdweb-derive" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c87a60a40fccc84bef0652345bbbbbe20a605bf5d0ce81719fc476f5c03b50ef" +dependencies = [ + "proc-macro2", + "quote", + "serde", + "serde_derive", + "syn 1.0.109", +] + +[[package]] +name = "stdweb-internal-macros" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58fa5ff6ad0d98d1ffa8cb115892b6e69d67799f6763e162a1c9db421dc22e11" +dependencies = [ + "base-x", + "proc-macro2", + "quote", + "serde", + "serde_derive", + "serde_json", + "sha1 0.6.1", + "syn 1.0.109", +] + +[[package]] +name = "stdweb-internal-runtime" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "213701ba3370744dcd1a12960caa4843b3d68b4d1c0a5d575e0d65b2ee9d16c0" + +[[package]] +name = "string_cache" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" +dependencies = [ + "new_debug_unreachable", + "once_cell", + "parking_lot", + "phf_shared", + "precomputed-hash", + "serde", +] + +[[package]] +name = "string_cache_codegen" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bb30289b722be4ff74a408c3cc27edeaad656e06cb1fe8fa9231fa59c728988" +dependencies = [ + "phf_generator", + "phf_shared", + "proc-macro2", + "quote", +] + +[[package]] +name = "string_enum" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0090512bdfee4b56d82480d66c0fd8a6f53f0fe0f97e075e949b252acdd482e0" +dependencies = [ + "pmutil", + "proc-macro2", + "quote", + "swc_macros_common", + "syn 1.0.109", +] + +[[package]] +name = "strsim" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "styled_components" +version = "0.54.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "789df3a5407332dadb05bc20e4c83d89dacac0ff3c73e264c25d80c2c295f749" +dependencies = [ + "Inflector", + "once_cell", + "regex", + "serde", + "swc_core", + "tracing", +] + +[[package]] +name = "styled_jsx" +version = "0.31.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a5ab84e077d9a2fa06ac1c33eb5a055d6a55457e59a777cdce2b51a6bebdf16" +dependencies = [ + "easy-error", + "swc_core", + "tracing", +] + +[[package]] +name = "subtle" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" + +[[package]] +name = "supports-color" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ba6faf2ca7ee42fdd458f4347ae0a9bd6bcc445ad7cb57ad82b383f18870d6f" +dependencies = [ + "atty", + "is_ci", +] + +[[package]] +name = "supports-hyperlinks" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "590b34f7c5f01ecc9d78dba4b3f445f31df750a67621cf31626f3b7441ce6406" +dependencies = [ + "atty", +] + +[[package]] +name = "supports-unicode" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8b945e45b417b125a8ec51f1b7df2f8df7920367700d1f98aedd21e5735f8b2" +dependencies = [ + "atty", +] + +[[package]] +name = "swc" +version = "0.260.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a4874d4b82e0a305e3183c8e332aa812e68b6a70453fa8d61d4a26bd1949ac7" +dependencies = [ + "ahash 0.7.6", + "anyhow", + "base64 0.13.1", + "dashmap", + "either", + "indexmap", + "jsonc-parser", + "lru", + "napi", + "napi-derive", + "once_cell", + "parking_lot", + "pathdiff", + "regex", + "rustc-hash", + "serde", + "serde_json", + "sourcemap", + "swc_atoms", + "swc_cached", + "swc_common", + "swc_config", + "swc_ecma_ast", + "swc_ecma_codegen", + "swc_ecma_ext_transforms", + "swc_ecma_lints", + "swc_ecma_loader", + "swc_ecma_minifier", + "swc_ecma_parser", + "swc_ecma_preset_env", + "swc_ecma_transforms", + "swc_ecma_transforms_base", + "swc_ecma_transforms_compat", + "swc_ecma_transforms_optimization", + "swc_ecma_utils", + "swc_ecma_visit", + "swc_error_reporters", + "swc_node_comments", + "swc_plugin_proxy", + "swc_plugin_runner", + "swc_timer", + "swc_visit", + "tracing", + "url", +] + +[[package]] +name = "swc-ast-explorer" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap 4.1.11", + "owo-colors", + "regex", + "swc_core", +] + +[[package]] +name = "swc_atoms" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "593c2f3e4cea60ddc4179ed731cabebe7eacec209d9e76a3bbcff4b2b020e3f5" +dependencies = [ + "once_cell", + "rkyv", + "rustc-hash", + "serde", + "string_cache", + "string_cache_codegen", + "triomphe", +] + +[[package]] +name = "swc_bundler" +version = "0.213.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39b0ab52fcef01d39ef7d80aad0bc21374a48cee98b788e25ebf4b520a3047f7" +dependencies = [ + "ahash 0.7.6", + "anyhow", + "crc", + "dashmap", + "indexmap", + "is-macro", + "once_cell", + "parking_lot", + "petgraph", + "radix_fmt", + "rayon", + "relative-path", + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "swc_ecma_codegen", + "swc_ecma_loader", + "swc_ecma_parser", + "swc_ecma_transforms_base", + "swc_ecma_transforms_optimization", + "swc_ecma_utils", + "swc_ecma_visit", + "swc_fast_graph", + "swc_graph_analyzer", + "tracing", +] + +[[package]] +name = "swc_cached" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9745d42d167cb60aeb1e85d2ee813ca455c3185bf7417f11fd102d745ae2b9e1" +dependencies = [ + "ahash 0.7.6", + "anyhow", + "dashmap", + "once_cell", + "regex", + "serde", +] + +[[package]] +name = "swc_common" +version = "0.31.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b557014d62318e08070c2a3d5eb0278ff73749dd69db53c39a4de4bcd301d6a" +dependencies = [ + "ahash 0.7.6", + "anyhow", + "ast_node", + "atty", + "better_scoped_tls", + "cfg-if 1.0.0", + "either", + "from_variant", + "new_debug_unreachable", + "num-bigint", + "once_cell", + "parking_lot", + "rkyv", + "rustc-hash", + "serde", + "siphasher", + "sourcemap", + "string_cache", + "swc_atoms", + "swc_eq_ignore_macros", + "swc_visit", + "termcolor", + "tracing", + "unicode-width", + "url", +] + +[[package]] +name = "swc_config" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89c8fc2c12bb1634c7c32fc3c9b6b963ad8f034cc62c4ecddcf215dc4f6f959d" +dependencies = [ + "indexmap", + "serde", + "serde_json", + "swc_config_macro", +] + +[[package]] +name = "swc_config_macro" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dadb9998d4f5fc36ef558ed5a092579441579ee8c6fcce84a5228cca9df4004" +dependencies = [ + "pmutil", + "proc-macro2", + "quote", + "swc_macros_common", + "syn 1.0.109", +] + +[[package]] +name = "swc_core" +version = "0.75.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a2a7e4e3a2d9fe7a2c53661657a9c7f032d4c65731ca79a7b29044f9d211c31" +dependencies = [ + "binding_macros", + "swc", + "swc_atoms", + "swc_bundler", + "swc_cached", + "swc_common", + "swc_css_ast", + "swc_css_codegen", + "swc_css_compat", + "swc_css_modules", + "swc_css_parser", + "swc_css_prefixer", + "swc_css_utils", + "swc_css_visit", + "swc_ecma_ast", + "swc_ecma_codegen", + "swc_ecma_loader", + "swc_ecma_minifier", + "swc_ecma_parser", + "swc_ecma_preset_env", + "swc_ecma_quote_macros", + "swc_ecma_transforms_base", + "swc_ecma_transforms_module", + "swc_ecma_transforms_optimization", + "swc_ecma_transforms_proposal", + "swc_ecma_transforms_react", + "swc_ecma_transforms_testing", + "swc_ecma_transforms_typescript", + "swc_ecma_utils", + "swc_ecma_visit", + "swc_node_base", + "swc_nodejs_common", + "swc_plugin_proxy", + "swc_plugin_runner", + "swc_trace_macro", + "testing", + "vergen", +] + +[[package]] +name = "swc_css_ast" +version = "0.137.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f1d3f6e80ad0043504099eeda037651db74ac60ae54a4a0ff3a4c846647b487" +dependencies = [ + "is-macro", + "serde", + "string_enum", + "swc_atoms", + "swc_common", +] + +[[package]] +name = "swc_css_codegen" +version = "0.147.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51f72afff183f03dd7d06ccdd806612b692adc30650ed95b744c944879a3463f" +dependencies = [ + "auto_impl", + "bitflags 2.1.0", + "rustc-hash", + "serde", + "swc_atoms", + "swc_common", + "swc_css_ast", + "swc_css_codegen_macros", + "swc_css_utils", +] + +[[package]] +name = "swc_css_codegen_macros" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01c132d9ba562343f7c49d776c4a09b362a4a4104b7cb0a0f7b785986a492e1b" +dependencies = [ + "pmutil", + "proc-macro2", + "quote", + "swc_macros_common", + "syn 1.0.109", +] + +[[package]] +name = "swc_css_compat" +version = "0.23.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7700ec829283ed5192eb3bed831bfdcef422c45232908c404bbb68fbcff680cd" +dependencies = [ + "bitflags 2.1.0", + "once_cell", + "serde", + "serde_json", + "swc_atoms", + "swc_common", + "swc_css_ast", + "swc_css_utils", + "swc_css_visit", +] + +[[package]] +name = "swc_css_modules" +version = "0.25.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5faa4cd3f387ff773c58b495325fc1ee1da7edfd034ea250a75b28f6141ef740" +dependencies = [ + "rustc-hash", + "serde", + "swc_atoms", + "swc_common", + "swc_css_ast", + "swc_css_codegen", + "swc_css_parser", + "swc_css_visit", +] + +[[package]] +name = "swc_css_parser" +version = "0.146.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd4049a02caf0f814b9517acef8277f74b427b12fe164b311f25ce043c4b0d81" +dependencies = [ + "bitflags 2.1.0", + "lexical", + "serde", + "swc_atoms", + "swc_common", + "swc_css_ast", +] + +[[package]] +name = "swc_css_prefixer" +version = "0.149.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e90d802abc1fdcbdf87273a34a08f992bb14b3a09db210b662cf04508988cbc5" +dependencies = [ + "once_cell", + "preset_env_base", + "serde", + "serde_json", + "swc_atoms", + "swc_common", + "swc_css_ast", + "swc_css_utils", + "swc_css_visit", +] + +[[package]] +name = "swc_css_utils" +version = "0.134.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc2ea0e478c4a052e7defadc5b185902027ea0f4c365b385cadeb18eea68d4e1" +dependencies = [ + "once_cell", + "serde", + "serde_json", + "swc_atoms", + "swc_common", + "swc_css_ast", + "swc_css_visit", +] + +[[package]] +name = "swc_css_visit" +version = "0.136.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a2de449561e0983bd750fd7a70c0350a35ac76a1336eed3dde78e416db29632" +dependencies = [ + "serde", + "swc_atoms", + "swc_common", + "swc_css_ast", + "swc_visit", +] + +[[package]] +name = "swc_ecma_ast" +version = "0.103.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5206233430a6763e2759da76cfc596a64250793f70cd94cace1f82fdcc4d702c" +dependencies = [ + "bitflags 2.1.0", + "is-macro", + "num-bigint", + "rkyv", + "scoped-tls", + "serde", + "string_enum", + "swc_atoms", + "swc_common", + "unicode-id", +] + +[[package]] +name = "swc_ecma_codegen" +version = "0.138.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd67009c5208689787f9fc59265deaeddad68d3e59da909e8db4615bb8c1d4a9" +dependencies = [ + "memchr", + "num-bigint", + "once_cell", + "rustc-hash", + "serde", + "sourcemap", + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "swc_ecma_codegen_macros", + "tracing", +] + +[[package]] +name = "swc_ecma_codegen_macros" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf4ee0caee1018808d94ecd09490cb7affd3d504b19aa11c49238f5fc4b54901" +dependencies = [ + "pmutil", + "proc-macro2", + "quote", + "swc_macros_common", + "syn 1.0.109", +] + +[[package]] +name = "swc_ecma_ext_transforms" +version = "0.102.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3aece9e36d47a48d49301d358d2c22a52918e8447c7679e2622c9fb366fdc6d" +dependencies = [ + "phf", + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "swc_ecma_utils", + "swc_ecma_visit", +] + +[[package]] +name = "swc_ecma_lints" +version = "0.81.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44afd61c03093baca78f20eb5698f8ac4470afe8891529dcf62ae9ebf6c8c617" +dependencies = [ + "ahash 0.7.6", + "auto_impl", + "dashmap", + "parking_lot", + "rayon", + "regex", + "serde", + "swc_atoms", + "swc_common", + "swc_config", + "swc_ecma_ast", + "swc_ecma_utils", + "swc_ecma_visit", +] + +[[package]] +name = "swc_ecma_loader" +version = "0.43.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9af852e780a8c33a7215139164472eca9edd3b2c38fb60fb9dc85500239195c8" +dependencies = [ + "ahash 0.7.6", + "anyhow", + "dashmap", + "lru", + "normpath", + "once_cell", + "parking_lot", + "path-clean", + "pathdiff", + "serde", + "serde_json", + "swc_cached", + "swc_common", + "tracing", +] + +[[package]] +name = "swc_ecma_minifier" +version = "0.180.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32201be32d654588d34b6b94da1472de0e3274ccd13bd1628e3cf5f8159d0caf" +dependencies = [ + "ahash 0.7.6", + "arrayvec 0.7.2", + "indexmap", + "num-bigint", + "num_cpus", + "once_cell", + "parking_lot", + "radix_fmt", + "rayon", + "regex", + "rustc-hash", + "ryu-js", + "serde", + "serde_json", + "swc_atoms", + "swc_cached", + "swc_common", + "swc_config", + "swc_ecma_ast", + "swc_ecma_codegen", + "swc_ecma_parser", + "swc_ecma_transforms_base", + "swc_ecma_transforms_optimization", + "swc_ecma_usage_analyzer", + "swc_ecma_utils", + "swc_ecma_visit", + "swc_timer", + "tracing", +] + +[[package]] +name = "swc_ecma_parser" +version = "0.133.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5341644ae5e8d143db12c43dcd06d31138c0dbda7db9db31ce751f0a46a58575" +dependencies = [ + "either", + "lexical", + "num-bigint", + "serde", + "smallvec", + "smartstring", + "stacker", + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "tracing", + "typed-arena", +] + +[[package]] +name = "swc_ecma_preset_env" +version = "0.194.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bd967099e0738d04136e8a877d8acf7e039b8e71ecffba725a9046516712a6f" +dependencies = [ + "ahash 0.7.6", + "anyhow", + "dashmap", + "indexmap", + "once_cell", + "preset_env_base", + "semver 1.0.17", + "serde", + "serde_json", + "st-map", + "string_enum", + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "swc_ecma_transforms", + "swc_ecma_utils", + "swc_ecma_visit", +] + +[[package]] +name = "swc_ecma_quote_macros" +version = "0.44.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d076f99703cba1028030dee5f97fe1b21741a3dd6bfef9f2367770f402c351a0" +dependencies = [ + "anyhow", + "pmutil", + "proc-macro2", + "quote", + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "swc_ecma_parser", + "swc_macros_common", + "syn 1.0.109", +] + +[[package]] +name = "swc_ecma_testing" +version = "0.20.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25198f96ef93c4bb4cc8fa13c9b22a018cf2c0c7609ee91f7abc7968ebc2e2df" +dependencies = [ + "anyhow", + "hex", + "sha-1", + "tracing", +] + +[[package]] +name = "swc_ecma_transforms" +version = "0.217.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3eab15d252a9e8aaa3344bb9f28781d8c0b52ded19c873d29e741b8b2291803" +dependencies = [ + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "swc_ecma_transforms_base", + "swc_ecma_transforms_compat", + "swc_ecma_transforms_module", + "swc_ecma_transforms_optimization", + "swc_ecma_transforms_proposal", + "swc_ecma_transforms_react", + "swc_ecma_transforms_typescript", + "swc_ecma_utils", + "swc_ecma_visit", +] + +[[package]] +name = "swc_ecma_transforms_base" +version = "0.126.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "907d73e5a76ddb21f33a3f5907c46c9df968ee52ba0d9f8d6ade43e8f06233a3" +dependencies = [ + "better_scoped_tls", + "bitflags 2.1.0", + "indexmap", + "once_cell", + "phf", + "rayon", + "rustc-hash", + "serde", + "smallvec", + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "swc_ecma_parser", + "swc_ecma_utils", + "swc_ecma_visit", + "tracing", +] + +[[package]] +name = "swc_ecma_transforms_classes" +version = "0.115.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15722bc7895b17b32d114caed125abeca6d556e0afbf6e8c55b43ab6d4448afa" +dependencies = [ + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "swc_ecma_transforms_base", + "swc_ecma_utils", + "swc_ecma_visit", +] + +[[package]] +name = "swc_ecma_transforms_compat" +version = "0.152.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5185a3ff6fc272cd4fd180265ad621b26b3c74c60b878bf10850a2d9f81eee1" +dependencies = [ + "ahash 0.7.6", + "arrayvec 0.7.2", + "indexmap", + "is-macro", + "num-bigint", + "rayon", + "serde", + "smallvec", + "swc_atoms", + "swc_common", + "swc_config", + "swc_ecma_ast", + "swc_ecma_transforms_base", + "swc_ecma_transforms_classes", + "swc_ecma_transforms_macros", + "swc_ecma_utils", + "swc_ecma_visit", + "swc_trace_macro", + "tracing", +] + +[[package]] +name = "swc_ecma_transforms_macros" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "984d5ac69b681fc5438f9abf82b0fda34fe04e119bc75f8213b7e01128c7c9a2" +dependencies = [ + "pmutil", + "proc-macro2", + "quote", + "swc_macros_common", + "syn 1.0.109", +] + +[[package]] +name = "swc_ecma_transforms_module" +version = "0.169.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa808a92ea3bc645d2021c63d54c4a440b43a2686bc6f3344f87c9ce9cf888ff" +dependencies = [ + "Inflector", + "ahash 0.7.6", + "anyhow", + "bitflags 2.1.0", + "indexmap", + "is-macro", + "path-clean", + "pathdiff", + "regex", + "serde", + "swc_atoms", + "swc_cached", + "swc_common", + "swc_ecma_ast", + "swc_ecma_loader", + "swc_ecma_parser", + "swc_ecma_transforms_base", + "swc_ecma_utils", + "swc_ecma_visit", + "tracing", +] + +[[package]] +name = "swc_ecma_transforms_optimization" +version = "0.186.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d73383687b4382101bc548510747fc84b790d22390833c4f3a83655e593b2a0" +dependencies = [ + "ahash 0.7.6", + "dashmap", + "indexmap", + "once_cell", + "petgraph", + "rayon", + "rustc-hash", + "serde_json", + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "swc_ecma_parser", + "swc_ecma_transforms_base", + "swc_ecma_transforms_macros", + "swc_ecma_utils", + "swc_ecma_visit", + "swc_fast_graph", + "tracing", +] + +[[package]] +name = "swc_ecma_transforms_proposal" +version = "0.160.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaf64211f97397a84978a0cbdd28227fddf9c804bf3479fad8983c3527ba5b39" +dependencies = [ + "either", + "rustc-hash", + "serde", + "smallvec", + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "swc_ecma_transforms_base", + "swc_ecma_transforms_classes", + "swc_ecma_transforms_macros", + "swc_ecma_utils", + "swc_ecma_visit", +] + +[[package]] +name = "swc_ecma_transforms_react" +version = "0.172.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6aa62447c537a9afb7bad7b4bc74599b3d7b8dcfe9e62035aab08f5e8a610f3f" +dependencies = [ + "ahash 0.7.6", + "base64 0.13.1", + "dashmap", + "indexmap", + "once_cell", + "rayon", + "serde", + "sha-1", + "string_enum", + "swc_atoms", + "swc_common", + "swc_config", + "swc_ecma_ast", + "swc_ecma_parser", + "swc_ecma_transforms_base", + "swc_ecma_transforms_macros", + "swc_ecma_utils", + "swc_ecma_visit", +] + +[[package]] +name = "swc_ecma_transforms_testing" +version = "0.129.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02fe6a8a1290bbd4fdd9cc1554e3458ffb14e2131936f065bed0a684f89aaf0c" +dependencies = [ + "ansi_term", + "anyhow", + "base64 0.13.1", + "hex", + "serde", + "serde_json", + "sha-1", + "sourcemap", + "swc_common", + "swc_ecma_ast", + "swc_ecma_codegen", + "swc_ecma_parser", + "swc_ecma_testing", + "swc_ecma_transforms_base", + "swc_ecma_utils", + "swc_ecma_visit", + "tempfile", + "testing", +] + +[[package]] +name = "swc_ecma_transforms_typescript" +version = "0.176.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7376e8b0a1a575c66ba4cc7d14158d4b59a4e605bb2ee1d0dbd6e1ea15f74a1" +dependencies = [ + "serde", + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "swc_ecma_transforms_base", + "swc_ecma_transforms_react", + "swc_ecma_utils", + "swc_ecma_visit", +] + +[[package]] +name = "swc_ecma_usage_analyzer" +version = "0.12.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "784ba3ec77f8c18e251b613074ee7da678e0a30dee37856b70d9a7c7ac636d19" +dependencies = [ + "ahash 0.7.6", + "indexmap", + "rustc-hash", + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "swc_ecma_utils", + "swc_ecma_visit", + "swc_timer", + "tracing", +] + +[[package]] +name = "swc_ecma_utils" +version = "0.116.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba86ab0cf8c64043dcc8ac5cb4f438f6e3666ddac58587925ddc2af6be2ed5d1" +dependencies = [ + "indexmap", + "num_cpus", + "once_cell", + "rayon", + "rustc-hash", + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "swc_ecma_visit", + "tracing", + "unicode-id", +] + +[[package]] +name = "swc_ecma_visit" +version = "0.89.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecb23a4a1d77997f54e9b3a4e68d1441e5e8a25ad1a476bbb3b5a620d6562a86" +dependencies = [ + "num-bigint", + "swc_atoms", + "swc_common", + "swc_ecma_ast", + "swc_visit", + "tracing", +] + +[[package]] +name = "swc_emotion" +version = "0.30.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d29e57e3f9bfd1586ae0b26ce7a24c7bc5e994cc574760add93d350ae3fb904" +dependencies = [ + "base64 0.13.1", + "byteorder", + "fxhash", + "once_cell", + "radix_fmt", + "regex", + "serde", + "sourcemap", + "swc_core", + "tracing", +] + +[[package]] +name = "swc_eq_ignore_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c20468634668c2bbab581947bb8c75c97158d5a6959f4ba33df20983b20b4f6" +dependencies = [ + "pmutil", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "swc_error_reporters" +version = "0.15.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf37dae113d98ec257727dce3d746254a2731abc56e609a6f2efa7cf57806705" +dependencies = [ + "anyhow", + "miette", + "once_cell", + "parking_lot", + "swc_common", +] + +[[package]] +name = "swc_fast_graph" +version = "0.19.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "992a92e087f7b2dc9aa626a6bee26530abbffba3572adf3894ccb55d2480f596" +dependencies = [ + "indexmap", + "petgraph", + "rustc-hash", + "swc_common", +] + +[[package]] +name = "swc_graph_analyzer" +version = "0.20.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66f7d6f4ec40acd00a7620c1627f926947c89d8a6c0a9728120b2396ee7eaa12" +dependencies = [ + "ahash 0.7.6", + "auto_impl", + "petgraph", + "swc_fast_graph", + "tracing", +] + +[[package]] +name = "swc_macros_common" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e582c3e3c2269238524923781df5be49e011dbe29cf7683a2215d600a562ea6" +dependencies = [ + "pmutil", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "swc_node_base" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6065892f97ac3f42280d0f3eadc351aeff552e8de4d459604bcd9c56eb799ade" +dependencies = [ + "mimalloc-rust", + "tikv-jemallocator", +] + +[[package]] +name = "swc_node_comments" +version = "0.18.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e723c3796eb5c3e46e68023062facc665c9ccf71df4907d829739388b7d919c" +dependencies = [ + "ahash 0.7.6", + "dashmap", + "swc_atoms", + "swc_common", +] + +[[package]] +name = "swc_nodejs_common" +version = "0.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c00871ef9d32aad437acced2eeffc96a97c5f2776bb90ad6497968a8d626b04" +dependencies = [ + "anyhow", + "napi", + "serde", + "serde_json", + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "swc_plugin_proxy" +version = "0.32.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01e5a04649cde6c40bd2b746ad7ac8195b9e3316048bf7263380e315907a6fd9" +dependencies = [ + "better_scoped_tls", + "rkyv", + "swc_common", + "swc_ecma_ast", + "swc_trace_macro", + "tracing", +] + +[[package]] +name = "swc_plugin_runner" +version = "0.94.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "533117dcf0b79c2943831147b8951251b675a4429e195ee133b375905ca87b1c" +dependencies = [ + "anyhow", + "enumset", + "once_cell", + "parking_lot", + "serde", + "serde_json", + "swc_common", + "swc_ecma_ast", + "swc_plugin_proxy", + "tracing", + "wasmer", + "wasmer-cache", + "wasmer-compiler-cranelift", + "wasmer-wasix", +] + +[[package]] +name = "swc_relay" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba2838477f82daa684ac765ea1a35920dc0daaf41f69cb4c667b9adf231e4de9" +dependencies = [ + "once_cell", + "regex", + "serde", + "serde_json", + "swc_common", + "swc_core", + "tracing", +] + +[[package]] +name = "swc_timer" +version = "0.19.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd3f57fbbb68655b2e2baadc47bfd96b9b25f179d8925b25b7a866a7ec71e041" +dependencies = [ + "tracing", +] + +[[package]] +name = "swc_trace_macro" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4795c8d23e0de62eef9cac0a20ae52429ee2ffc719768e838490f195b7d7267" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "swc_visit" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1d5999f23421c8e21a0f2bc53a0b9e8244f3b421de89471561af2fbe40b9cca" +dependencies = [ + "either", + "swc_visit_macros", +] + +[[package]] +name = "swc_visit_macros" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebeed7eb0f545f48ad30f5aab314e5208b735bcea1d1464f26e20f06db904989" +dependencies = [ + "Inflector", + "pmutil", + "proc-macro2", + "quote", + "swc_macros_common", + "syn 1.0.109", +] + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a34fcf3e8b60f57e6a14301a2e916d323af98b0ea63c599441eec8558660c822" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "sysinfo" +version = "0.27.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a902e9050fca0a5d6877550b769abd2bd1ce8c04634b941dbe2809735e1a1e33" +dependencies = [ + "cfg-if 1.0.0", + "core-foundation-sys", + "libc", + "ntapi", + "once_cell", + "rayon", + "winapi 0.3.9", +] + +[[package]] +name = "system-deps" +version = "6.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "555fc8147af6256f3931a36bb83ad0023240ce9cf2b319dec8236fd1f220b05f" +dependencies = [ + "cfg-expr", + "heck 0.4.1", + "pkg-config", + "toml 0.7.3", + "version-compare", +] + +[[package]] +name = "tabled" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56c3ee73732ffceaea7b8f6b719ce3bb17f253fa27461ffeaf568ebd0cdb4b85" +dependencies = [ + "ansi-str", + "papergrid", + "tabled_derive", + "unicode-width", +] + +[[package]] +name = "tabled_derive" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "beca1b4eaceb4f2755df858b88d9b9315b7ccfd1ffd0d7a48a52602301f01a57" +dependencies = [ + "heck 0.4.1", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "target-lexicon" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ae9980cab1db3fceee2f6c6f643d5d8de2997c58ee8d25fb0cc8a9e9e7348e5" + +[[package]] +name = "tempdir" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8" +dependencies = [ + "rand 0.4.6", + "remove_dir_all", +] + +[[package]] +name = "tempfile" +version = "3.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998" +dependencies = [ + "cfg-if 1.0.0", + "fastrand", + "redox_syscall 0.3.5", + "rustix 0.37.11", + "windows-sys 0.45.0", +] + +[[package]] +name = "term_size" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e4129646ca0ed8f45d09b929036bafad5377103edd06e50bf574b353d2b08d9" +dependencies = [ + "libc", + "winapi 0.3.9", +] + +[[package]] +name = "termcolor" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "terminal_size" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "633c1a546cee861a1a6d0dc69ebeca693bf4296661ba7852b9d21d159e0506df" +dependencies = [ + "libc", + "winapi 0.3.9", +] + +[[package]] +name = "terminal_size" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e6bf6f19e9f8ed8d4048dc22981458ebcf406d67e94cd422e5ecd73d63b3237" +dependencies = [ + "rustix 0.37.11", + "windows-sys 0.48.0", +] + +[[package]] +name = "termios" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "411c5bf740737c7918b8b1fe232dca4dc9f8e754b8ad5e20966814001ed0ac6b" +dependencies = [ + "libc", +] + +[[package]] +name = "termtree" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95059e91184749cb66be6dc994f67f182b6d897cb3df74a5bf66b5e709295fd8" + +[[package]] +name = "test-case" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a1d6e7bde536b0412f20765b76e921028059adfd1b90d8974d33fd3c91b25df" +dependencies = [ + "test-case-macros", +] + +[[package]] +name = "test-case-core" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72dc21b5887f4032c4656502d085dc28f2afbb686f25f216472bb0526f4b1b88" +dependencies = [ + "cfg-if 1.0.0", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "test-case-macros" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3786898e0be151a96f730fd529b0e8a10f5990fa2a7ea14e37ca27613c05190" +dependencies = [ + "proc-macro-error", + "proc-macro2", + "quote", + "syn 1.0.109", + "test-case-core", +] + +[[package]] +name = "testing" +version = "0.33.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3be7349994b23b7d91beddbfb6d63907651a9d2f6b4e4fa8d2ec65b41094968a" +dependencies = [ + "ansi_term", + "difference", + "once_cell", + "pretty_assertions", + "regex", + "serde_json", + "swc_common", + "swc_error_reporters", + "testing_macros", + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "testing_macros" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5315a85a7262fe1a8898890b616de62c152dd43cb5974752c0927aaabe48891" +dependencies = [ + "anyhow", + "glob", + "once_cell", + "pmutil", + "proc-macro2", + "quote", + "regex", + "relative-path", + "syn 1.0.109", +] + +[[package]] +name = "textwrap" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" +dependencies = [ + "unicode-width", +] + +[[package]] +name = "textwrap" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7b3e525a49ec206798b40326a44121291b530c963cfb01018f63e135bac543d" +dependencies = [ + "smawk", + "unicode-linebreak", + "unicode-width", +] + +[[package]] +name = "textwrap" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" + +[[package]] +name = "thiserror" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "thread_local" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" +dependencies = [ + "cfg-if 1.0.0", + "once_cell", +] + +[[package]] +name = "tikv-jemalloc-sys" +version = "0.4.3+5.2.1-patched.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1792ccb507d955b46af42c123ea8863668fae24d03721e40cad6a41773dbb49" +dependencies = [ + "cc", + "fs_extra", + "libc", +] + +[[package]] +name = "tikv-jemallocator" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5b7bcecfafe4998587d636f9ae9d55eb9d0499877b88757767c346875067098" +dependencies = [ + "libc", + "tikv-jemalloc-sys", +] + +[[package]] +name = "time" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" +dependencies = [ + "libc", + "wasi 0.10.0+wasi-snapshot-preview1", + "winapi 0.3.9", +] + +[[package]] +name = "time" +version = "0.2.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4752a97f8eebd6854ff91f1c1824cd6160626ac4bd44287f7f4ea2035a02a242" +dependencies = [ + "const_fn", + "libc", + "standback", + "stdweb", + "time-macros 0.1.1", + "version_check", + "winapi 0.3.9", +] + +[[package]] +name = "time" +version = "0.3.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd0cbfecb4d19b5ea75bb31ad904eb5b9fa13f21079c3b92017ebdf4999a5890" +dependencies = [ + "itoa", + "serde", + "time-core", + "time-macros 0.2.8", +] + +[[package]] +name = "time-core" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd" + +[[package]] +name = "time-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "957e9c6e26f12cb6d0dd7fc776bb67a706312e7299aed74c8dd5b17ebb27e2f1" +dependencies = [ + "proc-macro-hack", + "time-macros-impl", +] + +[[package]] +name = "time-macros" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd80a657e71da814b8e5d60d3374fc6d35045062245d80224748ae522dd76f36" +dependencies = [ + "time-core", +] + +[[package]] +name = "time-macros-impl" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd3c141a1b43194f3f56a1411225df8646c55781d5f26db825b3d98507eb482f" +dependencies = [ + "proc-macro-hack", + "proc-macro2", + "quote", + "standback", + "syn 1.0.109", +] + +[[package]] +name = "tiny-gradient" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8063c572fcc935676f1e01615f201f355a053e88525ec41c1b0c4884ce104847" +dependencies = [ + "libm", +] + +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0de47a4eecbe11f498978a9b29d792f0d2692d1dd003650c24c76510e3bc001" +dependencies = [ + "autocfg", + "bytes", + "libc", + "mio 0.8.6", + "num_cpus", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "tracing", + "windows-sys 0.45.0", +] + +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-macros" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61a573bdc87985e9d6ddeed1b3d864e8a302c847e40d647746df2f1de209d1ce" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.15", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.23.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" +dependencies = [ + "rustls", + "tokio", + "webpki", +] + +[[package]] +name = "tokio-stream" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fb52b74f05dbf495a8fba459fdc331812b96aa086d9eb78101fa0d4569c3313" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-tungstenite" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54319c93411147bced34cb5609a80e0a8e44c5999c93903a81cd866630ec0bfd" +dependencies = [ + "futures-util", + "log", + "tokio", + "tungstenite 0.18.0", +] + +[[package]] +name = "tokio-util" +version = "0.7.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5427d89453009325de0d8f342c9490009f76e999cb7672d77e46267448f7e6b2" +dependencies = [ + "bytes", + "futures-core", + "futures-io", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", +] + +[[package]] +name = "toml" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +dependencies = [ + "serde", +] + +[[package]] +name = "toml" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b403acf6f2bb0859c93c7f0d967cb4a75a7ac552100f9322faf64dc047669b21" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ab8ed2edee10b50132aed5f331333428b011c99402b5a534154ed15746f9622" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.19.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "239410c8609e8125456927e6707163a3b1fdb40561e4b803bc041f466ccfdc13" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "winnow", +] + +[[package]] +name = "tonic" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f219fad3b929bef19b1f86fbc0358d35daed8f2cac972037ac0dc10bbb8d5fb" +dependencies = [ + "async-stream", + "async-trait", + "axum", + "base64 0.13.1", + "bytes", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost", + "prost-derive", + "tokio", + "tokio-stream", + "tokio-util", + "tower", + "tower-layer", + "tower-service", + "tracing", + "tracing-futures", +] + +[[package]] +name = "tonic-build" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5bf5e9b9c0f7e0a7c027dcfaba7b2c60816c7049171f679d99ee2ff65d0de8c4" +dependencies = [ + "prettyplease", + "proc-macro2", + "prost-build", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "indexmap", + "pin-project", + "pin-project-lite", + "rand 0.8.5", + "slab", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +dependencies = [ + "cfg-if 1.0.0", + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "tracing-core" +version = "0.1.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-futures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" +dependencies = [ + "pin-project", + "tracing", +] + +[[package]] +name = "tracing-log" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" +dependencies = [ + "lazy_static", + "log", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6176eae26dd70d0c919749377897b54a9276bd7061339665dd68777926b5a70" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "triomphe" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1ee9bd9239c339d714d657fac840c6d2a4f9c45f4f9ec7b0975113458be78db" +dependencies = [ + "serde", + "stable_deref_trait", +] + +[[package]] +name = "try-lock" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" + +[[package]] +name = "ttf-parser" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b3e06c9b9d80ed6b745c7159c40b311ad2916abb34a49e9be2653b90db0d8dd" + +[[package]] +name = "tungstenite" +version = "0.17.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e27992fd6a8c29ee7eef28fc78349aa244134e10ad447ce3b9f0ac0ed0fa4ce0" +dependencies = [ + "base64 0.13.1", + "byteorder", + "bytes", + "http", + "httparse", + "log", + "rand 0.8.5", + "sha-1", + "thiserror", + "url", + "utf-8", +] + +[[package]] +name = "tungstenite" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30ee6ab729cd4cf0fd55218530c4522ed30b7b6081752839b68fcec8d0960788" +dependencies = [ + "base64 0.13.1", + "byteorder", + "bytes", + "http", + "httparse", + "log", + "rand 0.8.5", + "sha1 0.10.5", + "thiserror", + "url", + "utf-8", +] + +[[package]] +name = "turbo" +version = "0.1.0" +dependencies = [ + "anyhow", + "assert_cmd", + "build-target", + "clap 4.1.11", + "clap_complete", + "command-group", + "dunce", + "itertools", + "log", + "pretty_assertions", + "serde", + "serde_json", + "serde_yaml 0.9.19", + "tiny-gradient", + "tokio-util", + "turborepo-lib", + "winapi 0.3.9", +] + +[[package]] +name = "turbo-binding" +version = "0.1.0" +dependencies = [ + "auto-hash-map", + "mdxjs", + "modularize_imports", + "node-file-trace", + "styled_components", + "styled_jsx", + "swc-ast-explorer", + "swc_core", + "swc_emotion", + "swc_relay", + "testing", + "turbo-malloc", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-bytes", + "turbo-tasks-env", + "turbo-tasks-fetch", + "turbo-tasks-fs", + "turbo-tasks-hash", + "turbo-tasks-macros", + "turbo-tasks-macros-shared", + "turbo-tasks-memory", + "turbo-tasks-testing", + "turbo-updater", + "turbopack", + "turbopack-bench", + "turbopack-cli-utils", + "turbopack-core", + "turbopack-create-test-app", + "turbopack-css", + "turbopack-dev", + "turbopack-dev-server", + "turbopack-ecmascript", + "turbopack-env", + "turbopack-image", + "turbopack-json", + "turbopack-mdx", + "turbopack-node", + "turbopack-static", + "turbopack-swc-utils", + "turbopack-test-utils", + "turbopack-tests", +] + +[[package]] +name = "turbo-malloc" +version = "0.1.0" +dependencies = [ + "mimalloc", +] + +[[package]] +name = "turbo-tasks" +version = "0.1.0" +dependencies = [ + "anyhow", + "auto-hash-map", + "concurrent-queue", + "dashmap", + "erased-serde", + "event-listener", + "futures", + "indexmap", + "mopa", + "nohash-hasher", + "once_cell", + "parking_lot", + "pin-project-lite", + "regex", + "serde", + "serde_json", + "serde_regex", + "stable_deref_trait", + "thiserror", + "tokio", + "turbo-tasks-build", + "turbo-tasks-hash", + "turbo-tasks-macros", +] + +[[package]] +name = "turbo-tasks-build" +version = "0.1.0" +dependencies = [ + "anyhow", + "cargo-lock", + "glob", + "syn 1.0.109", + "turbo-tasks-macros-shared", +] + +[[package]] +name = "turbo-tasks-bytes" +version = "0.1.0" +dependencies = [ + "anyhow", + "bytes", + "futures", + "serde", + "serde_bytes", + "serde_test", + "tokio", + "turbo-tasks", + "turbo-tasks-build", +] + +[[package]] +name = "turbo-tasks-env" +version = "0.1.0" +dependencies = [ + "anyhow", + "dotenvy", + "indexmap", + "serde", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-fs", +] + +[[package]] +name = "turbo-tasks-fetch" +version = "0.1.0" +dependencies = [ + "anyhow", + "httpmock", + "indexmap", + "lazy_static", + "reqwest", + "serde", + "tokio", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-fs", + "turbo-tasks-memory", + "turbo-tasks-testing", + "turbopack-core", +] + +[[package]] +name = "turbo-tasks-fs" +version = "0.1.0" +dependencies = [ + "anyhow", + "auto-hash-map", + "bitflags 1.3.2", + "bytes", + "concurrent-queue", + "criterion", + "dashmap", + "dunce", + "futures", + "futures-retry", + "include_dir", + "indexmap", + "jsonc-parser", + "mime", + "notify 4.0.17", + "parking_lot", + "rstest", + "serde", + "serde_json", + "serde_path_to_error", + "sha2", + "tempfile", + "tokio", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-hash", + "turbo-tasks-memory", +] + +[[package]] +name = "turbo-tasks-hash" +version = "0.1.0" +dependencies = [ + "base16", + "hex", + "md4", + "turbo-tasks-macros", + "twox-hash", +] + +[[package]] +name = "turbo-tasks-macros" +version = "0.1.0" +dependencies = [ + "anyhow", + "convert_case 0.6.0", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 1.0.109", + "turbo-tasks-macros-shared", +] + +[[package]] +name = "turbo-tasks-macros-shared" +version = "0.1.0" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "turbo-tasks-memory" +version = "0.1.0" +dependencies = [ + "anyhow", + "auto-hash-map", + "concurrent-queue", + "criterion", + "dashmap", + "lazy_static", + "nohash-hasher", + "num_cpus", + "once_cell", + "parking_lot", + "priority-queue", + "rustc-hash", + "serde", + "tokio", + "turbo-malloc", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-hash", + "turbo-tasks-testing", +] + +[[package]] +name = "turbo-tasks-testing" +version = "0.1.0" +dependencies = [ + "anyhow", + "auto-hash-map", + "lazy_static", + "tokio", + "turbo-tasks", +] + +[[package]] +name = "turbo-updater" +version = "0.1.0" +dependencies = [ + "atty", + "console", + "reqwest", + "semver 1.0.17", + "serde", + "thiserror", + "update-informer", +] + +[[package]] +name = "turbopack" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-recursion", + "criterion", + "difference", + "futures", + "indexmap", + "lazy_static", + "regex", + "rstest", + "rstest_reuse", + "serde", + "serde_json", + "tokio", + "turbo-malloc", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-fs", + "turbo-tasks-memory", + "turbopack-core", + "turbopack-css", + "turbopack-ecmascript", + "turbopack-env", + "turbopack-image", + "turbopack-json", + "turbopack-mdx", + "turbopack-node", + "turbopack-static", +] + +[[package]] +name = "turbopack-bench" +version = "0.1.0" +dependencies = [ + "anyhow", + "chromiumoxide", + "clap 4.1.11", + "console-subscriber", + "criterion", + "dunce", + "futures", + "mime", + "nix", + "once_cell", + "owo-colors", + "parking_lot", + "portpicker", + "rand 0.8.5", + "regex", + "serde", + "serde_json", + "tempfile", + "tokio", + "tungstenite 0.17.3", + "turbo-tasks", + "turbo-tasks-testing", + "turbopack-create-test-app", + "url", + "webbrowser", +] + +[[package]] +name = "turbopack-cli" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap 4.1.11", + "console-subscriber", + "criterion", + "dunce", + "futures", + "mime", + "owo-colors", + "regex", + "serde", + "tokio", + "turbo-malloc", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-env", + "turbo-tasks-fetch", + "turbo-tasks-fs", + "turbo-tasks-memory", + "turbopack", + "turbopack-bench", + "turbopack-cli-utils", + "turbopack-core", + "turbopack-dev", + "turbopack-dev-server", + "turbopack-env", + "turbopack-node", + "webbrowser", +] + +[[package]] +name = "turbopack-cli-utils" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap 4.1.11", + "crossterm 0.26.1", + "owo-colors", + "serde", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-fs", + "turbopack-core", + "turbopack-ecmascript", +] + +[[package]] +name = "turbopack-core" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "auto-hash-map", + "browserslist-rs", + "futures", + "indexmap", + "lazy_static", + "patricia_tree", + "qstring", + "regex", + "rstest", + "serde", + "serde_json", + "serde_qs", + "sourcemap", + "swc_core", + "tokio", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-env", + "turbo-tasks-fs", + "turbo-tasks-hash", +] + +[[package]] +name = "turbopack-create-test-app" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap 4.1.11", + "indoc", + "pathdiff", + "serde_json", + "tempfile", +] + +[[package]] +name = "turbopack-css" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "indexmap", + "indoc", + "once_cell", + "regex", + "serde", + "swc_core", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-fs", + "turbo-tasks-hash", + "turbopack-core", + "turbopack-ecmascript", + "turbopack-swc-utils", +] + +[[package]] +name = "turbopack-dev" +version = "0.1.0" +dependencies = [ + "anyhow", + "indexmap", + "indoc", + "serde", + "serde_json", + "serde_qs", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-fs", + "turbo-tasks-hash", + "turbopack", + "turbopack-core", + "turbopack-css", + "turbopack-ecmascript", +] + +[[package]] +name = "turbopack-dev-server" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-compression", + "futures", + "hyper", + "hyper-tungstenite", + "indexmap", + "mime", + "mime_guess", + "once_cell", + "parking_lot", + "pin-project-lite", + "serde", + "serde_json", + "serde_qs", + "socket2", + "tokio", + "tokio-stream", + "tokio-util", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-bytes", + "turbo-tasks-fs", + "turbo-tasks-hash", + "turbopack-cli-utils", + "turbopack-core", + "turbopack-ecmascript", + "urlencoding", +] + +[[package]] +name = "turbopack-ecmascript" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "criterion", + "indexmap", + "indoc", + "lazy_static", + "num-bigint", + "num-traits", + "once_cell", + "parking_lot", + "petgraph", + "pin-project-lite", + "regex", + "rstest", + "rustc-hash", + "serde", + "serde_json", + "serde_qs", + "styled_components", + "styled_jsx", + "swc_core", + "swc_emotion", + "tokio", + "tracing", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-fs", + "turbo-tasks-hash", + "turbo-tasks-memory", + "turbo-tasks-testing", + "turbopack-core", + "turbopack-swc-utils", + "url", +] + +[[package]] +name = "turbopack-env" +version = "0.1.0" +dependencies = [ + "anyhow", + "indexmap", + "serde", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-env", + "turbo-tasks-fs", + "turbopack-core", + "turbopack-ecmascript", +] + +[[package]] +name = "turbopack-image" +version = "0.1.0" +dependencies = [ + "anyhow", + "base64 0.21.0", + "image", + "indexmap", + "mime", + "serde", + "serde_with", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-fs", + "turbopack-core", +] + +[[package]] +name = "turbopack-json" +version = "0.1.0" +dependencies = [ + "anyhow", + "serde", + "serde_json", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-fs", + "turbopack-core", + "turbopack-ecmascript", +] + +[[package]] +name = "turbopack-mdx" +version = "0.1.0" +dependencies = [ + "anyhow", + "mdxjs", + "serde", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-fs", + "turbopack-core", + "turbopack-ecmascript", +] + +[[package]] +name = "turbopack-node" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-stream", + "bytes", + "const_format", + "futures", + "futures-retry", + "indexmap", + "mime", + "once_cell", + "owo-colors", + "parking_lot", + "regex", + "serde", + "serde_json", + "serde_qs", + "tokio", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-bytes", + "turbo-tasks-env", + "turbo-tasks-fs", + "turbopack-cli-utils", + "turbopack-core", + "turbopack-dev-server", + "turbopack-ecmascript", + "url", + "urlencoding", +] + +[[package]] +name = "turbopack-static" +version = "0.1.0" +dependencies = [ + "anyhow", + "serde", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-fs", + "turbo-tasks-hash", + "turbopack-core", + "turbopack-css", + "turbopack-ecmascript", +] + +[[package]] +name = "turbopack-swc-utils" +version = "0.1.0" +dependencies = [ + "swc_core", + "turbo-tasks", + "turbo-tasks-build", + "turbopack-core", +] + +[[package]] +name = "turbopack-test-utils" +version = "0.1.0" +dependencies = [ + "anyhow", + "once_cell", + "similar", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-fs", + "turbo-tasks-hash", + "turbopack-core", +] + +[[package]] +name = "turbopack-tests" +version = "0.1.0" +dependencies = [ + "anyhow", + "dunce", + "once_cell", + "serde", + "serde_json", + "testing", + "tokio", + "turbo-tasks", + "turbo-tasks-build", + "turbo-tasks-env", + "turbo-tasks-fs", + "turbo-tasks-memory", + "turbopack", + "turbopack-core", + "turbopack-dev", + "turbopack-env", + "turbopack-test-utils", +] + +[[package]] +name = "turbopath" +version = "0.1.0" +dependencies = [ + "path-slash", + "serde", + "thiserror", +] + +[[package]] +name = "turborepo-api-client" +version = "0.1.0" +dependencies = [ + "anyhow", + "chrono", + "reqwest", + "rustc_version_runtime", + "serde", + "tokio", +] + +[[package]] +name = "turborepo-ffi" +version = "0.1.0" +dependencies = [ + "cbindgen", + "directories", + "prost", + "prost-build", + "thiserror", + "turborepo-lockfiles", + "turborepo-scm", +] + +[[package]] +name = "turborepo-lib" +version = "0.1.0" +dependencies = [ + "anyhow", + "assert_cmd", + "async-io", + "async-stream", + "atty", + "axum", + "axum-server", + "chrono", + "clap 4.1.11", + "clap_complete", + "command-group", + "config", + "console", + "const_format", + "ctrlc", + "dialoguer", + "dirs-next", + "dunce", + "env_logger 0.10.0", + "futures", + "glob-match 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "hex", + "hostname", + "humantime", + "indicatif", + "itertools", + "lazy_static", + "libc", + "log", + "notify 5.1.0", + "pidlock", + "port_scanner", + "pretty_assertions", + "prost", + "rand 0.8.5", + "reqwest", + "rustc_version_runtime", + "semver 1.0.17", + "serde", + "serde_json", + "serde_yaml 0.9.19", + "sha2", + "shared_child", + "sysinfo", + "tempfile", + "test-case", + "thiserror", + "tiny-gradient", + "tokio", + "tokio-stream", + "tokio-util", + "tonic", + "tonic-build", + "tower", + "turbo-updater", + "turbopath", + "turborepo-api-client", + "uds_windows", + "url", + "vercel-api-mock", + "webbrowser", +] + +[[package]] +name = "turborepo-lockfiles" +version = "0.1.0" +dependencies = [ + "serde", + "serde_json", + "thiserror", +] + +[[package]] +name = "turborepo-scm" +version = "0.1.0" +dependencies = [ + "anyhow", + "dunce", + "git2 0.16.1", + "tempfile", + "thiserror", + "turbopath", +] + +[[package]] +name = "twox-hash" +version = "1.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" +dependencies = [ + "cfg-if 1.0.0", + "rand 0.8.5", + "static_assertions", +] + +[[package]] +name = "typed-arena" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a" + +[[package]] +name = "typenum" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" + +[[package]] +name = "ucd-trie" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e79c4d996edb816c91e4308506774452e55e95c3c9de07b6729e17e15a5ef81" + +[[package]] +name = "uds_windows" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce65604324d3cce9b966701489fbd0cf318cb1f7bd9dd07ac9a4ee6fb791930d" +dependencies = [ + "tempfile", + "winapi 0.3.9", +] + +[[package]] +name = "unic-char-property" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8c57a407d9b6fa02b4795eb81c5b6652060a15a7903ea981f3d723e6c0be221" +dependencies = [ + "unic-char-range", +] + +[[package]] +name = "unic-char-range" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0398022d5f700414f6b899e10b8348231abf9173fa93144cbc1a43b9793c1fbc" + +[[package]] +name = "unic-common" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80d7ff825a6a654ee85a63e80f92f054f904f21e7d12da4e22f9834a4aaa35bc" + +[[package]] +name = "unic-segment" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4ed5d26be57f84f176157270c112ef57b86debac9cd21daaabbe56db0f88f23" +dependencies = [ + "unic-ucd-segment", +] + +[[package]] +name = "unic-ucd-segment" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2079c122a62205b421f499da10f3ee0f7697f012f55b675e002483c73ea34700" +dependencies = [ + "unic-char-property", + "unic-char-range", + "unic-ucd-version", +] + +[[package]] +name = "unic-ucd-version" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96bd2f2237fe450fcd0a1d2f5f4e91711124f7857ba2e964247776ebeeb7b0c4" +dependencies = [ + "unic-common", +] + +[[package]] +name = "unicase" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" +dependencies = [ + "version_check", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" + +[[package]] +name = "unicode-id" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d70b6494226b36008c8366c288d77190b3fad2eb4c10533139c1c1f461127f1a" + +[[package]] +name = "unicode-ident" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4" + +[[package]] +name = "unicode-linebreak" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5faade31a542b8b35855fff6e8def199853b2da8da256da52f52f1316ee3137" +dependencies = [ + "hashbrown 0.12.3", + "regex", +] + +[[package]] +name = "unicode-normalization" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-segmentation" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" + +[[package]] +name = "unicode-width" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" + +[[package]] +name = "unicode-xid" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" + +[[package]] +name = "unsafe-libyaml" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad2024452afd3874bf539695e04af6732ba06517424dbf958fdb16a01f3bef6c" + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "update-informer" +version = "0.6.0" +source = "git+https://github.com/mgrachev/update-informer?rev=b7a415ac2276e857167b9fe8282044f93155878a#b7a415ac2276e857167b9fe8282044f93155878a" +dependencies = [ + "directories", + "semver 1.0.17", + "serde", + "serde_json", +] + +[[package]] +name = "url" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "urlencoding" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8db7427f936968176eaa7cdf81b7f98b980b18495ec28f1b5791ac3bfe3eea9" + +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "utf8parse" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "936e4b492acfd135421d8dca4b1aa80a7bfc26e702ef3af710e0752684df5372" + +[[package]] +name = "v_frame" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "148c23ce3c8dae5562911cba1c264eaa5e31e133e0d5d08455409de9dd540358" +dependencies = [ + "cfg-if 1.0.0", + "new_debug_unreachable", + "noop_proc_macro", + "num-derive", + "num-traits", + "rust_hawktracer", +] + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "value-bag" +version = "1.0.0-alpha.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2209b78d1249f7e6f3293657c9779fe31ced465df091bbd433a1cf88e916ec55" +dependencies = [ + "ctor", + "version_check", +] + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "vec_map" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" + +[[package]] +name = "vercel-api-mock" +version = "0.1.0" +dependencies = [ + "anyhow", + "axum", + "axum-server", + "port_scanner", + "tokio", + "turborepo-api-client", +] + +[[package]] +name = "vergen" +version = "7.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f21b881cd6636ece9735721cf03c1fe1e774fe258683d084bb2812ab67435749" +dependencies = [ + "anyhow", + "cfg-if 1.0.0", + "enum-iterator 1.4.0", + "getset", + "rustversion", + "thiserror", + "time 0.3.20", +] + +[[package]] +name = "version-compare" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "579a42fc0b8e0c63b76519a339be31bed574929511fa53c1a3acae26eb258f29" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "virtual-fs" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66ecfcab24d1c4722afb076d89f21a49299fc9b34e36726114413012b48f795c" +dependencies = [ + "anyhow", + "async-trait", + "bytes", + "derivative", + "filetime", + "fs_extra", + "getrandom", + "indexmap", + "lazy_static", + "libc", + "pin-project-lite", + "slab", + "thiserror", + "tokio", + "tracing", + "webc", +] + +[[package]] +name = "virtual-net" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e043eb813b35633445d602acf13df921a8a1ac8833818fb8f891e2f6223fedd7" +dependencies = [ + "async-trait", + "bytes", + "thiserror", + "tracing", +] + +[[package]] +name = "vte" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6cbce692ab4ca2f1f3047fcf732430249c0e971bfdd2b234cf2c47ad93af5983" +dependencies = [ + "arrayvec 0.5.2", + "utf8parse", + "vte_generate_state_changes", +] + +[[package]] +name = "vte_generate_state_changes" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d257817081c7dffcdbab24b9e62d2def62e2ff7d00b1c20062551e6cccc145ff" +dependencies = [ + "proc-macro2", + "quote", +] + +[[package]] +name = "wai-bindgen-gen-core" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aa3dc41b510811122b3088197234c27e08fcad63ef936306dd8e11e2803876c" +dependencies = [ + "anyhow", + "wai-parser", +] + +[[package]] +name = "wai-bindgen-gen-rust" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19bc05e8380515c4337c40ef03b2ff233e391315b178a320de8640703d522efe" +dependencies = [ + "heck 0.3.3", + "wai-bindgen-gen-core", +] + +[[package]] +name = "wai-bindgen-gen-rust-wasm" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6f35ce5e74086fac87f3a7bd50f643f00fe3559adb75c88521ecaa01c8a6199" +dependencies = [ + "heck 0.3.3", + "wai-bindgen-gen-core", + "wai-bindgen-gen-rust", +] + +[[package]] +name = "wai-bindgen-gen-wasmer" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f61484185d8c520a86d5a7f7f8265f446617c2f9774b2e20a52de19b6e53432" +dependencies = [ + "heck 0.3.3", + "wai-bindgen-gen-core", + "wai-bindgen-gen-rust", +] + +[[package]] +name = "wai-bindgen-rust" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e5601c6f448c063e83a5e931b8fefcdf7e01ada424ad42372c948d2e3d67741" +dependencies = [ + "bitflags 1.3.2", + "wai-bindgen-rust-impl", +] + +[[package]] +name = "wai-bindgen-rust-impl" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bdeeb5c1170246de8425a3e123e7ef260dc05ba2b522a1d369fe2315376efea4" +dependencies = [ + "proc-macro2", + "syn 1.0.109", + "wai-bindgen-gen-core", + "wai-bindgen-gen-rust-wasm", +] + +[[package]] +name = "wai-bindgen-wasmer" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e20321fa5e7f7affba9ba727dbb3f4e0168656af4f1aa78306203ad2d9a0bba7" +dependencies = [ + "anyhow", + "bitflags 1.3.2", + "once_cell", + "thiserror", + "tracing", + "wai-bindgen-wasmer-impl", + "wasmer", +] + +[[package]] +name = "wai-bindgen-wasmer-impl" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b3488ed88d4dd0e3bf85bad4e27dac6cb31aae5d122a5dda2424803c8dc863a" +dependencies = [ + "proc-macro2", + "syn 1.0.109", + "wai-bindgen-gen-core", + "wai-bindgen-gen-wasmer", +] + +[[package]] +name = "wai-parser" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9bd0acb6d70885ea0c343749019ba74f015f64a9d30542e66db69b49b7e28186" +dependencies = [ + "anyhow", + "id-arena", + "pulldown-cmark", + "unicode-normalization", + "unicode-xid", +] + +[[package]] +name = "wait-timeout" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +dependencies = [ + "libc", +] + +[[package]] +name = "waker-fn" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" + +[[package]] +name = "walkdir" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +dependencies = [ + "log", + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.10.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" +dependencies = [ + "cfg-if 1.0.0", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 1.0.109", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-downcast" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dac026d43bcca6e7ce1c0956ba68f59edf6403e8e930a5d891be72c31a44340" +dependencies = [ + "js-sys", + "once_cell", + "wasm-bindgen", + "wasm-bindgen-downcast-macros", +] + +[[package]] +name = "wasm-bindgen-downcast-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5020cfa87c7cecefef118055d44e3c1fc122c7ec25701d528ee458a0b45f38f" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f219e0d211ba40266969f6dbdd90636da12f75bee4fc9d6c23d1260dadb51454" +dependencies = [ + "cfg-if 1.0.0", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" + +[[package]] +name = "wasm-encoder" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4eff853c4f09eec94d76af527eddad4e9de13b11d6286a1ef7134bc30135a2b7" +dependencies = [ + "leb128", +] + +[[package]] +name = "wasmer" +version = "3.2.0-beta.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad0f668c7708f75c74f0d97bcc7269c0bd523d3cf5a4d3db66365eb8092e2e1f" +dependencies = [ + "bytes", + "cfg-if 1.0.0", + "derivative", + "indexmap", + "js-sys", + "more-asserts", + "serde", + "serde-wasm-bindgen", + "target-lexicon", + "thiserror", + "wasm-bindgen", + "wasm-bindgen-downcast", + "wasmer-compiler", + "wasmer-compiler-cranelift", + "wasmer-derive", + "wasmer-types", + "wasmer-vm", + "wasmparser", + "wat", + "winapi 0.3.9", +] + +[[package]] +name = "wasmer-cache" +version = "3.2.0-beta.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35fb2759671bcaedbd51754f75709903ed5f73e68300cab329b2f064b98a5f2f" +dependencies = [ + "blake3", + "hex", + "thiserror", + "wasmer", +] + +[[package]] +name = "wasmer-compiler" +version = "3.2.0-beta.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d880f91d020ba406020e150a70c68413bd970867f1c47cb1ab407e8cba688c9" +dependencies = [ + "backtrace", + "cfg-if 1.0.0", + "enum-iterator 0.7.0", + "enumset", + "lazy_static", + "leb128", + "memmap2", + "more-asserts", + "region", + "rustc-demangle", + "smallvec", + "thiserror", + "wasmer-types", + "wasmer-vm", + "wasmparser", + "winapi 0.3.9", +] + +[[package]] +name = "wasmer-compiler-cranelift" +version = "3.2.0-beta.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b051cdd601f46de3ba76b44bb5ff850f47df3040d56e2dc8e21a95480a47eed" +dependencies = [ + "cranelift-codegen", + "cranelift-entity", + "cranelift-frontend", + "gimli 0.26.2", + "more-asserts", + "rayon", + "smallvec", + "target-lexicon", + "tracing", + "wasmer-compiler", + "wasmer-types", +] + +[[package]] +name = "wasmer-derive" +version = "3.2.0-beta.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23df3f21f629e45da8b9fe1d4266ba3e4af356ac4c22f71071a240563ba55a24" +dependencies = [ + "proc-macro-error", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "wasmer-types" +version = "3.2.0-beta.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa69c0da907df0110d0276273db3e730186b1378f01c111e41c93068142bcc54" +dependencies = [ + "bytecheck", + "enum-iterator 0.7.0", + "enumset", + "indexmap", + "more-asserts", + "rkyv", + "target-lexicon", + "thiserror", +] + +[[package]] +name = "wasmer-vm" +version = "3.2.0-beta.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5bd65399be53ddcb647c323ec73b827890cebf04e853ab01e989425a347b23a" +dependencies = [ + "backtrace", + "cc", + "cfg-if 1.0.0", + "corosensei", + "derivative", + "enum-iterator 0.7.0", + "indexmap", + "lazy_static", + "libc", + "mach", + "memoffset 0.6.5", + "more-asserts", + "region", + "scopeguard", + "thiserror", + "wasmer-types", + "winapi 0.3.9", +] + +[[package]] +name = "wasmer-wasix" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c808ba4a46d03a76a8ed53c15e7f49b42532d272aa814776659d2e21ab920309" +dependencies = [ + "anyhow", + "async-trait", + "bincode", + "bytes", + "cfg-if 1.0.0", + "chrono", + "cooked-waker", + "derivative", + "futures", + "getrandom", + "heapless", + "hex", + "http", + "lazy_static", + "libc", + "linked_hash_set", + "once_cell", + "pin-project", + "rand 0.8.5", + "serde", + "serde_derive", + "serde_json", + "serde_yaml 0.8.26", + "sha2", + "shellexpand", + "term_size", + "termios", + "thiserror", + "tokio", + "tracing", + "urlencoding", + "virtual-fs", + "virtual-net", + "wai-bindgen-wasmer", + "waker-fn", + "wasm-bindgen", + "wasmer", + "wasmer-types", + "wasmer-wasix-types", + "webc", + "weezl", + "winapi 0.3.9", +] + +[[package]] +name = "wasmer-wasix-types" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79992bb1b873535e16afa77455eed58b922aa4c8b30d591f103ac038b01537de" +dependencies = [ + "anyhow", + "bitflags 1.3.2", + "byteorder", + "cfg-if 1.0.0", + "num_enum", + "time 0.2.27", + "wai-bindgen-gen-core", + "wai-bindgen-gen-rust", + "wai-bindgen-gen-rust-wasm", + "wai-bindgen-rust", + "wai-parser", + "wasmer", + "wasmer-derive", + "wasmer-types", +] + +[[package]] +name = "wasmparser" +version = "0.95.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2ea896273ea99b15132414be1da01ab0d8836415083298ecaffbe308eaac87a" +dependencies = [ + "indexmap", + "url", +] + +[[package]] +name = "wast" +version = "55.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4984d3e1406571f4930ba5cf79bd70f75f41d0e87e17506e0bd19b0e5d085f05" +dependencies = [ + "leb128", + "memchr", + "unicode-width", + "wasm-encoder", +] + +[[package]] +name = "wat" +version = "1.0.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af2b53f4da14db05d32e70e9c617abdf6620c575bd5dd972b7400037b4df2091" +dependencies = [ + "wast", +] + +[[package]] +name = "web-sys" +version = "0.3.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webbrowser" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "579cc485bd5ce5bfa0d738e4921dd0b956eca9800be1fd2e5257ebe95bc4617e" +dependencies = [ + "core-foundation", + "dirs", + "jni", + "log", + "ndk-context", + "objc", + "raw-window-handle", + "url", + "web-sys", +] + +[[package]] +name = "webc" +version = "5.0.0-rc.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418bfd8fc298ce60295203a6960d53af48c8e10c5a021a5e7db8bc06c4830148" +dependencies = [ + "anyhow", + "base64 0.21.0", + "byteorder", + "bytes", + "indexmap", + "leb128", + "lexical-sort", + "memmap2", + "once_cell", + "path-clean", + "rand 0.8.5", + "serde", + "serde_cbor", + "serde_json", + "sha2", + "thiserror", + "url", + "walkdir", +] + +[[package]] +name = "webp" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf022f821f166079a407d000ab57e84de020e66ffbbf4edde999bc7d6e371cae" +dependencies = [ + "libwebp-sys", +] + +[[package]] +name = "webpki" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "webpki-roots" +version = "0.22.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" +dependencies = [ + "webpki", +] + +[[package]] +name = "weezl" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9193164d4de03a926d909d3bc7c30543cecb35400c02114792c2cae20d5e2dbb" + +[[package]] +name = "which" +version = "4.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269" +dependencies = [ + "either", + "libc", + "once_cell", +] + +[[package]] +name = "winapi" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-build" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi 0.3.9", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" +dependencies = [ + "windows-targets 0.48.0", +] + +[[package]] +name = "windows-sys" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43dbb096663629518eb1dfa72d80243ca5a6aca764cae62a2df70af760a9be75" +dependencies = [ + "windows_aarch64_msvc 0.33.0", + "windows_i686_gnu 0.33.0", + "windows_i686_msvc 0.33.0", + "windows_x86_64_gnu 0.33.0", + "windows_x86_64_msvc 0.33.0", +] + +[[package]] +name = "windows-sys" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.0", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-targets" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5" +dependencies = [ + "windows_aarch64_gnullvm 0.48.0", + "windows_aarch64_msvc 0.48.0", + "windows_i686_gnu 0.48.0", + "windows_i686_msvc 0.48.0", + "windows_x86_64_gnu 0.48.0", + "windows_x86_64_gnullvm 0.48.0", + "windows_x86_64_msvc 0.48.0", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd761fd3eb9ab8cc1ed81e56e567f02dd82c4c837e48ac3b2181b9ffc5060807" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" + +[[package]] +name = "windows_i686_gnu" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cab0cf703a96bab2dc0c02c0fa748491294bf9b7feb27e1f4f96340f208ada0e" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" + +[[package]] +name = "windows_i686_msvc" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cfdbe89cc9ad7ce618ba34abc34bbb6c36d99e96cae2245b7943cd75ee773d0" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4dd9b0c0e9ece7bb22e84d70d01b71c6d6248b81a3c60d11869451b4cb24784" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff1e4aa646495048ec7f3ffddc411e1d829c026a2ec62b39da15c1055e406eaa" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" + +[[package]] +name = "winnow" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae8970b36c66498d8ff1d66685dc86b91b29db0c7739899012f63a63814b4b28" +dependencies = [ + "memchr", +] + +[[package]] +name = "winreg" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d" +dependencies = [ + "winapi 0.3.9", +] + +[[package]] +name = "wio" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d129932f4644ac2396cb456385cbf9e63b5b30c6e8dc4820bdca4eb082037a5" +dependencies = [ + "winapi 0.3.9", +] + +[[package]] +name = "ws2_32-sys" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e" +dependencies = [ + "winapi 0.2.8", + "winapi-build", +] + +[[package]] +name = "xtask" +version = "0.1.0" +dependencies = [ + "anyhow", + "cargo-lock", + "chrono", + "clap 4.1.11", + "indexmap", + "inquire", + "num-format", + "owo-colors", + "plotters", + "semver 1.0.17", + "serde", + "serde_json", + "tabled", + "walkdir", +] + +[[package]] +name = "y4m" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a72a9921af8237fe25097a1ae31c92a05c1d39b2454653ad48f2f407cf7a0dae" + +[[package]] +name = "yaml-rust" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" +dependencies = [ + "linked-hash-map", +] + +[[package]] +name = "yansi" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" + +[[package]] +name = "yeslogic-fontconfig-sys" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2bbd69036d397ebbff671b1b8e4d918610c181c5a16073b96f984a38d08c386" +dependencies = [ + "const-cstr", + "dlib", + "once_cell", + "pkg-config", +] + +[[package]] +name = "zeroize" +version = "1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f" diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..68bdd51 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,229 @@ +[workspace] +resolver = "2" + +members = [ + "crates/auto-hash-map", + "crates/node-file-trace", + "crates/swc-ast-explorer", + "crates/turbo-binding", + "crates/turbo-malloc", + "crates/turbo-tasks", + "crates/turbo-tasks-build", + "crates/turbo-tasks-bytes", + "crates/turbo-tasks-env", + "crates/turbo-tasks-fetch", + "crates/turbo-tasks-fs", + "crates/turbo-tasks-hash", + "crates/turbo-tasks-macros", + "crates/turbo-tasks-macros-shared", + "crates/turbo-tasks-memory", + "crates/turbo-tasks-testing", + "crates/turbo-updater", + "crates/turbopack", + "crates/turbopack-cli", + "crates/turbopack-cli-utils", + "crates/turbopack-core", + "crates/turbopack-create-test-app", + "crates/turbopack-css", + "crates/turbopack-dev", + "crates/turbopack-dev-server", + "crates/turbopack-ecmascript", + "crates/turbopack-env", + "crates/turbopack-image", + "crates/turbopack-json", + "crates/turbopack-mdx", + "crates/turbopack-node", + "crates/turbopack-static", + "crates/turbopack-swc-utils", + "crates/turbopack-test-utils", + "crates/turbopack-tests", + "crates/turbopath", + "crates/glob-match", + "crates/turborepo", + "crates/turborepo-api-client", + "crates/turborepo-ffi", + "crates/turborepo-lib", + "crates/turborepo-lockfiles", + "crates/turborepo-scm", + "crates/turborepo-vercel-api-mock", + "xtask", +] + +default-members = [ + "crates/auto-hash-map", + "crates/node-file-trace", + "crates/swc-ast-explorer", + "crates/turbo-malloc", + "crates/turbo-tasks", + "crates/turbo-tasks-build", + "crates/turbo-tasks-bytes", + "crates/turbo-tasks-env", + "crates/turbo-tasks-fetch", + "crates/turbo-tasks-fs", + "crates/turbo-tasks-hash", + "crates/turbo-tasks-macros", + "crates/turbo-tasks-macros-shared", + "crates/turbo-tasks-memory", + "crates/turbo-tasks-testing", + "crates/turbopack", + "crates/turbopack-bench", + "crates/turbopack-cli-utils", + "crates/turbopack-core", + "crates/turbopack-create-test-app", + "crates/turbopack-css", + "crates/turbopack-dev", + "crates/turbopack-dev-server", + "crates/turbopack-ecmascript", + "crates/turbopack-env", + "crates/turbopack-json", + "crates/turbopack-mdx", + "crates/turbopack-node", + "crates/turbopack-static", + "crates/turbopack-swc-utils", + "crates/turbopack-test-utils", + "crates/turbopack-tests", + "xtask", +] + +[profile.dev.package.turbo-tasks-macros] +opt-level = 3 + +# Set the settings for build scripts and proc-macros. +[profile.dev.build-override] +opt-level = 3 + +# Declare dependencies used across workspace packages requires single version bump. +# ref: https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#inheriting-a-dependency-from-a-workspace +[workspace.dependencies] +# Keep consistent with preset_env_base through swc_core +browserslist-rs = { version = "0.12.2" } +mdxjs = { version = "0.1.11" } +modularize_imports = { version = "0.27.5" } +styled_components = { version = "0.54.5" } +styled_jsx = { version = "0.31.5" } +swc_core = { version = "0.75.23" } +swc_emotion = { version = "0.30.5" } +swc_relay = { version = "0.2.5" } +testing = { version = "0.33.4" } + +auto-hash-map = { path = "crates/auto-hash-map" } +node-file-trace = { path = "crates/node-file-trace", default-features = false } +swc-ast-explorer = { path = "crates/swc-ast-explorer" } +turbo-malloc = { path = "crates/turbo-malloc", default-features = false } +turbo-tasks = { path = "crates/turbo-tasks" } +turbo-tasks-build = { path = "crates/turbo-tasks-build" } +turbo-tasks-bytes = { path = "crates/turbo-tasks-bytes" } +turbo-tasks-env = { path = "crates/turbo-tasks-env" } +turbo-tasks-fetch = { path = "crates/turbo-tasks-fetch", default-features = false } +turbo-tasks-fs = { path = "crates/turbo-tasks-fs" } +turbo-tasks-hash = { path = "crates/turbo-tasks-hash" } +turbo-tasks-macros = { path = "crates/turbo-tasks-macros" } +turbo-tasks-macros-shared = { path = "crates/turbo-tasks-macros-shared" } +turbo-tasks-memory = { path = "crates/turbo-tasks-memory" } +turbo-tasks-testing = { path = "crates/turbo-tasks-testing" } +turbo-updater = { path = "crates/turbo-updater" } +turbopack = { path = "crates/turbopack" } +turbopack-bench = { path = "crates/turbopack-bench" } +turbopack-cli = { path = "crates/turbopack-cli" } +turbopack-cli-utils = { path = "crates/turbopack-cli-utils" } +turbopack-core = { path = "crates/turbopack-core" } +turbopack-create-test-app = { path = "crates/turbopack-create-test-app" } +turbopack-css = { path = "crates/turbopack-css" } +turbopack-dev = { path = "crates/turbopack-dev" } +turbopack-dev-server = { path = "crates/turbopack-dev-server" } +turbopack-ecmascript = { path = "crates/turbopack-ecmascript" } +turbopack-env = { path = "crates/turbopack-env" } +turbopack-image = { path = "crates/turbopack-image" } +turbopack-json = { path = "crates/turbopack-json" } +turbopack-mdx = { path = "crates/turbopack-mdx" } +turbopack-node = { path = "crates/turbopack-node" } +turbopack-static = { path = "crates/turbopack-static" } +turbopack-swc-utils = { path = "crates/turbopack-swc-utils" } +turbopack-test-utils = { path = "crates/turbopack-test-utils" } +turbopack-tests = { path = "crates/turbopack-tests" } +turbopath = { path = "crates/turbopath" } +turborepo = { path = "crates/turborepo" } +turborepo-api-client = { path = "crates/turborepo-api-client" } +turborepo-ffi = { path = "crates/turborepo-ffi" } +turborepo-lib = { path = "crates/turborepo-lib" } +turborepo-lockfiles = { path = "crates/turborepo-lockfiles" } +turborepo-scm = { path = "crates/turborepo-scm" } +vercel-api-mock = { path = "crates/turborepo-vercel-api-mock" } + +# Be careful when selecting tls backend, including change default tls backend. +# If you changed, must verify with ALL build targets with next-swc to ensure +# it works. next-swc have various platforms, some doesn't support native (using openssl-sys) +# and some aren't buildable with rustls. +reqwest = { version = "0.11.14", default-features = false } + +chromiumoxide = { version = "0.4.0", features = [ + "tokio-runtime", +], default-features = false } +# For matching on errors from chromiumoxide. Keep in +# sync with chromiumoxide's tungstenite requirement. +tungstenite = "0.17.3" + +anyhow = "1.0.69" +assert_cmd = "2.0.8" +async-compression = { version = "0.3.13", default-features = false, features = [ + "gzip", + "tokio", +] } +async-trait = "0.1.64" +atty = "0.2.14" +axum = "0.6.2" +axum-server = "0.4.4" +bytes = "1.1.0" +chrono = "0.4.23" +clap = "4.1.6" +clap_complete = "4.1.2" +concurrent-queue = "2.1.0" +console = "0.15.5" +console-subscriber = "0.1.8" +criterion = "0.4.0" +dashmap = "5.4.0" +dialoguer = "0.10.3" +dunce = "1.0.3" +futures = "0.3.26" +futures-retry = "0.6.0" +httpmock = { version = "0.6.7", default-features = false } +image = { version = "0.24.6", default-features = false } +indexmap = "1.9.2" +indicatif = "0.17.3" +indoc = "2.0.0" +itertools = "0.10.5" +lazy_static = "1.4.0" +log = "0.4.17" +mime = "0.3.16" +nohash-hasher = "0.2.0" +once_cell = "1.17.1" +owo-colors = "3.5.0" +parking_lot = "0.12.1" +pathdiff = "0.2.1" +pin-project-lite = "0.2.9" +port_scanner = "0.1.5" +predicates = "2.1.5" +pretty_assertions = "1.3.0" +proc-macro2 = "1.0.51" +qstring = "0.7.2" +quote = "1.0.23" +rand = "0.8.5" +regex = "1.7.0" +rstest = "0.16.0" +rustc-hash = "1.1.0" +semver = "1.0.16" +serde = { version = "1.0.152", features = ["derive"] } +serde_json = "1.0.93" +serde_qs = "0.11.0" +serde_with = "2.3.2" +serde_yaml = "0.9.17" +syn = "1.0.107" +tempfile = "3.3.0" +thiserror = "1.0.38" +tiny-gradient = "0.1.0" +tokio = "1.25.0" +tokio-util = { version = "0.7.7", features = ["io"] } +tracing = "0.1.37" +url = "2.2.2" +urlencoding = "2.1.2" +webbrowser = "0.8.7" diff --git a/NOTICES.md b/NOTICES.md new file mode 100644 index 0000000..cbbf7cc --- /dev/null +++ b/NOTICES.md @@ -0,0 +1,36 @@ +THIRD-PARTY SOFTWARE NOTICES + +This project incorporates components from the projects listed below. The original copyright notices and the licenses under which those components are redistributed are set forth below. + +--- + +- https://cs.opensource.google/go/go/+/refs/tags/go1.18.2:src/encoding/csv/reader.go +- https://cs.opensource.google/go/go/+/refs/tags/go1.18.2:src/encoding/csv/reader_test.go + +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +- Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +- Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. +- Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..294bff1 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1 @@ +Visit https://vercel.com/security to view the disclosure policy. diff --git a/benchmark/.gitignore b/benchmark/.gitignore new file mode 100644 index 0000000..956951a --- /dev/null +++ b/benchmark/.gitignore @@ -0,0 +1,3 @@ +large-monorepo +benchmarks.json +tinybird.ndjson diff --git a/benchmark/README.md b/benchmark/README.md new file mode 100644 index 0000000..2f13f23 --- /dev/null +++ b/benchmark/README.md @@ -0,0 +1,6 @@ +# Turborepo Benchmarking + +To run benchmarks for turborepo + +1. From `../cli/` run `make turbo-prod` to build turbo +2. From this directory `pnpm run benchmark` diff --git a/benchmark/package.json b/benchmark/package.json new file mode 100644 index 0000000..9a92660 --- /dev/null +++ b/benchmark/package.json @@ -0,0 +1,16 @@ +{ + "name": "benchmark", + "version": "1.0.0", + "dependencies": { + "esbuild": "^0.15.0", + "esbuild-register": "^3.3.2", + "fs-extra": "^10.0.0", + "ndjson": "^2.0.0" + }, + "scripts": { + "benchmark": "node -r esbuild-register src/index.ts" + }, + "devDependencies": { + "@types/node": "^16.11.49" + } +} diff --git a/benchmark/src/index.ts b/benchmark/src/index.ts new file mode 100644 index 0000000..117d6b1 --- /dev/null +++ b/benchmark/src/index.ts @@ -0,0 +1,273 @@ +import cp from "child_process"; +import fs from "fs"; +import fse from "fs-extra"; +import path from "path"; +import ndjson from "ndjson"; + +const REPO_ROOT = "large-monorepo"; +const REPO_ORIGIN = "https://github.com/gsoltis/large-monorepo.git"; +const REPO_PATH = path.join(process.cwd(), REPO_ROOT); +const REPETITIONS = 5; + +const DEFAULT_EXEC_OPTS = { stdio: "ignore" as const, cwd: REPO_PATH }; +const TURBO_BIN = path.resolve(path.join("..", "target", "release", "turbo")); +const DEFAULT_CACHE_PATH = path.join( + REPO_PATH, + "node_modules", + ".cache", + "turbo" +); +const ALT_CACHE_PATH = path.join( + REPO_PATH, + "node_modules", + ".cache", + "turbo-benchmark" +); + +type Timing = number; + +type Benchmark = { + name: string; + unit: string; + value: number; + range?: string; + extra?: string; +}; + +type TBirdEvent = { + commitSha: string; + commitTimestamp: Date; + platform: string; + benchmark: string; + durationMs: number; +}; + +function setup(): void { + // Clone repo if it doesn't exist, run clean + if (fs.existsSync(REPO_ROOT)) { + // reset the repo, remove all changed or untracked files + cp.execSync( + `cd ${REPO_ROOT} && git reset --hard HEAD && git clean -f -d -X`, + { + stdio: "inherit", + } + ); + } else { + cp.execSync(`git clone ${REPO_ORIGIN}`, { stdio: "ignore" }); + } + + // Run install so we aren't benchmarking node_modules ... + + cp.execSync("yarn install", DEFAULT_EXEC_OPTS); +} + +function cleanTurboCache(): void { + if (fs.existsSync(DEFAULT_CACHE_PATH)) { + console.log("clearing cache"); + fs.rmSync(DEFAULT_CACHE_PATH, { recursive: true }); + } +} + +function cleanBuild(): Timing[] { + const timings: Timing[] = []; + const isLocal = process.argv[process.argv.length - 1] == "--local"; + // We aren't really benchmarking this one, it OOMs if run in full parallel + // on GH actions + const repetitions = isLocal ? REPETITIONS : 1; + const concurrency = isLocal ? "" : " --concurrency=1"; + for (let i = 0; i < repetitions; i++) { + // clean first, we'll leave the cache in place for subsequent builds + cleanTurboCache(); + const start = new Date().getTime(); + cp.execSync(`${TURBO_BIN} run build${concurrency}`, DEFAULT_EXEC_OPTS); + const end = new Date().getTime(); + const timing = end - start; + timings.push(timing); + } + return timings; +} + +function cachedBuild(): Timing[] { + const timings: Timing[] = []; + for (let i = 0; i < REPETITIONS; i++) { + const start = new Date().getTime(); + cp.execSync(`${TURBO_BIN} run build`, DEFAULT_EXEC_OPTS); + const end = new Date().getTime(); + const timing = end - start; + timings.push(timing); + } + return timings; +} + +function saveCache() { + // Remove any existing backup + if (fs.existsSync(ALT_CACHE_PATH)) { + fs.rmSync(ALT_CACHE_PATH, { recursive: true }); + } + // copy the current cache to the backup + if (fs.existsSync(DEFAULT_CACHE_PATH)) { + fse.copySync(DEFAULT_CACHE_PATH, ALT_CACHE_PATH, { recursive: true }); + } else { + // make an empty cache + fs.mkdirSync(ALT_CACHE_PATH, { recursive: true }); + } +} + +function restoreSavedCache() { + // Remove any existing cache + if (fs.existsSync(DEFAULT_CACHE_PATH)) { + fs.rmSync(DEFAULT_CACHE_PATH, { recursive: true }); + } + // Copy the backed-up cache to the real cache + fse.copySync(ALT_CACHE_PATH, DEFAULT_CACHE_PATH, { recursive: true }); +} + +function cachedBuildWithDelta(): Timing[] { + // Save existing cache just once, we'll restore from it each time + saveCache(); + + // Edit a file in place + const file = path.join( + REPO_PATH, + "packages", + "crew", + "important-feature-0", + "src", + "lib", + "important-component-0", + "important-component-0.tsx" + ); + const contents = fs.readFileSync(file).toString("utf-8"); + // make a small edit + const updated = contents.replace("-0!", "-0!!"); + fs.writeFileSync(file, updated); + + const timings: Timing[] = []; + for (let i = 0; i < REPETITIONS; i++) { + // Make sure we're starting with the cache from before we make the source code edit + restoreSavedCache(); + const start = new Date().getTime(); + cp.execSync(`${TURBO_BIN} run build`, DEFAULT_EXEC_OPTS); + const end = new Date().getTime(); + const timing = end - start; + timings.push(timing); + } + return timings; +} + +function cachedBuildWithDependencyChange(): Timing[] { + // Save existing cache just once, we'll restore from it each time + saveCache(); + + // Edit a dependency + const file = path.join(REPO_PATH, "apps", "navigation", "package.json"); + const contents = JSON.parse(fs.readFileSync(file).toString("utf-8")); + contents.dependencies["crew-important-feature-0"] = "*"; + fs.writeFileSync(file, JSON.stringify(contents, null, 2)); + + const timings: Timing[] = []; + for (let i = 0; i < REPETITIONS; i++) { + // Make sure we're starting with the cache from before we made the dependency edit + restoreSavedCache(); + const start = new Date().getTime(); + cp.execSync(`${TURBO_BIN} run build`, DEFAULT_EXEC_OPTS); + const end = new Date().getTime(); + const timing = end - start; + timings.push(timing); + } + return timings; +} + +class Benchmarks { + private readonly benchmarks: Benchmark[] = []; + private readonly tbirdEvents: TBirdEvent[] = []; + + constructor( + private readonly benchmarkFile: string, + private readonly tinybirdFile: string, + private readonly commitSha: string, + private readonly commitTimestamp: Date, + private readonly platform: string + ) {} + + run(name: string, b: () => Timing[]) { + console.log(name); + const timings = b(); + const max = Math.max(...timings); + const min = Math.min(...timings); + const avg = timings.reduce((a, b) => a + b, 0) / timings.length; + this.benchmarks.push({ + name, + value: avg, + unit: "ms", + range: String(max - min), + }); + timings.forEach((t) => { + this.tbirdEvents.push({ + commitSha: this.commitSha, + commitTimestamp: this.commitTimestamp, + platform: this.platform, + benchmark: name, + durationMs: t, + }); + }); + } + + flush() { + console.log(JSON.stringify(this.benchmarks, null, 2)); + fs.writeFileSync( + this.benchmarkFile, + JSON.stringify(this.benchmarks, null, 2) + ); + const stream = ndjson.stringify(); + const fd = fs.openSync(this.tinybirdFile, "w"); + stream.on("data", (line) => { + fs.writeSync(fd, line); + }); + this.tbirdEvents.forEach((t) => { + stream.write(t); + }); + stream.end(); + fs.closeSync(fd); + } +} + +cp.execSync(`${TURBO_BIN} --version`, { stdio: "inherit" }); + +function getCommitDetails(): { commitSha: string; commitTimestamp: Date } { + const envSha = process.env["GITHUB_SHA"]; + if (envSha === undefined) { + return { + commitSha: "unknown sha", + commitTimestamp: new Date(), + }; + } + const buf = cp.execSync(`git show -s --format=%ci ${envSha}`); + const dateString = String(buf).trim(); + const commitTimestamp = new Date(dateString); + return { + commitSha: envSha, + commitTimestamp, + }; +} + +const { commitSha, commitTimestamp } = getCommitDetails(); +const platform = process.env["RUNNER_OS"] ?? "unknown"; + +console.log("setup"); +setup(); +const benchmark = new Benchmarks( + "benchmarks.json", + "tinybird.ndjson", + commitSha, + commitTimestamp, + platform +); +benchmark.run("Clean Build", cleanBuild); +benchmark.run("Cached Build - No Change", cachedBuild); +benchmark.run("Cached Build - Code Change", cachedBuildWithDelta); +benchmark.run( + "Cached Build - Dependency Change", + cachedBuildWithDependencyChange +); +benchmark.flush(); diff --git a/cli/.gitignore b/cli/.gitignore new file mode 100644 index 0000000..10e16a2 --- /dev/null +++ b/cli/.gitignore @@ -0,0 +1,21 @@ +/internal/turbodprotocol/*.go + +/demo/ +/dist/ +/dist-* + +# Built binaries. +/turbo +/turbo-new +/turbo-new.exe +/turbo.exe +/go-turbo +/go-turbo.exe + +testbed + +# Windows lib files +turbo.h +turbo.lib +libturbo.a +libturbo.h diff --git a/cli/.golangci.yml b/cli/.golangci.yml new file mode 100644 index 0000000..2008477 --- /dev/null +++ b/cli/.golangci.yml @@ -0,0 +1,30 @@ +# Refer to golangci-lint's example config file for more options and information: +# https://github.com/golangci/golangci-lint/blob/master/.golangci.example.yml + +run: + timeout: 5m + modules-download-mode: readonly + go: 1.17 + skip-dirs: + - internal/yaml # vendored upstream library + skip-files: + - internal/chrometracing/chrometracing.go # vendored upstream library + +linters: + enable: + - errcheck + - goimports + - govet + - staticcheck + - revive + - nakedret + +linters-settings: + nakedret: + # Aggressively disallow naked returns + max-func-lines: 3 + +issues: + exclude-use-default: false + max-issues-per-linter: 0 + max-same-issues: 0 diff --git a/cli/LICENSE b/cli/LICENSE new file mode 100644 index 0000000..fa0086a --- /dev/null +++ b/cli/LICENSE @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. \ No newline at end of file diff --git a/cli/Makefile b/cli/Makefile new file mode 100644 index 0000000..90db191 --- /dev/null +++ b/cli/Makefile @@ -0,0 +1,332 @@ +TURBO_VERSION = $(shell cat ../version.txt | sed -n '1 p') +TURBO_TAG = $(shell cat ../version.txt | sed -n '2 p') + +EXT := +ifeq ($(OS),Windows_NT) + UNAME := Windows + EXT = .exe +else + UNAME := $(shell uname -s) +endif + +GOARCH:=$(shell go env GOARCH | xargs) +GOOS:=$(shell go env GOOS | xargs) + +# Strip debug info +GO_FLAGS += "-ldflags=-s -w" + +# Avoid embedding the build path in the executable for more reproducible builds +GO_FLAGS += -trimpath + +CLI_DIR = $(shell pwd) + +# allow opting in to the rust codepaths +GO_TAG ?= rust + +GO_FILES = $(shell find . -name "*.go") +SRC_FILES = $(shell find . -name "*.go" | grep -v "_test.go") +GENERATED_FILES = internal/turbodprotocol/turbod.pb.go internal/turbodprotocol/turbod_grpc.pb.go + +# We do not set go-turbo as a dependency because the Rust build.rs +# script will call it for us and copy over the binary +turbo: + cargo build -p turbo + +turbo-prod: + cargo build --release --manifest-path ../crates/turborepo/Cargo.toml + +go-turbo$(EXT): $(GENERATED_FILES) $(SRC_FILES) go.mod turborepo-ffi-install + CGO_ENABLED=1 go build -tags $(GO_TAG) -o go-turbo$(EXT) ./cmd/turbo + + +.PHONY: turborepo-ffi-install +turborepo-ffi-install: turborepo-ffi turborepo-ffi-copy-bindings + cp ../crates/turborepo-ffi/target/debug/libturborepo_ffi.a ./internal/ffi/libturborepo_ffi_$(GOOS)_$(GOARCH).a + +.PHONY: turborepo-ffi +turborepo-ffi: + cd ../crates/turborepo-ffi && cargo build --target-dir ./target + +.PHONY: turborepo-ffi-copy-bindings +turborepo-ffi-copy-bindings: + cp ../crates/turborepo-ffi/bindings.h ./internal/ffi/bindings.h + +# +# ffi cross compiling +# +# these targets are used to build the ffi library for each platform +# when doing a release. they _may_ work on your local machine, but +# they're not intended to be used for development. +# + +.PHONY: turborepo-ffi-install-windows-amd64 +turborepo-ffi-install-windows-amd64: turborepo-ffi-windows-amd64 turborepo-ffi-copy-bindings + cp ../crates/turborepo-ffi/target/x86_64-pc-windows-gnu/release/libturborepo_ffi.a ./internal/ffi/libturborepo_ffi_windows_amd64.a + +.PHONY: turborepo-ffi-install-darwin-arm64 +turborepo-ffi-install-darwin-arm64: turborepo-ffi-darwin-arm64 turborepo-ffi-copy-bindings + cp ../crates/turborepo-ffi/target/aarch64-apple-darwin/release/libturborepo_ffi.a ./internal/ffi/libturborepo_ffi_darwin_arm64.a + +.PHONY: turborepo-ffi-install-darwin-amd64 +turborepo-ffi-install-darwin-amd64: turborepo-ffi-darwin-amd64 turborepo-ffi-copy-bindings + cp ../crates/turborepo-ffi/target/x86_64-apple-darwin/release/libturborepo_ffi.a ./internal/ffi/libturborepo_ffi_darwin_amd64.a + +.PHONY: turborepo-ffi-install-linux-arm64 +turborepo-ffi-install-linux-arm64: turborepo-ffi-linux-arm64 turborepo-ffi-copy-bindings + cp ../crates/turborepo-ffi/target/aarch64-unknown-linux-musl/release/libturborepo_ffi.a ./internal/ffi/libturborepo_ffi_linux_arm64.a + +.PHONY: turborepo-ffi-install-linux-amd64 +turborepo-ffi-install-linux-amd64: turborepo-ffi-linux-amd64 turborepo-ffi-copy-bindings + cp ../crates/turborepo-ffi/target/x86_64-unknown-linux-musl/release/libturborepo_ffi.a ./internal/ffi/libturborepo_ffi_linux_amd64.a + +.PHONY: turborepo-ffi-windows-amd64 +turborepo-ffi-windows-amd64: + cd ../crates/turborepo-ffi && cargo build --release --target-dir ./target --target x86_64-pc-windows-gnu + +.PHONY: turborepo-ffi-darwin-arm64 +turborepo-ffi-darwin-arm64: + cd ../crates/turborepo-ffi && cargo build --release --target-dir ./target --target aarch64-apple-darwin + +.PHONY: turborepo-ffi-darwin-amd64 +turborepo-ffi-darwin-amd64: + cd ../crates/turborepo-ffi && cargo build --release --target-dir ./target --target x86_64-apple-darwin + +.PHONY: turborepo-ffi-linux-arm64 +turborepo-ffi-linux-arm64: + cd ../crates/turborepo-ffi && CC="zig cc -target aarch64-linux-musl" cargo build --release --target-dir ./target --target aarch64-unknown-linux-musl + +.PHONY: turborepo-ffi-linux-amd64 +turborepo-ffi-linux-amd64: + cd ../crates/turborepo-ffi && CC="zig cc -target x86_64-linux-musl" cargo build --release --target-dir ./target --target x86_64-unknown-linux-musl + +# +# end +# + +.PHONY: turborepo-ffi-proto +turborepo-ffi-proto: + protoc -I../crates/ ../crates/turborepo-ffi/messages.proto --go_out=./internal/ + +protoc: internal/turbodprotocol/turbod.proto + protoc --go_out=. --go_opt=paths=source_relative \ + --go-grpc_out=. --go-grpc_opt=paths=source_relative \ + internal/turbodprotocol/turbod.proto + +$(GENERATED_FILES): internal/turbodprotocol/turbod.proto + make protoc + +compile-protos: $(GENERATED_FILES) + +ewatch: scripts/... + nodemon --exec "make e2e" -e .ts,.go + +check-go-version: + @go version | grep ' go1\.18\.0 ' || (echo 'Please install Go version 1.18.0' && false) + +# This "TURBO_RACE" variable exists at the request of a user on GitHub who +# wants to run "make test-go" on an unsupported version of macOS (version 10.9). +# Go's race detector does not run correctly on that version. With this flag +# you can run "TURBO_RACE= make test-go" to disable the race detector. +TURBO_RACE ?= -race + +ifeq ($(UNAME), Windows) + TURBO_RACE= +endif + +clean-go: + go clean -testcache -r + +test-go: $(GENERATED_FILES) $(GO_FILES) go.mod go.sum turborepo-ffi-install + go test $(TURBO_RACE) -tags $(GO_TAG) ./... + +# protos need to be compiled before linting, since linting needs to pick up +# some types from the generated code +lint-go: $(GENERATED_FILES) $(GO_FILES) go.mod go.sum + golangci-lint run --new-from-rev=main + +fmt-go: $(GO_FILES) go.mod go.sum + go fmt ./... + +install: | ./package.json + pnpm install --filter=cli + +corepack: + which corepack || npm install -g corepack@latest + corepack enable + +e2e: corepack install turbo + node -r esbuild-register scripts/e2e/e2e.ts + +# Expects turbo to be built and up to date +# Only should be used by CI +e2e-prebuilt: corepack install + node -r esbuild-register scripts/e2e/e2e.ts + +cmd/turbo/version.go: ../version.txt + # Update this atomically to avoid issues with this being overwritten during use + node -e 'console.log(`package main\n\nconst turboVersion = "$(TURBO_VERSION)"`)' > cmd/turbo/version.go.txt + mv cmd/turbo/version.go.txt cmd/turbo/version.go + +build: install + cd $(CLI_DIR)/../ && pnpm install --filter=create-turbo && pnpm turbo build --filter=create-turbo... + cd $(CLI_DIR)/../ && pnpm install --filter=@turbo/codemod && pnpm turbo build --filter=@turbo/codemod... + cd $(CLI_DIR)/../ && pnpm install --filter=turbo-ignore && pnpm turbo build --filter=turbo-ignore... + cd $(CLI_DIR)/../ && pnpm install --filter=@turbo/workspaces && pnpm turbo build --filter=@turbo/workspaces... + cd $(CLI_DIR)/../ && pnpm install --filter=eslint-plugin-turbo && pnpm turbo build --filter=eslint-plugin-turbo... + cd $(CLI_DIR)/../ && pnpm install --filter=eslint-config-turbo && pnpm turbo build --filter=eslint-config-turbo... + +.PHONY: prepublish +prepublish: compile-protos cmd/turbo/version.go + make -j3 bench/turbo test-go + +.PHONY: publish-turbo-cross +publish-turbo-cross: prepublish + goreleaser release --rm-dist -f cross-release.yml + +.PHONY: publish-turbo-darwin +publish-turbo-darwin: prepublish + goreleaser release --rm-dist -f darwin-release.yml + +.PHONY: snapshot-turbo-cross +snapshot-turbo-cross: + goreleaser release --snapshot --rm-dist -f cross-release.yml + +.PHONY: snapshot-turbo-darwin +snapshot-turbo-darwin: + goreleaser release --snapshot --rm-dist -f darwin-release.yml + +.PHONY: snapshot-lib-turbo-darwin +snapshot-lib-turbo-darwin: + goreleaser release --snapshot --rm-dist -f darwin-lib.yml + +.PHONY: snapshot-lib-turbo-cross +snapshot-lib-turbo-cross: + goreleaser release --snapshot --rm-dist -f cross-lib.yml + +.PHONY: build-lib-turbo-darwin +build-lib-turbo-darwin: + goreleaser release --rm-dist -f darwin-lib.yml + +.PHONY: build-go-turbo-darwin +build-go-turbo-darwin: + goreleaser release --rm-dist -f darwin-release.yml + +.PHONY: build-go-turbo-cross +build-go-turbo-cross: + goreleaser release --rm-dist -f cross-release.yml + +.PHONY: build-lib-turbo-cross +build-lib-turbo-cross: + goreleaser release --rm-dist -f cross-lib.yml + +.PHONY: stage-release +stage-release: cmd/turbo/version.go + echo "Version: $(TURBO_VERSION)" + echo "Tag: $(TURBO_TAG)" + cat $(CLI_DIR)/../version.txt + git diff -- $(CLI_DIR)/../version.txt + git status + @test main = "`git rev-parse --abbrev-ref HEAD`" || (echo "Refusing to publish from non-main branch `git rev-parse --abbrev-ref HEAD`" && false) + @test "" = "`git cherry`" || (echo "Refusing to publish with unpushed commits" && false) + + # Stop if versions are not updated. + @test "" != "`git diff -- $(CLI_DIR)/../version.txt`" || (echo "Refusing to publish with unupdated version.txt" && false) + @test "" != "`git diff -- $(CLI_DIR)/cmd/turbo/version.go`" || (echo "Refusing to publish with unupdated version.go" && false) + + # Prepare the packages. + cd $(CLI_DIR)/../packages/turbo && pnpm version "$(TURBO_VERSION)" --allow-same-version + cd $(CLI_DIR)/../packages/create-turbo && pnpm version "$(TURBO_VERSION)" --allow-same-version + cd $(CLI_DIR)/../packages/turbo-codemod && pnpm version "$(TURBO_VERSION)" --allow-same-version + cd $(CLI_DIR)/../packages/turbo-ignore && pnpm version "$(TURBO_VERSION)" --allow-same-version + cd $(CLI_DIR)/../packages/turbo-workspaces && pnpm version "$(TURBO_VERSION)" --allow-same-version + cd $(CLI_DIR)/../packages/eslint-plugin-turbo && pnpm version "$(TURBO_VERSION)" --allow-same-version + cd $(CLI_DIR)/../packages/eslint-config-turbo && pnpm version "$(TURBO_VERSION)" --allow-same-version + + git checkout -b staging-$(TURBO_VERSION) + git commit -anm "publish $(TURBO_VERSION) to registry" + git tag "v$(TURBO_VERSION)" + git push origin staging-$(TURBO_VERSION) --tags --force + +.PHONY: publish-turbo +publish-turbo: clean build + echo "Version: $(TURBO_VERSION)" + echo "Tag: $(TURBO_TAG)" + + # Include the patch in the log. + git format-patch HEAD~1 --stdout | cat + + npm config set --location=project "//registry.npmjs.org/:_authToken" $(NPM_TOKEN) + + # Publishes the native npm modules. + goreleaser release --rm-dist -f combined-shim.yml $(SKIP_PUBLISH) + + # Split packing from the publish step so that npm locates the correct .npmrc file. + cd $(CLI_DIR)/../packages/turbo && pnpm pack --pack-destination=$(CLI_DIR)/../ + cd $(CLI_DIR)/../packages/create-turbo && pnpm pack --pack-destination=$(CLI_DIR)/../ + cd $(CLI_DIR)/../packages/turbo-codemod && pnpm pack --pack-destination=$(CLI_DIR)/../ + cd $(CLI_DIR)/../packages/turbo-ignore && pnpm pack --pack-destination=$(CLI_DIR)/../ + cd $(CLI_DIR)/../packages/turbo-workspaces && pnpm pack --pack-destination=$(CLI_DIR)/../ + cd $(CLI_DIR)/../packages/eslint-plugin-turbo && pnpm pack --pack-destination=$(CLI_DIR)/../ + cd $(CLI_DIR)/../packages/eslint-config-turbo && pnpm pack --pack-destination=$(CLI_DIR)/../ + +ifneq ($(SKIP_PUBLISH),--skip-publish) + # Publish the remaining JS packages in order to avoid race conditions. + cd $(CLI_DIR)/../ + npm publish -ddd --tag $(TURBO_TAG) $(CLI_DIR)/../turbo-$(TURBO_VERSION).tgz + npm publish -ddd --tag $(TURBO_TAG) $(CLI_DIR)/../create-turbo-$(TURBO_VERSION).tgz + npm publish -ddd --tag $(TURBO_TAG) $(CLI_DIR)/../turbo-codemod-$(TURBO_VERSION).tgz + npm publish -ddd --tag $(TURBO_TAG) $(CLI_DIR)/../turbo-ignore-$(TURBO_VERSION).tgz + npm publish -ddd --tag $(TURBO_TAG) $(CLI_DIR)/../turbo-workspaces-$(TURBO_VERSION).tgz + npm publish -ddd --tag $(TURBO_TAG) $(CLI_DIR)/../eslint-plugin-turbo-$(TURBO_VERSION).tgz + npm publish -ddd --tag $(TURBO_TAG) $(CLI_DIR)/../eslint-config-turbo-$(TURBO_VERSION).tgz +endif + +demo/lage: install + node $(CLI_DIR)/scripts/generate.mjs lage + +demo/lerna: install + node $(CLI_DIR)/scripts/generate.mjs lerna + +demo/nx: install + node $(CLI_DIR)/scripts/generate.mjs nx + +demo/turbo: install + node $(CLI_DIR)/scripts/generate.mjs turbo + +demo: demo/lage demo/lerna demo/nx demo/turbo + +bench/lerna: demo/lerna + cd $(CLI_DIR)/demo/lerna && node_modules/.bin/lerna run build + +bench/lage: demo/lage + cd $(CLI_DIR)/demo/lage && node_modules/.bin/lage build + +bench/nx: demo/nx + cd $(CLI_DIR)/demo/nx && node_modules/.bin/nx run-many --target=build --all + +bench/turbo: demo/turbo turbo + cd $(CLI_DIR)/demo/turbo && $(CLI_DIR)/turbo run test + +bench: bench/lerna bench/lage bench/nx bench/turbo + +clean: clean-go clean-build clean-demo clean-rust + +clean-rust: + cargo clean + +clean-build: + rm -f turbo + +clean-demo: + rm -rf node_modules + rm -rf demo + +# use target fixture- to set up the testbed directory +.PHONY=fixture-% +fixture-%: + $(eval $@_FIXTURE := $(@:fixture-%=%)) + @echo "fixture setup $($@_FIXTURE)" + rm -rf testbed + mkdir -p testbed + ../turborepo-tests/integration/tests/_helpers/setup_monorepo.sh ./testbed $($@_FIXTURE) + diff --git a/cli/README.md b/cli/README.md new file mode 100644 index 0000000..0cc7a7b --- /dev/null +++ b/cli/README.md @@ -0,0 +1,3 @@ +# `turbo` CLI + +Visit https://turbo.build/repo to view the full documentation. diff --git a/cli/cmd/turbo/main.go b/cli/cmd/turbo/main.go new file mode 100644 index 0000000..d4155f5 --- /dev/null +++ b/cli/cmd/turbo/main.go @@ -0,0 +1,28 @@ +package main + +import ( + "encoding/json" + "fmt" + "os" + + "github.com/vercel/turbo/cli/internal/cmd" + "github.com/vercel/turbo/cli/internal/turbostate" +) + +func main() { + if len(os.Args) != 2 { + fmt.Printf("go-turbo is expected to be invoked via turbo") + os.Exit(1) + } + + argsString := os.Args[1] + var args turbostate.ParsedArgsFromRust + err := json.Unmarshal([]byte(argsString), &args) + if err != nil { + fmt.Printf("Error unmarshalling CLI args: %v\n Arg string: %v\n", err, argsString) + os.Exit(1) + } + + exitCode := cmd.RunWithArgs(&args, turboVersion) + os.Exit(exitCode) +} diff --git a/cli/cmd/turbo/version.go b/cli/cmd/turbo/version.go new file mode 100644 index 0000000..3febcaa --- /dev/null +++ b/cli/cmd/turbo/version.go @@ -0,0 +1,3 @@ +package main + +const turboVersion = "1.9.4-canary.2" diff --git a/cli/combined-release.yml b/cli/combined-release.yml new file mode 100644 index 0000000..6b85013 --- /dev/null +++ b/cli/combined-release.yml @@ -0,0 +1,78 @@ +project_name: turbo + +dist: dist + +builds: + - id: turbo + builder: prebuilt + tags: + - rust + - staticbinary + goos: + - linux + - windows + - darwin + goarch: + - amd64 + - arm64 + goamd64: + - v1 + prebuilt: + path: dist-combined/turbo_{{ .Os }}_{{ .Arch }}{{ with .Amd64 }}_{{ .}}{{ end }}/bin/turbo{{ .Ext }} + hooks: + pre: + - cmd: ./scripts/npm-native-packages/npm-native-packages.js {{ .Os }} {{ .Arch }} {{ .Version }} + binary: bin/turbo +checksum: + name_template: "checksums.txt" +snapshot: + name_template: "{{ incpatch .Version }}" +archives: + - id: github + name_template: "{{ .ProjectName }}-{{ .Version }}-{{ .Os }}-{{ .Arch }}" + wrap_in_directory: true + replacements: + amd64: 64 + format: tar.gz + format_overrides: + - goos: windows + format: zip + files: + - LICENSE + - README.md + - id: npm + name_template: "{{ .ProjectName }}-{{ .Os }}-{{ .Arch }}" + wrap_in_directory: true + replacements: + amd64: 64 + format: tar.gz + files: + - LICENSE + - src: "scripts/npm-native-packages/build/{{ .ProjectName }}-{{ .Os }}-{{ .Arch }}/package.json" + dst: "workaround/.." + strip_parent: true + - src: "scripts/npm-native-packages/build/{{ .ProjectName }}-{{ .Os }}-{{ .Arch }}/README.md" + dst: "workaround/.." + strip_parent: true + - src: "scripts/npm-native-packages/build/{{ .ProjectName }}-{{ .Os }}-{{ .Arch }}/bin/*" + dst: "bin/" + strip_parent: true +changelog: + sort: asc + filters: + exclude: + - "^docs:" + - "^test:" +release: + github: + owner: vercel + name: turborepo + ids: + - github + prerelease: auto + disable: true +publishers: + - name: npm + ids: + - npm + cmd: "npm publish{{ if .Prerelease }} --tag canary{{ end }} {{ abs .ArtifactPath }}" diff --git a/cli/combined-shim.yml b/cli/combined-shim.yml new file mode 100644 index 0000000..67d9914 --- /dev/null +++ b/cli/combined-shim.yml @@ -0,0 +1,78 @@ +project_name: turbo + +dist: dist + +builds: + - id: turbo + builder: prebuilt + tags: + - rust + - staticbinary + goos: + - linux + - windows + - darwin + goarch: + - amd64 + - arm64 + goamd64: + - v1 + prebuilt: + path: dist-{{ .Os }}-{{ .Arch }}/turbo{{ .Ext }} + hooks: + pre: + - cmd: ./scripts/npm-native-packages/npm-native-packages.js {{ .Os }} {{ .Arch }} {{ .Version }} + binary: bin/turbo +checksum: + name_template: "checksums.txt" +snapshot: + name_template: "{{ incpatch .Version }}" +archives: + - id: github + name_template: "{{ .ProjectName }}-{{ .Version }}-{{ .Os }}-{{ .Arch }}" + wrap_in_directory: true + replacements: + amd64: 64 + format: tar.gz + format_overrides: + - goos: windows + format: zip + files: + - LICENSE + - README.md + - id: npm + name_template: "{{ .ProjectName }}-{{ .Os }}-{{ .Arch }}" + wrap_in_directory: true + replacements: + amd64: 64 + format: tar.gz + files: + - LICENSE + - src: "scripts/npm-native-packages/build/{{ .ProjectName }}-{{ .Os }}-{{ .Arch }}/package.json" + dst: "workaround/.." + strip_parent: true + - src: "scripts/npm-native-packages/build/{{ .ProjectName }}-{{ .Os }}-{{ .Arch }}/README.md" + dst: "workaround/.." + strip_parent: true + - src: "scripts/npm-native-packages/build/{{ .ProjectName }}-{{ .Os }}-{{ .Arch }}/bin/*" + dst: "bin/" + strip_parent: true +changelog: + sort: asc + filters: + exclude: + - "^docs:" + - "^test:" +release: + github: + owner: vercel + name: turborepo + ids: + - github + prerelease: auto + disable: true +publishers: + - name: npm + ids: + - npm + cmd: "npm publish{{ if .Prerelease }} --tag canary{{ end }} {{ abs .ArtifactPath }}" diff --git a/cli/cross-release.yml b/cli/cross-release.yml new file mode 100644 index 0000000..18e449f --- /dev/null +++ b/cli/cross-release.yml @@ -0,0 +1,61 @@ +project_name: turbo +before: + hooks: + - make compile-protos + - go mod tidy + +dist: dist-cross + +builds: + - id: turbo + main: ./cmd/turbo + tags: + - rust + - staticbinary + binary: bin/go-turbo + flags: + - -trimpath + ldflags: + - -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.CommitDate}} -X main.builtBy=goreleaser + mod_timestamp: "{{ .CommitTimestamp }}" + env: + - CGO_ENABLED=1 + hooks: + pre: + - cmd: make turborepo-ffi-install-{{ .Os }}-{{ .Arch }} + output: true + targets: + - linux_arm64 + - linux_amd64 + - windows_amd64 + overrides: + - goos: linux + goarch: arm64 + ldflags: -linkmode external -extldflags="-static" -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.CommitDate}} -X main.builtBy=goreleaser + env: + - CC=zig cc -target aarch64-linux-musl + - CXX=zig c++ -target aarch64-linux-musl + - goos: linux + goarch: amd64 + goamd64: v1 + ldflags: -linkmode external -extldflags="-static" -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.CommitDate}} -X main.builtBy=goreleaser + env: + - CC=zig cc -target x86_64-linux-musl + - CXX=zig c++ -target x86_64-linux-musl + - goos: windows + goarch: arm64 + env: + - CC=/llvm-mingw/llvm-mingw/bin/aarch64-w64-mingw32-gcc + - CXX=/llvm-mingw/llvm-mingw/bin/aarch64-w64-mingw32-g++ + - goos: windows + goarch: amd64 + goamd64: v1 + env: + - CC=x86_64-w64-mingw32-gcc + - CXX=x86_64-w64-mingw32-g++ + +archives: + - format: binary + +release: + disable: true diff --git a/cli/darwin-release.yml b/cli/darwin-release.yml new file mode 100644 index 0000000..ca6e8a0 --- /dev/null +++ b/cli/darwin-release.yml @@ -0,0 +1,35 @@ +project_name: turbo +before: + hooks: + - make compile-protos + - go mod tidy + +dist: dist-darwin + +builds: + - id: turbo + main: ./cmd/turbo + tags: + - rust + - staticbinary + binary: bin/go-turbo + hooks: + pre: + - cmd: make turborepo-ffi-install-{{ .Os }}-{{ .Arch }} + output: true + flags: + - -trimpath + ldflags: + - -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.CommitDate}} -X main.builtBy=goreleaser + mod_timestamp: "{{ .CommitTimestamp }}" + env: + - CGO_ENABLED=1 + targets: + - darwin_arm64 + - darwin_amd64 + +archives: + - format: binary + +release: + disable: true diff --git a/cli/fixtures/01-git-hash-object/.gitignore b/cli/fixtures/01-git-hash-object/.gitignore new file mode 100644 index 0000000..d8e1950 --- /dev/null +++ b/cli/fixtures/01-git-hash-object/.gitignore @@ -0,0 +1,2 @@ +"quote" +new*line diff --git a/cli/fixtures/01-git-hash-object/child/child.json b/cli/fixtures/01-git-hash-object/child/child.json new file mode 100644 index 0000000..e69de29 diff --git a/cli/fixtures/01-git-hash-object/child/grandchild/grandchild.json b/cli/fixtures/01-git-hash-object/child/grandchild/grandchild.json new file mode 100644 index 0000000..e69de29 diff --git a/cli/fixtures/01-git-hash-object/root.json b/cli/fixtures/01-git-hash-object/root.json new file mode 100644 index 0000000..e69de29 diff --git a/cli/go.mod b/cli/go.mod new file mode 100644 index 0000000..da8b7f4 --- /dev/null +++ b/cli/go.mod @@ -0,0 +1,91 @@ +module github.com/vercel/turbo/cli + +go 1.18 + +require ( + github.com/AlecAivazis/survey/v2 v2.3.5 + github.com/DataDog/zstd v1.5.2 + github.com/Masterminds/semver v1.5.0 + github.com/adrg/xdg v0.3.3 + github.com/andybalholm/crlf v0.0.0-20171020200849-670099aa064f + github.com/briandowns/spinner v1.18.1 + github.com/cenkalti/backoff/v4 v4.1.3 + github.com/deckarep/golang-set v1.8.0 + github.com/fatih/color v1.13.0 + github.com/fsnotify/fsevents v0.1.1 + github.com/fsnotify/fsnotify v1.6.0 + github.com/gobwas/glob v0.2.3 + github.com/google/chrometracing v0.0.0-20210413150014-55fded0163e7 + github.com/google/go-cmp v0.5.8 + github.com/google/uuid v1.3.0 + github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 + github.com/hashicorp/go-gatedio v0.5.0 + github.com/hashicorp/go-hclog v1.2.1 + github.com/hashicorp/go-multierror v1.1.1 + github.com/hashicorp/go-retryablehttp v0.6.8 + github.com/iseki0/go-yarnlock v0.0.2-0.20220905015017-a2a90751cdfa + github.com/karrick/godirwalk v1.16.1 + github.com/mattn/go-isatty v0.0.14 + github.com/mitchellh/cli v1.1.5 + github.com/mitchellh/mapstructure v1.5.0 + github.com/moby/sys/sequential v0.5.0 + github.com/muhammadmuzzammil1998/jsonc v1.0.0 + github.com/nightlyone/lockfile v1.0.0 + github.com/pkg/errors v0.9.1 + github.com/pyr-sh/dag v1.0.0 + github.com/sabhiram/go-gitignore v0.0.0-20201211210132-54b8a0bf510f + github.com/schollz/progressbar/v3 v3.9.0 + github.com/segmentio/ksuid v1.0.4 + github.com/spf13/cobra v1.3.0 + github.com/spf13/viper v1.12.0 + github.com/stretchr/testify v1.8.0 + github.com/yookoala/realpath v1.0.0 + golang.org/x/sync v0.0.0-20210220032951-036812b2e83c + golang.org/x/sys v0.5.0 + google.golang.org/grpc v1.46.2 + google.golang.org/protobuf v1.28.0 + gotest.tools/v3 v3.3.0 +) + +require ( + github.com/Masterminds/goutils v1.1.1 // indirect + github.com/Masterminds/semver/v3 v3.1.1 // indirect + github.com/Masterminds/sprig/v3 v3.2.1 // indirect + github.com/armon/go-radix v1.0.0 // indirect + github.com/bgentry/speakeasy v0.1.0 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/golang/protobuf v1.5.2 // indirect + github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/go-cleanhttp v0.5.2 // indirect + github.com/hashicorp/hcl v1.0.0 // indirect + github.com/huandu/xstrings v1.3.2 // indirect + github.com/imdario/mergo v0.3.11 // indirect + github.com/inconshreveable/mousetrap v1.0.0 // indirect + github.com/magiconair/properties v1.8.6 // indirect + github.com/mattn/go-colorable v0.1.12 // indirect + github.com/mattn/go-runewidth v0.0.13 // indirect + github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db // indirect + github.com/mitchellh/copystructure v1.0.0 // indirect + github.com/mitchellh/reflectwalk v1.0.1 // indirect + github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect + github.com/pelletier/go-toml v1.9.5 // indirect + github.com/pelletier/go-toml/v2 v2.0.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/posener/complete v1.2.3 // indirect + github.com/rivo/uniseg v0.2.0 // indirect + github.com/shopspring/decimal v1.2.0 // indirect + github.com/spf13/afero v1.8.2 // indirect + github.com/spf13/cast v1.5.0 // indirect + github.com/spf13/jwalterweatherman v1.1.0 // indirect + github.com/spf13/pflag v1.0.5 // indirect + github.com/subosito/gotenv v1.3.0 // indirect + golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4 // indirect + golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2 // indirect + golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 // indirect + golang.org/x/text v0.3.7 // indirect + google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd // indirect + gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect + gopkg.in/ini.v1 v1.66.4 // indirect + gopkg.in/yaml.v2 v2.4.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/cli/go.sum b/cli/go.sum new file mode 100644 index 0000000..9c993f4 --- /dev/null +++ b/cli/go.sum @@ -0,0 +1,952 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= +cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= +cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= +cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= +cloud.google.com/go v0.83.0/go.mod h1:Z7MJUsANfY0pYPdw0lbnivPx4/vhy/e2FEkSkF7vAVY= +cloud.google.com/go v0.84.0/go.mod h1:RazrYuxIK6Kb7YrzzhPoLmCVzl7Sup4NrbKPg8KHSUM= +cloud.google.com/go v0.87.0/go.mod h1:TpDYlFy7vuLzZMMZ+B6iRiELaY7z/gJPaqbMx6mlWcY= +cloud.google.com/go v0.90.0/go.mod h1:kRX0mNRHe0e2rC6oNakvwQqzyDmg57xJ+SZU1eT2aDQ= +cloud.google.com/go v0.93.3/go.mod h1:8utlLll2EF5XMAV15woO4lSbWQlk8rer9aLOfLh7+YI= +cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW4= +cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= +cloud.google.com/go v0.98.0/go.mod h1:ua6Ush4NALrHk5QXDWnjvZHN93OuF0HfuEPq9I1X0cM= +cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/firestore v1.6.1/go.mod h1:asNXNOzBdyVQmEU+ggO8UPodTkEVFW5Qx+rwHnAz+EY= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/AlecAivazis/survey/v2 v2.3.5 h1:A8cYupsAZkjaUmhtTYv3sSqc7LO5mp1XDfqe5E/9wRQ= +github.com/AlecAivazis/survey/v2 v2.3.5/go.mod h1:4AuI9b7RjAR+G7v9+C4YSlX/YL3K3cWNXgWXOhllqvI= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= +github.com/DataDog/zstd v1.5.2 h1:vUG4lAyuPCXO0TLbXvPv7EB7cNK1QV/luu55UHLrrn8= +github.com/DataDog/zstd v1.5.2/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= +github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= +github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= +github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= +github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= +github.com/Masterminds/semver/v3 v3.1.1 h1:hLg3sBzpNErnxhQtUy/mmLR2I9foDujNK030IGemrRc= +github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= +github.com/Masterminds/sprig/v3 v3.2.1 h1:n6EPaDyLSvCEa3frruQvAiHuNp2dhBlMSmkEr+HuzGc= +github.com/Masterminds/sprig/v3 v3.2.1/go.mod h1:UoaO7Yp8KlPnJIYWTFkMaqPUYKTfGFPhxNuwnnxkKlk= +github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2/go.mod h1:HBCaDeC1lPdgDeDbhX8XFpy1jqjK0IBG8W5K+xYqA0w= +github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/adrg/xdg v0.3.3 h1:s/tV7MdqQnzB1nKY8aqHvAMD+uCiuEDzVB5HLRY849U= +github.com/adrg/xdg v0.3.3/go.mod h1:61xAR2VZcggl2St4O9ohF5qCKe08+JDmE4VNzPFQvOQ= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/andybalholm/crlf v0.0.0-20171020200849-670099aa064f h1:NNJE6p4LchkmNfNskDUaSbrwxZzr7t2/lj2aS+q4oF0= +github.com/andybalholm/crlf v0.0.0-20171020200849-670099aa064f/go.mod h1:k8feO4+kXDxro6ErPXBRTJ/ro2mf0SsFG8s7doP9kJE= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= +github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/briandowns/spinner v1.18.1 h1:yhQmQtM1zsqFsouh09Bk/jCjd50pC3EOGsh28gLVvwY= +github.com/briandowns/spinner v1.18.1/go.mod h1:mQak9GHqbspjC/5iUx3qMlIho8xBS/ppAL/hX5SmPJU= +github.com/cenkalti/backoff/v4 v4.1.3 h1:cFAlzYUlVYDysBEH2T5hyJZMh3+5+WCBvSnK6Q8UtC4= +github.com/cenkalti/backoff/v4 v4.1.3/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= +github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= +github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/creack/pty v1.1.17/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/deckarep/golang-set v1.8.0 h1:sk9/l/KqpunDwP7pSjUg0keiOOLEnOBHzykLrsPppp4= +github.com/deckarep/golang-set v1.8.0/go.mod h1:5nI87KwE7wgsBU1F4GKAw2Qod7p5kyS383rP6+o6qqo= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= +github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/go-control-plane v0.10.1/go.mod h1:AY7fTTXNdv/aJ2O5jwpxAPOWUZ7hQAEvzN5Pf27BkQQ= +github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/envoyproxy/protoc-gen-validate v0.6.2/go.mod h1:2t7qjJNvHPx8IjnBOzl9E9/baC+qXE/TeeyBRzgJDws= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= +github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= +github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= +github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= +github.com/fsnotify/fsevents v0.1.1 h1:/125uxJvvoSDDBPen6yUZbil8J9ydKZnnl3TWWmvnkw= +github.com/fsnotify/fsevents v0.1.1/go.mod h1:+d+hS27T6k5J8CRaPLKFgwKYcpS7GwW3Ule9+SC2ZRc= +github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= +github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= +github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= +github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= +github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/chrometracing v0.0.0-20210413150014-55fded0163e7 h1:mc1AFRocuO7EtVJgn4YIg97gBJ9VjiT4+UbCAM2IS/k= +github.com/google/chrometracing v0.0.0-20210413150014-55fded0163e7/go.mod h1:k2+go54tKjJPxWHxllhAI7WtOaxnnIaB0LjnGEsbyj0= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= +github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.2.1/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= +github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= +github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= +github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= +github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw= +github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/hashicorp/consul/api v1.11.0/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M= +github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= +github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= +github.com/hashicorp/go-gatedio v0.5.0 h1:Jm1X5yP4yCqqWj5L1TgW7iZwCVPGtVc+mro5r/XX7Tg= +github.com/hashicorp/go-gatedio v0.5.0/go.mod h1:Lr3t8L6IyxD3DAeaUxGcgl2JnRUpWMCsmBl4Omu/2t4= +github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= +github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-hclog v1.2.1 h1:YQsLlGDJgwhXFpucSPyVbCBviQtjlHv3jLTlp8YmtEw= +github.com/hashicorp/go-hclog v1.2.1/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= +github.com/hashicorp/go-retryablehttp v0.6.8 h1:92lWxgpa+fF3FozM4B3UZtHZMJX8T5XT+TFdCxsPyWs= +github.com/hashicorp/go-retryablehttp v0.6.8/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= +github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/mdns v1.0.1/go.mod h1:4gW7WsVCke5TE7EPeYliwHlRUyBtfCwuFwuMg2DmyNY= +github.com/hashicorp/mdns v1.0.4/go.mod h1:mtBihi+LeNXGtG8L9dX59gAEa12BDtBQSp4v/YAJqrc= +github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= +github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= +github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk= +github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= +github.com/hinshun/vt10x v0.0.0-20220119200601-820417d04eec/go.mod h1:Q48J4R4DvxnHolD5P8pOtXigYlRuPLGl6moFx3ulM68= +github.com/huandu/xstrings v1.3.1/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= +github.com/huandu/xstrings v1.3.2 h1:L18LIDzqlW6xN2rEkpdV8+oL/IXWJ1APd+vsdYy4Wdw= +github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= +github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/imdario/mergo v0.3.11 h1:3tnifQM4i+fbajXKBHXWEH+KvNHqojZ778UH75j3bGA= +github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= +github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/iseki0/go-yarnlock v0.0.2-0.20220905015017-a2a90751cdfa h1:0sf91Q+lIGU9ikSy16mNfFURM52O1RiB2fQCYEiRPBU= +github.com/iseki0/go-yarnlock v0.0.2-0.20220905015017-a2a90751cdfa/go.mod h1:wQ9AjysVf4yTrNMlPgNbVY8s/XZcdjpMLNyNwDIj47o= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/k0kubun/go-ansi v0.0.0-20180517002512-3bf9e2903213/go.mod h1:vNUNkEQ1e29fT/6vq2aBdFsgNPmy8qMdSay1npru+Sw= +github.com/karrick/godirwalk v1.16.1 h1:DynhcF+bztK8gooS0+NDJFrdNZjJ3gzVzC545UNA9iw= +github.com/karrick/godirwalk v1.16.1/go.mod h1:j4mkqPuvaLI8mp1DroR3P6ad7cyYd4c1qeJ3RV7ULlk= +github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/lyft/protoc-gen-star v0.5.3/go.mod h1:V0xaHgaf5oCCqmcxYcWiDfTiKsZsRc87/1qhoTACD8w= +github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo= +github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.12 h1:jF+Du6AlPIjs2BiUiQlKOX0rt3SujHxPnksPKZbaA40= +github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= +github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU= +github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= +github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= +github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= +github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= +github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= +github.com/mitchellh/cli v1.1.5 h1:OxRIeJXpAMztws/XHlN2vu6imG5Dpq+j61AzAX5fLng= +github.com/mitchellh/cli v1.1.5/go.mod h1:v8+iFts2sPIKUV1ltktPXMCC8fumSKFItNcD2cLtRR4= +github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ= +github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw= +github.com/mitchellh/copystructure v1.0.0 h1:Laisrj+bAB6b/yJwB5Bt3ITZhGJdqmxquMKeZ+mmkFQ= +github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/mitchellh/reflectwalk v1.0.1 h1:FVzMWA5RllMAKIdUSC8mdWo3XtwoecrH79BY70sEEpE= +github.com/mitchellh/reflectwalk v1.0.1/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc= +github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= +github.com/muhammadmuzzammil1998/jsonc v1.0.0 h1:8o5gBQn4ZA3NBA9DlTujCj2a4w0tqWrPVjDwhzkgTIs= +github.com/muhammadmuzzammil1998/jsonc v1.0.0/go.mod h1:saF2fIVw4banK0H4+/EuqfFLpRnoy5S+ECwTOCcRcSU= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/nightlyone/lockfile v1.0.0 h1:RHep2cFKK4PonZJDdEl4GmkabuhbsRMgk/k3uAmxBiA= +github.com/nightlyone/lockfile v1.0.0/go.mod h1:rywoIealpdNse2r832aiD9jRk8ErCatROs6LzC841CI= +github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= +github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml/v2 v2.0.1 h1:8e3L2cCQzLFi2CR4g7vGFuFxX7Jl1kKX8gW+iV0GUKU= +github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= +github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/posener/complete v1.2.3 h1:NP0eAhjcjImqslEwo/1hq7gpajME0fTLTezBKDqfXqo= +github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= +github.com/pyr-sh/dag v1.0.0 h1:hIyuIe8nvHZuwFUjTXoLJP7cN5Xr3IAQukiX2S8NAS0= +github.com/pyr-sh/dag v1.0.0/go.mod h1:alhhyzDdT3KwVmFc+pF4uhMSfRSTbiAUMcqfrfPSs0Y= +github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.6.1 h1:/FiVV8dS/e+YqF2JvO3yXRFbBLTIuSDkuC7aBOAvL+k= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/sabhiram/go-gitignore v0.0.0-20201211210132-54b8a0bf510f h1:8P2MkG70G76gnZBOPGwmMIgwBb/rESQuwsJ7K8ds4NE= +github.com/sabhiram/go-gitignore v0.0.0-20201211210132-54b8a0bf510f/go.mod h1:+ePHsJ1keEjQtpvf9HHw0f4ZeJ0TLRsxhunSI2hYJSs= +github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig= +github.com/schollz/progressbar/v3 v3.9.0 h1:k9SRNQ8KZyibz1UZOaKxnkUE3iGtmGSDt1YY9KlCYQk= +github.com/schollz/progressbar/v3 v3.9.0/go.mod h1:W5IEwbJecncFGBvuEh4A7HT1nZZ6WNIL2i3qbnI0WKY= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/segmentio/ksuid v1.0.4 h1:sBo2BdShXjmcugAMwjugoGUdUV0pcxY5mW4xKRn3v4c= +github.com/segmentio/ksuid v1.0.4/go.mod h1:/XUiZBD3kVx5SmUOl55voK5yeAbBNNIed+2O73XgrPE= +github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ= +github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= +github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= +github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= +github.com/spf13/afero v1.8.2 h1:xehSyVa0YnHWsJ49JFljMpg1HX19V6NDZ1fkm1Xznbo= +github.com/spf13/afero v1.8.2/go.mod h1:CtAatgMJh6bJEIs48Ay/FOnkljP3WeGUG0MC1RfAqwo= +github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w= +github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU= +github.com/spf13/cobra v1.3.0 h1:R7cSvGu+Vv+qX0gW5R/85dx2kmmJT5z5NM8ifdYjdn0= +github.com/spf13/cobra v1.3.0/go.mod h1:BrRVncBjOJa/eUcVVm9CE+oC6as8k+VYr4NY7WCi9V4= +github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= +github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.10.0/go.mod h1:SoyBPwAtKDzypXNDFKN5kzH7ppppbGZtls1UpIy5AsM= +github.com/spf13/viper v1.12.0 h1:CZ7eSOd3kZoaYDLbXnmzgQI5RlciuXBMA+18HwHRfZQ= +github.com/spf13/viper v1.12.0/go.mod h1:b6COn30jlNxbm/V2IqWiNWkJ+vZNiMNksliPCiuKtSI= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= +github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= +github.com/subosito/gotenv v1.3.0 h1:mjC+YW8QpAdXibNi+vNWgzmgBH4+5l5dCXv8cNysBLI= +github.com/subosito/gotenv v1.3.0/go.mod h1:YzJjq/33h7nrwdY+iHMhEOEEbW0ovIz0tB6t6PwAXzs= +github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= +github.com/yookoala/realpath v1.0.0 h1:7OA9pj4FZd+oZDsyvXWQvjn5oBdcHRTV44PpdMSuImQ= +github.com/yookoala/realpath v1.0.0/go.mod h1:gJJMA9wuX7AcqLy1+ffPatSCySA1FQ2S8Ya9AIoYBpE= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +go.etcd.io/etcd/api/v3 v3.5.1/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= +go.etcd.io/etcd/client/pkg/v3 v3.5.1/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= +go.etcd.io/etcd/client/v2 v2.305.1/go.mod h1:pMEacxZW7o8pg4CrFE7pquyCJJzZvkvdD2RibOCCCGs= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= +go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= +go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20220131195533-30dcbda58838/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4 h1:kUhD7nTDoI3fVd9G4ORWrbV5NY0liEs/Jg2pv5f+bBA= +golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= +golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2 h1:NWy5+hlRbC7HK+PmcXVUmW1IMyFce7to56IUvhUFm7Y= +golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210816183151-1e6c022a8912/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211205182925-97ca703d548d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220422013727-9388b58f7150/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210503060354-a79de5458b56/go.mod h1:tfny5GFUkzUvx4ps4ajbZsCe5lw1metzhBm9T3x7oIY= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 h1:JGgROgKl9N8DuW20oFS5gxc+lE67/N3FcwmBPMe7ArY= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= +google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= +google.golang.org/api v0.47.0/go.mod h1:Wbvgpq1HddcWVtzsVLyfLp8lDg6AA241LmgIL59tHXo= +google.golang.org/api v0.48.0/go.mod h1:71Pr1vy+TAZRPkPs/xlCf5SsU8WjuAWv1Pfjbtukyy4= +google.golang.org/api v0.50.0/go.mod h1:4bNT5pAuq5ji4SRZm+5QIkjny9JAyVD/3gaSihNefaw= +google.golang.org/api v0.51.0/go.mod h1:t4HdrdoNgyN5cbEfm7Lum0lcLDLiise1F8qDKX00sOU= +google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6z3k= +google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= +google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= +google.golang.org/api v0.59.0/go.mod h1:sT2boj7M9YJxZzgeZqXogmhfmRWDtPzT31xkieUbuZU= +google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= +google.golang.org/api v0.62.0/go.mod h1:dKmwPCydfsad4qCH08MSdgWjfHOyfpd4VtDGgRFdavw= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= +google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A= +google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= +google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k= +google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48= +google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w= +google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= +google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211008145708-270636b82663/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211028162531-8db9c33dc351/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211129164237-f09f9a12af12/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211203200212-54befc351ae9/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd h1:e0TwkXOdbnH/1x5rc5MZ/VYyiZ4v+RdVfrGMqEwT68I= +google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE= +google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= +google.golang.org/grpc v1.42.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU= +google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.46.2 h1:u+MLGgVf7vRdjEYZ8wDFhAVNmhkbJ5hmrA1LMWK1CAQ= +google.golang.org/grpc v1.46.2/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +google.golang.org/protobuf v1.28.0 h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw= +google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.66.4 h1:SsAcf+mM7mRZo2nJNGt8mZCjG8ZRaNGMURJw7BsIST4= +gopkg.in/ini.v1 v1.66.4/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools/v3 v3.3.0 h1:MfDY1b1/0xN1CyMlQDac0ziEy9zJQd9CXBRRDHw2jJo= +gotest.tools/v3 v3.3.0/go.mod h1:Mcr9QNxkg0uMvy/YElmo4SpXgJKWgQvYrT7Kw5RzJ1A= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/cli/internal/analytics/analytics.go b/cli/internal/analytics/analytics.go new file mode 100644 index 0000000..8d9a3b6 --- /dev/null +++ b/cli/internal/analytics/analytics.go @@ -0,0 +1,175 @@ +package analytics + +import ( + "context" + "sync" + "time" + + "github.com/google/uuid" + "github.com/hashicorp/go-hclog" + "github.com/mitchellh/mapstructure" + "github.com/vercel/turbo/cli/internal/util" +) + +type Events = []map[string]interface{} + +type EventPayload = interface{} + +type Recorder interface { + LogEvent(payload EventPayload) +} + +type Client interface { + Recorder + Close() + CloseWithTimeout(timeout time.Duration) +} + +type Sink interface { + RecordAnalyticsEvents(events Events) error +} + +type nullSink struct{} + +func (n *nullSink) RecordAnalyticsEvents(events Events) error { + return nil +} + +// NullSink is an analytics sink to use in the event that we don't want to send +// analytics +var NullSink = &nullSink{} + +type client struct { + ch chan<- EventPayload + cancel func() + + worker *worker +} + +type worker struct { + buffer []EventPayload + ch <-chan EventPayload + ctx context.Context + doneSemaphore util.Semaphore + sessionID uuid.UUID + sink Sink + wg sync.WaitGroup + logger hclog.Logger +} + +const bufferThreshold = 10 +const eventTimeout = 200 * time.Millisecond +const noTimeout = 24 * time.Hour + +func newWorker(ctx context.Context, ch <-chan EventPayload, sink Sink, logger hclog.Logger) *worker { + buffer := []EventPayload{} + sessionID := uuid.New() + w := &worker{ + buffer: buffer, + ch: ch, + ctx: ctx, + doneSemaphore: util.NewSemaphore(1), + sessionID: sessionID, + sink: sink, + logger: logger, + } + w.doneSemaphore.Acquire() + go w.analyticsClient() + return w +} + +func NewClient(parent context.Context, sink Sink, logger hclog.Logger) Client { + ch := make(chan EventPayload) + ctx, cancel := context.WithCancel(parent) + // creates and starts the worker + worker := newWorker(ctx, ch, sink, logger) + s := &client{ + ch: ch, + cancel: cancel, + worker: worker, + } + return s +} + +func (s *client) LogEvent(event EventPayload) { + s.ch <- event +} + +func (s *client) Close() { + s.cancel() + s.worker.Wait() +} + +func (s *client) CloseWithTimeout(timeout time.Duration) { + ch := make(chan struct{}) + go func() { + s.Close() + close(ch) + }() + select { + case <-ch: + case <-time.After(timeout): + } +} + +func (w *worker) Wait() { + w.doneSemaphore.Acquire() + w.wg.Wait() +} + +func (w *worker) analyticsClient() { + timeout := time.After(noTimeout) + for { + select { + case e := <-w.ch: + w.buffer = append(w.buffer, e) + if len(w.buffer) == bufferThreshold { + w.flush() + timeout = time.After(noTimeout) + } else { + timeout = time.After(eventTimeout) + } + case <-timeout: + w.flush() + timeout = time.After(noTimeout) + case <-w.ctx.Done(): + w.flush() + w.doneSemaphore.Release() + return + } + } +} + +func (w *worker) flush() { + if len(w.buffer) > 0 { + w.sendEvents(w.buffer) + w.buffer = []EventPayload{} + } +} + +func (w *worker) sendEvents(events []EventPayload) { + w.wg.Add(1) + go func() { + payload, err := addSessionID(w.sessionID.String(), events) + if err != nil { + w.logger.Debug("failed to encode cache usage analytics", "error", err) + } + err = w.sink.RecordAnalyticsEvents(payload) + if err != nil { + w.logger.Debug("failed to record cache usage analytics", "error", err) + } + w.wg.Done() + }() +} + +func addSessionID(sessionID string, events []EventPayload) (Events, error) { + eventMaps := []map[string]interface{}{} + err := mapstructure.Decode(events, &eventMaps) + if err != nil { + return nil, err + } + for _, event := range eventMaps { + event["sessionId"] = sessionID + } + return eventMaps, nil +} diff --git a/cli/internal/analytics/analytics_test.go b/cli/internal/analytics/analytics_test.go new file mode 100644 index 0000000..0715fda --- /dev/null +++ b/cli/internal/analytics/analytics_test.go @@ -0,0 +1,192 @@ +package analytics + +import ( + "context" + "sync" + "testing" + "time" + + "github.com/hashicorp/go-hclog" +) + +type dummySink struct { + events []*Events + err error + mu sync.Mutex + ch chan struct{} +} + +type evt struct { + I int +} + +func newDummySink() *dummySink { + return &dummySink{ + events: []*Events{}, + ch: make(chan struct{}, 1), + } +} + +func (d *dummySink) RecordAnalyticsEvents(events Events) error { + d.mu.Lock() + defer d.mu.Unlock() + // Make a copy in case a test is holding a copy too + eventsCopy := make([]*Events, len(d.events)) + copy(eventsCopy, d.events) + d.events = append(eventsCopy, &events) + d.ch <- struct{}{} + return d.err +} + +func (d *dummySink) Events() []*Events { + d.mu.Lock() + defer d.mu.Unlock() + return d.events +} + +func (d *dummySink) ExpectImmediateMessage(t *testing.T) { + select { + case <-time.After(150 * time.Millisecond): + t.Errorf("expected to not wait out the flush timeout") + case <-d.ch: + } +} + +func (d *dummySink) ExpectTimeoutThenMessage(t *testing.T) { + select { + case <-d.ch: + t.Errorf("Expected to wait out the flush timeout") + case <-time.After(150 * time.Millisecond): + } + <-d.ch +} + +func Test_batching(t *testing.T) { + d := newDummySink() + ctx := context.Background() + c := NewClient(ctx, d, hclog.Default()) + for i := 0; i < 2; i++ { + c.LogEvent(&evt{i}) + } + found := d.Events() + if len(found) != 0 { + t.Errorf("got %v events, want 0 due to batching", len(found)) + } + // Should timeout + d.ExpectTimeoutThenMessage(t) + found = d.Events() + if len(found) != 1 { + t.Errorf("got %v, want 1 batch to have been flushed", len(found)) + } + payloads := *found[0] + if len(payloads) != 2 { + t.Errorf("got %v, want 2 payloads to have been flushed", len(payloads)) + } +} + +func Test_batchingAcrossTwoBatches(t *testing.T) { + d := newDummySink() + ctx := context.Background() + c := NewClient(ctx, d, hclog.Default()) + for i := 0; i < 12; i++ { + c.LogEvent(&evt{i}) + } + // We sent more than the batch size, expect a message immediately + d.ExpectImmediateMessage(t) + found := d.Events() + if len(found) != 1 { + t.Errorf("got %v, want 1 batch to have been flushed", len(found)) + } + payloads := *found[0] + if len(payloads) != 10 { + t.Errorf("got %v, want 10 payloads to have been flushed", len(payloads)) + } + // Should timeout second batch + d.ExpectTimeoutThenMessage(t) + found = d.Events() + if len(found) != 2 { + t.Errorf("got %v, want 2 batches to have been flushed", len(found)) + } + payloads = *found[1] + if len(payloads) != 2 { + t.Errorf("got %v, want 2 payloads to have been flushed", len(payloads)) + } +} + +func Test_closing(t *testing.T) { + d := newDummySink() + ctx := context.Background() + c := NewClient(ctx, d, hclog.Default()) + for i := 0; i < 2; i++ { + c.LogEvent(&evt{i}) + } + found := d.Events() + if len(found) != 0 { + t.Errorf("got %v events, want 0 due to batching", len(found)) + } + c.Close() + found = d.Events() + if len(found) != 1 { + t.Errorf("got %v, want 1 batch to have been flushed", len(found)) + } + payloads := *found[0] + if len(payloads) != 2 { + t.Errorf("got %v, want 2 payloads to have been flushed", len(payloads)) + } +} + +func Test_closingByContext(t *testing.T) { + d := newDummySink() + ctx, cancel := context.WithCancel(context.Background()) + c := NewClient(ctx, d, hclog.Default()) + for i := 0; i < 2; i++ { + c.LogEvent(&evt{i}) + } + found := d.Events() + if len(found) != 0 { + t.Errorf("got %v events, want 0 due to batching", len(found)) + } + cancel() + d.ExpectImmediateMessage(t) + found = d.Events() + if len(found) != 1 { + t.Errorf("got %v, want 1 batch to have been flushed", len(found)) + } + payloads := *found[0] + if len(payloads) != 2 { + t.Errorf("got %v, want 2 payloads to have been flushed", len(payloads)) + } +} + +func Test_addSessionId(t *testing.T) { + events := []struct { + Foo string `mapstructure:"foo"` + }{ + { + Foo: "foo1", + }, + { + Foo: "foo2", + }, + } + arr := make([]interface{}, len(events)) + for i, event := range events { + arr[i] = event + } + sessionID := "my-uuid" + output, err := addSessionID(sessionID, arr) + if err != nil { + t.Errorf("failed to encode analytics events: %v", err) + } + if len(output) != 2 { + t.Errorf("len output got %v, want 2", len(output)) + } + if output[0]["foo"] != "foo1" { + t.Errorf("first event foo got %v, want foo1", output[0]["foo"]) + } + for i, event := range output { + if event["sessionId"] != "my-uuid" { + t.Errorf("event %v sessionId got %v, want %v", i, event["sessionId"], sessionID) + } + } +} diff --git a/cli/internal/cache/async_cache.go b/cli/internal/cache/async_cache.go new file mode 100644 index 0000000..0a8f467 --- /dev/null +++ b/cli/internal/cache/async_cache.go @@ -0,0 +1,82 @@ +// Adapted from https://github.com/thought-machine/please +// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +package cache + +import ( + "sync" + + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// An asyncCache is a wrapper around a Cache interface that handles incoming +// store requests asynchronously and attempts to return immediately. +// The requests are handled on an internal queue, if that fills up then +// incoming requests will start to block again until it empties. +// Retrieval requests are still handled synchronously. +type asyncCache struct { + requests chan cacheRequest + realCache Cache + wg sync.WaitGroup +} + +// A cacheRequest models an incoming cache request on our queue. +type cacheRequest struct { + anchor turbopath.AbsoluteSystemPath + key string + duration int + files []turbopath.AnchoredSystemPath +} + +func newAsyncCache(realCache Cache, opts Opts) Cache { + c := &asyncCache{ + requests: make(chan cacheRequest), + realCache: realCache, + } + c.wg.Add(opts.Workers) + for i := 0; i < opts.Workers; i++ { + go c.run() + } + return c +} + +func (c *asyncCache) Put(anchor turbopath.AbsoluteSystemPath, key string, duration int, files []turbopath.AnchoredSystemPath) error { + c.requests <- cacheRequest{ + anchor: anchor, + key: key, + files: files, + duration: duration, + } + return nil +} + +func (c *asyncCache) Fetch(anchor turbopath.AbsoluteSystemPath, key string, files []string) (ItemStatus, []turbopath.AnchoredSystemPath, int, error) { + return c.realCache.Fetch(anchor, key, files) +} + +func (c *asyncCache) Exists(key string) ItemStatus { + return c.realCache.Exists(key) +} + +func (c *asyncCache) Clean(anchor turbopath.AbsoluteSystemPath) { + c.realCache.Clean(anchor) +} + +func (c *asyncCache) CleanAll() { + c.realCache.CleanAll() +} + +func (c *asyncCache) Shutdown() { + // fmt.Println("Shutting down cache workers...") + close(c.requests) + c.wg.Wait() + // fmt.Println("Shut down all cache workers") +} + +// run implements the actual async logic. +func (c *asyncCache) run() { + for r := range c.requests { + _ = c.realCache.Put(r.anchor, r.key, r.duration, r.files) + } + c.wg.Done() +} diff --git a/cli/internal/cache/cache.go b/cli/internal/cache/cache.go new file mode 100644 index 0000000..8b74272 --- /dev/null +++ b/cli/internal/cache/cache.go @@ -0,0 +1,317 @@ +// Package cache abstracts storing and fetching previously run tasks +// +// Adapted from https://github.com/thought-machine/please +// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +package cache + +import ( + "errors" + "sync" + + "github.com/vercel/turbo/cli/internal/analytics" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/util" + "golang.org/x/sync/errgroup" +) + +// Cache is abstracted way to cache/fetch previously run tasks +type Cache interface { + // Fetch returns true if there is a cache it. It is expected to move files + // into their correct position as a side effect + Fetch(anchor turbopath.AbsoluteSystemPath, hash string, files []string) (ItemStatus, []turbopath.AnchoredSystemPath, int, error) + Exists(hash string) ItemStatus + // Put caches files for a given hash + Put(anchor turbopath.AbsoluteSystemPath, hash string, duration int, files []turbopath.AnchoredSystemPath) error + Clean(anchor turbopath.AbsoluteSystemPath) + CleanAll() + Shutdown() +} + +// ItemStatus holds whether artifacts exists for a given hash on local +// and/or remote caching server +type ItemStatus struct { + Local bool `json:"local"` + Remote bool `json:"remote"` +} + +const ( + // CacheSourceFS is a constant to indicate local cache hit + CacheSourceFS = "LOCAL" + // CacheSourceRemote is a constant to indicate remote cache hit + CacheSourceRemote = "REMOTE" + // CacheEventHit is a constant to indicate a cache hit + CacheEventHit = "HIT" + // CacheEventMiss is a constant to indicate a cache miss + CacheEventMiss = "MISS" +) + +type CacheEvent struct { + Source string `mapstructure:"source"` + Event string `mapstructure:"event"` + Hash string `mapstructure:"hash"` + Duration int `mapstructure:"duration"` +} + +// DefaultLocation returns the default filesystem cache location, given a repo root +func DefaultLocation(repoRoot turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { + return repoRoot.UntypedJoin("node_modules", ".cache", "turbo") +} + +// OnCacheRemoved defines a callback that the cache system calls if a particular cache +// needs to be removed. In practice, this happens when Remote Caching has been disabled +// the but CLI continues to try to use it. +type OnCacheRemoved = func(cache Cache, err error) + +// ErrNoCachesEnabled is returned when both the filesystem and http cache are unavailable +var ErrNoCachesEnabled = errors.New("no caches are enabled") + +// Opts holds configuration options for the cache +// TODO(gsoltis): further refactor this into fs cache opts and http cache opts +type Opts struct { + OverrideDir string + SkipRemote bool + SkipFilesystem bool + Workers int + RemoteCacheOpts fs.RemoteCacheOptions +} + +// resolveCacheDir calculates the location turbo should use to cache artifacts, +// based on the options supplied by the user. +func (o *Opts) resolveCacheDir(repoRoot turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { + if o.OverrideDir != "" { + return fs.ResolveUnknownPath(repoRoot, o.OverrideDir) + } + return DefaultLocation(repoRoot) +} + +var _remoteOnlyHelp = `Ignore the local filesystem cache for all tasks. Only +allow reading and caching artifacts using the remote cache.` + +// New creates a new cache +func New(opts Opts, repoRoot turbopath.AbsoluteSystemPath, client client, recorder analytics.Recorder, onCacheRemoved OnCacheRemoved) (Cache, error) { + c, err := newSyncCache(opts, repoRoot, client, recorder, onCacheRemoved) + if err != nil && !errors.Is(err, ErrNoCachesEnabled) { + return nil, err + } + if opts.Workers > 0 { + return newAsyncCache(c, opts), err + } + return c, err +} + +// newSyncCache can return an error with a usable noopCache. +func newSyncCache(opts Opts, repoRoot turbopath.AbsoluteSystemPath, client client, recorder analytics.Recorder, onCacheRemoved OnCacheRemoved) (Cache, error) { + // Check to see if the user has turned off particular cache implementations. + useFsCache := !opts.SkipFilesystem + useHTTPCache := !opts.SkipRemote + + // Since the above two flags are not mutually exclusive it is possible to configure + // yourself out of having a cache. We should tell you about it but we shouldn't fail + // your build for that reason. + // + // Further, since the httpCache can be removed at runtime, we need to insert a noopCache + // as a backup if you are configured to have *just* an httpCache. + // + // This is reduced from (!useFsCache && !useHTTPCache) || (!useFsCache & useHTTPCache) + useNoopCache := !useFsCache + + // Build up an array of cache implementations, we can only ever have 1 or 2. + cacheImplementations := make([]Cache, 0, 2) + + if useFsCache { + implementation, err := newFsCache(opts, recorder, repoRoot) + if err != nil { + return nil, err + } + cacheImplementations = append(cacheImplementations, implementation) + } + + if useHTTPCache { + implementation := newHTTPCache(opts, client, recorder) + cacheImplementations = append(cacheImplementations, implementation) + } + + if useNoopCache { + implementation := newNoopCache() + cacheImplementations = append(cacheImplementations, implementation) + } + + // Precisely two cache implementations: + // fsCache and httpCache OR httpCache and noopCache + useMultiplexer := len(cacheImplementations) > 1 + if useMultiplexer { + // We have early-returned any possible errors for this scenario. + return &cacheMultiplexer{ + onCacheRemoved: onCacheRemoved, + opts: opts, + caches: cacheImplementations, + }, nil + } + + // Precisely one cache implementation: fsCache OR noopCache + implementation := cacheImplementations[0] + _, isNoopCache := implementation.(*noopCache) + + // We want to let the user know something is wonky, but we don't want + // to trigger their build to fail. + if isNoopCache { + return implementation, ErrNoCachesEnabled + } + return implementation, nil +} + +// A cacheMultiplexer multiplexes several caches into one. +// Used when we have several active (eg. http, dir). +type cacheMultiplexer struct { + caches []Cache + opts Opts + mu sync.RWMutex + onCacheRemoved OnCacheRemoved +} + +func (mplex *cacheMultiplexer) Put(anchor turbopath.AbsoluteSystemPath, key string, duration int, files []turbopath.AnchoredSystemPath) error { + return mplex.storeUntil(anchor, key, duration, files, len(mplex.caches)) +} + +type cacheRemoval struct { + cache Cache + err *util.CacheDisabledError +} + +// storeUntil stores artifacts into higher priority caches than the given one. +// Used after artifact retrieval to ensure we have them in eg. the directory cache after +// downloading from the RPC cache. +func (mplex *cacheMultiplexer) storeUntil(anchor turbopath.AbsoluteSystemPath, key string, duration int, files []turbopath.AnchoredSystemPath, stopAt int) error { + // Attempt to store on all caches simultaneously. + toRemove := make([]*cacheRemoval, stopAt) + g := &errgroup.Group{} + mplex.mu.RLock() + for i, cache := range mplex.caches { + if i == stopAt { + break + } + c := cache + i := i + g.Go(func() error { + err := c.Put(anchor, key, duration, files) + if err != nil { + cd := &util.CacheDisabledError{} + if errors.As(err, &cd) { + toRemove[i] = &cacheRemoval{ + cache: c, + err: cd, + } + // we don't want this to cancel other cache actions + return nil + } + return err + } + return nil + }) + } + mplex.mu.RUnlock() + + if err := g.Wait(); err != nil { + return err + } + + for _, removal := range toRemove { + if removal != nil { + mplex.removeCache(removal) + } + } + return nil +} + +// removeCache takes a requested removal and tries to actually remove it. However, +// multiple requests could result in concurrent requests to remove the same cache. +// Let one of them win and propagate the error, the rest will no-op. +func (mplex *cacheMultiplexer) removeCache(removal *cacheRemoval) { + mplex.mu.Lock() + defer mplex.mu.Unlock() + for i, cache := range mplex.caches { + if cache == removal.cache { + mplex.caches = append(mplex.caches[:i], mplex.caches[i+1:]...) + mplex.onCacheRemoved(cache, removal.err) + break + } + } +} + +func (mplex *cacheMultiplexer) Fetch(anchor turbopath.AbsoluteSystemPath, key string, files []string) (ItemStatus, []turbopath.AnchoredSystemPath, int, error) { + // Make a shallow copy of the caches, since storeUntil can call removeCache + mplex.mu.RLock() + caches := make([]Cache, len(mplex.caches)) + copy(caches, mplex.caches) + mplex.mu.RUnlock() + + // We need to return a composite cache status from multiple caches + // Initialize the empty struct so we can assign values to it. This is similar + // to how the Exists() method works. + combinedCacheState := ItemStatus{} + + // Retrieve from caches sequentially; if we did them simultaneously we could + // easily write the same file from two goroutines at once. + for i, cache := range caches { + itemStatus, actualFiles, duration, err := cache.Fetch(anchor, key, files) + ok := itemStatus.Local || itemStatus.Remote + + if err != nil { + cd := &util.CacheDisabledError{} + if errors.As(err, &cd) { + mplex.removeCache(&cacheRemoval{ + cache: cache, + err: cd, + }) + } + // We're ignoring the error in the else case, since with this cache + // abstraction, we want to check lower priority caches rather than fail + // the operation. Future work that plumbs UI / Logging into the cache system + // should probably log this at least. + } + if ok { + // Store this into other caches. We can ignore errors here because we know + // we have previously successfully stored in a higher-priority cache, and so the overall + // result is a success at fetching. Storing in lower-priority caches is an optimization. + _ = mplex.storeUntil(anchor, key, duration, actualFiles, i) + + // If another cache had already set this to true, we don't need to set it again from this cache + combinedCacheState.Local = combinedCacheState.Local || itemStatus.Local + combinedCacheState.Remote = combinedCacheState.Remote || itemStatus.Remote + return combinedCacheState, actualFiles, duration, err + } + } + + return ItemStatus{Local: false, Remote: false}, nil, 0, nil +} + +func (mplex *cacheMultiplexer) Exists(target string) ItemStatus { + syncCacheState := ItemStatus{} + for _, cache := range mplex.caches { + itemStatus := cache.Exists(target) + syncCacheState.Local = syncCacheState.Local || itemStatus.Local + syncCacheState.Remote = syncCacheState.Remote || itemStatus.Remote + } + + return syncCacheState +} + +func (mplex *cacheMultiplexer) Clean(anchor turbopath.AbsoluteSystemPath) { + for _, cache := range mplex.caches { + cache.Clean(anchor) + } +} + +func (mplex *cacheMultiplexer) CleanAll() { + for _, cache := range mplex.caches { + cache.CleanAll() + } +} + +func (mplex *cacheMultiplexer) Shutdown() { + for _, cache := range mplex.caches { + cache.Shutdown() + } +} diff --git a/cli/internal/cache/cache_fs.go b/cli/internal/cache/cache_fs.go new file mode 100644 index 0000000..fb15a02 --- /dev/null +++ b/cli/internal/cache/cache_fs.go @@ -0,0 +1,174 @@ +// Adapted from https://github.com/thought-machine/please +// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// Package cache implements our cache abstraction. +package cache + +import ( + "encoding/json" + "fmt" + + "github.com/vercel/turbo/cli/internal/analytics" + "github.com/vercel/turbo/cli/internal/cacheitem" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// fsCache is a local filesystem cache +type fsCache struct { + cacheDirectory turbopath.AbsoluteSystemPath + recorder analytics.Recorder +} + +// newFsCache creates a new filesystem cache +func newFsCache(opts Opts, recorder analytics.Recorder, repoRoot turbopath.AbsoluteSystemPath) (*fsCache, error) { + cacheDir := opts.resolveCacheDir(repoRoot) + if err := cacheDir.MkdirAll(0775); err != nil { + return nil, err + } + return &fsCache{ + cacheDirectory: cacheDir, + recorder: recorder, + }, nil +} + +// Fetch returns true if items are cached. It moves them into position as a side effect. +func (f *fsCache) Fetch(anchor turbopath.AbsoluteSystemPath, hash string, _ []string) (ItemStatus, []turbopath.AnchoredSystemPath, int, error) { + uncompressedCachePath := f.cacheDirectory.UntypedJoin(hash + ".tar") + compressedCachePath := f.cacheDirectory.UntypedJoin(hash + ".tar.zst") + + var actualCachePath turbopath.AbsoluteSystemPath + if uncompressedCachePath.FileExists() { + actualCachePath = uncompressedCachePath + } else if compressedCachePath.FileExists() { + actualCachePath = compressedCachePath + } else { + // It's not in the cache, bail now + f.logFetch(false, hash, 0) + return ItemStatus{Local: false}, nil, 0, nil + } + + cacheItem, openErr := cacheitem.Open(actualCachePath) + if openErr != nil { + return ItemStatus{Local: false}, nil, 0, openErr + } + + restoredFiles, restoreErr := cacheItem.Restore(anchor) + if restoreErr != nil { + _ = cacheItem.Close() + return ItemStatus{Local: false}, nil, 0, restoreErr + } + + meta, err := ReadCacheMetaFile(f.cacheDirectory.UntypedJoin(hash + "-meta.json")) + if err != nil { + _ = cacheItem.Close() + return ItemStatus{Local: false}, nil, 0, fmt.Errorf("error reading cache metadata: %w", err) + } + f.logFetch(true, hash, meta.Duration) + + // Wait to see what happens with close. + closeErr := cacheItem.Close() + if closeErr != nil { + return ItemStatus{Local: false}, restoredFiles, 0, closeErr + } + return ItemStatus{Local: true}, restoredFiles, meta.Duration, nil +} + +func (f *fsCache) Exists(hash string) ItemStatus { + uncompressedCachePath := f.cacheDirectory.UntypedJoin(hash + ".tar") + compressedCachePath := f.cacheDirectory.UntypedJoin(hash + ".tar.zst") + + if compressedCachePath.FileExists() || uncompressedCachePath.FileExists() { + return ItemStatus{Local: true} + } + + return ItemStatus{Local: false} +} + +func (f *fsCache) logFetch(hit bool, hash string, duration int) { + var event string + if hit { + event = CacheEventHit + } else { + event = CacheEventMiss + } + payload := &CacheEvent{ + Source: CacheSourceFS, + Event: event, + Hash: hash, + Duration: duration, + } + f.recorder.LogEvent(payload) +} + +func (f *fsCache) Put(anchor turbopath.AbsoluteSystemPath, hash string, duration int, files []turbopath.AnchoredSystemPath) error { + cachePath := f.cacheDirectory.UntypedJoin(hash + ".tar.zst") + cacheItem, err := cacheitem.Create(cachePath) + if err != nil { + return err + } + + for _, file := range files { + err := cacheItem.AddFile(anchor, file) + if err != nil { + _ = cacheItem.Close() + return err + } + } + + writeErr := WriteCacheMetaFile(f.cacheDirectory.UntypedJoin(hash+"-meta.json"), &CacheMetadata{ + Duration: duration, + Hash: hash, + }) + + if writeErr != nil { + _ = cacheItem.Close() + return writeErr + } + + return cacheItem.Close() +} + +func (f *fsCache) Clean(_ turbopath.AbsoluteSystemPath) { + fmt.Println("Not implemented yet") +} + +func (f *fsCache) CleanAll() { + fmt.Println("Not implemented yet") +} + +func (f *fsCache) Shutdown() {} + +// CacheMetadata stores duration and hash information for a cache entry so that aggregate Time Saved calculations +// can be made from artifacts from various caches +type CacheMetadata struct { + Hash string `json:"hash"` + Duration int `json:"duration"` +} + +// WriteCacheMetaFile writes cache metadata file at a path +func WriteCacheMetaFile(path turbopath.AbsoluteSystemPath, config *CacheMetadata) error { + jsonBytes, marshalErr := json.Marshal(config) + if marshalErr != nil { + return marshalErr + } + writeFilErr := path.WriteFile(jsonBytes, 0644) + if writeFilErr != nil { + return writeFilErr + } + return nil +} + +// ReadCacheMetaFile reads cache metadata file at a path +func ReadCacheMetaFile(path turbopath.AbsoluteSystemPath) (*CacheMetadata, error) { + jsonBytes, readFileErr := path.ReadFile() + if readFileErr != nil { + return nil, readFileErr + } + var config CacheMetadata + marshalErr := json.Unmarshal(jsonBytes, &config) + if marshalErr != nil { + return nil, marshalErr + } + return &config, nil +} diff --git a/cli/internal/cache/cache_fs_test.go b/cli/internal/cache/cache_fs_test.go new file mode 100644 index 0000000..614ad86 --- /dev/null +++ b/cli/internal/cache/cache_fs_test.go @@ -0,0 +1,253 @@ +package cache + +import ( + "path/filepath" + "testing" + + "github.com/vercel/turbo/cli/internal/analytics" + "github.com/vercel/turbo/cli/internal/cacheitem" + "github.com/vercel/turbo/cli/internal/turbopath" + "gotest.tools/v3/assert" +) + +type dummyRecorder struct{} + +func (dr *dummyRecorder) LogEvent(payload analytics.EventPayload) {} + +func TestPut(t *testing.T) { + // Set up a test source and cache directory + // The "source" directory simulates a package + // + // / + // b + // child/ + // a + // link -> ../b + // broken -> missing + // + // Ensure we end up with a matching directory under a + // "cache" directory: + // + // /the-hash//... + + src := turbopath.AbsoluteSystemPath(t.TempDir()) + childDir := src.UntypedJoin("child") + err := childDir.MkdirAll(0775) + assert.NilError(t, err, "Mkdir") + aPath := childDir.UntypedJoin("a") + aFile, err := aPath.Create() + assert.NilError(t, err, "Create") + _, err = aFile.WriteString("hello") + assert.NilError(t, err, "WriteString") + assert.NilError(t, aFile.Close(), "Close") + + bPath := src.UntypedJoin("b") + bFile, err := bPath.Create() + assert.NilError(t, err, "Create") + _, err = bFile.WriteString("bFile") + assert.NilError(t, err, "WriteString") + assert.NilError(t, bFile.Close(), "Close") + + srcLinkPath := childDir.UntypedJoin("link") + linkTarget := filepath.FromSlash("../b") + assert.NilError(t, srcLinkPath.Symlink(linkTarget), "Symlink") + + srcBrokenLinkPath := childDir.Join("broken") + assert.NilError(t, srcBrokenLinkPath.Symlink("missing"), "Symlink") + circlePath := childDir.Join("circle") + assert.NilError(t, circlePath.Symlink(filepath.FromSlash("../child")), "Symlink") + + files := []turbopath.AnchoredSystemPath{ + turbopath.AnchoredUnixPath("child/").ToSystemPath(), // childDir + turbopath.AnchoredUnixPath("child/a").ToSystemPath(), // aPath, + turbopath.AnchoredUnixPath("b").ToSystemPath(), // bPath, + turbopath.AnchoredUnixPath("child/link").ToSystemPath(), // srcLinkPath, + turbopath.AnchoredUnixPath("child/broken").ToSystemPath(), // srcBrokenLinkPath, + turbopath.AnchoredUnixPath("child/circle").ToSystemPath(), // circlePath + } + + dst := turbopath.AbsoluteSystemPath(t.TempDir()) + dr := &dummyRecorder{} + + cache := &fsCache{ + cacheDirectory: dst, + recorder: dr, + } + + hash := "the-hash" + duration := 0 + putErr := cache.Put(src, hash, duration, files) + assert.NilError(t, putErr, "Put") + + // Verify that we got the files that we're expecting + dstCachePath := dst.UntypedJoin(hash) + + // This test checks outputs, so we go ahead and pull things back out. + // Attempting to satisfy our beliefs that the change is viable with + // as few changes to the tests as possible. + cacheItem, openErr := cacheitem.Open(dst.UntypedJoin(hash + ".tar.zst")) + assert.NilError(t, openErr, "Open") + + _, restoreErr := cacheItem.Restore(dstCachePath) + assert.NilError(t, restoreErr, "Restore") + + dstAPath := dstCachePath.UntypedJoin("child", "a") + assertFileMatches(t, aPath, dstAPath) + + dstBPath := dstCachePath.UntypedJoin("b") + assertFileMatches(t, bPath, dstBPath) + + dstLinkPath := dstCachePath.UntypedJoin("child", "link") + target, err := dstLinkPath.Readlink() + assert.NilError(t, err, "Readlink") + if target != linkTarget { + t.Errorf("Readlink got %v, want %v", target, linkTarget) + } + + dstBrokenLinkPath := dstCachePath.UntypedJoin("child", "broken") + target, err = dstBrokenLinkPath.Readlink() + assert.NilError(t, err, "Readlink") + if target != "missing" { + t.Errorf("Readlink got %v, want missing", target) + } + + dstCirclePath := dstCachePath.UntypedJoin("child", "circle") + circleLinkDest, err := dstCirclePath.Readlink() + assert.NilError(t, err, "Readlink") + expectedCircleLinkDest := filepath.FromSlash("../child") + if circleLinkDest != expectedCircleLinkDest { + t.Errorf("Cache link got %v, want %v", circleLinkDest, expectedCircleLinkDest) + } + + assert.NilError(t, cacheItem.Close(), "Close") +} + +func assertFileMatches(t *testing.T, orig turbopath.AbsoluteSystemPath, copy turbopath.AbsoluteSystemPath) { + t.Helper() + origBytes, err := orig.ReadFile() + assert.NilError(t, err, "ReadFile") + copyBytes, err := copy.ReadFile() + assert.NilError(t, err, "ReadFile") + assert.DeepEqual(t, origBytes, copyBytes) + origStat, err := orig.Lstat() + assert.NilError(t, err, "Lstat") + copyStat, err := copy.Lstat() + assert.NilError(t, err, "Lstat") + assert.Equal(t, origStat.Mode(), copyStat.Mode()) +} + +func TestFetch(t *testing.T) { + // Set up a test cache directory and target output directory + // The "cacheDir" directory simulates a cached package + // + // / + // the-hash-meta.json + // the-hash/ + // some-package/ + // b + // child/ + // a + // link -> ../b + // broken -> missing + // circle -> ../child + // + // Ensure we end up with a matching directory under a + // "some-package" directory: + // + // "some-package"/... + + cacheDir := turbopath.AbsoluteSystemPath(t.TempDir()) + hash := "the-hash" + src := cacheDir.UntypedJoin(hash, "some-package") + err := src.MkdirAll(0775) + assert.NilError(t, err, "mkdirAll") + + childDir := src.UntypedJoin("child") + err = childDir.MkdirAll(0775) + assert.NilError(t, err, "Mkdir") + aPath := childDir.UntypedJoin("a") + aFile, err := aPath.Create() + assert.NilError(t, err, "Create") + _, err = aFile.WriteString("hello") + assert.NilError(t, err, "WriteString") + assert.NilError(t, aFile.Close(), "Close") + + bPath := src.UntypedJoin("b") + bFile, err := bPath.Create() + assert.NilError(t, err, "Create") + _, err = bFile.WriteString("bFile") + assert.NilError(t, err, "WriteString") + assert.NilError(t, bFile.Close(), "Close") + + srcLinkPath := childDir.UntypedJoin("link") + linkTarget := filepath.FromSlash("../b") + assert.NilError(t, srcLinkPath.Symlink(linkTarget), "Symlink") + + srcBrokenLinkPath := childDir.UntypedJoin("broken") + srcBrokenLinkTarget := turbopath.AnchoredUnixPath("missing").ToSystemPath() + assert.NilError(t, srcBrokenLinkPath.Symlink(srcBrokenLinkTarget.ToString()), "Symlink") + + circlePath := childDir.Join("circle") + srcCircleLinkTarget := turbopath.AnchoredUnixPath("../child").ToSystemPath() + assert.NilError(t, circlePath.Symlink(srcCircleLinkTarget.ToString()), "Symlink") + + metadataPath := cacheDir.UntypedJoin("the-hash-meta.json") + err = metadataPath.WriteFile([]byte(`{"hash":"the-hash","duration":0}`), 0777) + assert.NilError(t, err, "WriteFile") + + dr := &dummyRecorder{} + + cache := &fsCache{ + cacheDirectory: cacheDir, + recorder: dr, + } + + inputFiles := []turbopath.AnchoredSystemPath{ + turbopath.AnchoredUnixPath("some-package/child/").ToSystemPath(), // childDir + turbopath.AnchoredUnixPath("some-package/child/a").ToSystemPath(), // aPath, + turbopath.AnchoredUnixPath("some-package/b").ToSystemPath(), // bPath, + turbopath.AnchoredUnixPath("some-package/child/link").ToSystemPath(), // srcLinkPath, + turbopath.AnchoredUnixPath("some-package/child/broken").ToSystemPath(), // srcBrokenLinkPath, + turbopath.AnchoredUnixPath("some-package/child/circle").ToSystemPath(), // circlePath + } + + putErr := cache.Put(cacheDir.UntypedJoin(hash), hash, 0, inputFiles) + assert.NilError(t, putErr, "Put") + + outputDir := turbopath.AbsoluteSystemPath(t.TempDir()) + dstOutputPath := "some-package" + cacheStatus, files, _, err := cache.Fetch(outputDir, "the-hash", []string{}) + assert.NilError(t, err, "Fetch") + hit := cacheStatus.Local || cacheStatus.Remote + if !hit { + t.Error("Fetch got false, want true") + } + if len(files) != len(inputFiles) { + t.Errorf("len(files) got %v, want %v", len(files), len(inputFiles)) + } + + dstAPath := outputDir.UntypedJoin(dstOutputPath, "child", "a") + assertFileMatches(t, aPath, dstAPath) + + dstBPath := outputDir.UntypedJoin(dstOutputPath, "b") + assertFileMatches(t, bPath, dstBPath) + + dstLinkPath := outputDir.UntypedJoin(dstOutputPath, "child", "link") + target, err := dstLinkPath.Readlink() + assert.NilError(t, err, "Readlink") + if target != linkTarget { + t.Errorf("Readlink got %v, want %v", target, linkTarget) + } + + // Assert that we restore broken symlinks correctly + dstBrokenLinkPath := outputDir.UntypedJoin(dstOutputPath, "child", "broken") + target, readlinkErr := dstBrokenLinkPath.Readlink() + assert.NilError(t, readlinkErr, "Readlink") + assert.Equal(t, target, srcBrokenLinkTarget.ToString()) + + // Assert that we restore symlinks to directories correctly + dstCirclePath := outputDir.UntypedJoin(dstOutputPath, "child", "circle") + circleTarget, circleReadlinkErr := dstCirclePath.Readlink() + assert.NilError(t, circleReadlinkErr, "Circle Readlink") + assert.Equal(t, circleTarget, srcCircleLinkTarget.ToString()) +} diff --git a/cli/internal/cache/cache_http.go b/cli/internal/cache/cache_http.go new file mode 100644 index 0000000..1d345bf --- /dev/null +++ b/cli/internal/cache/cache_http.go @@ -0,0 +1,375 @@ +// Adapted from https://github.com/thought-machine/please +// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +package cache + +import ( + "archive/tar" + "bytes" + "errors" + "fmt" + "io" + "io/ioutil" + log "log" + "net/http" + "os" + "path/filepath" + "strconv" + "time" + + "github.com/DataDog/zstd" + + "github.com/vercel/turbo/cli/internal/analytics" + "github.com/vercel/turbo/cli/internal/tarpatch" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +type client interface { + PutArtifact(hash string, body []byte, duration int, tag string) error + FetchArtifact(hash string) (*http.Response, error) + ArtifactExists(hash string) (*http.Response, error) + GetTeamID() string +} + +type httpCache struct { + writable bool + client client + requestLimiter limiter + recorder analytics.Recorder + signerVerifier *ArtifactSignatureAuthentication + repoRoot turbopath.AbsoluteSystemPath +} + +type limiter chan struct{} + +func (l limiter) acquire() { + l <- struct{}{} +} + +func (l limiter) release() { + <-l +} + +// mtime is the time we attach for the modification time of all files. +var mtime = time.Date(2000, time.January, 1, 0, 0, 0, 0, time.UTC) + +// nobody is the usual uid / gid of the 'nobody' user. +const nobody = 65534 + +func (cache *httpCache) Put(_ turbopath.AbsoluteSystemPath, hash string, duration int, files []turbopath.AnchoredSystemPath) error { + // if cache.writable { + cache.requestLimiter.acquire() + defer cache.requestLimiter.release() + + r, w := io.Pipe() + go cache.write(w, hash, files) + + // Read the entire artifact tar into memory so we can easily compute the signature. + // Note: retryablehttp.NewRequest reads the files into memory anyways so there's no + // additional overhead by doing the ioutil.ReadAll here instead. + artifactBody, err := ioutil.ReadAll(r) + if err != nil { + return fmt.Errorf("failed to store files in HTTP cache: %w", err) + } + tag := "" + if cache.signerVerifier.isEnabled() { + tag, err = cache.signerVerifier.generateTag(hash, artifactBody) + if err != nil { + return fmt.Errorf("failed to store files in HTTP cache: %w", err) + } + } + return cache.client.PutArtifact(hash, artifactBody, duration, tag) +} + +// write writes a series of files into the given Writer. +func (cache *httpCache) write(w io.WriteCloser, hash string, files []turbopath.AnchoredSystemPath) { + defer w.Close() + defer func() { _ = w.Close() }() + zw := zstd.NewWriter(w) + defer func() { _ = zw.Close() }() + tw := tar.NewWriter(zw) + defer func() { _ = tw.Close() }() + for _, file := range files { + // log.Printf("caching file %v", file) + if err := cache.storeFile(tw, file); err != nil { + log.Printf("[ERROR] Error uploading artifact %s to HTTP cache due to: %s", file, err) + // TODO(jaredpalmer): How can we cancel the request at this point? + } + } +} + +func (cache *httpCache) storeFile(tw *tar.Writer, repoRelativePath turbopath.AnchoredSystemPath) error { + absoluteFilePath := repoRelativePath.RestoreAnchor(cache.repoRoot) + info, err := absoluteFilePath.Lstat() + if err != nil { + return err + } + target := "" + if info.Mode()&os.ModeSymlink != 0 { + target, err = absoluteFilePath.Readlink() + if err != nil { + return err + } + } + hdr, err := tarpatch.FileInfoHeader(repoRelativePath.ToUnixPath(), info, filepath.ToSlash(target)) + if err != nil { + return err + } + // Ensure posix path for filename written in header. + hdr.Name = repoRelativePath.ToUnixPath().ToString() + // Zero out all timestamps. + hdr.ModTime = mtime + hdr.AccessTime = mtime + hdr.ChangeTime = mtime + // Strip user/group ids. + hdr.Uid = nobody + hdr.Gid = nobody + hdr.Uname = "nobody" + hdr.Gname = "nobody" + if err := tw.WriteHeader(hdr); err != nil { + return err + } else if info.IsDir() || target != "" { + return nil // nothing to write + } + f, err := absoluteFilePath.Open() + if err != nil { + return err + } + defer func() { _ = f.Close() }() + _, err = io.Copy(tw, f) + if errors.Is(err, tar.ErrWriteTooLong) { + log.Printf("Error writing %v to tar file, info: %v, mode: %v, is regular: %v", repoRelativePath, info, info.Mode(), info.Mode().IsRegular()) + } + return err +} + +func (cache *httpCache) Fetch(_ turbopath.AbsoluteSystemPath, key string, _ []string) (ItemStatus, []turbopath.AnchoredSystemPath, int, error) { + cache.requestLimiter.acquire() + defer cache.requestLimiter.release() + hit, files, duration, err := cache.retrieve(key) + if err != nil { + // TODO: analytics event? + return ItemStatus{Remote: false}, files, duration, fmt.Errorf("failed to retrieve files from HTTP cache: %w", err) + } + cache.logFetch(hit, key, duration) + return ItemStatus{Remote: hit}, files, duration, err +} + +func (cache *httpCache) Exists(key string) ItemStatus { + cache.requestLimiter.acquire() + defer cache.requestLimiter.release() + hit, err := cache.exists(key) + if err != nil { + return ItemStatus{Remote: false} + } + return ItemStatus{Remote: hit} +} + +func (cache *httpCache) logFetch(hit bool, hash string, duration int) { + var event string + if hit { + event = CacheEventHit + } else { + event = CacheEventMiss + } + payload := &CacheEvent{ + Source: CacheSourceRemote, + Event: event, + Hash: hash, + Duration: duration, + } + cache.recorder.LogEvent(payload) +} + +func (cache *httpCache) exists(hash string) (bool, error) { + resp, err := cache.client.ArtifactExists(hash) + if err != nil { + return false, nil + } + + defer func() { err = resp.Body.Close() }() + + if resp.StatusCode == http.StatusNotFound { + return false, nil + } else if resp.StatusCode != http.StatusOK { + return false, fmt.Errorf("%s", strconv.Itoa(resp.StatusCode)) + } + return true, err +} + +func (cache *httpCache) retrieve(hash string) (bool, []turbopath.AnchoredSystemPath, int, error) { + resp, err := cache.client.FetchArtifact(hash) + if err != nil { + return false, nil, 0, err + } + defer resp.Body.Close() + if resp.StatusCode == http.StatusNotFound { + return false, nil, 0, nil // doesn't exist - not an error + } else if resp.StatusCode != http.StatusOK { + b, _ := ioutil.ReadAll(resp.Body) + return false, nil, 0, fmt.Errorf("%s", string(b)) + } + // If present, extract the duration from the response. + duration := 0 + if resp.Header.Get("x-artifact-duration") != "" { + intVar, err := strconv.Atoi(resp.Header.Get("x-artifact-duration")) + if err != nil { + return false, nil, 0, fmt.Errorf("invalid x-artifact-duration header: %w", err) + } + duration = intVar + } + var tarReader io.Reader + + defer func() { _ = resp.Body.Close() }() + if cache.signerVerifier.isEnabled() { + expectedTag := resp.Header.Get("x-artifact-tag") + if expectedTag == "" { + // If the verifier is enabled all incoming artifact downloads must have a signature + return false, nil, 0, errors.New("artifact verification failed: Downloaded artifact is missing required x-artifact-tag header") + } + b, err := ioutil.ReadAll(resp.Body) + if err != nil { + return false, nil, 0, fmt.Errorf("artifact verification failed: %w", err) + } + isValid, err := cache.signerVerifier.validate(hash, b, expectedTag) + if err != nil { + return false, nil, 0, fmt.Errorf("artifact verification failed: %w", err) + } + if !isValid { + err = fmt.Errorf("artifact verification failed: artifact tag does not match expected tag %s", expectedTag) + return false, nil, 0, err + } + // The artifact has been verified and the body can be read and untarred + tarReader = bytes.NewReader(b) + } else { + tarReader = resp.Body + } + files, err := restoreTar(cache.repoRoot, tarReader) + if err != nil { + return false, nil, 0, err + } + return true, files, duration, nil +} + +// restoreTar returns posix-style repo-relative paths of the files it +// restored. In the future, these should likely be repo-relative system paths +// so that they are suitable for being fed into cache.Put for other caches. +// For now, I think this is working because windows also accepts /-delimited paths. +func restoreTar(root turbopath.AbsoluteSystemPath, reader io.Reader) ([]turbopath.AnchoredSystemPath, error) { + files := []turbopath.AnchoredSystemPath{} + missingLinks := []*tar.Header{} + zr := zstd.NewReader(reader) + var closeError error + defer func() { closeError = zr.Close() }() + tr := tar.NewReader(zr) + for { + hdr, err := tr.Next() + if err != nil { + if err == io.EOF { + for _, link := range missingLinks { + err := restoreSymlink(root, link, true) + if err != nil { + return nil, err + } + } + + return files, closeError + } + return nil, err + } + // hdr.Name is always a posix-style path + // FIXME: THIS IS A BUG. + restoredName := turbopath.AnchoredUnixPath(hdr.Name) + files = append(files, restoredName.ToSystemPath()) + filename := restoredName.ToSystemPath().RestoreAnchor(root) + if isChild, err := root.ContainsPath(filename); err != nil { + return nil, err + } else if !isChild { + return nil, fmt.Errorf("cannot untar file to %v", filename) + } + switch hdr.Typeflag { + case tar.TypeDir: + if err := filename.MkdirAll(0775); err != nil { + return nil, err + } + case tar.TypeReg: + if dir := filename.Dir(); dir != "." { + if err := dir.MkdirAll(0775); err != nil { + return nil, err + } + } + if f, err := filename.OpenFile(os.O_WRONLY|os.O_TRUNC|os.O_CREATE, os.FileMode(hdr.Mode)); err != nil { + return nil, err + } else if _, err := io.Copy(f, tr); err != nil { + return nil, err + } else if err := f.Close(); err != nil { + return nil, err + } + case tar.TypeSymlink: + if err := restoreSymlink(root, hdr, false); errors.Is(err, errNonexistentLinkTarget) { + missingLinks = append(missingLinks, hdr) + } else if err != nil { + return nil, err + } + default: + log.Printf("Unhandled file type %d for %s", hdr.Typeflag, hdr.Name) + } + } +} + +var errNonexistentLinkTarget = errors.New("the link target does not exist") + +func restoreSymlink(root turbopath.AbsoluteSystemPath, hdr *tar.Header, allowNonexistentTargets bool) error { + // Note that hdr.Linkname is really the link target + relativeLinkTarget := filepath.FromSlash(hdr.Linkname) + linkFilename := root.UntypedJoin(hdr.Name) + if err := linkFilename.EnsureDir(); err != nil { + return err + } + + // TODO: check if this is an absolute path, or if we even care + linkTarget := linkFilename.Dir().UntypedJoin(relativeLinkTarget) + if _, err := linkTarget.Lstat(); err != nil { + if os.IsNotExist(err) { + if !allowNonexistentTargets { + return errNonexistentLinkTarget + } + // if we're allowing nonexistent link targets, proceed to creating the link + } else { + return err + } + } + // Ensure that the link we're about to create doesn't already exist + if err := linkFilename.Remove(); err != nil && !errors.Is(err, os.ErrNotExist) { + return err + } + if err := linkFilename.Symlink(relativeLinkTarget); err != nil { + return err + } + return nil +} + +func (cache *httpCache) Clean(_ turbopath.AbsoluteSystemPath) { + // Not possible; this implementation can only clean for a hash. +} + +func (cache *httpCache) CleanAll() { + // Also not possible. +} + +func (cache *httpCache) Shutdown() {} + +func newHTTPCache(opts Opts, client client, recorder analytics.Recorder) *httpCache { + return &httpCache{ + writable: true, + client: client, + requestLimiter: make(limiter, 20), + recorder: recorder, + signerVerifier: &ArtifactSignatureAuthentication{ + // TODO(Gaspar): this should use RemoteCacheOptions.TeamId once we start + // enforcing team restrictions for repositories. + teamId: client.GetTeamID(), + enabled: opts.RemoteCacheOpts.Signature, + }, + } +} diff --git a/cli/internal/cache/cache_http_test.go b/cli/internal/cache/cache_http_test.go new file mode 100644 index 0000000..d187931 --- /dev/null +++ b/cli/internal/cache/cache_http_test.go @@ -0,0 +1,245 @@ +package cache + +import ( + "archive/tar" + "bytes" + "errors" + "net/http" + "testing" + + "github.com/DataDog/zstd" + + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/util" + "gotest.tools/v3/assert" +) + +type errorResp struct { + err error +} + +func (sr *errorResp) PutArtifact(hash string, body []byte, duration int, tag string) error { + return sr.err +} + +func (sr *errorResp) FetchArtifact(hash string) (*http.Response, error) { + return nil, sr.err +} + +func (sr *errorResp) ArtifactExists(hash string) (*http.Response, error) { + return nil, sr.err +} + +func (sr *errorResp) GetTeamID() string { + return "" +} + +func TestRemoteCachingDisabled(t *testing.T) { + clientErr := &util.CacheDisabledError{ + Status: util.CachingStatusDisabled, + Message: "Remote Caching has been disabled for this team. A team owner can enable it here: $URL", + } + client := &errorResp{err: clientErr} + cache := &httpCache{ + client: client, + requestLimiter: make(limiter, 20), + } + cd := &util.CacheDisabledError{} + _, _, _, err := cache.Fetch("unused-target", "some-hash", []string{"unused", "outputs"}) + if !errors.As(err, &cd) { + t.Errorf("cache.Fetch err got %v, want a CacheDisabled error", err) + } + if cd.Status != util.CachingStatusDisabled { + t.Errorf("CacheDisabled.Status got %v, want %v", cd.Status, util.CachingStatusDisabled) + } +} + +func makeValidTar(t *testing.T) *bytes.Buffer { + // + // my-pkg/ + // some-file + // link-to-extra-file -> ../extra-file + // broken-link -> ../../global-dep + // extra-file + + t.Helper() + buf := &bytes.Buffer{} + zw := zstd.NewWriter(buf) + defer func() { + if err := zw.Close(); err != nil { + t.Fatalf("failed to close gzip: %v", err) + } + }() + tw := tar.NewWriter(zw) + defer func() { + if err := tw.Close(); err != nil { + t.Fatalf("failed to close tar: %v", err) + } + }() + + // my-pkg + h := &tar.Header{ + Name: "my-pkg/", + Mode: int64(0644), + Typeflag: tar.TypeDir, + } + if err := tw.WriteHeader(h); err != nil { + t.Fatalf("failed to write header: %v", err) + } + // my-pkg/some-file + contents := []byte("some-file-contents") + h = &tar.Header{ + Name: "my-pkg/some-file", + Mode: int64(0644), + Typeflag: tar.TypeReg, + Size: int64(len(contents)), + } + if err := tw.WriteHeader(h); err != nil { + t.Fatalf("failed to write header: %v", err) + } + if _, err := tw.Write(contents); err != nil { + t.Fatalf("failed to write file: %v", err) + } + // my-pkg/link-to-extra-file + h = &tar.Header{ + Name: "my-pkg/link-to-extra-file", + Mode: int64(0644), + Typeflag: tar.TypeSymlink, + Linkname: "../extra-file", + } + if err := tw.WriteHeader(h); err != nil { + t.Fatalf("failed to write header: %v", err) + } + // my-pkg/broken-link + h = &tar.Header{ + Name: "my-pkg/broken-link", + Mode: int64(0644), + Typeflag: tar.TypeSymlink, + Linkname: "../../global-dep", + } + if err := tw.WriteHeader(h); err != nil { + t.Fatalf("failed to write header: %v", err) + } + // extra-file + contents = []byte("extra-file-contents") + h = &tar.Header{ + Name: "extra-file", + Mode: int64(0644), + Typeflag: tar.TypeReg, + Size: int64(len(contents)), + } + if err := tw.WriteHeader(h); err != nil { + t.Fatalf("failed to write header: %v", err) + } + if _, err := tw.Write(contents); err != nil { + t.Fatalf("failed to write file: %v", err) + } + + return buf +} + +func makeInvalidTar(t *testing.T) *bytes.Buffer { + // contains a single file that traverses out + // ../some-file + + t.Helper() + buf := &bytes.Buffer{} + zw := zstd.NewWriter(buf) + defer func() { + if err := zw.Close(); err != nil { + t.Fatalf("failed to close gzip: %v", err) + } + }() + tw := tar.NewWriter(zw) + defer func() { + if err := tw.Close(); err != nil { + t.Fatalf("failed to close tar: %v", err) + } + }() + + // my-pkg/some-file + contents := []byte("some-file-contents") + h := &tar.Header{ + Name: "../some-file", + Mode: int64(0644), + Typeflag: tar.TypeReg, + Size: int64(len(contents)), + } + if err := tw.WriteHeader(h); err != nil { + t.Fatalf("failed to write header: %v", err) + } + if _, err := tw.Write(contents); err != nil { + t.Fatalf("failed to write file: %v", err) + } + return buf +} + +func TestRestoreTar(t *testing.T) { + root := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) + + tar := makeValidTar(t) + + expectedFiles := []turbopath.AnchoredSystemPath{ + turbopath.AnchoredUnixPath("extra-file").ToSystemPath(), + turbopath.AnchoredUnixPath("my-pkg/").ToSystemPath(), + turbopath.AnchoredUnixPath("my-pkg/some-file").ToSystemPath(), + turbopath.AnchoredUnixPath("my-pkg/link-to-extra-file").ToSystemPath(), + turbopath.AnchoredUnixPath("my-pkg/broken-link").ToSystemPath(), + } + files, err := restoreTar(root, tar) + assert.NilError(t, err, "readTar") + + expectedSet := make(util.Set) + for _, file := range expectedFiles { + expectedSet.Add(file.ToString()) + } + gotSet := make(util.Set) + for _, file := range files { + gotSet.Add(file.ToString()) + } + extraFiles := gotSet.Difference(expectedSet) + if extraFiles.Len() > 0 { + t.Errorf("got extra files: %v", extraFiles.UnsafeListOfStrings()) + } + missingFiles := expectedSet.Difference(gotSet) + if missingFiles.Len() > 0 { + t.Errorf("missing expected files: %v", missingFiles.UnsafeListOfStrings()) + } + + // Verify file contents + extraFile := root.UntypedJoin("extra-file") + contents, err := extraFile.ReadFile() + assert.NilError(t, err, "ReadFile") + assert.DeepEqual(t, contents, []byte("extra-file-contents")) + + someFile := root.UntypedJoin("my-pkg", "some-file") + contents, err = someFile.ReadFile() + assert.NilError(t, err, "ReadFile") + assert.DeepEqual(t, contents, []byte("some-file-contents")) +} + +func TestRestoreInvalidTar(t *testing.T) { + root := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) + expectedContents := []byte("important-data") + someFile := root.UntypedJoin("some-file") + err := someFile.WriteFile(expectedContents, 0644) + assert.NilError(t, err, "WriteFile") + + tar := makeInvalidTar(t) + // use a child directory so that blindly untarring will squash the file + // that we just wrote above. + repoRoot := root.UntypedJoin("repo") + _, err = restoreTar(repoRoot, tar) + if err == nil { + t.Error("expected error untarring invalid tar") + } + + contents, err := someFile.ReadFile() + assert.NilError(t, err, "ReadFile") + assert.Equal(t, string(contents), string(expectedContents), "expected to not overwrite file") +} + +// Note that testing Put will require mocking the filesystem and is not currently the most +// interesting test. The current implementation directly returns the error from PutArtifact. +// We should still add the test once feasible to avoid future breakage. diff --git a/cli/internal/cache/cache_noop.go b/cli/internal/cache/cache_noop.go new file mode 100644 index 0000000..80a3c23 --- /dev/null +++ b/cli/internal/cache/cache_noop.go @@ -0,0 +1,23 @@ +package cache + +import "github.com/vercel/turbo/cli/internal/turbopath" + +type noopCache struct{} + +func newNoopCache() *noopCache { + return &noopCache{} +} + +func (c *noopCache) Put(_ turbopath.AbsoluteSystemPath, _ string, _ int, _ []turbopath.AnchoredSystemPath) error { + return nil +} +func (c *noopCache) Fetch(_ turbopath.AbsoluteSystemPath, _ string, _ []string) (ItemStatus, []turbopath.AnchoredSystemPath, int, error) { + return ItemStatus{Local: false, Remote: false}, nil, 0, nil +} +func (c *noopCache) Exists(_ string) ItemStatus { + return ItemStatus{} +} + +func (c *noopCache) Clean(_ turbopath.AbsoluteSystemPath) {} +func (c *noopCache) CleanAll() {} +func (c *noopCache) Shutdown() {} diff --git a/cli/internal/cache/cache_signature_authentication.go b/cli/internal/cache/cache_signature_authentication.go new file mode 100644 index 0000000..f9fe4c0 --- /dev/null +++ b/cli/internal/cache/cache_signature_authentication.go @@ -0,0 +1,88 @@ +// Adapted from https://github.com/thought-machine/please +// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +package cache + +import ( + "crypto/hmac" + "crypto/sha256" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "hash" + "os" +) + +type ArtifactSignatureAuthentication struct { + teamId string + enabled bool +} + +func (asa *ArtifactSignatureAuthentication) isEnabled() bool { + return asa.enabled +} + +// If the secret key is not found or the secret key length is 0, an error is returned +// Preference is given to the environment specified secret key. +func (asa *ArtifactSignatureAuthentication) secretKey() ([]byte, error) { + secret := os.Getenv("TURBO_REMOTE_CACHE_SIGNATURE_KEY") + if len(secret) == 0 { + return nil, errors.New("signature secret key not found. You must specify a secret key in the TURBO_REMOTE_CACHE_SIGNATURE_KEY environment variable") + } + return []byte(secret), nil +} + +func (asa *ArtifactSignatureAuthentication) generateTag(hash string, artifactBody []byte) (string, error) { + tag, err := asa.getTagGenerator(hash) + if err != nil { + return "", err + } + tag.Write(artifactBody) + return base64.StdEncoding.EncodeToString(tag.Sum(nil)), nil +} + +func (asa *ArtifactSignatureAuthentication) getTagGenerator(hash string) (hash.Hash, error) { + teamId := asa.teamId + secret, err := asa.secretKey() + if err != nil { + return nil, err + } + artifactMetadata := &struct { + Hash string `json:"hash"` + TeamId string `json:"teamId"` + }{ + Hash: hash, + TeamId: teamId, + } + metadata, err := json.Marshal(artifactMetadata) + if err != nil { + return nil, err + } + + // TODO(Gaspar) Support additional signing algorithms here + h := hmac.New(sha256.New, secret) + h.Write(metadata) + return h, nil +} + +func (asa *ArtifactSignatureAuthentication) validate(hash string, artifactBody []byte, expectedTag string) (bool, error) { + computedTag, err := asa.generateTag(hash, artifactBody) + if err != nil { + return false, fmt.Errorf("failed to verify artifact tag: %w", err) + } + return hmac.Equal([]byte(computedTag), []byte(expectedTag)), nil +} + +type StreamValidator struct { + currentHash hash.Hash +} + +func (sv *StreamValidator) Validate(expectedTag string) bool { + computedTag := base64.StdEncoding.EncodeToString(sv.currentHash.Sum(nil)) + return hmac.Equal([]byte(computedTag), []byte(expectedTag)) +} + +func (sv *StreamValidator) CurrentValue() string { + return base64.StdEncoding.EncodeToString(sv.currentHash.Sum(nil)) +} diff --git a/cli/internal/cache/cache_signature_authentication_test.go b/cli/internal/cache/cache_signature_authentication_test.go new file mode 100644 index 0000000..7f3f865 --- /dev/null +++ b/cli/internal/cache/cache_signature_authentication_test.go @@ -0,0 +1,195 @@ +// Adapted from ghttps://github.com/thought-machine/please +// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +package cache + +import ( + "crypto/hmac" + "crypto/sha256" + "encoding/base64" + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func Test_SecretKeySuccess(t *testing.T) { + teamId := "team_someid" + secretKeyEnvName := "TURBO_REMOTE_CACHE_SIGNATURE_KEY" + secretKeyEnvValue := "my-secret-key-env" + t.Setenv(secretKeyEnvName, secretKeyEnvValue) + + cases := []struct { + name string + asa *ArtifactSignatureAuthentication + expectedSecretKey string + expectedSecretKeyError bool + }{ + { + name: "Accepts secret key", + asa: &ArtifactSignatureAuthentication{ + teamId: teamId, + enabled: true, + }, + expectedSecretKey: secretKeyEnvValue, + expectedSecretKeyError: false, + }, + } + + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + secretKey, err := tc.asa.secretKey() + if tc.expectedSecretKeyError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.expectedSecretKey, string(secretKey)) + } + }) + } +} + +func Test_SecretKeyErrors(t *testing.T) { + teamId := "team_someid" + + // Env secret key TURBO_REMOTE_CACHE_SIGNATURE_KEY is not set + + cases := []struct { + name string + asa *ArtifactSignatureAuthentication + expectedSecretKey string + expectedSecretKeyError bool + }{ + { + name: "Secret key not defined errors", + asa: &ArtifactSignatureAuthentication{ + teamId: teamId, + enabled: true, + }, + expectedSecretKey: "", + expectedSecretKeyError: true, + }, + { + name: "Secret key is empty errors", + asa: &ArtifactSignatureAuthentication{ + teamId: teamId, + enabled: true, + }, + expectedSecretKey: "", + expectedSecretKeyError: true, + }, + } + + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + secretKey, err := tc.asa.secretKey() + if tc.expectedSecretKeyError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.expectedSecretKey, string(secretKey)) + } + }) + } +} + +func Test_GenerateTagAndValidate(t *testing.T) { + teamId := "team_someid" + hash := "the-artifact-hash" + artifactBody := []byte("the artifact body as bytes") + secretKeyEnvName := "TURBO_REMOTE_CACHE_SIGNATURE_KEY" + secretKeyEnvValue := "my-secret-key-env" + t.Setenv(secretKeyEnvName, secretKeyEnvValue) + + cases := []struct { + name string + asa *ArtifactSignatureAuthentication + expectedTagMatches string + expectedTagDoesNotMatch string + }{ + { + name: "Uses hash to generate tag", + asa: &ArtifactSignatureAuthentication{ + teamId: teamId, + enabled: true, + }, + expectedTagMatches: testUtilGetHMACTag(hash, teamId, artifactBody, secretKeyEnvValue), + expectedTagDoesNotMatch: testUtilGetHMACTag("wrong-hash", teamId, artifactBody, secretKeyEnvValue), + }, + { + name: "Uses teamId to generate tag", + asa: &ArtifactSignatureAuthentication{ + teamId: teamId, + enabled: true, + }, + expectedTagMatches: testUtilGetHMACTag(hash, teamId, artifactBody, secretKeyEnvValue), + expectedTagDoesNotMatch: testUtilGetHMACTag(hash, "wrong-teamId", artifactBody, secretKeyEnvValue), + }, + { + name: "Uses artifactBody to generate tag", + asa: &ArtifactSignatureAuthentication{ + teamId: teamId, + enabled: true, + }, + expectedTagMatches: testUtilGetHMACTag(hash, teamId, artifactBody, secretKeyEnvValue), + expectedTagDoesNotMatch: testUtilGetHMACTag(hash, teamId, []byte("wrong-artifact-body"), secretKeyEnvValue), + }, + { + name: "Uses secret to generate tag", + asa: &ArtifactSignatureAuthentication{ + teamId: teamId, + enabled: true, + }, + expectedTagMatches: testUtilGetHMACTag(hash, teamId, artifactBody, secretKeyEnvValue), + expectedTagDoesNotMatch: testUtilGetHMACTag(hash, teamId, artifactBody, "wrong-secret"), + }, + } + + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + tag, err := tc.asa.generateTag(hash, artifactBody) + assert.NoError(t, err) + + // validates the tag + assert.Equal(t, tc.expectedTagMatches, tag) + isValid, err := tc.asa.validate(hash, artifactBody, tc.expectedTagMatches) + assert.NoError(t, err) + assert.True(t, isValid) + + // does not validate the tag + assert.NotEqual(t, tc.expectedTagDoesNotMatch, tag) + isValid, err = tc.asa.validate(hash, artifactBody, tc.expectedTagDoesNotMatch) + assert.NoError(t, err) + assert.False(t, isValid) + + }) + } +} + +// Test utils + +// Return the Base64 encoded HMAC given the artifact metadata and artifact body +func testUtilGetHMACTag(hash string, teamId string, artifactBody []byte, secret string) string { + artifactMetadata := &struct { + Hash string `json:"hash"` + TeamId string `json:"teamId"` + }{ + Hash: hash, + TeamId: teamId, + } + metadata, _ := json.Marshal(artifactMetadata) + h := hmac.New(sha256.New, []byte(secret)) + h.Write(metadata) + h.Write(artifactBody) + return base64.StdEncoding.EncodeToString(h.Sum(nil)) +} + +func Test_Utils(t *testing.T) { + teamId := "team_someid" + secret := "my-secret" + hash := "the-artifact-hash" + artifactBody := []byte("the artifact body as bytes") + testTag := testUtilGetHMACTag(hash, teamId, artifactBody, secret) + expectedTag := "9Fu8YniPZ2dEBolTPQoNlFWG0LNMW8EXrBsRmf/fEHk=" + assert.True(t, hmac.Equal([]byte(testTag), []byte(expectedTag))) +} diff --git a/cli/internal/cache/cache_test.go b/cli/internal/cache/cache_test.go new file mode 100644 index 0000000..3f17877 --- /dev/null +++ b/cli/internal/cache/cache_test.go @@ -0,0 +1,318 @@ +package cache + +import ( + "net/http" + "reflect" + "sync/atomic" + "testing" + + "github.com/vercel/turbo/cli/internal/analytics" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/util" +) + +type testCache struct { + disabledErr *util.CacheDisabledError + entries map[string][]turbopath.AnchoredSystemPath +} + +func (tc *testCache) Fetch(_ turbopath.AbsoluteSystemPath, hash string, _ []string) (ItemStatus, []turbopath.AnchoredSystemPath, int, error) { + if tc.disabledErr != nil { + return ItemStatus{}, nil, 0, tc.disabledErr + } + foundFiles, ok := tc.entries[hash] + if ok { + duration := 5 + return ItemStatus{Local: true}, foundFiles, duration, nil + } + return ItemStatus{}, nil, 0, nil +} + +func (tc *testCache) Exists(hash string) ItemStatus { + if tc.disabledErr != nil { + return ItemStatus{} + } + _, ok := tc.entries[hash] + if ok { + return ItemStatus{Local: true} + } + return ItemStatus{} +} + +func (tc *testCache) Put(_ turbopath.AbsoluteSystemPath, hash string, _ int, files []turbopath.AnchoredSystemPath) error { + if tc.disabledErr != nil { + return tc.disabledErr + } + tc.entries[hash] = files + return nil +} + +func (tc *testCache) Clean(_ turbopath.AbsoluteSystemPath) {} +func (tc *testCache) CleanAll() {} +func (tc *testCache) Shutdown() {} + +func newEnabledCache() *testCache { + return &testCache{ + entries: make(map[string][]turbopath.AnchoredSystemPath), + } +} + +func newDisabledCache() *testCache { + return &testCache{ + disabledErr: &util.CacheDisabledError{ + Status: util.CachingStatusDisabled, + Message: "remote caching is disabled", + }, + } +} + +func TestPutCachingDisabled(t *testing.T) { + disabledCache := newDisabledCache() + caches := []Cache{ + newEnabledCache(), + disabledCache, + newEnabledCache(), + newEnabledCache(), + } + var removeCalled uint64 + mplex := &cacheMultiplexer{ + caches: caches, + onCacheRemoved: func(cache Cache, err error) { + atomic.AddUint64(&removeCalled, 1) + }, + } + + err := mplex.Put("unused-target", "some-hash", 5, []turbopath.AnchoredSystemPath{"a-file"}) + if err != nil { + // don't leak the cache removal + t.Errorf("Put got error %v, want ", err) + } + + removes := atomic.LoadUint64(&removeCalled) + if removes != 1 { + t.Errorf("removes count: %v, want 1", removes) + } + + mplex.mu.RLock() + if len(mplex.caches) != 3 { + t.Errorf("found %v caches, expected to have 3 after one was removed", len(mplex.caches)) + } + for _, cache := range mplex.caches { + if cache == disabledCache { + t.Error("found disabled cache, expected it to be removed") + } + } + mplex.mu.RUnlock() + + // subsequent Fetch should still work + cacheStatus, _, _, err := mplex.Fetch("unused-target", "some-hash", []string{"unused", "files"}) + if err != nil { + t.Errorf("got error fetching files: %v", err) + } + hit := cacheStatus.Local || cacheStatus.Remote + if !hit { + t.Error("failed to find previously stored files") + } + + removes = atomic.LoadUint64(&removeCalled) + if removes != 1 { + t.Errorf("removes count: %v, want 1", removes) + } +} + +func TestExists(t *testing.T) { + caches := []Cache{ + newEnabledCache(), + } + + mplex := &cacheMultiplexer{ + caches: caches, + } + + itemStatus := mplex.Exists("some-hash") + if itemStatus.Local { + t.Error("did not expect file to exist") + } + + err := mplex.Put("unused-target", "some-hash", 5, []turbopath.AnchoredSystemPath{"a-file"}) + if err != nil { + // don't leak the cache removal + t.Errorf("Put got error %v, want ", err) + } + + itemStatus = mplex.Exists("some-hash") + if !itemStatus.Local { + t.Error("failed to find previously stored files") + } +} + +type fakeClient struct{} + +// FetchArtifact implements client +func (*fakeClient) FetchArtifact(hash string) (*http.Response, error) { + panic("unimplemented") +} + +func (*fakeClient) ArtifactExists(hash string) (*http.Response, error) { + panic("unimplemented") +} + +// GetTeamID implements client +func (*fakeClient) GetTeamID() string { + return "fake-team-id" +} + +// PutArtifact implements client +func (*fakeClient) PutArtifact(hash string, body []byte, duration int, tag string) error { + panic("unimplemented") +} + +var _ client = &fakeClient{} + +func TestFetchCachingDisabled(t *testing.T) { + disabledCache := newDisabledCache() + caches := []Cache{ + newEnabledCache(), + disabledCache, + newEnabledCache(), + newEnabledCache(), + } + var removeCalled uint64 + mplex := &cacheMultiplexer{ + caches: caches, + onCacheRemoved: func(cache Cache, err error) { + atomic.AddUint64(&removeCalled, 1) + }, + } + + cacheStatus, _, _, err := mplex.Fetch("unused-target", "some-hash", []string{"unused", "files"}) + if err != nil { + // don't leak the cache removal + t.Errorf("Fetch got error %v, want ", err) + } + hit := cacheStatus.Local || cacheStatus.Remote + if hit { + t.Error("hit on empty cache, expected miss") + } + + removes := atomic.LoadUint64(&removeCalled) + if removes != 1 { + t.Errorf("removes count: %v, want 1", removes) + } + + mplex.mu.RLock() + if len(mplex.caches) != 3 { + t.Errorf("found %v caches, expected to have 3 after one was removed", len(mplex.caches)) + } + for _, cache := range mplex.caches { + if cache == disabledCache { + t.Error("found disabled cache, expected it to be removed") + } + } + mplex.mu.RUnlock() +} + +type nullRecorder struct{} + +func (nullRecorder) LogEvent(analytics.EventPayload) {} + +func TestNew(t *testing.T) { + // Test will bomb if this fails, no need to specially handle the error + repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) + type args struct { + opts Opts + recorder analytics.Recorder + onCacheRemoved OnCacheRemoved + client fakeClient + } + tests := []struct { + name string + args args + want Cache + wantErr bool + }{ + { + name: "With no caches configured, new returns a noopCache and an error", + args: args{ + opts: Opts{ + SkipFilesystem: true, + SkipRemote: true, + }, + recorder: &nullRecorder{}, + onCacheRemoved: func(Cache, error) {}, + }, + want: &noopCache{}, + wantErr: true, + }, + { + name: "With just httpCache configured, new returns an httpCache and a noopCache", + args: args{ + opts: Opts{ + SkipFilesystem: true, + RemoteCacheOpts: fs.RemoteCacheOptions{ + Signature: true, + }, + }, + recorder: &nullRecorder{}, + onCacheRemoved: func(Cache, error) {}, + }, + want: &cacheMultiplexer{ + caches: []Cache{&httpCache{}, &noopCache{}}, + }, + wantErr: false, + }, + { + name: "With just fsCache configured, new returns only an fsCache", + args: args{ + opts: Opts{ + SkipRemote: true, + }, + recorder: &nullRecorder{}, + onCacheRemoved: func(Cache, error) {}, + }, + want: &fsCache{}, + }, + { + name: "With both configured, new returns an fsCache and httpCache", + args: args{ + opts: Opts{ + RemoteCacheOpts: fs.RemoteCacheOptions{ + Signature: true, + }, + }, + recorder: &nullRecorder{}, + onCacheRemoved: func(Cache, error) {}, + }, + want: &cacheMultiplexer{ + caches: []Cache{&fsCache{}, &httpCache{}}, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := New(tt.args.opts, repoRoot, &tt.args.client, tt.args.recorder, tt.args.onCacheRemoved) + if (err != nil) != tt.wantErr { + t.Errorf("New() error = %v, wantErr %v", err, tt.wantErr) + return + } + switch multiplexer := got.(type) { + case *cacheMultiplexer: + want := tt.want.(*cacheMultiplexer) + for i := range multiplexer.caches { + if reflect.TypeOf(multiplexer.caches[i]) != reflect.TypeOf(want.caches[i]) { + t.Errorf("New() = %v, want %v", reflect.TypeOf(multiplexer.caches[i]), reflect.TypeOf(want.caches[i])) + } + } + case *fsCache: + if reflect.TypeOf(got) != reflect.TypeOf(tt.want) { + t.Errorf("New() = %v, want %v", reflect.TypeOf(got), reflect.TypeOf(tt.want)) + } + case *noopCache: + if reflect.TypeOf(got) != reflect.TypeOf(tt.want) { + t.Errorf("New() = %v, want %v", reflect.TypeOf(got), reflect.TypeOf(tt.want)) + } + } + }) + } +} diff --git a/cli/internal/cacheitem/cacheitem.go b/cli/internal/cacheitem/cacheitem.go new file mode 100644 index 0000000..2fb2c3b --- /dev/null +++ b/cli/internal/cacheitem/cacheitem.go @@ -0,0 +1,76 @@ +// Package cacheitem is an abstraction over the creation and restoration of a cache +package cacheitem + +import ( + "archive/tar" + "bufio" + "crypto/sha512" + "errors" + "io" + "os" + + "github.com/vercel/turbo/cli/internal/turbopath" +) + +var ( + errMissingSymlinkTarget = errors.New("symlink restoration is delayed") + errCycleDetected = errors.New("links in the cache are cyclic") + errTraversal = errors.New("tar attempts to write outside of directory") + errNameMalformed = errors.New("file name is malformed") + errNameWindowsUnsafe = errors.New("file name is not Windows-safe") + errUnsupportedFileType = errors.New("attempted to restore unsupported file type") +) + +// CacheItem is a `tar` utility with a little bit extra. +type CacheItem struct { + // Path is the location on disk for the CacheItem. + Path turbopath.AbsoluteSystemPath + // Anchor is the position on disk at which the CacheItem will be restored. + Anchor turbopath.AbsoluteSystemPath + + // For creation. + tw *tar.Writer + zw io.WriteCloser + fileBuffer *bufio.Writer + handle *os.File + compressed bool +} + +// Close any open pipes +func (ci *CacheItem) Close() error { + if ci.tw != nil { + if err := ci.tw.Close(); err != nil { + return err + } + } + + if ci.zw != nil { + if err := ci.zw.Close(); err != nil { + return err + } + } + + if ci.fileBuffer != nil { + if err := ci.fileBuffer.Flush(); err != nil { + return err + } + } + + if ci.handle != nil { + if err := ci.handle.Close(); err != nil { + return err + } + } + + return nil +} + +// GetSha returns the SHA-512 hash for the CacheItem. +func (ci *CacheItem) GetSha() ([]byte, error) { + sha := sha512.New() + if _, err := io.Copy(sha, ci.handle); err != nil { + return nil, err + } + + return sha.Sum(nil), nil +} diff --git a/cli/internal/cacheitem/create.go b/cli/internal/cacheitem/create.go new file mode 100644 index 0000000..ce5b1c8 --- /dev/null +++ b/cli/internal/cacheitem/create.go @@ -0,0 +1,119 @@ +package cacheitem + +import ( + "archive/tar" + "bufio" + "io" + "os" + "strings" + "time" + + "github.com/DataDog/zstd" + + "github.com/moby/sys/sequential" + "github.com/vercel/turbo/cli/internal/tarpatch" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// Create makes a new CacheItem at the specified path. +func Create(path turbopath.AbsoluteSystemPath) (*CacheItem, error) { + handle, err := path.OpenFile(os.O_WRONLY|os.O_CREATE|os.O_TRUNC|os.O_APPEND, 0644) + if err != nil { + return nil, err + } + + cacheItem := &CacheItem{ + Path: path, + handle: handle, + compressed: strings.HasSuffix(path.ToString(), ".zst"), + } + + cacheItem.init() + return cacheItem, nil +} + +// init prepares the CacheItem for writing. +// Wires all the writers end-to-end: +// tar.Writer -> zstd.Writer -> fileBuffer -> file +func (ci *CacheItem) init() { + fileBuffer := bufio.NewWriterSize(ci.handle, 2^20) // Flush to disk in 1mb chunks. + + var tw *tar.Writer + if ci.compressed { + zw := zstd.NewWriter(fileBuffer) + tw = tar.NewWriter(zw) + ci.zw = zw + } else { + tw = tar.NewWriter(fileBuffer) + } + + ci.tw = tw + ci.fileBuffer = fileBuffer +} + +// AddFile adds a user-cached item to the tar. +func (ci *CacheItem) AddFile(fsAnchor turbopath.AbsoluteSystemPath, filePath turbopath.AnchoredSystemPath) error { + // Calculate the fully-qualified path to the file to read it. + sourcePath := filePath.RestoreAnchor(fsAnchor) + + // We grab the FileInfo which tar.FileInfoHeader accepts. + fileInfo, lstatErr := sourcePath.Lstat() + if lstatErr != nil { + return lstatErr + } + + // Determine if we need to populate the additional link argument to tar.FileInfoHeader. + var link string + if fileInfo.Mode()&os.ModeSymlink != 0 { + linkTarget, readlinkErr := sourcePath.Readlink() + if readlinkErr != nil { + return readlinkErr + } + link = linkTarget + } + + // Normalize the path within the cache. + cacheDestinationName := filePath.ToUnixPath() + + // Generate the the header. + // We do not use header generation from stdlib because it can throw an error. + header, headerErr := tarpatch.FileInfoHeader(cacheDestinationName, fileInfo, link) + if headerErr != nil { + return headerErr + } + + // Throw an error if trying to create a cache that contains a type we don't support. + if (header.Typeflag != tar.TypeReg) && (header.Typeflag != tar.TypeDir) && (header.Typeflag != tar.TypeSymlink) { + return errUnsupportedFileType + } + + // Consistent creation. + header.Uid = 0 + header.Gid = 0 + header.AccessTime = time.Unix(0, 0) + header.ModTime = time.Unix(0, 0) + header.ChangeTime = time.Unix(0, 0) + + // Always write the header. + if err := ci.tw.WriteHeader(header); err != nil { + return err + } + + // If there is a body to be written, do so. + if header.Typeflag == tar.TypeReg && header.Size > 0 { + // Windows has a distinct "sequential read" opening mode. + // We use a library that will switch to this mode for Windows. + sourceFile, sourceErr := sequential.OpenFile(sourcePath.ToString(), os.O_RDONLY, 0777) + if sourceErr != nil { + return sourceErr + } + + if _, err := io.Copy(ci.tw, sourceFile); err != nil { + return err + } + + return sourceFile.Close() + } + + return nil +} diff --git a/cli/internal/cacheitem/create_test.go b/cli/internal/cacheitem/create_test.go new file mode 100644 index 0000000..97eeb01 --- /dev/null +++ b/cli/internal/cacheitem/create_test.go @@ -0,0 +1,205 @@ +package cacheitem + +import ( + "encoding/hex" + "io/fs" + "os" + "runtime" + "testing" + + "github.com/vercel/turbo/cli/internal/turbopath" + "gotest.tools/v3/assert" +) + +type createFileDefinition struct { + Path turbopath.AnchoredSystemPath + Linkname string + fs.FileMode +} + +func createEntry(t *testing.T, anchor turbopath.AbsoluteSystemPath, fileDefinition createFileDefinition) error { + t.Helper() + if fileDefinition.FileMode.IsDir() { + return createDir(t, anchor, fileDefinition) + } else if fileDefinition.FileMode&os.ModeSymlink != 0 { + return createSymlink(t, anchor, fileDefinition) + } else if fileDefinition.FileMode&os.ModeNamedPipe != 0 { + return createFifo(t, anchor, fileDefinition) + } else { + return createFile(t, anchor, fileDefinition) + } +} + +func createDir(t *testing.T, anchor turbopath.AbsoluteSystemPath, fileDefinition createFileDefinition) error { + t.Helper() + path := fileDefinition.Path.RestoreAnchor(anchor) + mkdirAllErr := path.MkdirAllMode(fileDefinition.FileMode & 0777) + assert.NilError(t, mkdirAllErr, "MkdirAll") + return mkdirAllErr +} +func createFile(t *testing.T, anchor turbopath.AbsoluteSystemPath, fileDefinition createFileDefinition) error { + t.Helper() + path := fileDefinition.Path.RestoreAnchor(anchor) + writeErr := path.WriteFile([]byte("file contents"), fileDefinition.FileMode&0777) + assert.NilError(t, writeErr, "WriteFile") + return writeErr +} +func createSymlink(t *testing.T, anchor turbopath.AbsoluteSystemPath, fileDefinition createFileDefinition) error { + t.Helper() + path := fileDefinition.Path.RestoreAnchor(anchor) + symlinkErr := path.Symlink(fileDefinition.Linkname) + assert.NilError(t, symlinkErr, "Symlink") + lchmodErr := path.Lchmod(fileDefinition.FileMode & 0777) + assert.NilError(t, lchmodErr, "Lchmod") + return symlinkErr +} + +func TestCreate(t *testing.T) { + tests := []struct { + name string + files []createFileDefinition + wantDarwin string + wantUnix string + wantWindows string + wantErr error + }{ + { + name: "hello world", + files: []createFileDefinition{ + { + Path: turbopath.AnchoredSystemPath("hello world.txt"), + FileMode: 0 | 0644, + }, + }, + wantDarwin: "4f39f1cab23906f3b89f313392ef7c26f2586e1c15fa6b577cce640c4781d082817927b4875a5413bc23e1248f0b198218998d70e7336e8b1244542ba446ca07", + wantUnix: "4f39f1cab23906f3b89f313392ef7c26f2586e1c15fa6b577cce640c4781d082817927b4875a5413bc23e1248f0b198218998d70e7336e8b1244542ba446ca07", + wantWindows: "e304d1ba8c51209f97bd11dabf27ca06996b70a850db592343942c49480de47bcbb4b7131fb3dd4d7564021d3bc0e648919e4876572b46ac1da97fca92b009c5", + }, + { + name: "links", + files: []createFileDefinition{ + { + Path: turbopath.AnchoredSystemPath("one"), + Linkname: "two", + FileMode: 0 | os.ModeSymlink | 0777, + }, + { + Path: turbopath.AnchoredSystemPath("two"), + Linkname: "three", + FileMode: 0 | os.ModeSymlink | 0777, + }, + { + Path: turbopath.AnchoredSystemPath("three"), + Linkname: "real", + FileMode: 0 | os.ModeSymlink | 0777, + }, + { + Path: turbopath.AnchoredSystemPath("real"), + FileMode: 0 | 0644, + }, + }, + wantDarwin: "07278fdf37db4b212352367f391377bd6bac8f361dd834ae5522d809539bcf3b34d046873c1b45876d7372251446bb12c32f9fa9824914c4a1a01f6d7a206702", + wantUnix: "07278fdf37db4b212352367f391377bd6bac8f361dd834ae5522d809539bcf3b34d046873c1b45876d7372251446bb12c32f9fa9824914c4a1a01f6d7a206702", + wantWindows: "d4dac527e40860ee1ba3fdf2b9b12a1eba385050cf4f5877558dd531f0ecf2a06952fd5f88b852ad99e010943ed7b7f1437b727796369524e85f0c06f25d62c9", + }, + { + name: "subdirectory", + files: []createFileDefinition{ + { + Path: turbopath.AnchoredSystemPath("parent"), + FileMode: 0 | os.ModeDir | 0755, + }, + { + Path: turbopath.AnchoredSystemPath("parent/child"), + FileMode: 0 | 0644, + }, + }, + wantDarwin: "b513eea231daa84245d1d23d99fc398ccf17166ca49754ffbdcc1a3269cd75b7ad176a9c7095ff2481f71dca9fc350189747035f13d53b3a864e4fe35165233f", + wantUnix: "b513eea231daa84245d1d23d99fc398ccf17166ca49754ffbdcc1a3269cd75b7ad176a9c7095ff2481f71dca9fc350189747035f13d53b3a864e4fe35165233f", + wantWindows: "a8c3cba54e4dc214d3b21c3fa284d4032fe317d2f88943159efd5d16f3551ab53fae5c92ebf8acdd1bdb85d1238510b7938772cb11a0daa1b72b5e0f2700b5c7", + }, + { + name: "symlink permissions", + files: []createFileDefinition{ + { + Path: turbopath.AnchoredSystemPath("one"), + Linkname: "two", + FileMode: 0 | os.ModeSymlink | 0644, + }, + }, + wantDarwin: "3ea9d8a4581a0c2ba77557c72447b240c5ac622edcdac570a0bf597c276c2917b4ea73e6c373bbac593a480e396845651fa4b51e049531ff5d44c0adb807c2d9", + wantUnix: "99d953cbe1c0d8545e6f8382208fcefe14bcbefe39872f7b6310da14ac195b9a1b04b6d7b4b56f01a27216176193344a92488f99e124fcd68693f313f7137a1c", + wantWindows: "a4b1dc5c296f8ac4c9124727c1d84d70f72872c7bb4ced6d83ee312889e822baf1eaa72f88e624fb1aac4339d0a1f766ede77eabd2e4524eb26e89f883dc479d", + }, + { + name: "unsupported types error", + files: []createFileDefinition{ + { + Path: turbopath.AnchoredSystemPath("fifo"), + FileMode: 0 | os.ModeNamedPipe | 0644, + }, + }, + wantErr: errUnsupportedFileType, + }, + } + for _, tt := range tests { + getTestFunc := func(compressed bool) func(t *testing.T) { + return func(t *testing.T) { + inputDir := turbopath.AbsoluteSystemPath(t.TempDir()) + archiveDir := turbopath.AbsoluteSystemPath(t.TempDir()) + var archivePath turbopath.AbsoluteSystemPath + if compressed { + archivePath = turbopath.AnchoredSystemPath("out.tar.zst").RestoreAnchor(archiveDir) + } else { + archivePath = turbopath.AnchoredSystemPath("out.tar").RestoreAnchor(archiveDir) + } + + cacheItem, cacheCreateErr := Create(archivePath) + assert.NilError(t, cacheCreateErr, "Cache Create") + + for _, file := range tt.files { + createErr := createEntry(t, inputDir, file) + if createErr != nil { + assert.ErrorIs(t, createErr, tt.wantErr) + assert.NilError(t, cacheItem.Close(), "Close") + return + } + + addFileError := cacheItem.AddFile(inputDir, file.Path) + if addFileError != nil { + assert.ErrorIs(t, addFileError, tt.wantErr) + assert.NilError(t, cacheItem.Close(), "Close") + return + } + } + + assert.NilError(t, cacheItem.Close(), "Cache Close") + + // We only check for repeatability on compressed caches. + if compressed { + openedCacheItem, openedCacheItemErr := Open(archivePath) + assert.NilError(t, openedCacheItemErr, "Cache Open") + + // We actually only need to compare the generated SHA. + // That ensures we got the same output. (Effectively snapshots.) + // This must be called after `Close` because both `tar` and `gzip` have footers. + shaOne, shaOneErr := openedCacheItem.GetSha() + assert.NilError(t, shaOneErr, "GetSha") + snapshot := hex.EncodeToString(shaOne) + + switch runtime.GOOS { + case "darwin": + assert.Equal(t, snapshot, tt.wantDarwin, "Got expected hash.") + case "windows": + assert.Equal(t, snapshot, tt.wantWindows, "Got expected hash.") + default: + assert.Equal(t, snapshot, tt.wantUnix, "Got expected hash.") + } + assert.NilError(t, openedCacheItem.Close(), "Close") + } + } + } + t.Run(tt.name, getTestFunc(false)) + t.Run(tt.name+"zst", getTestFunc(true)) + } +} diff --git a/cli/internal/cacheitem/create_unix_test.go b/cli/internal/cacheitem/create_unix_test.go new file mode 100644 index 0000000..812d1eb --- /dev/null +++ b/cli/internal/cacheitem/create_unix_test.go @@ -0,0 +1,20 @@ +//go:build darwin || linux +// +build darwin linux + +package cacheitem + +import ( + "syscall" + "testing" + + "github.com/vercel/turbo/cli/internal/turbopath" + "gotest.tools/v3/assert" +) + +func createFifo(t *testing.T, anchor turbopath.AbsoluteSystemPath, fileDefinition createFileDefinition) error { + t.Helper() + path := fileDefinition.Path.RestoreAnchor(anchor) + fifoErr := syscall.Mknod(path.ToString(), syscall.S_IFIFO|0666, 0) + assert.NilError(t, fifoErr, "FIFO") + return fifoErr +} diff --git a/cli/internal/cacheitem/create_windows_test.go b/cli/internal/cacheitem/create_windows_test.go new file mode 100644 index 0000000..2cbb8b9 --- /dev/null +++ b/cli/internal/cacheitem/create_windows_test.go @@ -0,0 +1,14 @@ +//go:build windows +// +build windows + +package cacheitem + +import ( + "testing" + + "github.com/vercel/turbo/cli/internal/turbopath" +) + +func createFifo(t *testing.T, anchor turbopath.AbsoluteSystemPath, fileDefinition createFileDefinition) error { + return errUnsupportedFileType +} diff --git a/cli/internal/cacheitem/filepath.go b/cli/internal/cacheitem/filepath.go new file mode 100644 index 0000000..4fd1681 --- /dev/null +++ b/cli/internal/cacheitem/filepath.go @@ -0,0 +1,162 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cacheitem + +import "os" + +const _separator = os.PathSeparator + +// A lazybuf is a lazily constructed path buffer. +// It supports append, reading previously appended bytes, +// and retrieving the final string. It does not allocate a buffer +// to hold the output until that output diverges from s. +type lazybuf struct { + path string + buf []byte + w int + volAndPath string + volLen int +} + +func (b *lazybuf) index(i int) byte { + if b.buf != nil { + return b.buf[i] + } + return b.path[i] +} + +func (b *lazybuf) append(c byte) { + if b.buf == nil { + if b.w < len(b.path) && b.path[b.w] == c { + b.w++ + return + } + b.buf = make([]byte, len(b.path)) + copy(b.buf, b.path[:b.w]) + } + b.buf[b.w] = c + b.w++ +} + +func (b *lazybuf) string() string { + if b.buf == nil { + return b.volAndPath[:b.volLen+b.w] + } + return b.volAndPath[:b.volLen] + string(b.buf[:b.w]) +} + +// Clean is extracted from stdlib and removes `FromSlash` processing +// of the stdlib version. +// +// Clean returns the shortest path name equivalent to path +// by purely lexical processing. It applies the following rules +// iteratively until no further processing can be done: +// +// 1. Replace multiple Separator elements with a single one. +// 2. Eliminate each . path name element (the current directory). +// 3. Eliminate each inner .. path name element (the parent directory) +// along with the non-.. element that precedes it. +// 4. Eliminate .. elements that begin a rooted path: +// that is, replace "/.." by "/" at the beginning of a path, +// assuming Separator is '/'. +// +// The returned path ends in a slash only if it represents a root directory, +// such as "/" on Unix or `C:\` on Windows. +// +// Finally, any occurrences of slash are replaced by Separator. +// +// If the result of this process is an empty string, Clean +// returns the string ".". +// +// See also Rob Pike, “Lexical File Names in Plan 9 or +// Getting Dot-Dot Right,” +// https://9p.io/sys/doc/lexnames.html +func Clean(path string) string { + originalPath := path + volLen := volumeNameLen(path) + path = path[volLen:] + if path == "" { + if volLen > 1 && originalPath[1] != ':' { + // should be UNC + // ORIGINAL: return FromSlash(originalPath) + return originalPath + } + return originalPath + "." + } + rooted := os.IsPathSeparator(path[0]) + + // Invariants: + // reading from path; r is index of next byte to process. + // writing to buf; w is index of next byte to write. + // dotdot is index in buf where .. must stop, either because + // it is the leading slash or it is a leading ../../.. prefix. + n := len(path) + out := lazybuf{path: path, volAndPath: originalPath, volLen: volLen} + r, dotdot := 0, 0 + if rooted { + out.append(_separator) + r, dotdot = 1, 1 + } + + for r < n { + switch { + case os.IsPathSeparator(path[r]): + // empty path element + r++ + case path[r] == '.' && r+1 == n: + // . element + r++ + case path[r] == '.' && os.IsPathSeparator(path[r+1]): + // ./ element + r++ + + for r < len(path) && os.IsPathSeparator(path[r]) { + r++ + } + if out.w == 0 && volumeNameLen(path[r:]) > 0 { + // When joining prefix "." and an absolute path on Windows, + // the prefix should not be removed. + out.append('.') + } + case path[r] == '.' && path[r+1] == '.' && (r+2 == n || os.IsPathSeparator(path[r+2])): + // .. element: remove to last separator + r += 2 + switch { + case out.w > dotdot: + // can backtrack + out.w-- + for out.w > dotdot && !os.IsPathSeparator(out.index(out.w)) { + out.w-- + } + case !rooted: + // cannot backtrack, but not rooted, so append .. element. + if out.w > 0 { + out.append(_separator) + } + out.append('.') + out.append('.') + dotdot = out.w + } + default: + // real path element. + // add slash if needed + if rooted && out.w != 1 || !rooted && out.w != 0 { + out.append(_separator) + } + // copy element + for ; r < n && !os.IsPathSeparator(path[r]); r++ { + out.append(path[r]) + } + } + } + + // Turn empty string into "." + if out.w == 0 { + out.append('.') + } + + // ORIGINAL: return FromSlash(out.string()) + return out.string() +} diff --git a/cli/internal/cacheitem/filepath_unix.go b/cli/internal/cacheitem/filepath_unix.go new file mode 100644 index 0000000..d0f6786 --- /dev/null +++ b/cli/internal/cacheitem/filepath_unix.go @@ -0,0 +1,14 @@ +//go:build !windows +// +build !windows + +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cacheitem + +// volumeNameLen returns length of the leading volume name on Windows. +// It returns 0 elsewhere. +func volumeNameLen(path string) int { + return 0 +} diff --git a/cli/internal/cacheitem/filepath_windows.go b/cli/internal/cacheitem/filepath_windows.go new file mode 100644 index 0000000..2c3b852 --- /dev/null +++ b/cli/internal/cacheitem/filepath_windows.go @@ -0,0 +1,50 @@ +//go:build windows +// +build windows + +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cacheitem + +func isSlash(c uint8) bool { + return c == '\\' || c == '/' +} + +// volumeNameLen returns length of the leading volume name on Windows. +// It returns 0 elsewhere. +func volumeNameLen(path string) int { + if len(path) < 2 { + return 0 + } + // with drive letter + c := path[0] + if path[1] == ':' && ('a' <= c && c <= 'z' || 'A' <= c && c <= 'Z') { + return 2 + } + // is it UNC? https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx + if l := len(path); l >= 5 && isSlash(path[0]) && isSlash(path[1]) && + !isSlash(path[2]) && path[2] != '.' { + // first, leading `\\` and next shouldn't be `\`. its server name. + for n := 3; n < l-1; n++ { + // second, next '\' shouldn't be repeated. + if isSlash(path[n]) { + n++ + // third, following something characters. its share name. + if !isSlash(path[n]) { + if path[n] == '.' { + break + } + for ; n < l; n++ { + if isSlash(path[n]) { + break + } + } + return n + } + break + } + } + } + return 0 +} diff --git a/cli/internal/cacheitem/restore.go b/cli/internal/cacheitem/restore.go new file mode 100644 index 0000000..347b996 --- /dev/null +++ b/cli/internal/cacheitem/restore.go @@ -0,0 +1,200 @@ +package cacheitem + +import ( + "archive/tar" + "errors" + "io" + "os" + "runtime" + "strings" + + "github.com/DataDog/zstd" + + "github.com/moby/sys/sequential" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// Open returns an existing CacheItem at the specified path. +func Open(path turbopath.AbsoluteSystemPath) (*CacheItem, error) { + handle, err := sequential.OpenFile(path.ToString(), os.O_RDONLY, 0777) + if err != nil { + return nil, err + } + + return &CacheItem{ + Path: path, + handle: handle, + compressed: strings.HasSuffix(path.ToString(), ".zst"), + }, nil +} + +// Restore extracts a cache to a specified disk location. +func (ci *CacheItem) Restore(anchor turbopath.AbsoluteSystemPath) ([]turbopath.AnchoredSystemPath, error) { + var tr *tar.Reader + var closeError error + + // We're reading a tar, possibly wrapped in zstd. + if ci.compressed { + zr := zstd.NewReader(ci.handle) + + // The `Close` function for compression effectively just returns the singular + // error field on the decompressor instance. This is extremely unlikely to be + // set without triggering one of the numerous other errors, but we should still + // handle that possible edge case. + defer func() { closeError = zr.Close() }() + tr = tar.NewReader(zr) + } else { + tr = tar.NewReader(ci.handle) + } + + // On first attempt to restore it's possible that a link target doesn't exist. + // Save them and topsort them. + var symlinks []*tar.Header + + restored := make([]turbopath.AnchoredSystemPath, 0) + + restorePointErr := anchor.MkdirAll(0755) + if restorePointErr != nil { + return nil, restorePointErr + } + + // We're going to make the following two assumptions here for "fast" path restoration: + // - All directories are enumerated in the `tar`. + // - The contents of the tar are enumerated depth-first. + // + // This allows us to avoid: + // - Attempts at recursive creation of directories. + // - Repetitive `lstat` on restore of a file. + // + // Violating these assumptions won't cause things to break but we're only going to maintain + // an `lstat` cache for the current tree. If you violate these assumptions and the current + // cache does not apply for your path, it will clobber and re-start from the common + // shared prefix. + dirCache := &cachedDirTree{ + anchorAtDepth: []turbopath.AbsoluteSystemPath{anchor}, + } + + for { + header, trErr := tr.Next() + if trErr == io.EOF { + // The end, time to restore any missing links. + symlinksRestored, symlinksErr := topologicallyRestoreSymlinks(dirCache, anchor, symlinks, tr) + restored = append(restored, symlinksRestored...) + if symlinksErr != nil { + return restored, symlinksErr + } + + break + } + if trErr != nil { + return restored, trErr + } + + // The reader will not advance until tr.Next is called. + // We can treat this as file metadata + body reader. + + // Attempt to place the file on disk. + file, restoreErr := restoreEntry(dirCache, anchor, header, tr) + if restoreErr != nil { + if errors.Is(restoreErr, errMissingSymlinkTarget) { + // Links get one shot to be valid, then they're accumulated, DAG'd, and restored on delay. + symlinks = append(symlinks, header) + continue + } + return restored, restoreErr + } + restored = append(restored, file) + } + + return restored, closeError +} + +// restoreRegular is the entry point for all things read from the tar. +func restoreEntry(dirCache *cachedDirTree, anchor turbopath.AbsoluteSystemPath, header *tar.Header, reader *tar.Reader) (turbopath.AnchoredSystemPath, error) { + // We're permissive on creation, but restrictive on restoration. + // There is no need to prevent the cache creation in any case. + // And on restoration, if we fail, we simply run the task. + switch header.Typeflag { + case tar.TypeDir: + return restoreDirectory(dirCache, anchor, header) + case tar.TypeReg: + return restoreRegular(dirCache, anchor, header, reader) + case tar.TypeSymlink: + return restoreSymlink(dirCache, anchor, header) + default: + return "", errUnsupportedFileType + } +} + +// canonicalizeName returns either an AnchoredSystemPath or an error. +func canonicalizeName(name string) (turbopath.AnchoredSystemPath, error) { + // Assuming this was a `turbo`-created input, we currently have an AnchoredUnixPath. + // Assuming this is malicious input we don't really care if we do the wrong thing. + wellFormed, windowsSafe := checkName(name) + + // Determine if the future filename is a well-formed AnchoredUnixPath + if !wellFormed { + return "", errNameMalformed + } + + // Determine if the AnchoredUnixPath is safe to be used on Windows + if runtime.GOOS == "windows" && !windowsSafe { + return "", errNameWindowsUnsafe + } + + // Directories will have a trailing slash. Remove it. + noTrailingSlash := strings.TrimSuffix(name, "/") + + // Okay, we're all set here. + return turbopath.AnchoredUnixPathFromUpstream(noTrailingSlash).ToSystemPath(), nil +} + +// checkName returns `wellFormed, windowsSafe` via inspection of separators and traversal +func checkName(name string) (bool, bool) { + length := len(name) + + // Name is of length 0. + if length == 0 { + return false, false + } + + wellFormed := true + windowsSafe := true + + // Name is: + // - "." + // - ".." + if wellFormed && (name == "." || name == "..") { + wellFormed = false + } + + // Name starts with: + // - `/` + // - `./` + // - `../` + if wellFormed && (strings.HasPrefix(name, "/") || strings.HasPrefix(name, "./") || strings.HasPrefix(name, "../")) { + wellFormed = false + } + + // Name ends in: + // - `/.` + // - `/..` + if wellFormed && (strings.HasSuffix(name, "/.") || strings.HasSuffix(name, "/..")) { + wellFormed = false + } + + // Name contains: + // - `//` + // - `/./` + // - `/../` + if wellFormed && (strings.Contains(name, "//") || strings.Contains(name, "/./") || strings.Contains(name, "/../")) { + wellFormed = false + } + + // Name contains: `\` + if strings.ContainsRune(name, '\\') { + windowsSafe = false + } + + return wellFormed, windowsSafe +} diff --git a/cli/internal/cacheitem/restore_directory.go b/cli/internal/cacheitem/restore_directory.go new file mode 100644 index 0000000..4704d66 --- /dev/null +++ b/cli/internal/cacheitem/restore_directory.go @@ -0,0 +1,144 @@ +package cacheitem + +import ( + "archive/tar" + "os" + "path/filepath" + "strings" + + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// restoreDirectory restores a directory. +func restoreDirectory(dirCache *cachedDirTree, anchor turbopath.AbsoluteSystemPath, header *tar.Header) (turbopath.AnchoredSystemPath, error) { + processedName, err := canonicalizeName(header.Name) + if err != nil { + return "", err + } + + // We need to traverse `processedName` from base to root split at + // `os.Separator` to make sure we don't end up following a symlink + // outside of the restore path. + + // Create the directory. + if err := safeMkdirAll(dirCache, anchor, processedName, header.Mode); err != nil { + return "", err + } + + return processedName, nil +} + +type cachedDirTree struct { + anchorAtDepth []turbopath.AbsoluteSystemPath + prefix []turbopath.RelativeSystemPath +} + +func (cr *cachedDirTree) getStartingPoint(path turbopath.AnchoredSystemPath) (turbopath.AbsoluteSystemPath, []turbopath.RelativeSystemPath) { + pathSegmentStrings := strings.Split(path.ToString(), string(os.PathSeparator)) + pathSegments := make([]turbopath.RelativeSystemPath, len(pathSegmentStrings)) + for index, pathSegmentString := range pathSegmentStrings { + pathSegments[index] = turbopath.RelativeSystemPathFromUpstream(pathSegmentString) + } + + i := 0 + for i = 0; i < len(cr.prefix) && i < len(pathSegments); i++ { + if pathSegments[i] != cr.prefix[i] { + break + } + } + + // 0: root anchor, can't remove it. + cr.anchorAtDepth = cr.anchorAtDepth[:i+1] + + // 0: first prefix. + cr.prefix = cr.prefix[:i] + + return cr.anchorAtDepth[i], pathSegments[i:] +} + +func (cr *cachedDirTree) Update(anchor turbopath.AbsoluteSystemPath, newSegment turbopath.RelativeSystemPath) { + cr.anchorAtDepth = append(cr.anchorAtDepth, anchor) + cr.prefix = append(cr.prefix, newSegment) +} + +// safeMkdirAll creates all directories, assuming that the leaf node is a directory. +// FIXME: Recheck the symlink cache before creating a directory. +func safeMkdirAll(dirCache *cachedDirTree, anchor turbopath.AbsoluteSystemPath, processedName turbopath.AnchoredSystemPath, mode int64) error { + // Iterate through path segments by os.Separator, appending them onto the anchor. + // Check to see if that path segment is a symlink with a target outside of anchor. + + // Pull the iteration starting point from thie directory cache. + calculatedAnchor, pathSegments := dirCache.getStartingPoint(processedName) + for _, segment := range pathSegments { + calculatedAnchor, checkPathErr := checkPath(anchor, calculatedAnchor, segment) + // We hit an existing directory or absolute path that was invalid. + if checkPathErr != nil { + return checkPathErr + } + + // Otherwise we continue and check the next segment. + dirCache.Update(calculatedAnchor, segment) + } + + // If we have made it here we know that it is safe to call os.MkdirAll + // on the Join of anchor and processedName. + // + // This could _still_ error, but we don't care. + return processedName.RestoreAnchor(anchor).MkdirAll(os.FileMode(mode)) +} + +// checkPath ensures that the resolved path (if restoring symlinks). +// It makes sure to never traverse outside of the anchor. +func checkPath(originalAnchor turbopath.AbsoluteSystemPath, accumulatedAnchor turbopath.AbsoluteSystemPath, segment turbopath.RelativeSystemPath) (turbopath.AbsoluteSystemPath, error) { + // Check if the segment itself is sneakily an absolute path... + // (looking at you, Windows. CON, AUX...) + if filepath.IsAbs(segment.ToString()) { + return "", errTraversal + } + + // Find out if this portion of the path is a symlink. + combinedPath := accumulatedAnchor.Join(segment) + fileInfo, err := combinedPath.Lstat() + + // Getting an error here means we failed to stat the path. + // Assume that means we're safe and continue. + if err != nil { + return combinedPath, nil + } + + // Find out if we have a symlink. + isSymlink := fileInfo.Mode()&os.ModeSymlink != 0 + + // If we don't have a symlink it's safe. + if !isSymlink { + return combinedPath, nil + } + + // Check to see if the symlink targets outside of the originalAnchor. + // We don't do eval symlinks because we could find ourself in a totally + // different place. + + // 1. Get the target. + linkTarget, readLinkErr := combinedPath.Readlink() + if readLinkErr != nil { + return "", readLinkErr + } + + // 2. See if the target is absolute. + if filepath.IsAbs(linkTarget) { + absoluteLinkTarget := turbopath.AbsoluteSystemPathFromUpstream(linkTarget) + if originalAnchor.HasPrefix(absoluteLinkTarget) { + return absoluteLinkTarget, nil + } + return "", errTraversal + } + + // 3. Target is relative (or absolute Windows on a Unix device) + relativeLinkTarget := turbopath.RelativeSystemPathFromUpstream(linkTarget) + computedTarget := accumulatedAnchor.UntypedJoin(linkTarget) + if computedTarget.HasPrefix(originalAnchor) { + // Need to recurse and make sure the target doesn't link out. + return checkPath(originalAnchor, accumulatedAnchor, relativeLinkTarget) + } + return "", errTraversal +} diff --git a/cli/internal/cacheitem/restore_directory_test.go b/cli/internal/cacheitem/restore_directory_test.go new file mode 100644 index 0000000..f75bd47 --- /dev/null +++ b/cli/internal/cacheitem/restore_directory_test.go @@ -0,0 +1,103 @@ +package cacheitem + +import ( + "reflect" + "testing" + + "github.com/vercel/turbo/cli/internal/turbopath" +) + +func Test_cachedDirTree_getStartingPoint(t *testing.T) { + testDir := turbopath.AbsoluteSystemPath("") + tests := []struct { + name string + + // STATE + cachedDirTree cachedDirTree + + // INPUT + path turbopath.AnchoredSystemPath + + // OUTPUT + calculatedAnchor turbopath.AbsoluteSystemPath + pathSegments []turbopath.RelativeSystemPath + }{ + { + name: "hello world", + cachedDirTree: cachedDirTree{ + anchorAtDepth: []turbopath.AbsoluteSystemPath{testDir}, + prefix: []turbopath.RelativeSystemPath{}, + }, + path: turbopath.AnchoredUnixPath("hello/world").ToSystemPath(), + calculatedAnchor: testDir, + pathSegments: []turbopath.RelativeSystemPath{"hello", "world"}, + }, + { + name: "has a cache", + cachedDirTree: cachedDirTree{ + anchorAtDepth: []turbopath.AbsoluteSystemPath{ + testDir, + testDir.UntypedJoin("hello"), + }, + prefix: []turbopath.RelativeSystemPath{"hello"}, + }, + path: turbopath.AnchoredUnixPath("hello/world").ToSystemPath(), + calculatedAnchor: testDir.UntypedJoin("hello"), + pathSegments: []turbopath.RelativeSystemPath{"world"}, + }, + { + name: "ask for yourself", + cachedDirTree: cachedDirTree{ + anchorAtDepth: []turbopath.AbsoluteSystemPath{ + testDir, + testDir.UntypedJoin("hello"), + testDir.UntypedJoin("hello", "world"), + }, + prefix: []turbopath.RelativeSystemPath{"hello", "world"}, + }, + path: turbopath.AnchoredUnixPath("hello/world").ToSystemPath(), + calculatedAnchor: testDir.UntypedJoin("hello", "world"), + pathSegments: []turbopath.RelativeSystemPath{}, + }, + { + name: "three layer cake", + cachedDirTree: cachedDirTree{ + anchorAtDepth: []turbopath.AbsoluteSystemPath{ + testDir, + testDir.UntypedJoin("hello"), + testDir.UntypedJoin("hello", "world"), + }, + prefix: []turbopath.RelativeSystemPath{"hello", "world"}, + }, + path: turbopath.AnchoredUnixPath("hello/world/again").ToSystemPath(), + calculatedAnchor: testDir.UntypedJoin("hello", "world"), + pathSegments: []turbopath.RelativeSystemPath{"again"}, + }, + { + name: "outside of cache hierarchy", + cachedDirTree: cachedDirTree{ + anchorAtDepth: []turbopath.AbsoluteSystemPath{ + testDir, + testDir.UntypedJoin("hello"), + testDir.UntypedJoin("hello", "world"), + }, + prefix: []turbopath.RelativeSystemPath{"hello", "world"}, + }, + path: turbopath.AnchoredUnixPath("somewhere/else").ToSystemPath(), + calculatedAnchor: testDir, + pathSegments: []turbopath.RelativeSystemPath{"somewhere", "else"}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + cr := tt.cachedDirTree + calculatedAnchor, pathSegments := cr.getStartingPoint(tt.path) + if !reflect.DeepEqual(calculatedAnchor, tt.calculatedAnchor) { + t.Errorf("cachedDirTree.getStartingPoint() calculatedAnchor = %v, want %v", calculatedAnchor, tt.calculatedAnchor) + } + if !reflect.DeepEqual(pathSegments, tt.pathSegments) { + t.Errorf("cachedDirTree.getStartingPoint() pathSegments = %v, want %v", pathSegments, tt.pathSegments) + } + }) + } +} diff --git a/cli/internal/cacheitem/restore_regular.go b/cli/internal/cacheitem/restore_regular.go new file mode 100644 index 0000000..ed8946e --- /dev/null +++ b/cli/internal/cacheitem/restore_regular.go @@ -0,0 +1,46 @@ +package cacheitem + +import ( + "archive/tar" + "io" + "os" + + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// restoreRegular restores a file. +func restoreRegular(dirCache *cachedDirTree, anchor turbopath.AbsoluteSystemPath, header *tar.Header, reader *tar.Reader) (turbopath.AnchoredSystemPath, error) { + // Assuming this was a `turbo`-created input, we currently have an AnchoredUnixPath. + // Assuming this is malicious input we don't really care if we do the wrong thing. + processedName, err := canonicalizeName(header.Name) + if err != nil { + return "", err + } + + // We need to traverse `processedName` from base to root split at + // `os.Separator` to make sure we don't end up following a symlink + // outside of the restore path. + if err := safeMkdirFile(dirCache, anchor, processedName, header.Mode); err != nil { + return "", err + } + + // Create the file. + if f, err := processedName.RestoreAnchor(anchor).OpenFile(os.O_WRONLY|os.O_TRUNC|os.O_CREATE, os.FileMode(header.Mode)); err != nil { + return "", err + } else if _, err := io.Copy(f, reader); err != nil { + return "", err + } else if err := f.Close(); err != nil { + return "", err + } + return processedName, nil +} + +// safeMkdirAll creates all directories, assuming that the leaf node is a file. +func safeMkdirFile(dirCache *cachedDirTree, anchor turbopath.AbsoluteSystemPath, processedName turbopath.AnchoredSystemPath, mode int64) error { + isRootFile := processedName.Dir() == "." + if !isRootFile { + return safeMkdirAll(dirCache, anchor, processedName.Dir(), 0755) + } + + return nil +} diff --git a/cli/internal/cacheitem/restore_symlink.go b/cli/internal/cacheitem/restore_symlink.go new file mode 100644 index 0000000..4cb29f5 --- /dev/null +++ b/cli/internal/cacheitem/restore_symlink.go @@ -0,0 +1,180 @@ +package cacheitem + +import ( + "archive/tar" + "io/fs" + "os" + "path/filepath" + + "github.com/pyr-sh/dag" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// restoreSymlink restores a symlink and errors if the target is missing. +func restoreSymlink(dirCache *cachedDirTree, anchor turbopath.AbsoluteSystemPath, header *tar.Header) (turbopath.AnchoredSystemPath, error) { + processedName, canonicalizeNameErr := canonicalizeName(header.Name) + if canonicalizeNameErr != nil { + return "", canonicalizeNameErr + } + + // Check to see if the target exists. + processedLinkname := canonicalizeLinkname(anchor, processedName, header.Linkname) + if _, err := os.Lstat(processedLinkname); err != nil { + return "", errMissingSymlinkTarget + } + + return actuallyRestoreSymlink(dirCache, anchor, processedName, header) +} + +// restoreSymlinkMissingTarget restores a symlink and does not error if the target is missing. +func restoreSymlinkMissingTarget(dirCache *cachedDirTree, anchor turbopath.AbsoluteSystemPath, header *tar.Header) (turbopath.AnchoredSystemPath, error) { + processedName, canonicalizeNameErr := canonicalizeName(header.Name) + if canonicalizeNameErr != nil { + return "", canonicalizeNameErr + } + + return actuallyRestoreSymlink(dirCache, anchor, processedName, header) +} + +func actuallyRestoreSymlink(dirCache *cachedDirTree, anchor turbopath.AbsoluteSystemPath, processedName turbopath.AnchoredSystemPath, header *tar.Header) (turbopath.AnchoredSystemPath, error) { + // We need to traverse `processedName` from base to root split at + // `os.Separator` to make sure we don't end up following a symlink + // outside of the restore path. + if err := safeMkdirFile(dirCache, anchor, processedName, header.Mode); err != nil { + return "", err + } + + // Specify where we restoring this symlink. + symlinkFrom := processedName.RestoreAnchor(anchor) + + // Remove any existing object at that location. + // If it errors we'll catch it on creation. + _ = symlinkFrom.Remove() + + // Create the symlink. + // Explicitly uses the _original_ header.Linkname as the target. + // This does not support file names with `\` in them in a cross-platform manner. + symlinkErr := symlinkFrom.Symlink(header.Linkname) + if symlinkErr != nil { + return "", symlinkErr + } + + // Darwin allows you to change the permissions of a symlink. + lchmodErr := symlinkFrom.Lchmod(fs.FileMode(header.Mode)) + if lchmodErr != nil { + return "", lchmodErr + } + + return processedName, nil +} + +// topologicallyRestoreSymlinks ensures that targets of symlinks are created in advance +// of the things that link to them. It does this by topologically sorting all +// of the symlinks. This also enables us to ensure we do not create cycles. +func topologicallyRestoreSymlinks(dirCache *cachedDirTree, anchor turbopath.AbsoluteSystemPath, symlinks []*tar.Header, tr *tar.Reader) ([]turbopath.AnchoredSystemPath, error) { + restored := make([]turbopath.AnchoredSystemPath, 0) + lookup := make(map[string]*tar.Header) + + var g dag.AcyclicGraph + for _, header := range symlinks { + processedName, err := canonicalizeName(header.Name) + processedSourcename := canonicalizeLinkname(anchor, processedName, processedName.ToString()) + processedLinkname := canonicalizeLinkname(anchor, processedName, header.Linkname) + if err != nil { + return nil, err + } + g.Add(processedSourcename) + g.Add(processedLinkname) + g.Connect(dag.BasicEdge(processedLinkname, processedSourcename)) + lookup[processedSourcename] = header + } + + cycles := g.Cycles() + if cycles != nil { + return restored, errCycleDetected + } + + roots := make(dag.Set) + for _, v := range g.Vertices() { + if g.UpEdges(v).Len() == 0 { + roots.Add(v) + } + } + + walkFunc := func(vertex dag.Vertex, depth int) error { + key, ok := vertex.(string) + if !ok { + return nil + } + header, exists := lookup[key] + if !exists { + return nil + } + + file, restoreErr := restoreSymlinkMissingTarget(dirCache, anchor, header) + if restoreErr != nil { + return restoreErr + } + + restored = append(restored, file) + return nil + } + + walkError := g.DepthFirstWalk(roots, walkFunc) + if walkError != nil { + return restored, walkError + } + + return restored, nil +} + +// canonicalizeLinkname determines (lexically) what the resolved path on the +// system will be when linkname is restored verbatim. +func canonicalizeLinkname(anchor turbopath.AbsoluteSystemPath, processedName turbopath.AnchoredSystemPath, linkname string) string { + // We don't know _anything_ about linkname. It could be any of: + // + // - Absolute Unix Path + // - Absolute Windows Path + // - Relative Unix Path + // - Relative Windows Path + // + // We also can't _truly_ distinguish if the path is Unix or Windows. + // Take for example: `/Users/turbobot/weird-filenames/\foo\/lol` + // It is a valid file on Unix, but if we do slash conversion it breaks. + // Or `i\am\a\normal\unix\file\but\super\nested\on\windows`. + // + // We also can't safely assume that paths in link targets on one platform + // should be treated as targets for that platform. The author may be + // generating an artifact that should work on Windows on a Unix device. + // + // Given all of that, our best option is to restore link targets _verbatim_. + // No modification, no slash conversion. + // + // In order to DAG sort them, however, we do need to canonicalize them. + // We canonicalize them as if we're restoring them verbatim. + // + // 0. We've extracted a version of `Clean` from stdlib which does nothing but + // separator and traversal collapsing. + cleanedLinkname := Clean(linkname) + + // 1. Check to see if the link target is absolute _on the current platform_. + // If it is an absolute path it's canonical by rule. + if filepath.IsAbs(cleanedLinkname) { + return cleanedLinkname + } + + // Remaining options: + // - Absolute (other platform) Path + // - Relative Unix Path + // - Relative Windows Path + // + // At this point we simply assume that it's a relative path—no matter + // which separators appear in it and where they appear, We can't do + // anything else because the OS will also treat it like that when it is + // a link target. + // + // We manually join these to avoid calls to stdlib's `Clean`. + source := processedName.RestoreAnchor(anchor) + canonicalized := source.Dir().ToString() + string(os.PathSeparator) + cleanedLinkname + return Clean(canonicalized) +} diff --git a/cli/internal/cacheitem/restore_test.go b/cli/internal/cacheitem/restore_test.go new file mode 100644 index 0000000..a0a33d6 --- /dev/null +++ b/cli/internal/cacheitem/restore_test.go @@ -0,0 +1,1493 @@ +package cacheitem + +import ( + "archive/tar" + "errors" + "fmt" + "io" + "io/fs" + "os" + "path/filepath" + "reflect" + "runtime" + "syscall" + "testing" + + "github.com/DataDog/zstd" + "github.com/vercel/turbo/cli/internal/turbopath" + "gotest.tools/v3/assert" +) + +type tarFile struct { + Body string + *tar.Header +} + +type restoreFile struct { + Name turbopath.AnchoredUnixPath + Linkname string + fs.FileMode +} + +// generateTar is used specifically to generate tar files that Turborepo would +// rarely or never encounter without malicious or pathological inputs. We use it +// to make sure that we respond well in these scenarios during restore attempts. +func generateTar(t *testing.T, files []tarFile) turbopath.AbsoluteSystemPath { + t.Helper() + testDir := turbopath.AbsoluteSystemPath(t.TempDir()) + testArchivePath := testDir.UntypedJoin("out.tar") + + handle, handleCreateErr := testArchivePath.Create() + assert.NilError(t, handleCreateErr, "os.Create") + + tw := tar.NewWriter(handle) + + for _, file := range files { + if file.Header.Typeflag == tar.TypeReg { + file.Header.Size = int64(len(file.Body)) + } + + writeHeaderErr := tw.WriteHeader(file.Header) + assert.NilError(t, writeHeaderErr, "tw.WriteHeader") + + _, writeErr := tw.Write([]byte(file.Body)) + assert.NilError(t, writeErr, "tw.Write") + } + + twCloseErr := tw.Close() + assert.NilError(t, twCloseErr, "tw.Close") + + handleCloseErr := handle.Close() + assert.NilError(t, handleCloseErr, "handle.Close") + + return testArchivePath +} + +// compressTar splits the compression of a tar file so that we don't +// accidentally diverge in tar creation while still being able to test +// restoration from tar and from .tar.zst. +func compressTar(t *testing.T, archivePath turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { + t.Helper() + + inputHandle, inputHandleOpenErr := archivePath.Open() + assert.NilError(t, inputHandleOpenErr, "os.Open") + + outputPath := archivePath + ".zst" + outputHandle, outputHandleCreateErr := outputPath.Create() + assert.NilError(t, outputHandleCreateErr, "os.Create") + + zw := zstd.NewWriter(outputHandle) + _, copyError := io.Copy(zw, inputHandle) + assert.NilError(t, copyError, "io.Copy") + + zwCloseErr := zw.Close() + assert.NilError(t, zwCloseErr, "zw.Close") + + inputHandleCloseErr := inputHandle.Close() + assert.NilError(t, inputHandleCloseErr, "inputHandle.Close") + + outputHandleCloseErr := outputHandle.Close() + assert.NilError(t, outputHandleCloseErr, "outputHandle.Close") + + return outputPath +} + +func generateAnchor(t *testing.T) turbopath.AbsoluteSystemPath { + t.Helper() + testDir := turbopath.AbsoluteSystemPath(t.TempDir()) + anchorPoint := testDir.UntypedJoin("anchor") + + mkdirErr := anchorPoint.Mkdir(0777) + assert.NilError(t, mkdirErr, "Mkdir") + + return anchorPoint +} + +func assertFileExists(t *testing.T, anchor turbopath.AbsoluteSystemPath, diskFile restoreFile) { + t.Helper() + // If we have gotten here we can assume this to be true. + processedName := diskFile.Name.ToSystemPath() + fullName := processedName.RestoreAnchor(anchor) + fileInfo, err := fullName.Lstat() + assert.NilError(t, err, "Lstat") + + assert.Equal(t, fileInfo.Mode()&fs.ModePerm, diskFile.FileMode&fs.ModePerm, "File has the expected permissions: "+processedName) + assert.Equal(t, fileInfo.Mode()|fs.ModePerm, diskFile.FileMode|fs.ModePerm, "File has the expected mode.") + + if diskFile.FileMode&os.ModeSymlink != 0 { + linkname, err := fullName.Readlink() + assert.NilError(t, err, "Readlink") + + // We restore Linkname verbatim. + assert.Equal(t, linkname, diskFile.Linkname, "Link target matches.") + } +} + +func TestOpen(t *testing.T) { + type wantErr struct { + unix error + windows error + } + type wantOutput struct { + unix []turbopath.AnchoredSystemPath + windows []turbopath.AnchoredSystemPath + } + type wantFiles struct { + unix []restoreFile + windows []restoreFile + } + tests := []struct { + name string + tarFiles []tarFile + wantOutput wantOutput + wantFiles wantFiles + wantErr wantErr + }{ + { + name: "cache optimized", + tarFiles: []tarFile{ + { + Header: &tar.Header{ + Name: "one/", + Typeflag: tar.TypeDir, + Mode: 0755, + }, + }, + { + Header: &tar.Header{ + Name: "one/two/", + Typeflag: tar.TypeDir, + Mode: 0755, + }, + }, + { + Header: &tar.Header{ + Name: "one/two/three/", + Typeflag: tar.TypeDir, + Mode: 0755, + }, + }, + { + Header: &tar.Header{ + Name: "one/two/three/file-one", + Typeflag: tar.TypeReg, + Mode: 0644, + }, + }, + { + Header: &tar.Header{ + Name: "one/two/three/file-two", + Typeflag: tar.TypeReg, + Mode: 0644, + }, + }, + { + Header: &tar.Header{ + Name: "one/two/a/", + Typeflag: tar.TypeDir, + Mode: 0755, + }, + }, + { + Header: &tar.Header{ + Name: "one/two/a/file", + Typeflag: tar.TypeReg, + Mode: 0644, + }, + }, + { + Header: &tar.Header{ + Name: "one/two/b/", + Typeflag: tar.TypeDir, + Mode: 0755, + }, + }, + { + Header: &tar.Header{ + Name: "one/two/b/file", + Typeflag: tar.TypeReg, + Mode: 0644, + }, + }, + }, + wantFiles: wantFiles{ + unix: []restoreFile{ + { + Name: "one", + FileMode: 0 | os.ModeDir | 0755, + }, + { + Name: "one/two", + FileMode: 0 | os.ModeDir | 0755, + }, + { + Name: "one/two/three", + FileMode: 0 | os.ModeDir | 0755, + }, + { + Name: "one/two/three/file-one", + FileMode: 0644, + }, + { + Name: "one/two/three/file-two", + FileMode: 0644, + }, + { + Name: "one/two/a", + FileMode: 0 | os.ModeDir | 0755, + }, + { + Name: "one/two/a/file", + FileMode: 0644, + }, + { + Name: "one/two/b", + FileMode: 0 | os.ModeDir | 0755, + }, + { + Name: "one/two/b/file", + FileMode: 0644, + }, + }, + windows: []restoreFile{ + { + Name: "one", + FileMode: 0 | os.ModeDir | 0777, + }, + { + Name: "one/two", + FileMode: 0 | os.ModeDir | 0777, + }, + { + Name: "one/two/three", + FileMode: 0 | os.ModeDir | 0777, + }, + { + Name: "one/two/three/file-one", + FileMode: 0666, + }, + { + Name: "one/two/three/file-two", + FileMode: 0666, + }, + { + Name: "one/two/a", + FileMode: 0 | os.ModeDir | 0777, + }, + { + Name: "one/two/a/file", + FileMode: 0666, + }, + { + Name: "one/two/b", + FileMode: 0 | os.ModeDir | 0777, + }, + { + Name: "one/two/b/file", + FileMode: 0666, + }, + }, + }, + wantOutput: wantOutput{ + unix: turbopath.AnchoredUnixPathArray{ + "one", + "one/two", + "one/two/three", + "one/two/three/file-one", + "one/two/three/file-two", + "one/two/a", + "one/two/a/file", + "one/two/b", + "one/two/b/file", + }.ToSystemPathArray(), + }, + }, + { + name: "pathological cache works", + tarFiles: []tarFile{ + { + Header: &tar.Header{ + Name: "one/", + Typeflag: tar.TypeDir, + Mode: 0755, + }, + }, + { + Header: &tar.Header{ + Name: "one/two/", + Typeflag: tar.TypeDir, + Mode: 0755, + }, + }, + { + Header: &tar.Header{ + Name: "one/two/a/", + Typeflag: tar.TypeDir, + Mode: 0755, + }, + }, + { + Header: &tar.Header{ + Name: "one/two/b/", + Typeflag: tar.TypeDir, + Mode: 0755, + }, + }, + { + Header: &tar.Header{ + Name: "one/two/three/", + Typeflag: tar.TypeDir, + Mode: 0755, + }, + }, + { + Header: &tar.Header{ + Name: "one/two/a/file", + Typeflag: tar.TypeReg, + Mode: 0644, + }, + }, + { + Header: &tar.Header{ + Name: "one/two/b/file", + Typeflag: tar.TypeReg, + Mode: 0644, + }, + }, + { + Header: &tar.Header{ + Name: "one/two/three/file-one", + Typeflag: tar.TypeReg, + Mode: 0644, + }, + }, + { + Header: &tar.Header{ + Name: "one/two/three/file-two", + Typeflag: tar.TypeReg, + Mode: 0644, + }, + }, + }, + wantFiles: wantFiles{ + unix: []restoreFile{ + { + Name: "one", + FileMode: 0 | os.ModeDir | 0755, + }, + { + Name: "one/two", + FileMode: 0 | os.ModeDir | 0755, + }, + { + Name: "one/two/three", + FileMode: 0 | os.ModeDir | 0755, + }, + { + Name: "one/two/three/file-one", + FileMode: 0644, + }, + { + Name: "one/two/three/file-two", + FileMode: 0644, + }, + { + Name: "one/two/a", + FileMode: 0 | os.ModeDir | 0755, + }, + { + Name: "one/two/a/file", + FileMode: 0644, + }, + { + Name: "one/two/b", + FileMode: 0 | os.ModeDir | 0755, + }, + { + Name: "one/two/b/file", + FileMode: 0644, + }, + }, + windows: []restoreFile{ + { + Name: "one", + FileMode: 0 | os.ModeDir | 0777, + }, + { + Name: "one/two", + FileMode: 0 | os.ModeDir | 0777, + }, + { + Name: "one/two/three", + FileMode: 0 | os.ModeDir | 0777, + }, + { + Name: "one/two/three/file-one", + FileMode: 0666, + }, + { + Name: "one/two/three/file-two", + FileMode: 0666, + }, + { + Name: "one/two/a", + FileMode: 0 | os.ModeDir | 0777, + }, + { + Name: "one/two/a/file", + FileMode: 0666, + }, + { + Name: "one/two/b", + FileMode: 0 | os.ModeDir | 0777, + }, + { + Name: "one/two/b/file", + FileMode: 0666, + }, + }, + }, + wantOutput: wantOutput{ + unix: turbopath.AnchoredUnixPathArray{ + "one", + "one/two", + "one/two/a", + "one/two/b", + "one/two/three", + "one/two/a/file", + "one/two/b/file", + "one/two/three/file-one", + "one/two/three/file-two", + }.ToSystemPathArray(), + }, + }, + { + name: "hello world", + tarFiles: []tarFile{ + { + Header: &tar.Header{ + Name: "target", + Typeflag: tar.TypeReg, + Mode: 0644, + }, + Body: "target", + }, + { + Header: &tar.Header{ + Name: "source", + Linkname: "target", + Typeflag: tar.TypeSymlink, + Mode: 0777, + }, + }, + }, + wantFiles: wantFiles{ + unix: []restoreFile{ + { + Name: "source", + Linkname: "target", + FileMode: 0 | os.ModeSymlink | 0777, + }, + { + Name: "target", + FileMode: 0644, + }, + }, + windows: []restoreFile{ + { + Name: "source", + Linkname: "target", + FileMode: 0 | os.ModeSymlink | 0666, + }, + { + Name: "target", + FileMode: 0666, + }, + }, + }, + wantOutput: wantOutput{ + unix: turbopath.AnchoredUnixPathArray{"target", "source"}.ToSystemPathArray(), + }, + }, + { + name: "nested file", + tarFiles: []tarFile{ + { + Header: &tar.Header{ + Name: "folder/", + Typeflag: tar.TypeDir, + Mode: 0755, + }, + }, + { + Header: &tar.Header{ + Name: "folder/file", + Typeflag: tar.TypeReg, + Mode: 0644, + }, + Body: "file", + }, + }, + wantFiles: wantFiles{ + unix: []restoreFile{ + { + Name: "folder", + FileMode: 0 | os.ModeDir | 0755, + }, + { + Name: "folder/file", + FileMode: 0644, + }, + }, + windows: []restoreFile{ + { + Name: "folder", + FileMode: 0 | os.ModeDir | 0777, + }, + { + Name: "folder/file", + FileMode: 0666, + }, + }, + }, + wantOutput: wantOutput{ + unix: turbopath.AnchoredUnixPathArray{"folder", "folder/file"}.ToSystemPathArray(), + }, + }, + { + name: "nested symlink", + tarFiles: []tarFile{ + { + Header: &tar.Header{ + Name: "folder/", + Typeflag: tar.TypeDir, + Mode: 0755, + }, + }, + { + Header: &tar.Header{ + Name: "folder/symlink", + Linkname: "../", + Typeflag: tar.TypeSymlink, + Mode: 0777, + }, + }, + { + Header: &tar.Header{ + Name: "folder/symlink/folder-sibling", + Typeflag: tar.TypeReg, + Mode: 0644, + }, + Body: "folder-sibling", + }, + }, + wantFiles: wantFiles{ + unix: []restoreFile{ + { + Name: "folder", + FileMode: 0 | os.ModeDir | 0755, + }, + { + Name: "folder/symlink", + FileMode: 0 | os.ModeSymlink | 0777, + Linkname: "../", + }, + { + Name: "folder/symlink/folder-sibling", + FileMode: 0644, + }, + { + Name: "folder-sibling", + FileMode: 0644, + }, + }, + windows: []restoreFile{ + { + Name: "folder", + FileMode: 0 | os.ModeDir | 0777, + }, + { + Name: "folder/symlink", + FileMode: 0 | os.ModeSymlink | 0666, + Linkname: "..\\", + }, + { + Name: "folder/symlink/folder-sibling", + FileMode: 0666, + }, + { + Name: "folder-sibling", + FileMode: 0666, + }, + }, + }, + wantOutput: wantOutput{ + unix: turbopath.AnchoredUnixPathArray{"folder", "folder/symlink", "folder/symlink/folder-sibling"}.ToSystemPathArray(), + }, + }, + { + name: "pathological symlinks", + tarFiles: []tarFile{ + { + Header: &tar.Header{ + Name: "one", + Linkname: "two", + Typeflag: tar.TypeSymlink, + Mode: 0777, + }, + }, + { + Header: &tar.Header{ + Name: "two", + Linkname: "three", + Typeflag: tar.TypeSymlink, + Mode: 0777, + }, + }, + { + Header: &tar.Header{ + Name: "three", + Linkname: "real", + Typeflag: tar.TypeSymlink, + Mode: 0777, + }, + }, + { + Header: &tar.Header{ + Name: "real", + Typeflag: tar.TypeReg, + Mode: 0755, + }, + Body: "real", + }, + }, + wantFiles: wantFiles{ + unix: []restoreFile{ + { + Name: "one", + Linkname: "two", + FileMode: 0 | os.ModeSymlink | 0777, + }, + { + Name: "two", + Linkname: "three", + FileMode: 0 | os.ModeSymlink | 0777, + }, + { + Name: "three", + Linkname: "real", + FileMode: 0 | os.ModeSymlink | 0777, + }, + { + Name: "real", + FileMode: 0 | 0755, + }, + }, + windows: []restoreFile{ + { + Name: "one", + Linkname: "two", + FileMode: 0 | os.ModeSymlink | 0666, + }, + { + Name: "two", + Linkname: "three", + FileMode: 0 | os.ModeSymlink | 0666, + }, + { + Name: "three", + Linkname: "real", + FileMode: 0 | os.ModeSymlink | 0666, + }, + { + Name: "real", + FileMode: 0 | 0666, + }, + }, + }, + wantOutput: wantOutput{ + unix: turbopath.AnchoredUnixPathArray{"real", "three", "two", "one"}.ToSystemPathArray(), + }, + }, + { + name: "place file at dir location", + tarFiles: []tarFile{ + { + Header: &tar.Header{ + Name: "folder-not-file/", + Typeflag: tar.TypeDir, + Mode: 0755, + }, + }, + { + Header: &tar.Header{ + Name: "folder-not-file/subfile", + Typeflag: tar.TypeReg, + Mode: 0755, + }, + Body: "subfile", + }, + { + Header: &tar.Header{ + Name: "folder-not-file", + Typeflag: tar.TypeReg, + Mode: 0755, + }, + Body: "this shouldn't work", + }, + }, + wantFiles: wantFiles{ + unix: []restoreFile{ + { + Name: "folder-not-file", + FileMode: 0 | os.ModeDir | 0755, + }, + { + Name: "folder-not-file/subfile", + FileMode: 0755, + }, + }, + windows: []restoreFile{ + { + Name: "folder-not-file", + FileMode: 0 | os.ModeDir | 0777, + }, + { + Name: "folder-not-file/subfile", + FileMode: 0666, + }, + }, + }, + wantOutput: wantOutput{ + unix: turbopath.AnchoredUnixPathArray{"folder-not-file", "folder-not-file/subfile"}.ToSystemPathArray(), + }, + wantErr: wantErr{ + unix: syscall.EISDIR, + windows: syscall.EISDIR, + }, + }, + // { + // name: "missing symlink with file at subdir", + // tarFiles: []tarFile{ + // { + // Header: &tar.Header{ + // Name: "one", + // Linkname: "two", + // Typeflag: tar.TypeSymlink, + // Mode: 0777, + // }, + // }, + // { + // Header: &tar.Header{ + // Name: "one/file", + // Typeflag: tar.TypeReg, + // Mode: 0755, + // }, + // Body: "file", + // }, + // }, + // wantFiles: wantFiles{ + // unix: []restoreFile{ + // { + // Name: "one", + // Linkname: "two", + // FileMode: 0 | os.ModeSymlink | 0777, + // }, + // }, + // }, + // wantOutput: wantOutput{ + // unix: turbopath.AnchoredUnixPathArray{"one"}.ToSystemPathArray(), + // windows: nil, + // }, + // wantErr: wantErr{ + // unix: os.ErrExist, + // windows: os.ErrExist, + // }, + // }, + { + name: "symlink cycle", + tarFiles: []tarFile{ + { + Header: &tar.Header{ + Name: "one", + Linkname: "two", + Typeflag: tar.TypeSymlink, + Mode: 0777, + }, + }, + { + Header: &tar.Header{ + Name: "two", + Linkname: "three", + Typeflag: tar.TypeSymlink, + Mode: 0777, + }, + }, + { + Header: &tar.Header{ + Name: "three", + Linkname: "one", + Typeflag: tar.TypeSymlink, + Mode: 0777, + }, + }, + }, + wantFiles: wantFiles{ + unix: []restoreFile{}, + }, + wantOutput: wantOutput{ + unix: []turbopath.AnchoredSystemPath{}, + }, + wantErr: wantErr{ + unix: errCycleDetected, + windows: errCycleDetected, + }, + }, + { + name: "symlink clobber", + tarFiles: []tarFile{ + { + Header: &tar.Header{ + Name: "one", + Linkname: "two", + Typeflag: tar.TypeSymlink, + Mode: 0777, + }, + }, + { + Header: &tar.Header{ + Name: "one", + Linkname: "three", + Typeflag: tar.TypeSymlink, + Mode: 0777, + }, + }, + { + Header: &tar.Header{ + Name: "one", + Linkname: "real", + Typeflag: tar.TypeSymlink, + Mode: 0777, + }, + }, + { + Header: &tar.Header{ + Name: "real", + Typeflag: tar.TypeReg, + Mode: 0755, + }, + Body: "real", + }, + }, + wantFiles: wantFiles{ + unix: []restoreFile{ + { + Name: "one", + Linkname: "real", + FileMode: 0 | os.ModeSymlink | 0777, + }, + { + Name: "real", + FileMode: 0755, + }, + }, + windows: []restoreFile{ + { + Name: "one", + Linkname: "real", + FileMode: 0 | os.ModeSymlink | 0666, + }, + { + Name: "real", + FileMode: 0666, + }, + }, + }, + wantOutput: wantOutput{ + unix: turbopath.AnchoredUnixPathArray{"real", "one"}.ToSystemPathArray(), + }, + }, + { + name: "symlink traversal", + tarFiles: []tarFile{ + { + Header: &tar.Header{ + Name: "escape", + Linkname: "../", + Typeflag: tar.TypeSymlink, + Mode: 0777, + }, + }, + { + Header: &tar.Header{ + Name: "escape/file", + Typeflag: tar.TypeReg, + Mode: 0644, + }, + Body: "file", + }, + }, + wantFiles: wantFiles{ + unix: []restoreFile{ + { + Name: "escape", + Linkname: "../", + FileMode: 0 | os.ModeSymlink | 0777, + }, + }, + windows: []restoreFile{ + { + Name: "escape", + Linkname: "..\\", + FileMode: 0 | os.ModeSymlink | 0666, + }, + }, + }, + wantOutput: wantOutput{ + unix: turbopath.AnchoredUnixPathArray{"escape"}.ToSystemPathArray(), + }, + wantErr: wantErr{ + unix: errTraversal, + windows: errTraversal, + }, + }, + { + name: "Double indirection: file", + tarFiles: []tarFile{ + { + Header: &tar.Header{ + Name: "up", + Linkname: "../", + Typeflag: tar.TypeSymlink, + Mode: 0777, + }, + }, + { + Header: &tar.Header{ + Name: "link", + Linkname: "up", + Typeflag: tar.TypeSymlink, + Mode: 0777, + }, + }, + { + Header: &tar.Header{ + Name: "link/outside-file", + Typeflag: tar.TypeReg, + Mode: 0755, + }, + }, + }, + wantErr: wantErr{unix: errTraversal, windows: errTraversal}, + wantOutput: wantOutput{ + unix: turbopath.AnchoredUnixPathArray{ + "up", + "link", + }.ToSystemPathArray(), + }, + }, + { + name: "Double indirection: folder", + tarFiles: []tarFile{ + { + Header: &tar.Header{ + Name: "up", + Linkname: "../", + Typeflag: tar.TypeSymlink, + Mode: 0777, + }, + }, + { + Header: &tar.Header{ + Name: "link", + Linkname: "up", + Typeflag: tar.TypeSymlink, + Mode: 0777, + }, + }, + { + Header: &tar.Header{ + Name: "link/level-one/level-two/", + Typeflag: tar.TypeDir, + Mode: 0755, + }, + }, + }, + wantErr: wantErr{unix: errTraversal, windows: errTraversal}, + wantOutput: wantOutput{ + unix: turbopath.AnchoredUnixPathArray{ + "up", + "link", + }.ToSystemPathArray(), + }, + }, + { + name: "name traversal", + tarFiles: []tarFile{ + { + Header: &tar.Header{ + Name: "../escape", + Typeflag: tar.TypeReg, + Mode: 0644, + }, + Body: "file", + }, + }, + wantFiles: wantFiles{ + unix: []restoreFile{}, + }, + wantOutput: wantOutput{ + unix: []turbopath.AnchoredSystemPath{}, + }, + wantErr: wantErr{ + unix: errNameMalformed, + windows: errNameMalformed, + }, + }, + { + name: "windows unsafe", + tarFiles: []tarFile{ + { + Header: &tar.Header{ + Name: "back\\slash\\file", + Typeflag: tar.TypeReg, + Mode: 0644, + }, + Body: "file", + }, + }, + wantFiles: wantFiles{ + unix: []restoreFile{ + { + Name: "back\\slash\\file", + FileMode: 0644, + }, + }, + windows: []restoreFile{}, + }, + wantOutput: wantOutput{ + unix: turbopath.AnchoredUnixPathArray{"back\\slash\\file"}.ToSystemPathArray(), + windows: turbopath.AnchoredUnixPathArray{}.ToSystemPathArray(), + }, + wantErr: wantErr{ + unix: nil, + windows: errNameWindowsUnsafe, + }, + }, + { + name: "fifo (and others) unsupported", + tarFiles: []tarFile{ + { + Header: &tar.Header{ + Name: "fifo", + Typeflag: tar.TypeFifo, + }, + }, + }, + wantFiles: wantFiles{ + unix: []restoreFile{}, + }, + wantOutput: wantOutput{ + unix: []turbopath.AnchoredSystemPath{}, + }, + wantErr: wantErr{ + unix: errUnsupportedFileType, + windows: errUnsupportedFileType, + }, + }, + } + for _, tt := range tests { + getTestFunc := func(compressed bool) func(t *testing.T) { + return func(t *testing.T) { + var archivePath turbopath.AbsoluteSystemPath + if compressed { + archivePath = compressTar(t, generateTar(t, tt.tarFiles)) + } else { + archivePath = generateTar(t, tt.tarFiles) + } + anchor := generateAnchor(t) + + cacheItem, err := Open(archivePath) + assert.NilError(t, err, "Open") + + restoreOutput, restoreErr := cacheItem.Restore(anchor) + var desiredErr error + if runtime.GOOS == "windows" { + desiredErr = tt.wantErr.windows + } else { + desiredErr = tt.wantErr.unix + } + if desiredErr != nil { + if !errors.Is(restoreErr, desiredErr) { + t.Errorf("wanted err: %v, got err: %v", tt.wantErr, restoreErr) + } + } else { + assert.NilError(t, restoreErr, "Restore") + } + + outputComparison := tt.wantOutput.unix + if runtime.GOOS == "windows" && tt.wantOutput.windows != nil { + outputComparison = tt.wantOutput.windows + } + + if !reflect.DeepEqual(restoreOutput, outputComparison) { + t.Errorf("Restore() = %v, want %v", restoreOutput, outputComparison) + } + + // Check files on disk. + filesComparison := tt.wantFiles.unix + if runtime.GOOS == "windows" && tt.wantFiles.windows != nil { + filesComparison = tt.wantFiles.windows + } + for _, diskFile := range filesComparison { + assertFileExists(t, anchor, diskFile) + } + + assert.NilError(t, cacheItem.Close(), "Close") + } + } + t.Run(tt.name+"zst", getTestFunc(true)) + t.Run(tt.name, getTestFunc(false)) + } +} + +func Test_checkName(t *testing.T) { + tests := []struct { + path string + wellFormed bool + windowsSafe bool + }{ + // Empty + { + path: "", + wellFormed: false, + windowsSafe: false, + }, + // Bad prefix + { + path: ".", + wellFormed: false, + windowsSafe: true, + }, + { + path: "..", + wellFormed: false, + windowsSafe: true, + }, + { + path: "/", + wellFormed: false, + windowsSafe: true, + }, + { + path: "./", + wellFormed: false, + windowsSafe: true, + }, + { + path: "../", + wellFormed: false, + windowsSafe: true, + }, + // Bad prefix, suffixed + { + path: "/a", + wellFormed: false, + windowsSafe: true, + }, + { + path: "./a", + wellFormed: false, + windowsSafe: true, + }, + { + path: "../a", + wellFormed: false, + windowsSafe: true, + }, + // Bad Suffix + { + path: "/.", + wellFormed: false, + windowsSafe: true, + }, + { + path: "/..", + wellFormed: false, + windowsSafe: true, + }, + // Bad Suffix, with prefix + { + path: "a/.", + wellFormed: false, + windowsSafe: true, + }, + { + path: "a/..", + wellFormed: false, + windowsSafe: true, + }, + // Bad middle + { + path: "//", + wellFormed: false, + windowsSafe: true, + }, + { + path: "/./", + wellFormed: false, + windowsSafe: true, + }, + { + path: "/../", + wellFormed: false, + windowsSafe: true, + }, + // Bad middle, prefixed + { + path: "a//", + wellFormed: false, + windowsSafe: true, + }, + { + path: "a/./", + wellFormed: false, + windowsSafe: true, + }, + { + path: "a/../", + wellFormed: false, + windowsSafe: true, + }, + // Bad middle, suffixed + { + path: "//a", + wellFormed: false, + windowsSafe: true, + }, + { + path: "/./a", + wellFormed: false, + windowsSafe: true, + }, + { + path: "/../a", + wellFormed: false, + windowsSafe: true, + }, + // Bad middle, wrapped + { + path: "a//a", + wellFormed: false, + windowsSafe: true, + }, + { + path: "a/./a", + wellFormed: false, + windowsSafe: true, + }, + { + path: "a/../a", + wellFormed: false, + windowsSafe: true, + }, + // False positive tests + { + path: "...", + wellFormed: true, + windowsSafe: true, + }, + { + path: ".../a", + wellFormed: true, + windowsSafe: true, + }, + { + path: "a/...", + wellFormed: true, + windowsSafe: true, + }, + { + path: "a/.../a", + wellFormed: true, + windowsSafe: true, + }, + { + path: ".../...", + wellFormed: true, + windowsSafe: true, + }, + } + for _, tt := range tests { + t.Run(fmt.Sprintf("Path: \"%v\"", tt.path), func(t *testing.T) { + wellFormed, windowsSafe := checkName(tt.path) + if wellFormed != tt.wellFormed || windowsSafe != tt.windowsSafe { + t.Errorf("\nwantOutput: checkName(\"%v\") wellFormed = %v, windowsSafe %v\ngot: checkName(\"%v\") wellFormed = %v, windowsSafe %v", tt.path, tt.wellFormed, tt.windowsSafe, tt.path, wellFormed, windowsSafe) + } + }) + } +} + +func Test_canonicalizeLinkname(t *testing.T) { + // We're lying that this thing is absolute, but that's not relevant for tests. + anchor := turbopath.AbsoluteSystemPath(filepath.Join("path", "to", "anchor")) + + tests := []struct { + name string + processedName turbopath.AnchoredSystemPath + linkname string + canonicalUnix string + canonicalWindows string + }{ + { + name: "hello world", + processedName: turbopath.AnchoredSystemPath("source"), + linkname: "target", + canonicalUnix: "path/to/anchor/target", + canonicalWindows: "path\\to\\anchor\\target", + }, + { + name: "Unix path subdirectory traversal", + processedName: turbopath.AnchoredUnixPath("child/source").ToSystemPath(), + linkname: "../sibling/target", + canonicalUnix: "path/to/anchor/sibling/target", + canonicalWindows: "path\\to\\anchor\\sibling\\target", + }, + { + name: "Windows path subdirectory traversal", + processedName: turbopath.AnchoredUnixPath("child/source").ToSystemPath(), + linkname: "..\\sibling\\target", + canonicalUnix: "path/to/anchor/child/..\\sibling\\target", + canonicalWindows: "path\\to\\anchor\\sibling\\target", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + canonical := tt.canonicalUnix + if runtime.GOOS == "windows" { + canonical = tt.canonicalWindows + } + if got := canonicalizeLinkname(anchor, tt.processedName, tt.linkname); got != canonical { + t.Errorf("canonicalizeLinkname() = %v, want %v", got, canonical) + } + }) + } +} + +func Test_canonicalizeName(t *testing.T) { + tests := []struct { + name string + fileName string + want turbopath.AnchoredSystemPath + wantErr error + }{ + { + name: "hello world", + fileName: "test.txt", + want: "test.txt", + }, + { + name: "directory", + fileName: "something/", + want: "something", + }, + { + name: "malformed name", + fileName: "//", + want: "", + wantErr: errNameMalformed, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := canonicalizeName(tt.fileName) + if tt.wantErr != nil && !errors.Is(err, tt.wantErr) { + t.Errorf("canonicalizeName() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("canonicalizeName() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestCacheItem_Restore(t *testing.T) { + tests := []struct { + name string + tarFiles []tarFile + want []turbopath.AnchoredSystemPath + }{ + { + name: "duplicate restores", + tarFiles: []tarFile{ + { + Header: &tar.Header{ + Name: "target", + Typeflag: tar.TypeReg, + Mode: 0644, + }, + Body: "target", + }, + { + Header: &tar.Header{ + Name: "source", + Linkname: "target", + Typeflag: tar.TypeSymlink, + Mode: 0777, + }, + }, + { + Header: &tar.Header{ + Name: "one/", + Typeflag: tar.TypeDir, + Mode: 0755, + }, + }, + { + Header: &tar.Header{ + Name: "one/two/", + Typeflag: tar.TypeDir, + Mode: 0755, + }, + }, + }, + want: turbopath.AnchoredUnixPathArray{"target", "source", "one", "one/two"}.ToSystemPathArray(), + }, + } + for _, tt := range tests { + getTestFunc := func(compressed bool) func(t *testing.T) { + return func(t *testing.T) { + var archivePath turbopath.AbsoluteSystemPath + if compressed { + archivePath = compressTar(t, generateTar(t, tt.tarFiles)) + } else { + archivePath = generateTar(t, tt.tarFiles) + } + anchor := generateAnchor(t) + + cacheItem, err := Open(archivePath) + assert.NilError(t, err, "Open") + + restoreOutput, restoreErr := cacheItem.Restore(anchor) + if !reflect.DeepEqual(restoreOutput, tt.want) { + t.Errorf("#1 CacheItem.Restore() = %v, want %v", restoreOutput, tt.want) + } + assert.NilError(t, restoreErr, "Restore #1") + assert.NilError(t, cacheItem.Close(), "Close") + + cacheItem2, err2 := Open(archivePath) + assert.NilError(t, err2, "Open") + + restoreOutput2, restoreErr2 := cacheItem2.Restore(anchor) + if !reflect.DeepEqual(restoreOutput2, tt.want) { + t.Errorf("#2 CacheItem.Restore() = %v, want %v", restoreOutput2, tt.want) + } + assert.NilError(t, restoreErr2, "Restore #2") + assert.NilError(t, cacheItem2.Close(), "Close") + } + } + t.Run(tt.name+"zst", getTestFunc(true)) + t.Run(tt.name, getTestFunc(false)) + } +} diff --git a/cli/internal/chrometracing/chrometracing.go b/cli/internal/chrometracing/chrometracing.go new file mode 100644 index 0000000..d9325fd --- /dev/null +++ b/cli/internal/chrometracing/chrometracing.go @@ -0,0 +1,227 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package chrometracing writes per-process Chrome trace_event files that can be +// loaded into chrome://tracing. +package chrometracing + +import ( + "encoding/json" + "fmt" + "os" + "path/filepath" + "strings" + "sync" + "time" + + "github.com/google/chrometracing/traceinternal" +) + +var trace = struct { + start time.Time + pid uint64 + + fileMu sync.Mutex + file *os.File +}{ + pid: uint64(os.Getpid()), +} + +var out = setup(false) + +// Path returns the full path of the chrome://tracing trace_event file for +// display in log messages. +func Path() string { return out } + +// EnableTracing turns on tracing, regardless of running in a test or +// not. Tracing is enabled by default if the CHROMETRACING_DIR environment +// variable is present and non-empty. +func EnableTracing() { + trace.fileMu.Lock() + alreadyEnabled := trace.file != nil + trace.fileMu.Unlock() + if alreadyEnabled { + return + } + out = setup(true) +} + +func setup(overrideEnable bool) string { + inTest := os.Getenv("TEST_TMPDIR") != "" + explicitlyEnabled := os.Getenv("CHROMETRACING_DIR") != "" + enableTracing := inTest || explicitlyEnabled || overrideEnable + if !enableTracing { + return "" + } + + var err error + dir := os.Getenv("TEST_UNDECLARED_OUTPUTS_DIR") + if dir == "" { + dir = os.Getenv("CHROMETRACING_DIR") + } + if dir == "" { + dir = os.TempDir() + } + fn := filepath.Join(dir, fmt.Sprintf("%s.%d.trace", filepath.Base(os.Args[0]), trace.pid)) + trace.file, err = os.OpenFile(fn, os.O_WRONLY|os.O_CREATE|os.O_TRUNC|os.O_EXCL, 0644) + if err != nil { + // Using the log package from func init results in an error message + // being printed. + fmt.Fprintf(os.Stderr, "continuing without tracing: %v\n", err) + return "" + } + + // We only ever open a JSON array. Ending the array is optional as per + // go/trace_event so that not cleanly finished traces can still be read. + trace.file.Write([]byte{'['}) + trace.start = time.Now() + + writeEvent(&traceinternal.ViewerEvent{ + Name: "process_name", + Phase: "M", // Metadata Event + Pid: trace.pid, + Tid: trace.pid, + Arg: struct { + Name string `json:"name"` + }{ + Name: strings.Join(os.Args, " "), + }, + }) + return fn +} + +func writeEvent(ev *traceinternal.ViewerEvent) { + b, err := json.Marshal(&ev) + if err != nil { + fmt.Fprintf(os.Stderr, "%v\n", err) + return + } + trace.fileMu.Lock() + defer trace.fileMu.Unlock() + if _, err = trace.file.Write(b); err != nil { + fmt.Fprintf(os.Stderr, "%v\n", err) + return + } + if _, err = trace.file.Write([]byte{',', '\n'}); err != nil { + fmt.Fprintf(os.Stderr, "%v\n", err) + return + } +} + +const ( + begin = "B" + end = "E" +) + +// A PendingEvent represents an ongoing unit of work. The begin trace event has +// already been written, and calling Done will write the end trace event. +type PendingEvent struct { + name string + tid uint64 +} + +// Done writes the end trace event for this unit of work. +func (pe *PendingEvent) Done() { + if pe == nil || pe.name == "" || trace.file == nil { + return + } + writeEvent(&traceinternal.ViewerEvent{ + Name: pe.name, + Phase: end, + Pid: trace.pid, + Tid: pe.tid, + Time: float64(time.Since(trace.start).Microseconds()), + }) + releaseTid(pe.tid) +} + +// Event logs a unit of work. To instrument a Go function, use e.g.: +// +// func calcPi() { +// defer chrometracing.Event("calculate pi").Done() +// // … +// } +// +// For more finely-granular traces, use e.g.: +// +// for _, cmd := range commands { +// ev := chrometracing.Event("initialize " + cmd.Name) +// cmd.Init() +// ev.Done() +// } +func Event(name string) *PendingEvent { + if trace.file == nil { + return &PendingEvent{} + } + tid := tid() + writeEvent(&traceinternal.ViewerEvent{ + Name: name, + Phase: begin, + Pid: trace.pid, + Tid: tid, + Time: float64(time.Since(trace.start).Microseconds()), + }) + return &PendingEvent{ + name: name, + tid: tid, + } +} + +// tids is a chrome://tracing thread id pool. Go does not permit accessing the +// goroutine id, so we need to maintain our own identifier. The chrome://tracing +// file format requires a numeric thread id, so we just increment whenever we +// need a thread id, and reuse the ones no longer in use. +// +// In practice, parallelized sections of the code (many goroutines) end up using +// only as few thread ids as are concurrently in use, and the rest of the events +// mirror the code call stack nicely. See e.g. http://screen/7MPcAcvXQNUE3JZ +var tids struct { + sync.Mutex + + // We allocate chrome://tracing thread ids based on the index of the + // corresponding entry in the used slice. + used []bool + + // next points to the earliest unused tid to consider for the next tid to + // hand out. This is purely a performance optimization to avoid O(n) slice + // iteration. + next int +} + +func tid() uint64 { + tids.Lock() + defer tids.Unlock() + // re-use released tids if any + for t := tids.next; t < len(tids.used); t++ { + if !tids.used[t] { + tids.used[t] = true + tids.next = t + 1 + return uint64(t) + } + } + // allocate a new tid + t := len(tids.used) + tids.used = append(tids.used, true) + tids.next = t + 1 + return uint64(t) +} + +func releaseTid(t uint64) { + tids.Lock() + defer tids.Unlock() + tids.used[int(t)] = false + if tids.next > int(t) { + tids.next = int(t) + } +} diff --git a/cli/internal/chrometracing/chrometracing_close.go b/cli/internal/chrometracing/chrometracing_close.go new file mode 100644 index 0000000..1b3a7b9 --- /dev/null +++ b/cli/internal/chrometracing/chrometracing_close.go @@ -0,0 +1,26 @@ +package chrometracing + +// Close overwrites the trailing (,\n) with (]\n) and closes the trace file. +// Close is implemented in a separate file to keep a separation between custom +// code and upstream from github.com/google/chrometracing. Additionally, we can +// enable linting for code we author, while leaving upstream code alone. +func Close() error { + trace.fileMu.Lock() + defer trace.fileMu.Unlock() + // Seek backwards two bytes (,\n) + if _, err := trace.file.Seek(-2, 1); err != nil { + return err + } + // Write 1 byte, ']', leaving the trailing '\n' in place + if _, err := trace.file.Write([]byte{']'}); err != nil { + return err + } + // Force the filesystem to write to disk + if err := trace.file.Sync(); err != nil { + return err + } + if err := trace.file.Close(); err != nil { + return err + } + return nil +} diff --git a/cli/internal/ci/ci.go b/cli/internal/ci/ci.go new file mode 100644 index 0000000..a22ad78 --- /dev/null +++ b/cli/internal/ci/ci.go @@ -0,0 +1,58 @@ +// Package ci is a simple utility to check if a program is being executed in common CI/CD/PaaS vendors. +// This is a partial port of https://github.com/watson/ci-info +package ci + +import "os" + +var isCI = os.Getenv("BUILD_ID") != "" || os.Getenv("BUILD_NUMBER") != "" || os.Getenv("CI") != "" || os.Getenv("CI_APP_ID") != "" || os.Getenv("CI_BUILD_ID") != "" || os.Getenv("CI_BUILD_NUMBER") != "" || os.Getenv("CI_NAME") != "" || os.Getenv("CONTINUOUS_INTEGRATION") != "" || os.Getenv("RUN_ID") != "" || os.Getenv("TEAMCITY_VERSION") != "" || false + +// IsCi returns true if the program is executing in a CI/CD environment +func IsCi() bool { + return isCI +} + +// Name returns the name of the CI vendor +func Name() string { + return Info().Name +} + +// Constant returns the name of the CI vendor as a constant +func Constant() string { + return Info().Constant +} + +// Info returns information about a CI vendor +func Info() Vendor { + // check both the env var key and value + for _, env := range Vendors { + if env.EvalEnv != nil { + for name, value := range env.EvalEnv { + if os.Getenv(name) == value { + return env + } + } + } else { + // check for any of the listed env var keys, with any value + if env.Env.Any != nil && len(env.Env.Any) > 0 { + for _, envVar := range env.Env.Any { + if os.Getenv(envVar) != "" { + return env + } + } + // check for all of the listed env var keys, with any value + } else if env.Env.All != nil && len(env.Env.All) > 0 { + all := true + for _, envVar := range env.Env.All { + if os.Getenv(envVar) == "" { + all = false + break + } + } + if all { + return env + } + } + } + } + return Vendor{} +} diff --git a/cli/internal/ci/ci_test.go b/cli/internal/ci/ci_test.go new file mode 100644 index 0000000..333ff61 --- /dev/null +++ b/cli/internal/ci/ci_test.go @@ -0,0 +1,105 @@ +package ci + +import ( + "os" + "reflect" + "strings" + "testing" +) + +func getVendor(name string) Vendor { + for _, v := range Vendors { + if v.Name == name { + return v + } + } + return Vendor{} +} + +func TestInfo(t *testing.T) { + tests := []struct { + name string + setEnv []string + want Vendor + }{ + { + name: "AppVeyor", + setEnv: []string{"APPVEYOR"}, + want: getVendor("AppVeyor"), + }, + { + name: "Vercel", + setEnv: []string{"VERCEL", "NOW_BUILDER"}, + want: getVendor("Vercel"), + }, + { + name: "Render", + setEnv: []string{"RENDER"}, + want: getVendor("Render"), + }, + { + name: "Netlify", + setEnv: []string{"NETLIFY"}, + want: getVendor("Netlify CI"), + }, + { + name: "Jenkins", + setEnv: []string{"BUILD_ID", "JENKINS_URL"}, + want: getVendor("Jenkins"), + }, + { + name: "Jenkins - failing", + setEnv: []string{"BUILD_ID"}, + want: getVendor(""), + }, + { + name: "GitHub Actions", + setEnv: []string{"GITHUB_ACTIONS"}, + want: getVendor("GitHub Actions"), + }, + { + name: "Codeship", + setEnv: []string{"CI_NAME=codeship"}, + want: getVendor("Codeship"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // unset existing envs + liveCi := "" + if Name() == "GitHub Actions" { + liveCi = os.Getenv("GITHUB_ACTIONS") + err := os.Unsetenv("GITHUB_ACTIONS") + if err != nil { + t.Errorf("Error un-setting GITHUB_ACTIONS env: %s", err) + } + } + // set envs + for _, env := range tt.setEnv { + envParts := strings.Split(env, "=") + val := "some value" + if len(envParts) > 1 { + val = envParts[1] + } + err := os.Setenv(envParts[0], val) + if err != nil { + t.Errorf("Error setting %s for %s test", envParts[0], tt.name) + } + defer os.Unsetenv(envParts[0]) //nolint errcheck + + } + // run test + if got := Info(); !reflect.DeepEqual(got, tt.want) { + t.Errorf("Info() = %v, want %v", got, tt.want) + } + + // reset env + if Name() == "GitHub Actions" { + err := os.Setenv("GITHUB_ACTIONS", liveCi) + if err != nil { + t.Errorf("Error re-setting GITHUB_ACTIONS env: %s", err) + } + } + }) + } +} diff --git a/cli/internal/ci/vendors.go b/cli/internal/ci/vendors.go new file mode 100644 index 0000000..13bce77 --- /dev/null +++ b/cli/internal/ci/vendors.go @@ -0,0 +1,253 @@ +package ci + +type vendorEnvs struct { + Any []string + All []string +} + +// Vendor describes a CI/CD vendor execution environment +type Vendor struct { + // Name is the name of the vendor + Name string + // Constant is the environment variable prefix used by the vendor + Constant string + // Env is one or many environment variables that can be used to quickly determine the vendor (using simple os.Getenv(env) check) + Env vendorEnvs + // EvalEnv is key/value map of environment variables that can be used to quickly determine the vendor + EvalEnv map[string]string +} + +// Vendors is a list of common CI/CD vendors (from https://github.com/watson/ci-info/blob/master/vendors.json) +var Vendors = []Vendor{ + { + Name: "Appcircle", + Constant: "APPCIRCLE", + Env: vendorEnvs{Any: []string{"AC_APPCIRCLE"}}, + }, + { + Name: "AppVeyor", + Constant: "APPVEYOR", + Env: vendorEnvs{Any: []string{"APPVEYOR"}}, + }, + { + Name: "AWS CodeBuild", + Constant: "CODEBUILD", + Env: vendorEnvs{Any: []string{"CODEBUILD_BUILD_ARN"}}, + }, + { + Name: "Azure Pipelines", + Constant: "AZURE_PIPELINES", + Env: vendorEnvs{Any: []string{"SYSTEM_TEAMFOUNDATIONCOLLECTIONURI"}}, + }, + { + Name: "Bamboo", + Constant: "BAMBOO", + Env: vendorEnvs{Any: []string{"bamboo_planKey"}}, + }, + { + Name: "Bitbucket Pipelines", + Constant: "BITBUCKET", + Env: vendorEnvs{Any: []string{"BITBUCKET_COMMIT"}}, + }, + { + Name: "Bitrise", + Constant: "BITRISE", + Env: vendorEnvs{Any: []string{"BITRISE_IO"}}, + }, + { + Name: "Buddy", + Constant: "BUDDY", + Env: vendorEnvs{Any: []string{"BUDDY_WORKSPACE_ID"}}, + }, + { + Name: "Buildkite", + Constant: "BUILDKITE", + Env: vendorEnvs{Any: []string{"BUILDKITE"}}, + }, + { + Name: "CircleCI", + Constant: "CIRCLE", + Env: vendorEnvs{Any: []string{"CIRCLECI"}}, + }, + { + Name: "Cirrus CI", + Constant: "CIRRUS", + Env: vendorEnvs{Any: []string{"CIRRUS_CI"}}, + }, + { + Name: "Codefresh", + Constant: "CODEFRESH", + Env: vendorEnvs{Any: []string{"CF_BUILD_ID"}}, + }, + { + Name: "Codemagic", + Constant: "CODEMAGIC", + Env: vendorEnvs{Any: []string{"CM_BUILD_ID"}}, + }, + { + Name: "Codeship", + Constant: "CODESHIP", + EvalEnv: map[string]string{ + "CI_NAME": "codeship", + }, + }, + { + Name: "Drone", + Constant: "DRONE", + Env: vendorEnvs{Any: []string{"DRONE"}}, + }, + { + Name: "dsari", + Constant: "DSARI", + Env: vendorEnvs{Any: []string{"DSARI"}}, + }, + { + Name: "Expo Application Services", + Constant: "EAS", + Env: vendorEnvs{Any: []string{"EAS_BUILD"}}, + }, + { + Name: "GitHub Actions", + Constant: "GITHUB_ACTIONS", + Env: vendorEnvs{Any: []string{"GITHUB_ACTIONS"}}, + }, + { + Name: "GitLab CI", + Constant: "GITLAB", + Env: vendorEnvs{Any: []string{"GITLAB_CI"}}, + }, + { + Name: "GoCD", + Constant: "GOCD", + Env: vendorEnvs{Any: []string{"GO_PIPELINE_LABEL"}}, + }, + { + Name: "Google Cloud Build", + Constant: "GOOGLE_CLOUD_BUILD", + Env: vendorEnvs{Any: []string{"BUILDER_OUTPUT"}}, + }, + { + Name: "LayerCI", + Constant: "LAYERCI", + Env: vendorEnvs{Any: []string{"LAYERCI"}}, + }, + { + Name: "Gerrit", + Constant: "GERRIT", + Env: vendorEnvs{Any: []string{"GERRIT_PROJECT"}}, + }, + { + Name: "Hudson", + Constant: "HUDSON", + Env: vendorEnvs{Any: []string{"HUDSON"}}, + }, + { + Name: "Jenkins", + Constant: "JENKINS", + Env: vendorEnvs{All: []string{"JENKINS_URL", "BUILD_ID"}}, + }, + { + Name: "Magnum CI", + Constant: "MAGNUM", + Env: vendorEnvs{Any: []string{"MAGNUM"}}, + }, + { + Name: "Netlify CI", + Constant: "NETLIFY", + Env: vendorEnvs{Any: []string{"NETLIFY"}}, + }, + { + Name: "Nevercode", + Constant: "NEVERCODE", + Env: vendorEnvs{Any: []string{"NEVERCODE"}}, + }, + { + Name: "ReleaseHub", + Constant: "RELEASEHUB", + Env: vendorEnvs{Any: []string{"RELEASE_BUILD_ID"}}, + }, + { + Name: "Render", + Constant: "RENDER", + Env: vendorEnvs{Any: []string{"RENDER"}}, + }, + { + Name: "Sail CI", + Constant: "SAIL", + Env: vendorEnvs{Any: []string{"SAILCI"}}, + }, + { + Name: "Screwdriver", + Constant: "SCREWDRIVER", + Env: vendorEnvs{Any: []string{"SCREWDRIVER"}}, + }, + { + Name: "Semaphore", + Constant: "SEMAPHORE", + Env: vendorEnvs{Any: []string{"SEMAPHORE"}}, + }, + { + Name: "Shippable", + Constant: "SHIPPABLE", + Env: vendorEnvs{Any: []string{"SHIPPABLE"}}, + }, + { + Name: "Solano CI", + Constant: "SOLANO", + Env: vendorEnvs{Any: []string{"TDDIUM"}}, + }, + { + Name: "Sourcehut", + Constant: "SOURCEHUT", + EvalEnv: map[string]string{ + "CI_NAME": "sourcehut", + }, + }, + { + Name: "Strider CD", + Constant: "STRIDER", + Env: vendorEnvs{Any: []string{"STRIDER"}}, + }, + { + Name: "TaskCluster", + Constant: "TASKCLUSTER", + Env: vendorEnvs{All: []string{"TASK_ID", "RUN_ID"}}, + }, + { + Name: "TeamCity", + Constant: "TEAMCITY", + Env: vendorEnvs{Any: []string{"TEAMCITY_VERSION"}}, + }, + { + Name: "Travis CI", + Constant: "TRAVIS", + Env: vendorEnvs{Any: []string{"TRAVIS"}}, + }, + { + Name: "Vercel", + Constant: "VERCEL", + Env: vendorEnvs{Any: []string{"NOW_BUILDER", "VERCEL"}}, + }, + { + Name: "Visual Studio App Center", + Constant: "APPCENTER", + Env: vendorEnvs{Any: []string{"APPCENTER"}}, + }, + { + Name: "Woodpecker", + Constant: "WOODPECKER", + EvalEnv: map[string]string{ + "CI": "woodpecker", + }, + }, + { + Name: "Xcode Cloud", + Constant: "XCODE_CLOUD", + Env: vendorEnvs{Any: []string{"CI_XCODE_PROJECT"}}, + }, + { + Name: "Xcode Server", + Constant: "XCODE_SERVER", + Env: vendorEnvs{Any: []string{"XCS"}}, + }, +} diff --git a/cli/internal/client/analytics.go b/cli/internal/client/analytics.go new file mode 100644 index 0000000..71381f0 --- /dev/null +++ b/cli/internal/client/analytics.go @@ -0,0 +1,21 @@ +package client + +import ( + "encoding/json" +) + +// RecordAnalyticsEvents is a specific method for POSTing events to Vercel +func (c *APIClient) RecordAnalyticsEvents(events []map[string]interface{}) error { + body, err := json.Marshal(events) + if err != nil { + return err + + } + + // We don't care about the response here + if _, err := c.JSONPost("/v8/artifacts/events", body); err != nil { + return err + } + + return nil +} diff --git a/cli/internal/client/cache.go b/cli/internal/client/cache.go new file mode 100644 index 0000000..11ad87a --- /dev/null +++ b/cli/internal/client/cache.go @@ -0,0 +1,167 @@ +package client + +import ( + "encoding/json" + "fmt" + "io" + "io/ioutil" + "net/http" + "net/url" + "strings" + + "github.com/hashicorp/go-retryablehttp" + "github.com/vercel/turbo/cli/internal/ci" + "github.com/vercel/turbo/cli/internal/util" +) + +// PutArtifact uploads an artifact associated with a given hash string to the remote cache +func (c *APIClient) PutArtifact(hash string, artifactBody []byte, duration int, tag string) error { + if err := c.okToRequest(); err != nil { + return err + } + params := url.Values{} + c.addTeamParam(¶ms) + // only add a ? if it's actually needed (makes logging cleaner) + encoded := params.Encode() + if encoded != "" { + encoded = "?" + encoded + } + + requestURL := c.makeURL("/v8/artifacts/" + hash + encoded) + allowAuth := true + if c.usePreflight { + resp, latestRequestURL, err := c.doPreflight(requestURL, http.MethodPut, "Content-Type, x-artifact-duration, Authorization, User-Agent, x-artifact-tag") + if err != nil { + return fmt.Errorf("pre-flight request failed before trying to store in HTTP cache: %w", err) + } + requestURL = latestRequestURL + headers := resp.Header.Get("Access-Control-Allow-Headers") + allowAuth = strings.Contains(strings.ToLower(headers), strings.ToLower("Authorization")) + } + + req, err := retryablehttp.NewRequest(http.MethodPut, requestURL, artifactBody) + req.Header.Set("Content-Type", "application/octet-stream") + req.Header.Set("x-artifact-duration", fmt.Sprintf("%v", duration)) + if allowAuth { + req.Header.Set("Authorization", "Bearer "+c.token) + } + req.Header.Set("User-Agent", c.userAgent()) + if ci.IsCi() { + req.Header.Set("x-artifact-client-ci", ci.Constant()) + } + if tag != "" { + req.Header.Set("x-artifact-tag", tag) + } + if err != nil { + return fmt.Errorf("[WARNING] Invalid cache URL: %w", err) + } + + resp, err := c.HTTPClient.Do(req) + if err != nil { + return fmt.Errorf("[ERROR] Failed to store files in HTTP cache: %w", err) + } + defer func() { _ = resp.Body.Close() }() + if resp.StatusCode == http.StatusForbidden { + return c.handle403(resp.Body) + } + if resp.StatusCode != http.StatusOK { + return fmt.Errorf("[ERROR] Failed to store files in HTTP cache: %s against URL %s", resp.Status, requestURL) + } + return nil +} + +// FetchArtifact attempts to retrieve the build artifact with the given hash from the remote cache +func (c *APIClient) FetchArtifact(hash string) (*http.Response, error) { + return c.getArtifact(hash, http.MethodGet) +} + +// ArtifactExists attempts to determine if the build artifact with the given hash exists in the Remote Caching server +func (c *APIClient) ArtifactExists(hash string) (*http.Response, error) { + return c.getArtifact(hash, http.MethodHead) +} + +// getArtifact attempts to retrieve the build artifact with the given hash from the remote cache +func (c *APIClient) getArtifact(hash string, httpMethod string) (*http.Response, error) { + if httpMethod != http.MethodHead && httpMethod != http.MethodGet { + return nil, fmt.Errorf("invalid httpMethod %v, expected GET or HEAD", httpMethod) + } + + if err := c.okToRequest(); err != nil { + return nil, err + } + params := url.Values{} + c.addTeamParam(¶ms) + // only add a ? if it's actually needed (makes logging cleaner) + encoded := params.Encode() + if encoded != "" { + encoded = "?" + encoded + } + + requestURL := c.makeURL("/v8/artifacts/" + hash + encoded) + allowAuth := true + if c.usePreflight { + resp, latestRequestURL, err := c.doPreflight(requestURL, http.MethodGet, "Authorization, User-Agent") + if err != nil { + return nil, fmt.Errorf("pre-flight request failed before trying to fetch files in HTTP cache: %w", err) + } + requestURL = latestRequestURL + headers := resp.Header.Get("Access-Control-Allow-Headers") + allowAuth = strings.Contains(strings.ToLower(headers), strings.ToLower("Authorization")) + } + + req, err := retryablehttp.NewRequest(httpMethod, requestURL, nil) + if allowAuth { + req.Header.Set("Authorization", "Bearer "+c.token) + } + req.Header.Set("User-Agent", c.userAgent()) + if err != nil { + return nil, fmt.Errorf("invalid cache URL: %w", err) + } + + resp, err := c.HTTPClient.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to fetch artifact: %v", err) + } else if resp.StatusCode == http.StatusForbidden { + err = c.handle403(resp.Body) + _ = resp.Body.Close() + return nil, err + } + return resp, nil +} + +func (c *APIClient) handle403(body io.Reader) error { + raw, err := ioutil.ReadAll(body) + if err != nil { + return fmt.Errorf("failed to read response %v", err) + } + apiError := &apiError{} + err = json.Unmarshal(raw, apiError) + if err != nil { + return fmt.Errorf("failed to read response (%v): %v", string(raw), err) + } + disabledErr, err := apiError.cacheDisabled() + if err != nil { + return err + } + return disabledErr +} + +type apiError struct { + Code string `json:"code"` + Message string `json:"message"` +} + +func (ae *apiError) cacheDisabled() (*util.CacheDisabledError, error) { + if strings.HasPrefix(ae.Code, "remote_caching_") { + statusString := ae.Code[len("remote_caching_"):] + status, err := util.CachingStatusFromString(statusString) + if err != nil { + return nil, err + } + return &util.CacheDisabledError{ + Status: status, + Message: ae.Message, + }, nil + } + return nil, fmt.Errorf("unknown status %v: %v", ae.Code, ae.Message) +} diff --git a/cli/internal/client/client.go b/cli/internal/client/client.go new file mode 100644 index 0000000..822b2df --- /dev/null +++ b/cli/internal/client/client.go @@ -0,0 +1,309 @@ +// Package client implements some interfaces and convenience methods to interact with Vercel APIs and Remote Cache +package client + +import ( + "context" + "crypto/x509" + "errors" + "fmt" + "io/ioutil" + "net/http" + "net/url" + "runtime" + "strings" + "sync/atomic" + "time" + + "github.com/hashicorp/go-hclog" + "github.com/hashicorp/go-retryablehttp" + "github.com/vercel/turbo/cli/internal/ci" +) + +// APIClient is the main interface for making network requests to Vercel +type APIClient struct { + // The api's base URL + baseURL string + token string + turboVersion string + + // Must be used via atomic package + currentFailCount uint64 + HTTPClient *retryablehttp.Client + teamID string + teamSlug string + // Whether or not to send preflight requests before uploads + usePreflight bool +} + +// ErrTooManyFailures is returned from remote cache API methods after `maxRemoteFailCount` errors have occurred +var ErrTooManyFailures = errors.New("skipping HTTP Request, too many failures have occurred") + +// _maxRemoteFailCount is the number of failed requests before we stop trying to upload/download +// artifacts to the remote cache +const _maxRemoteFailCount = uint64(3) + +// SetToken updates the APIClient's Token +func (c *APIClient) SetToken(token string) { + c.token = token +} + +// RemoteConfig holds the authentication and endpoint details for the API client +type RemoteConfig struct { + Token string + TeamID string + TeamSlug string + APIURL string +} + +// Opts holds values for configuring the behavior of the API client +type Opts struct { + UsePreflight bool + Timeout uint64 +} + +// ClientTimeout Exported ClientTimeout used in run.go +const ClientTimeout uint64 = 20 + +// NewClient creates a new APIClient +func NewClient(remoteConfig RemoteConfig, logger hclog.Logger, turboVersion string, opts Opts) *APIClient { + client := &APIClient{ + baseURL: remoteConfig.APIURL, + turboVersion: turboVersion, + HTTPClient: &retryablehttp.Client{ + HTTPClient: &http.Client{ + Timeout: time.Duration(opts.Timeout) * time.Second, + }, + RetryWaitMin: 2 * time.Second, + RetryWaitMax: 10 * time.Second, + RetryMax: 2, + Backoff: retryablehttp.DefaultBackoff, + Logger: logger, + }, + token: remoteConfig.Token, + teamID: remoteConfig.TeamID, + teamSlug: remoteConfig.TeamSlug, + usePreflight: opts.UsePreflight, + } + client.HTTPClient.CheckRetry = client.checkRetry + return client +} + +// hasUser returns true if we have credentials for a user +func (c *APIClient) hasUser() bool { + return c.token != "" +} + +// IsLinked returns true if we have a user and linked team +func (c *APIClient) IsLinked() bool { + return c.hasUser() && (c.teamID != "" || c.teamSlug != "") +} + +// GetTeamID returns the currently configured team id +func (c *APIClient) GetTeamID() string { + return c.teamID +} + +func (c *APIClient) retryCachePolicy(resp *http.Response, err error) (bool, error) { + if err != nil { + if errors.As(err, &x509.UnknownAuthorityError{}) { + // Don't retry if the error was due to TLS cert verification failure. + atomic.AddUint64(&c.currentFailCount, 1) + return false, err + } + atomic.AddUint64(&c.currentFailCount, 1) + return true, nil + } + + // 429 Too Many Requests is recoverable. Sometimes the server puts + // a Retry-After response header to indicate when the server is + // available to start processing request from client. + if resp.StatusCode == http.StatusTooManyRequests { + atomic.AddUint64(&c.currentFailCount, 1) + return true, nil + } + + // Check the response code. We retry on 500-range responses to allow + // the server time to recover, as 500's are typically not permanent + // errors and may relate to outages on the server side. This will catch + // invalid response codes as well, like 0 and 999. + if resp.StatusCode == 0 || (resp.StatusCode >= 500 && resp.StatusCode != 501) { + atomic.AddUint64(&c.currentFailCount, 1) + return true, fmt.Errorf("unexpected HTTP status %s", resp.Status) + } + + // swallow the error and stop retrying + return false, nil +} + +func (c *APIClient) checkRetry(ctx context.Context, resp *http.Response, err error) (bool, error) { + // do not retry on context.Canceled or context.DeadlineExceeded + if ctx.Err() != nil { + atomic.AddUint64(&c.currentFailCount, 1) + return false, ctx.Err() + } + + // we're squashing the error from the request and substituting any error that might come + // from our retry policy. + shouldRetry, err := c.retryCachePolicy(resp, err) + if shouldRetry { + // Our policy says it's ok to retry, but we need to check the failure count + if retryErr := c.okToRequest(); retryErr != nil { + return false, retryErr + } + } + return shouldRetry, err +} + +// okToRequest returns nil if it's ok to make a request, and returns the error to +// return to the caller if a request is not allowed +func (c *APIClient) okToRequest() error { + if atomic.LoadUint64(&c.currentFailCount) < _maxRemoteFailCount { + return nil + } + return ErrTooManyFailures +} + +func (c *APIClient) makeURL(endpoint string) string { + return fmt.Sprintf("%v%v", c.baseURL, endpoint) +} + +func (c *APIClient) userAgent() string { + return fmt.Sprintf("turbo %v %v %v (%v)", c.turboVersion, runtime.Version(), runtime.GOOS, runtime.GOARCH) +} + +// doPreflight returns response with closed body, latest request url, and any errors to the caller +func (c *APIClient) doPreflight(requestURL string, requestMethod string, requestHeaders string) (*http.Response, string, error) { + req, err := retryablehttp.NewRequest(http.MethodOptions, requestURL, nil) + req.Header.Set("User-Agent", c.userAgent()) + req.Header.Set("Access-Control-Request-Method", requestMethod) + req.Header.Set("Access-Control-Request-Headers", requestHeaders) + req.Header.Set("Authorization", "Bearer "+c.token) + if err != nil { + return nil, requestURL, fmt.Errorf("[WARNING] Invalid cache URL: %w", err) + } + + // If resp is not nil, ignore any errors + // because most likely unimportant for preflight to handle. + // Let follow-up request handle potential errors. + resp, err := c.HTTPClient.Do(req) + if resp == nil { + return resp, requestURL, err + } + defer resp.Body.Close() //nolint:golint,errcheck // nothing to do + // The client will continue following 307, 308 redirects until it hits + // max redirects, gets an error, or gets a normal response. + // Get the url from the Location header or get the url used in the last + // request (could have changed after following redirects). + // Note that net/http client does not continue redirecting the preflight + // request with the OPTIONS method for 301, 302, and 303 redirects. + // See golang/go Issue 18570. + if locationURL, err := resp.Location(); err == nil { + requestURL = locationURL.String() + } else { + requestURL = resp.Request.URL.String() + } + return resp, requestURL, nil +} + +func (c *APIClient) addTeamParam(params *url.Values) { + if c.teamID != "" && strings.HasPrefix(c.teamID, "team_") { + params.Add("teamId", c.teamID) + } + if c.teamSlug != "" { + params.Add("slug", c.teamSlug) + } +} + +// JSONPatch sends a byte array (json.marshalled payload) to a given endpoint with PATCH +func (c *APIClient) JSONPatch(endpoint string, body []byte) ([]byte, error) { + resp, err := c.request(endpoint, http.MethodPatch, body) + if err != nil { + return nil, err + } + + rawResponse, err := ioutil.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("failed to read response %v", err) + } + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("%s", string(rawResponse)) + } + + return rawResponse, nil +} + +// JSONPost sends a byte array (json.marshalled payload) to a given endpoint with POST +func (c *APIClient) JSONPost(endpoint string, body []byte) ([]byte, error) { + resp, err := c.request(endpoint, http.MethodPost, body) + if err != nil { + return nil, err + } + + rawResponse, err := ioutil.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("failed to read response %v", err) + } + + // For non 200/201 status codes, return the response body as an error + if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusCreated { + return nil, fmt.Errorf("%s", string(rawResponse)) + } + + return rawResponse, nil +} + +func (c *APIClient) request(endpoint string, method string, body []byte) (*http.Response, error) { + if err := c.okToRequest(); err != nil { + return nil, err + } + + params := url.Values{} + c.addTeamParam(¶ms) + encoded := params.Encode() + if encoded != "" { + encoded = "?" + encoded + } + + requestURL := c.makeURL(endpoint + encoded) + + allowAuth := true + if c.usePreflight { + resp, latestRequestURL, err := c.doPreflight(requestURL, method, "Authorization, User-Agent") + if err != nil { + return nil, fmt.Errorf("pre-flight request failed before trying to fetch files in HTTP cache: %w", err) + } + + requestURL = latestRequestURL + headers := resp.Header.Get("Access-Control-Allow-Headers") + allowAuth = strings.Contains(strings.ToLower(headers), strings.ToLower("Authorization")) + } + + req, err := retryablehttp.NewRequest(method, requestURL, body) + if err != nil { + return nil, err + } + + // Set headers + req.Header.Set("Content-Type", "application/json") + req.Header.Set("User-Agent", c.userAgent()) + + if allowAuth { + req.Header.Set("Authorization", "Bearer "+c.token) + } + + if ci.IsCi() { + req.Header.Set("x-artifact-client-ci", ci.Constant()) + } + + resp, err := c.HTTPClient.Do(req) + if err != nil { + return nil, err + } + + // If there isn't a response, something else probably went wrong + if resp == nil { + return nil, fmt.Errorf("response from %s is nil, something went wrong", requestURL) + } + + return resp, nil +} diff --git a/cli/internal/client/client_test.go b/cli/internal/client/client_test.go new file mode 100644 index 0000000..36ff3fb --- /dev/null +++ b/cli/internal/client/client_test.go @@ -0,0 +1,159 @@ +package client + +import ( + "bytes" + "encoding/json" + "errors" + "io/ioutil" + "net/http" + "net/http/httptest" + "reflect" + "testing" + + "github.com/google/uuid" + "github.com/hashicorp/go-hclog" + "github.com/vercel/turbo/cli/internal/util" +) + +func Test_sendToServer(t *testing.T) { + ch := make(chan []byte, 1) + ts := httptest.NewServer( + http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { + defer req.Body.Close() + b, err := ioutil.ReadAll(req.Body) + if err != nil { + t.Errorf("failed to read request %v", err) + } + ch <- b + w.WriteHeader(200) + w.Write([]byte{}) + })) + defer ts.Close() + + remoteConfig := RemoteConfig{ + TeamSlug: "my-team-slug", + APIURL: ts.URL, + Token: "my-token", + } + apiClient := NewClient(remoteConfig, hclog.Default(), "v1", Opts{}) + + myUUID, err := uuid.NewUUID() + if err != nil { + t.Errorf("failed to create uuid %v", err) + } + events := []map[string]interface{}{ + { + "sessionId": myUUID.String(), + "hash": "foo", + "source": "LOCAL", + "event": "hit", + }, + { + "sessionId": myUUID.String(), + "hash": "bar", + "source": "REMOTE", + "event": "MISS", + }, + } + + apiClient.RecordAnalyticsEvents(events) + + body := <-ch + + result := []map[string]interface{}{} + err = json.Unmarshal(body, &result) + if err != nil { + t.Errorf("unmarshalling body %v", err) + } + if !reflect.DeepEqual(events, result) { + t.Errorf("roundtrip got %v, want %v", result, events) + } +} + +func Test_PutArtifact(t *testing.T) { + ch := make(chan []byte, 1) + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { + defer req.Body.Close() + b, err := ioutil.ReadAll(req.Body) + if err != nil { + t.Errorf("failed to read request %v", err) + } + ch <- b + w.WriteHeader(200) + w.Write([]byte{}) + })) + defer ts.Close() + + // Set up test expected values + remoteConfig := RemoteConfig{ + TeamSlug: "my-team-slug", + APIURL: ts.URL, + Token: "my-token", + } + apiClient := NewClient(remoteConfig, hclog.Default(), "v1", Opts{}) + expectedArtifactBody := []byte("My string artifact") + + // Test Put Artifact + apiClient.PutArtifact("hash", expectedArtifactBody, 500, "") + testBody := <-ch + if !bytes.Equal(expectedArtifactBody, testBody) { + t.Errorf("Handler read '%v', wants '%v'", testBody, expectedArtifactBody) + } + +} + +func Test_PutWhenCachingDisabled(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { + defer func() { _ = req.Body.Close() }() + w.WriteHeader(403) + _, _ = w.Write([]byte("{\"code\": \"remote_caching_disabled\",\"message\":\"caching disabled\"}")) + })) + defer ts.Close() + + // Set up test expected values + remoteConfig := RemoteConfig{ + TeamSlug: "my-team-slug", + APIURL: ts.URL, + Token: "my-token", + } + apiClient := NewClient(remoteConfig, hclog.Default(), "v1", Opts{}) + expectedArtifactBody := []byte("My string artifact") + // Test Put Artifact + err := apiClient.PutArtifact("hash", expectedArtifactBody, 500, "") + cd := &util.CacheDisabledError{} + if !errors.As(err, &cd) { + t.Errorf("expected cache disabled error, got %v", err) + } + if cd.Status != util.CachingStatusDisabled { + t.Errorf("caching status: expected %v, got %v", util.CachingStatusDisabled, cd.Status) + } +} + +func Test_FetchWhenCachingDisabled(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { + defer func() { _ = req.Body.Close() }() + w.WriteHeader(403) + _, _ = w.Write([]byte("{\"code\": \"remote_caching_disabled\",\"message\":\"caching disabled\"}")) + })) + defer ts.Close() + + // Set up test expected values + remoteConfig := RemoteConfig{ + TeamSlug: "my-team-slug", + APIURL: ts.URL, + Token: "my-token", + } + apiClient := NewClient(remoteConfig, hclog.Default(), "v1", Opts{}) + // Test Put Artifact + resp, err := apiClient.FetchArtifact("hash") + cd := &util.CacheDisabledError{} + if !errors.As(err, &cd) { + t.Errorf("expected cache disabled error, got %v", err) + } + if cd.Status != util.CachingStatusDisabled { + t.Errorf("caching status: expected %v, got %v", util.CachingStatusDisabled, cd.Status) + } + if resp != nil { + t.Errorf("response got %v, want ", resp) + } +} diff --git a/cli/internal/cmd/root.go b/cli/internal/cmd/root.go new file mode 100644 index 0000000..d8d0e33 --- /dev/null +++ b/cli/internal/cmd/root.go @@ -0,0 +1,157 @@ +// Package cmd holds the root cobra command for turbo +package cmd + +import ( + "context" + "fmt" + "os" + "runtime/pprof" + "runtime/trace" + + "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/cmdutil" + "github.com/vercel/turbo/cli/internal/daemon" + "github.com/vercel/turbo/cli/internal/process" + "github.com/vercel/turbo/cli/internal/prune" + "github.com/vercel/turbo/cli/internal/run" + "github.com/vercel/turbo/cli/internal/signals" + "github.com/vercel/turbo/cli/internal/turbostate" + "github.com/vercel/turbo/cli/internal/util" +) + +func initializeOutputFiles(helper *cmdutil.Helper, parsedArgs *turbostate.ParsedArgsFromRust) error { + if parsedArgs.Trace != "" { + cleanup, err := createTraceFile(parsedArgs.Trace) + if err != nil { + return fmt.Errorf("failed to create trace file: %v", err) + } + helper.RegisterCleanup(cleanup) + } + if parsedArgs.Heap != "" { + cleanup, err := createHeapFile(parsedArgs.Heap) + if err != nil { + return fmt.Errorf("failed to create heap file: %v", err) + } + helper.RegisterCleanup(cleanup) + } + if parsedArgs.CPUProfile != "" { + cleanup, err := createCpuprofileFile(parsedArgs.CPUProfile) + if err != nil { + return fmt.Errorf("failed to create CPU profile file: %v", err) + } + helper.RegisterCleanup(cleanup) + } + + return nil +} + +// RunWithArgs runs turbo with the ParsedArgsFromRust that is passed from the Rust side. +func RunWithArgs(args *turbostate.ParsedArgsFromRust, turboVersion string) int { + util.InitPrintf() + // TODO: replace this with a context + signalWatcher := signals.NewWatcher() + helper := cmdutil.NewHelper(turboVersion, args) + ctx := context.Background() + + err := initializeOutputFiles(helper, args) + if err != nil { + fmt.Printf("%v", err) + return 1 + } + defer helper.Cleanup(args) + + doneCh := make(chan struct{}) + var execErr error + go func() { + command := args.Command + if command.Daemon != nil { + execErr = daemon.ExecuteDaemon(ctx, helper, signalWatcher, args) + } else if command.Prune != nil { + execErr = prune.ExecutePrune(helper, args) + } else if command.Run != nil { + execErr = run.ExecuteRun(ctx, helper, signalWatcher, args) + } else { + execErr = fmt.Errorf("unknown command: %v", command) + } + + close(doneCh) + }() + + // Wait for either our command to finish, in which case we need to clean up, + // or to receive a signal, in which case the signal handler above does the cleanup + select { + case <-doneCh: + // We finished whatever task we were running + signalWatcher.Close() + exitErr := &process.ChildExit{} + if errors.As(execErr, &exitErr) { + return exitErr.ExitCode + } else if execErr != nil { + fmt.Printf("Turbo error: %v\n", execErr) + return 1 + } + return 0 + case <-signalWatcher.Done(): + // We caught a signal, which already called the close handlers + return 1 + } +} + +type profileCleanup func() error + +// Close implements io.Close for profileCleanup +func (pc profileCleanup) Close() error { + return pc() +} + +// To view a CPU trace, use "go tool trace [file]". Note that the trace +// viewer doesn't work under Windows Subsystem for Linux for some reason. +func createTraceFile(traceFile string) (profileCleanup, error) { + f, err := os.Create(traceFile) + if err != nil { + return nil, errors.Wrapf(err, "failed to create trace file: %v", traceFile) + } + if err := trace.Start(f); err != nil { + return nil, errors.Wrap(err, "failed to start tracing") + } + return func() error { + trace.Stop() + return f.Close() + }, nil +} + +// To view a heap trace, use "go tool pprof [file]" and type "top". You can +// also drop it into https://speedscope.app and use the "left heavy" or +// "sandwich" view modes. +func createHeapFile(heapFile string) (profileCleanup, error) { + f, err := os.Create(heapFile) + if err != nil { + return nil, errors.Wrapf(err, "failed to create heap file: %v", heapFile) + } + return func() error { + if err := pprof.WriteHeapProfile(f); err != nil { + // we don't care if we fail to close the file we just failed to write to + _ = f.Close() + return errors.Wrapf(err, "failed to write heap file: %v", heapFile) + } + return f.Close() + }, nil +} + +// To view a CPU profile, drop the file into https://speedscope.app. +// Note: Running the CPU profiler doesn't work under Windows subsystem for +// Linux. The profiler has to be built for native Windows and run using the +// command prompt instead. +func createCpuprofileFile(cpuprofileFile string) (profileCleanup, error) { + f, err := os.Create(cpuprofileFile) + if err != nil { + return nil, errors.Wrapf(err, "failed to create cpuprofile file: %v", cpuprofileFile) + } + if err := pprof.StartCPUProfile(f); err != nil { + return nil, errors.Wrap(err, "failed to start CPU profiling") + } + return func() error { + pprof.StopCPUProfile() + return f.Close() + }, nil +} diff --git a/cli/internal/cmdutil/cmdutil.go b/cli/internal/cmdutil/cmdutil.go new file mode 100644 index 0000000..0b02392 --- /dev/null +++ b/cli/internal/cmdutil/cmdutil.go @@ -0,0 +1,245 @@ +// Package cmdutil holds functionality to run turbo via cobra. That includes flag parsing and configuration +// of components common to all subcommands +package cmdutil + +import ( + "fmt" + "io" + "io/ioutil" + "os" + "strconv" + "sync" + + "github.com/hashicorp/go-hclog" + + "github.com/fatih/color" + "github.com/mitchellh/cli" + "github.com/vercel/turbo/cli/internal/client" + "github.com/vercel/turbo/cli/internal/config" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/turbostate" + "github.com/vercel/turbo/cli/internal/ui" +) + +const ( + // _envLogLevel is the environment log level + _envLogLevel = "TURBO_LOG_LEVEL" +) + +// Helper is a struct used to hold configuration values passed via flag, env vars, +// config files, etc. It is not intended for direct use by turbo commands, it drives +// the creation of CmdBase, which is then used by the commands themselves. +type Helper struct { + // TurboVersion is the version of turbo that is currently executing + TurboVersion string + + // for logging + verbosity int + + rawRepoRoot string + + clientOpts client.Opts + + // UserConfigPath is the path to where we expect to find + // a user-specific config file, if one is present. Public + // to allow overrides in tests + UserConfigPath turbopath.AbsoluteSystemPath + + cleanupsMu sync.Mutex + cleanups []io.Closer +} + +// RegisterCleanup saves a function to be run after turbo execution, +// even if the command that runs returns an error +func (h *Helper) RegisterCleanup(cleanup io.Closer) { + h.cleanupsMu.Lock() + defer h.cleanupsMu.Unlock() + h.cleanups = append(h.cleanups, cleanup) +} + +// Cleanup runs the register cleanup handlers. It requires the flags +// to the root command so that it can construct a UI if necessary +func (h *Helper) Cleanup(cliConfig *turbostate.ParsedArgsFromRust) { + h.cleanupsMu.Lock() + defer h.cleanupsMu.Unlock() + var ui cli.Ui + for _, cleanup := range h.cleanups { + if err := cleanup.Close(); err != nil { + if ui == nil { + ui = h.getUI(cliConfig) + } + ui.Warn(fmt.Sprintf("failed cleanup: %v", err)) + } + } +} + +func (h *Helper) getUI(cliConfig *turbostate.ParsedArgsFromRust) cli.Ui { + colorMode := ui.GetColorModeFromEnv() + if cliConfig.GetNoColor() { + colorMode = ui.ColorModeSuppressed + } + if cliConfig.GetColor() { + colorMode = ui.ColorModeForced + } + return ui.BuildColoredUi(colorMode) +} + +func (h *Helper) getLogger() (hclog.Logger, error) { + var level hclog.Level + switch h.verbosity { + case 0: + if v := os.Getenv(_envLogLevel); v != "" { + level = hclog.LevelFromString(v) + if level == hclog.NoLevel { + return nil, fmt.Errorf("%s value %q is not a valid log level", _envLogLevel, v) + } + } else { + level = hclog.NoLevel + } + case 1: + level = hclog.Info + case 2: + level = hclog.Debug + case 3: + level = hclog.Trace + default: + level = hclog.Trace + } + // Default output is nowhere unless we enable logging. + output := ioutil.Discard + color := hclog.ColorOff + if level != hclog.NoLevel { + output = os.Stderr + color = hclog.AutoColor + } + + return hclog.New(&hclog.LoggerOptions{ + Name: "turbo", + Level: level, + Color: color, + Output: output, + }), nil +} + +// NewHelper returns a new helper instance to hold configuration values for the root +// turbo command. +func NewHelper(turboVersion string, args *turbostate.ParsedArgsFromRust) *Helper { + return &Helper{ + TurboVersion: turboVersion, + UserConfigPath: config.DefaultUserConfigPath(), + verbosity: args.Verbosity, + } +} + +// GetCmdBase returns a CmdBase instance configured with values from this helper. +// It additionally returns a mechanism to set an error, so +func (h *Helper) GetCmdBase(cliConfig *turbostate.ParsedArgsFromRust) (*CmdBase, error) { + // terminal is for color/no-color output + terminal := h.getUI(cliConfig) + // logger is configured with verbosity level using --verbosity flag from end users + logger, err := h.getLogger() + if err != nil { + return nil, err + } + cwdRaw, err := cliConfig.GetCwd() + if err != nil { + return nil, err + } + cwd, err := fs.GetCwd(cwdRaw) + if err != nil { + return nil, err + } + repoRoot := fs.ResolveUnknownPath(cwd, h.rawRepoRoot) + repoRoot, err = repoRoot.EvalSymlinks() + if err != nil { + return nil, err + } + repoConfig, err := config.ReadRepoConfigFile(config.GetRepoConfigPath(repoRoot), cliConfig) + if err != nil { + return nil, err + } + userConfig, err := config.ReadUserConfigFile(h.UserConfigPath, cliConfig) + if err != nil { + return nil, err + } + remoteConfig := repoConfig.GetRemoteConfig(userConfig.Token()) + if remoteConfig.Token == "" && ui.IsCI { + vercelArtifactsToken := os.Getenv("VERCEL_ARTIFACTS_TOKEN") + vercelArtifactsOwner := os.Getenv("VERCEL_ARTIFACTS_OWNER") + if vercelArtifactsToken != "" { + remoteConfig.Token = vercelArtifactsToken + } + if vercelArtifactsOwner != "" { + remoteConfig.TeamID = vercelArtifactsOwner + } + } + + // Primacy: Arg > Env + timeout, err := cliConfig.GetRemoteCacheTimeout() + if err == nil { + h.clientOpts.Timeout = timeout + } else { + val, ok := os.LookupEnv("TURBO_REMOTE_CACHE_TIMEOUT") + if ok { + number, err := strconv.ParseUint(val, 10, 64) + if err == nil { + h.clientOpts.Timeout = number + } + } + } + + apiClient := client.NewClient( + remoteConfig, + logger, + h.TurboVersion, + h.clientOpts, + ) + + return &CmdBase{ + UI: terminal, + Logger: logger, + RepoRoot: repoRoot, + APIClient: apiClient, + RepoConfig: repoConfig, + UserConfig: userConfig, + RemoteConfig: remoteConfig, + TurboVersion: h.TurboVersion, + }, nil +} + +// CmdBase encompasses configured components common to all turbo commands. +type CmdBase struct { + UI cli.Ui + Logger hclog.Logger + RepoRoot turbopath.AbsoluteSystemPath + APIClient *client.APIClient + RepoConfig *config.RepoConfig + UserConfig *config.UserConfig + RemoteConfig client.RemoteConfig + TurboVersion string +} + +// LogError prints an error to the UI +func (b *CmdBase) LogError(format string, args ...interface{}) { + err := fmt.Errorf(format, args...) + b.Logger.Error("error", err) + b.UI.Error(fmt.Sprintf("%s%s", ui.ERROR_PREFIX, color.RedString(" %v", err))) +} + +// LogWarning logs an error and outputs it to the UI. +func (b *CmdBase) LogWarning(prefix string, err error) { + b.Logger.Warn(prefix, "warning", err) + + if prefix != "" { + prefix = " " + prefix + ": " + } + + b.UI.Warn(fmt.Sprintf("%s%s%s", ui.WARNING_PREFIX, prefix, color.YellowString(" %v", err))) +} + +// LogInfo logs an message and outputs it to the UI. +func (b *CmdBase) LogInfo(msg string) { + b.Logger.Info(msg) + b.UI.Info(fmt.Sprintf("%s%s", ui.InfoPrefix, color.WhiteString(" %v", msg))) +} diff --git a/cli/internal/cmdutil/cmdutil_test.go b/cli/internal/cmdutil/cmdutil_test.go new file mode 100644 index 0000000..4e6cf70 --- /dev/null +++ b/cli/internal/cmdutil/cmdutil_test.go @@ -0,0 +1,109 @@ +package cmdutil + +import ( + "os" + "testing" + "time" + + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbostate" + "gotest.tools/v3/assert" +) + +func TestTokenEnvVar(t *testing.T) { + // Set up an empty config so we're just testing environment variables + userConfigPath := fs.AbsoluteSystemPathFromUpstream(t.TempDir()).UntypedJoin("turborepo", "config.json") + expectedPrefix := "my-token" + vars := []string{"TURBO_TOKEN", "VERCEL_ARTIFACTS_TOKEN"} + for _, v := range vars { + t.Run(v, func(t *testing.T) { + t.Cleanup(func() { + _ = os.Unsetenv(v) + }) + args := &turbostate.ParsedArgsFromRust{ + CWD: "", + } + h := NewHelper("test-version", args) + h.UserConfigPath = userConfigPath + + expectedToken := expectedPrefix + v + err := os.Setenv(v, expectedToken) + if err != nil { + t.Fatalf("setenv %v", err) + } + + base, err := h.GetCmdBase(args) + if err != nil { + t.Fatalf("failed to get command base %v", err) + } + assert.Equal(t, base.RemoteConfig.Token, expectedToken) + }) + } +} + +func TestRemoteCacheTimeoutEnvVar(t *testing.T) { + key := "TURBO_REMOTE_CACHE_TIMEOUT" + expectedTimeout := "600" + t.Run(key, func(t *testing.T) { + t.Cleanup(func() { + _ = os.Unsetenv(key) + }) + args := &turbostate.ParsedArgsFromRust{ + CWD: "", + } + h := NewHelper("test-version", args) + + err := os.Setenv(key, expectedTimeout) + if err != nil { + t.Fatalf("setenv %v", err) + } + + base, err := h.GetCmdBase(args) + if err != nil { + t.Fatalf("failed to get command base %v", err) + } + assert.Equal(t, base.APIClient.HTTPClient.HTTPClient.Timeout, time.Duration(600)*time.Second) + }) +} + +func TestRemoteCacheTimeoutFlag(t *testing.T) { + args := &turbostate.ParsedArgsFromRust{ + CWD: "", + RemoteCacheTimeout: 599, + } + h := NewHelper("test-version", args) + + base, err := h.GetCmdBase(args) + if err != nil { + t.Fatalf("failed to get command base %v", err) + } + + assert.Equal(t, base.APIClient.HTTPClient.HTTPClient.Timeout, time.Duration(599)*time.Second) +} + +func TestRemoteCacheTimeoutPrimacy(t *testing.T) { + key := "TURBO_REMOTE_CACHE_TIMEOUT" + value := "2" + + t.Run(key, func(t *testing.T) { + t.Cleanup(func() { + _ = os.Unsetenv(key) + }) + args := &turbostate.ParsedArgsFromRust{ + CWD: "", + RemoteCacheTimeout: 1, + } + h := NewHelper("test-version", args) + + err := os.Setenv(key, value) + if err != nil { + t.Fatalf("setenv %v", err) + } + + base, err := h.GetCmdBase(args) + if err != nil { + t.Fatalf("failed to get command base %v", err) + } + assert.Equal(t, base.APIClient.HTTPClient.HTTPClient.Timeout, time.Duration(1)*time.Second) + }) +} diff --git a/cli/internal/colorcache/colorcache.go b/cli/internal/colorcache/colorcache.go new file mode 100644 index 0000000..08a15e8 --- /dev/null +++ b/cli/internal/colorcache/colorcache.go @@ -0,0 +1,56 @@ +package colorcache + +import ( + "sync" + + "github.com/vercel/turbo/cli/internal/util" + + "github.com/fatih/color" +) + +type colorFn = func(format string, a ...interface{}) string + +func getTerminalPackageColors() []colorFn { + return []colorFn{color.CyanString, color.MagentaString, color.GreenString, color.YellowString, color.BlueString} +} + +type ColorCache struct { + mu sync.Mutex + index int + TermColors []colorFn + Cache map[interface{}]colorFn +} + +// New creates an instance of ColorCache with helpers for adding colors to task outputs +func New() *ColorCache { + return &ColorCache{ + TermColors: getTerminalPackageColors(), + index: 0, + Cache: make(map[interface{}]colorFn), + } +} + +// colorForKey returns a color function for a given package name +func (c *ColorCache) colorForKey(key string) colorFn { + c.mu.Lock() + defer c.mu.Unlock() + colorFn, ok := c.Cache[key] + if ok { + return colorFn + } + c.index++ + colorFn = c.TermColors[util.PositiveMod(c.index, len(c.TermColors))] // 5 possible colors + c.Cache[key] = colorFn + return colorFn +} + +// PrefixWithColor returns a string consisting of the provided prefix in a consistent +// color based on the cacheKey +func (c *ColorCache) PrefixWithColor(cacheKey string, prefix string) string { + colorFn := c.colorForKey(cacheKey) + if prefix != "" { + return colorFn("%s: ", prefix) + } + + return "" +} diff --git a/cli/internal/config/config_file.go b/cli/internal/config/config_file.go new file mode 100644 index 0000000..d3118b8 --- /dev/null +++ b/cli/internal/config/config_file.go @@ -0,0 +1,192 @@ +package config + +import ( + "os" + + "github.com/spf13/viper" + "github.com/vercel/turbo/cli/internal/client" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/turbostate" +) + +// RepoConfig is a configuration object for the logged-in turborepo.com user +type RepoConfig struct { + repoViper *viper.Viper + path turbopath.AbsoluteSystemPath +} + +// LoginURL returns the configured URL for authenticating the user +func (rc *RepoConfig) LoginURL() string { + return rc.repoViper.GetString("loginurl") +} + +// SetTeamID sets the teamID and clears the slug, since it may have been from an old team +func (rc *RepoConfig) SetTeamID(teamID string) error { + // Note that we can't use viper.Set to set a nil value, we have to merge it in + newVals := map[string]interface{}{ + "teamid": teamID, + "teamslug": nil, + } + if err := rc.repoViper.MergeConfigMap(newVals); err != nil { + return err + } + return rc.write() +} + +// GetRemoteConfig produces the necessary values for an API client configuration +func (rc *RepoConfig) GetRemoteConfig(token string) client.RemoteConfig { + return client.RemoteConfig{ + Token: token, + TeamID: rc.repoViper.GetString("teamid"), + TeamSlug: rc.repoViper.GetString("teamslug"), + APIURL: rc.repoViper.GetString("apiurl"), + } +} + +// Internal call to save this config data to the user config file. +func (rc *RepoConfig) write() error { + if err := rc.path.EnsureDir(); err != nil { + return err + } + return rc.repoViper.WriteConfig() +} + +// Delete deletes the config file. This repo config shouldn't be used +// afterwards, it needs to be re-initialized +func (rc *RepoConfig) Delete() error { + return rc.path.Remove() +} + +// UserConfig is a wrapper around the user-specific configuration values +// for Turborepo. +type UserConfig struct { + userViper *viper.Viper + path turbopath.AbsoluteSystemPath +} + +// Token returns the Bearer token for this user if it exists +func (uc *UserConfig) Token() string { + return uc.userViper.GetString("token") +} + +// SetToken saves a Bearer token for this user, writing it to the +// user config file, creating it if necessary +func (uc *UserConfig) SetToken(token string) error { + // Technically Set works here, due to how overrides work, but use merge for consistency + if err := uc.userViper.MergeConfigMap(map[string]interface{}{"token": token}); err != nil { + return err + } + return uc.write() +} + +// Internal call to save this config data to the user config file. +func (uc *UserConfig) write() error { + if err := uc.path.EnsureDir(); err != nil { + return err + } + return uc.userViper.WriteConfig() +} + +// Delete deletes the config file. This user config shouldn't be used +// afterwards, it needs to be re-initialized +func (uc *UserConfig) Delete() error { + return uc.path.Remove() +} + +// ReadUserConfigFile creates a UserConfig using the +// specified path as the user config file. Note that the path or its parents +// do not need to exist. On a write to this configuration, they will be created. +func ReadUserConfigFile(path turbopath.AbsoluteSystemPath, cliConfig *turbostate.ParsedArgsFromRust) (*UserConfig, error) { + userViper := viper.New() + userViper.SetConfigFile(path.ToString()) + userViper.SetConfigType("json") + userViper.SetEnvPrefix("turbo") + userViper.MustBindEnv("token") + + token, err := cliConfig.GetToken() + if err != nil { + return nil, err + } + if token != "" { + userViper.Set("token", token) + } + + if err := userViper.ReadInConfig(); err != nil && !os.IsNotExist(err) { + return nil, err + } + return &UserConfig{ + userViper: userViper, + path: path, + }, nil +} + +// DefaultUserConfigPath returns the default platform-dependent place that +// we store the user-specific configuration. +func DefaultUserConfigPath() turbopath.AbsoluteSystemPath { + return fs.GetUserConfigDir().UntypedJoin("config.json") +} + +const ( + _defaultAPIURL = "https://vercel.com/api" + _defaultLoginURL = "https://vercel.com" +) + +// ReadRepoConfigFile creates a RepoConfig using the +// specified path as the repo config file. Note that the path or its +// parents do not need to exist. On a write to this configuration, they +// will be created. +func ReadRepoConfigFile(path turbopath.AbsoluteSystemPath, cliConfig *turbostate.ParsedArgsFromRust) (*RepoConfig, error) { + repoViper := viper.New() + repoViper.SetConfigFile(path.ToString()) + repoViper.SetConfigType("json") + repoViper.SetEnvPrefix("turbo") + repoViper.MustBindEnv("apiurl", "TURBO_API") + repoViper.MustBindEnv("loginurl", "TURBO_LOGIN") + repoViper.MustBindEnv("teamslug", "TURBO_TEAM") + repoViper.MustBindEnv("teamid") + repoViper.SetDefault("apiurl", _defaultAPIURL) + repoViper.SetDefault("loginurl", _defaultLoginURL) + + login, err := cliConfig.GetLogin() + if err != nil { + return nil, err + } + if login != "" { + repoViper.Set("loginurl", login) + } + + api, err := cliConfig.GetAPI() + if err != nil { + return nil, err + } + if api != "" { + repoViper.Set("apiurl", api) + } + + team, err := cliConfig.GetTeam() + if err != nil { + return nil, err + } + if team != "" { + repoViper.Set("teamslug", team) + } + + if err := repoViper.ReadInConfig(); err != nil && !os.IsNotExist(err) { + return nil, err + } + // If team was set via commandline, don't read the teamId from the config file, as it + // won't necessarily match. + if team != "" { + repoViper.Set("teamid", "") + } + return &RepoConfig{ + repoViper: repoViper, + path: path, + }, nil +} + +// GetRepoConfigPath reads the user-specific configuration values +func GetRepoConfigPath(repoRoot turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { + return repoRoot.UntypedJoin(".turbo", "config.json") +} diff --git a/cli/internal/config/config_file_test.go b/cli/internal/config/config_file_test.go new file mode 100644 index 0000000..7a19108 --- /dev/null +++ b/cli/internal/config/config_file_test.go @@ -0,0 +1,157 @@ +package config + +import ( + "fmt" + "testing" + + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbostate" + "gotest.tools/v3/assert" +) + +func TestReadRepoConfigWhenMissing(t *testing.T) { + testDir := fs.AbsoluteSystemPathFromUpstream(t.TempDir()).UntypedJoin("config.json") + args := &turbostate.ParsedArgsFromRust{ + CWD: "", + } + + config, err := ReadRepoConfigFile(testDir, args) + if err != nil { + t.Errorf("got error reading non-existent config file: %v, want ", err) + } + if config == nil { + t.Error("got , wanted config value") + } +} + +func TestReadRepoConfigSetTeamAndAPIFlag(t *testing.T) { + testConfigFile := fs.AbsoluteSystemPathFromUpstream(t.TempDir()).UntypedJoin("turborepo", "config.json") + + slug := "my-team-slug" + apiURL := "http://my-login-url" + args := &turbostate.ParsedArgsFromRust{ + CWD: "", + Team: slug, + API: apiURL, + } + + teamID := "some-id" + assert.NilError(t, testConfigFile.EnsureDir(), "EnsureDir") + assert.NilError(t, testConfigFile.WriteFile([]byte(fmt.Sprintf(`{"teamId":"%v"}`, teamID)), 0644), "WriteFile") + + config, err := ReadRepoConfigFile(testConfigFile, args) + if err != nil { + t.Errorf("ReadRepoConfigFile err got %v, want ", err) + } + remoteConfig := config.GetRemoteConfig("") + if remoteConfig.TeamID != "" { + t.Errorf("TeamID got %v, want ", remoteConfig.TeamID) + } + if remoteConfig.TeamSlug != slug { + t.Errorf("TeamSlug got %v, want %v", remoteConfig.TeamSlug, slug) + } + if remoteConfig.APIURL != apiURL { + t.Errorf("APIURL got %v, want %v", remoteConfig.APIURL, apiURL) + } +} + +func TestRepoConfigIncludesDefaults(t *testing.T) { + testConfigFile := fs.AbsoluteSystemPathFromUpstream(t.TempDir()).UntypedJoin("turborepo", "config.json") + args := &turbostate.ParsedArgsFromRust{ + CWD: "", + } + + expectedTeam := "my-team" + + assert.NilError(t, testConfigFile.EnsureDir(), "EnsureDir") + assert.NilError(t, testConfigFile.WriteFile([]byte(fmt.Sprintf(`{"teamSlug":"%v"}`, expectedTeam)), 0644), "WriteFile") + + config, err := ReadRepoConfigFile(testConfigFile, args) + if err != nil { + t.Errorf("ReadRepoConfigFile err got %v, want ", err) + } + + remoteConfig := config.GetRemoteConfig("") + if remoteConfig.APIURL != _defaultAPIURL { + t.Errorf("api url got %v, want %v", remoteConfig.APIURL, _defaultAPIURL) + } + if remoteConfig.TeamSlug != expectedTeam { + t.Errorf("team slug got %v, want %v", remoteConfig.TeamSlug, expectedTeam) + } +} + +func TestWriteRepoConfig(t *testing.T) { + repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) + testConfigFile := repoRoot.UntypedJoin(".turbo", "config.json") + args := &turbostate.ParsedArgsFromRust{ + CWD: "", + } + + expectedTeam := "my-team" + + assert.NilError(t, testConfigFile.EnsureDir(), "EnsureDir") + assert.NilError(t, testConfigFile.WriteFile([]byte(fmt.Sprintf(`{"teamSlug":"%v"}`, expectedTeam)), 0644), "WriteFile") + + initial, err := ReadRepoConfigFile(testConfigFile, args) + assert.NilError(t, err, "GetRepoConfig") + // setting the teamID should clear the slug, since it may have been from an old team + expectedTeamID := "my-team-id" + err = initial.SetTeamID(expectedTeamID) + assert.NilError(t, err, "SetTeamID") + + config, err := ReadRepoConfigFile(testConfigFile, args) + if err != nil { + t.Errorf("ReadRepoConfig err got %v, want ", err) + } + + remoteConfig := config.GetRemoteConfig("") + if remoteConfig.TeamSlug != "" { + t.Errorf("Expected TeamSlug to be cleared, got %v", remoteConfig.TeamSlug) + } + if remoteConfig.TeamID != expectedTeamID { + t.Errorf("TeamID got %v, want %v", remoteConfig.TeamID, expectedTeamID) + } +} + +func TestWriteUserConfig(t *testing.T) { + configPath := fs.AbsoluteSystemPathFromUpstream(t.TempDir()).UntypedJoin("turborepo", "config.json") + args := &turbostate.ParsedArgsFromRust{ + CWD: "", + } + + // Non-existent config file should get empty values + userConfig, err := ReadUserConfigFile(configPath, args) + assert.NilError(t, err, "readUserConfigFile") + assert.Equal(t, userConfig.Token(), "") + assert.Equal(t, userConfig.path, configPath) + + expectedToken := "my-token" + err = userConfig.SetToken(expectedToken) + assert.NilError(t, err, "SetToken") + + config, err := ReadUserConfigFile(configPath, args) + assert.NilError(t, err, "readUserConfigFile") + assert.Equal(t, config.Token(), expectedToken) + + err = config.Delete() + assert.NilError(t, err, "deleteConfigFile") + assert.Equal(t, configPath.FileExists(), false, "config file should be deleted") + + final, err := ReadUserConfigFile(configPath, args) + assert.NilError(t, err, "readUserConfigFile") + assert.Equal(t, final.Token(), "") + assert.Equal(t, configPath.FileExists(), false, "config file should be deleted") +} + +func TestUserConfigFlags(t *testing.T) { + configPath := fs.AbsoluteSystemPathFromUpstream(t.TempDir()).UntypedJoin("turborepo", "config.json") + args := &turbostate.ParsedArgsFromRust{ + CWD: "", + Token: "my-token", + } + + userConfig, err := ReadUserConfigFile(configPath, args) + assert.NilError(t, err, "readUserConfigFile") + assert.Equal(t, userConfig.Token(), "my-token") + assert.Equal(t, userConfig.path, configPath) +} diff --git a/cli/internal/context/context.go b/cli/internal/context/context.go new file mode 100644 index 0000000..2376d2d --- /dev/null +++ b/cli/internal/context/context.go @@ -0,0 +1,480 @@ +package context + +import ( + "fmt" + "path/filepath" + "sort" + "strings" + "sync" + + "github.com/hashicorp/go-multierror" + "github.com/vercel/turbo/cli/internal/core" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/lockfile" + "github.com/vercel/turbo/cli/internal/packagemanager" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/util" + "github.com/vercel/turbo/cli/internal/workspace" + + "github.com/Masterminds/semver" + mapset "github.com/deckarep/golang-set" + "github.com/pyr-sh/dag" + "golang.org/x/sync/errgroup" +) + +// Warnings Error type for errors that don't prevent the creation of a functional Context +type Warnings struct { + warns *multierror.Error + mu sync.Mutex +} + +var _ error = (*Warnings)(nil) + +func (w *Warnings) Error() string { + return w.warns.Error() +} + +func (w *Warnings) errorOrNil() error { + if w.warns != nil { + return w + } + return nil +} + +func (w *Warnings) append(err error) { + w.mu.Lock() + defer w.mu.Unlock() + w.warns = multierror.Append(w.warns, err) +} + +// Context of the CLI +type Context struct { + // WorkspaceInfos contains the contents of package.json for every workspace + // TODO(gsoltis): should the RootPackageJSON be included in WorkspaceInfos? + WorkspaceInfos workspace.Catalog + + // WorkspaceNames is all the names of the workspaces + WorkspaceNames []string + + // WorkspaceGraph is a graph of workspace dependencies + // (based on package.json dependencies and devDependencies) + WorkspaceGraph dag.AcyclicGraph + + // RootNode is a sigil identifying the root workspace + RootNode string + + // Lockfile is a struct to read the lockfile based on the package manager + Lockfile lockfile.Lockfile + + // PackageManager is an abstraction for all the info a package manager + // can give us about the repo. + PackageManager *packagemanager.PackageManager + + // Used to arbitrate access to the graph. We parallelise most build operations + // and Go maps aren't natively threadsafe so this is needed. + mutex sync.Mutex +} + +// Splits "npm:^1.2.3" and "github:foo/bar.git" into a protocol part and a version part. +func parseDependencyProtocol(version string) (string, string) { + parts := strings.Split(version, ":") + if len(parts) == 1 { + return "", parts[0] + } + + return parts[0], strings.Join(parts[1:], ":") +} + +func isProtocolExternal(protocol string) bool { + // The npm protocol for yarn by default still uses the workspace package if the workspace + // version is in a compatible semver range. See https://github.com/yarnpkg/berry/discussions/4015 + // For now, we will just assume if the npm protocol is being used and the version matches + // its an internal dependency which matches the existing behavior before this additional + // logic was added. + + // TODO: extend this to support the `enableTransparentWorkspaces` yarn option + return protocol != "" && protocol != "npm" +} + +func isWorkspaceReference(packageVersion string, dependencyVersion string, cwd string, rootpath string) bool { + protocol, dependencyVersion := parseDependencyProtocol(dependencyVersion) + + if protocol == "workspace" { + // TODO: Since support at the moment is non-existent for workspaces that contain multiple + // versions of the same package name, just assume its a match and don't check the range + // for an exact match. + return true + } else if protocol == "file" || protocol == "link" { + abs, err := filepath.Abs(filepath.Join(cwd, dependencyVersion)) + if err != nil { + // Default to internal if we have the package but somehow cannot get the path + // TODO(gsoltis): log this? + return true + } + isWithinRepo, err := fs.DirContainsPath(rootpath, filepath.FromSlash(abs)) + if err != nil { + // Default to internal if we have the package but somehow cannot get the path + // TODO(gsoltis): log this? + return true + } + return isWithinRepo + } else if isProtocolExternal(protocol) { + // Other protocols are assumed to be external references ("github:", etc) + return false + } else if dependencyVersion == "*" { + return true + } + + // If we got this far, then we need to check the workspace package version to see it satisfies + // the dependencies range to determin whether or not its an internal or external dependency. + + constraint, constraintErr := semver.NewConstraint(dependencyVersion) + pkgVersion, packageVersionErr := semver.NewVersion(packageVersion) + if constraintErr != nil || packageVersionErr != nil { + // For backwards compatibility with existing behavior, if we can't parse the version then we + // treat the dependency as an internal package reference and swallow the error. + + // TODO: some package managers also support tags like "latest". Does extra handling need to be + // added for this corner-case + return true + } + + return constraint.Check(pkgVersion) +} + +// SinglePackageGraph constructs a Context instance from a single package. +func SinglePackageGraph(repoRoot turbopath.AbsoluteSystemPath, rootPackageJSON *fs.PackageJSON) (*Context, error) { + workspaceInfos := workspace.Catalog{ + PackageJSONs: map[string]*fs.PackageJSON{util.RootPkgName: rootPackageJSON}, + TurboConfigs: map[string]*fs.TurboJSON{}, + } + c := &Context{ + WorkspaceInfos: workspaceInfos, + RootNode: core.ROOT_NODE_NAME, + } + c.WorkspaceGraph.Connect(dag.BasicEdge(util.RootPkgName, core.ROOT_NODE_NAME)) + packageManager, err := packagemanager.GetPackageManager(repoRoot, rootPackageJSON) + if err != nil { + return nil, err + } + c.PackageManager = packageManager + return c, nil +} + +// BuildPackageGraph constructs a Context instance with information about the package dependency graph +func BuildPackageGraph(repoRoot turbopath.AbsoluteSystemPath, rootPackageJSON *fs.PackageJSON) (*Context, error) { + c := &Context{} + rootpath := repoRoot.ToStringDuringMigration() + c.WorkspaceInfos = workspace.Catalog{ + PackageJSONs: map[string]*fs.PackageJSON{}, + TurboConfigs: map[string]*fs.TurboJSON{}, + } + c.RootNode = core.ROOT_NODE_NAME + + var warnings Warnings + + packageManager, err := packagemanager.GetPackageManager(repoRoot, rootPackageJSON) + if err != nil { + return nil, err + } + c.PackageManager = packageManager + + if lockfile, err := c.PackageManager.ReadLockfile(repoRoot, rootPackageJSON); err != nil { + warnings.append(err) + } else { + c.Lockfile = lockfile + } + + if err := c.resolveWorkspaceRootDeps(rootPackageJSON, &warnings); err != nil { + // TODO(Gaspar) was this the intended return error? + return nil, fmt.Errorf("could not resolve workspaces: %w", err) + } + + // Get the workspaces from the package manager. + // workspaces are absolute paths + workspaces, err := c.PackageManager.GetWorkspaces(repoRoot) + + if err != nil { + return nil, fmt.Errorf("workspace configuration error: %w", err) + } + + // We will parse all package.json's simultaneously. We use a + // wait group because we cannot fully populate the graph (the next step) + // until all parsing is complete + parseJSONWaitGroup := &errgroup.Group{} + for _, workspace := range workspaces { + pkgJSONPath := fs.UnsafeToAbsoluteSystemPath(workspace) + parseJSONWaitGroup.Go(func() error { + return c.parsePackageJSON(repoRoot, pkgJSONPath) + }) + } + + if err := parseJSONWaitGroup.Wait(); err != nil { + return nil, err + } + populateGraphWaitGroup := &errgroup.Group{} + for _, pkg := range c.WorkspaceInfos.PackageJSONs { + pkg := pkg + populateGraphWaitGroup.Go(func() error { + return c.populateWorkspaceGraphForPackageJSON(pkg, rootpath, pkg.Name, &warnings) + }) + } + + if err := populateGraphWaitGroup.Wait(); err != nil { + return nil, err + } + // Resolve dependencies for the root package. We override the vertexName in the graph + // for the root package, since it can have an arbitrary name. We need it to have our + // RootPkgName so that we can identify it as the root later on. + err = c.populateWorkspaceGraphForPackageJSON(rootPackageJSON, rootpath, util.RootPkgName, &warnings) + if err != nil { + return nil, fmt.Errorf("failed to resolve dependencies for root package: %v", err) + } + c.WorkspaceInfos.PackageJSONs[util.RootPkgName] = rootPackageJSON + + return c, warnings.errorOrNil() +} + +func (c *Context) resolveWorkspaceRootDeps(rootPackageJSON *fs.PackageJSON, warnings *Warnings) error { + pkg := rootPackageJSON + pkg.UnresolvedExternalDeps = make(map[string]string) + for dep, version := range pkg.DevDependencies { + pkg.UnresolvedExternalDeps[dep] = version + } + for dep, version := range pkg.OptionalDependencies { + pkg.UnresolvedExternalDeps[dep] = version + } + for dep, version := range pkg.Dependencies { + pkg.UnresolvedExternalDeps[dep] = version + } + if c.Lockfile != nil { + depSet, err := lockfile.TransitiveClosure( + pkg.Dir.ToUnixPath(), + pkg.UnresolvedExternalDeps, + c.Lockfile, + ) + if err != nil { + warnings.append(err) + // Return early to skip using results of incomplete dep graph resolution + return nil + } + pkg.TransitiveDeps = make([]lockfile.Package, 0, depSet.Cardinality()) + for _, v := range depSet.ToSlice() { + dep := v.(lockfile.Package) + pkg.TransitiveDeps = append(pkg.TransitiveDeps, dep) + } + sort.Sort(lockfile.ByKey(pkg.TransitiveDeps)) + hashOfExternalDeps, err := fs.HashObject(pkg.TransitiveDeps) + if err != nil { + return err + } + pkg.ExternalDepsHash = hashOfExternalDeps + } else { + pkg.TransitiveDeps = []lockfile.Package{} + pkg.ExternalDepsHash = "" + } + + return nil +} + +// populateWorkspaceGraphForPackageJSON fills in the edges for the dependencies of the given package +// that are within the monorepo, as well as collecting and hashing the dependencies of the package +// that are not within the monorepo. The vertexName is used to override the package name in the graph. +// This can happen when adding the root package, which can have an arbitrary name. +func (c *Context) populateWorkspaceGraphForPackageJSON(pkg *fs.PackageJSON, rootpath string, vertexName string, warnings *Warnings) error { + c.mutex.Lock() + defer c.mutex.Unlock() + depMap := make(map[string]string) + internalDepsSet := make(dag.Set) + externalUnresolvedDepsSet := make(dag.Set) + pkg.UnresolvedExternalDeps = make(map[string]string) + + for dep, version := range pkg.DevDependencies { + depMap[dep] = version + } + + for dep, version := range pkg.OptionalDependencies { + depMap[dep] = version + } + + for dep, version := range pkg.Dependencies { + depMap[dep] = version + } + + // split out internal vs. external deps + for depName, depVersion := range depMap { + if item, ok := c.WorkspaceInfos.PackageJSONs[depName]; ok && isWorkspaceReference(item.Version, depVersion, pkg.Dir.ToStringDuringMigration(), rootpath) { + internalDepsSet.Add(depName) + c.WorkspaceGraph.Connect(dag.BasicEdge(vertexName, depName)) + } else { + externalUnresolvedDepsSet.Add(depName) + } + } + + for _, name := range externalUnresolvedDepsSet.List() { + name := name.(string) + if item, ok := pkg.DevDependencies[name]; ok { + pkg.UnresolvedExternalDeps[name] = item + } + + if item, ok := pkg.OptionalDependencies[name]; ok { + pkg.UnresolvedExternalDeps[name] = item + } + + if item, ok := pkg.Dependencies[name]; ok { + pkg.UnresolvedExternalDeps[name] = item + } + } + + externalDeps, err := lockfile.TransitiveClosure( + pkg.Dir.ToUnixPath(), + pkg.UnresolvedExternalDeps, + c.Lockfile, + ) + if err != nil { + warnings.append(err) + // reset external deps to original state + externalDeps = mapset.NewSet() + } + + // when there are no internal dependencies, we need to still add these leafs to the graph + if internalDepsSet.Len() == 0 { + c.WorkspaceGraph.Connect(dag.BasicEdge(pkg.Name, core.ROOT_NODE_NAME)) + } + pkg.TransitiveDeps = make([]lockfile.Package, 0, externalDeps.Cardinality()) + for _, dependency := range externalDeps.ToSlice() { + dependency := dependency.(lockfile.Package) + pkg.TransitiveDeps = append(pkg.TransitiveDeps, dependency) + } + pkg.InternalDeps = make([]string, 0, internalDepsSet.Len()) + for _, v := range internalDepsSet.List() { + pkg.InternalDeps = append(pkg.InternalDeps, fmt.Sprintf("%v", v)) + } + sort.Strings(pkg.InternalDeps) + sort.Sort(lockfile.ByKey(pkg.TransitiveDeps)) + hashOfExternalDeps, err := fs.HashObject(pkg.TransitiveDeps) + if err != nil { + return err + } + pkg.ExternalDepsHash = hashOfExternalDeps + return nil +} + +func (c *Context) parsePackageJSON(repoRoot turbopath.AbsoluteSystemPath, pkgJSONPath turbopath.AbsoluteSystemPath) error { + c.mutex.Lock() + defer c.mutex.Unlock() + + if pkgJSONPath.FileExists() { + pkg, err := fs.ReadPackageJSON(pkgJSONPath) + if err != nil { + return fmt.Errorf("parsing %s: %w", pkgJSONPath, err) + } + + relativePkgJSONPath, err := repoRoot.PathTo(pkgJSONPath) + if err != nil { + return err + } + c.WorkspaceGraph.Add(pkg.Name) + pkg.PackageJSONPath = turbopath.AnchoredSystemPathFromUpstream(relativePkgJSONPath) + pkg.Dir = turbopath.AnchoredSystemPathFromUpstream(filepath.Dir(relativePkgJSONPath)) + if c.WorkspaceInfos.PackageJSONs[pkg.Name] != nil { + existing := c.WorkspaceInfos.PackageJSONs[pkg.Name] + return fmt.Errorf("Failed to add workspace \"%s\" from %s, it already exists at %s", pkg.Name, pkg.Dir, existing.Dir) + } + c.WorkspaceInfos.PackageJSONs[pkg.Name] = pkg + c.WorkspaceNames = append(c.WorkspaceNames, pkg.Name) + } + return nil +} + +// InternalDependencies finds all dependencies required by the slice of starting +// packages, as well as the starting packages themselves. +func (c *Context) InternalDependencies(start []string) ([]string, error) { + vertices := make(dag.Set) + for _, v := range start { + vertices.Add(v) + } + s := make(dag.Set) + memoFunc := func(v dag.Vertex, d int) error { + s.Add(v) + return nil + } + + if err := c.WorkspaceGraph.DepthFirstWalk(vertices, memoFunc); err != nil { + return nil, err + } + + // Use for loop so we can coerce to string + // .List() returns a list of interface{} types, but + // we know they are strings. + targets := make([]string, 0, s.Len()) + for _, dep := range s.List() { + targets = append(targets, dep.(string)) + } + sort.Strings(targets) + + return targets, nil +} + +// ChangedPackages returns a list of changed packages based on the contents of a previous lockfile +// This assumes that none of the package.json in the workspace change, it is +// the responsibility of the caller to verify this. +func (c *Context) ChangedPackages(previousLockfile lockfile.Lockfile) ([]string, error) { + if lockfile.IsNil(previousLockfile) || lockfile.IsNil(c.Lockfile) { + return nil, fmt.Errorf("Cannot detect changed packages without previous and current lockfile") + } + + didPackageChange := func(pkgName string, pkg *fs.PackageJSON) bool { + previousDeps, err := lockfile.TransitiveClosure( + pkg.Dir.ToUnixPath(), + pkg.UnresolvedExternalDeps, + previousLockfile, + ) + if err != nil || previousDeps.Cardinality() != len(pkg.TransitiveDeps) { + return true + } + + prevExternalDeps := make([]lockfile.Package, 0, previousDeps.Cardinality()) + for _, d := range previousDeps.ToSlice() { + prevExternalDeps = append(prevExternalDeps, d.(lockfile.Package)) + } + sort.Sort(lockfile.ByKey(prevExternalDeps)) + + for i := range prevExternalDeps { + if prevExternalDeps[i] != pkg.TransitiveDeps[i] { + return true + } + } + return false + } + + changedPkgs := make([]string, 0, len(c.WorkspaceInfos.PackageJSONs)) + + // check if prev and current have "global" changes e.g. lockfile bump + globalChange := c.Lockfile.GlobalChange(previousLockfile) + + for pkgName, pkg := range c.WorkspaceInfos.PackageJSONs { + if globalChange { + break + } + if didPackageChange(pkgName, pkg) { + if pkgName == util.RootPkgName { + globalChange = true + } else { + changedPkgs = append(changedPkgs, pkgName) + } + } + } + + if globalChange { + changedPkgs = make([]string, 0, len(c.WorkspaceInfos.PackageJSONs)) + for pkgName := range c.WorkspaceInfos.PackageJSONs { + changedPkgs = append(changedPkgs, pkgName) + } + sort.Strings(changedPkgs) + return changedPkgs, nil + } + + sort.Strings(changedPkgs) + return changedPkgs, nil +} diff --git a/cli/internal/context/context_test.go b/cli/internal/context/context_test.go new file mode 100644 index 0000000..692c0a8 --- /dev/null +++ b/cli/internal/context/context_test.go @@ -0,0 +1,162 @@ +package context + +import ( + "os" + "path/filepath" + "regexp" + "testing" + + testifyAssert "github.com/stretchr/testify/assert" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +func Test_isWorkspaceReference(t *testing.T) { + rootpath, err := filepath.Abs(filepath.FromSlash("/some/repo")) + if err != nil { + t.Fatalf("failed to create absolute root path %v", err) + } + pkgDir, err := filepath.Abs(filepath.FromSlash("/some/repo/packages/libA")) + if err != nil { + t.Fatalf("failed to create absolute pkgDir %v", err) + } + tests := []struct { + name string + packageVersion string + dependencyVersion string + want bool + }{ + { + name: "handles exact match", + packageVersion: "1.2.3", + dependencyVersion: "1.2.3", + want: true, + }, + { + name: "handles semver range satisfied", + packageVersion: "1.2.3", + dependencyVersion: "^1.0.0", + want: true, + }, + { + name: "handles semver range not-satisfied", + packageVersion: "2.3.4", + dependencyVersion: "^1.0.0", + want: false, + }, + { + name: "handles workspace protocol with version", + packageVersion: "1.2.3", + dependencyVersion: "workspace:1.2.3", + want: true, + }, + { + name: "handles workspace protocol with relative path", + packageVersion: "1.2.3", + dependencyVersion: "workspace:../other-package/", + want: true, + }, + { + name: "handles npm protocol with satisfied semver range", + packageVersion: "1.2.3", + dependencyVersion: "npm:^1.2.3", + want: true, // default in yarn is to use the workspace version unless `enableTransparentWorkspaces: true`. This isn't currently being checked. + }, + { + name: "handles npm protocol with non-satisfied semver range", + packageVersion: "2.3.4", + dependencyVersion: "npm:^1.2.3", + want: false, + }, + { + name: "handles pre-release versions", + packageVersion: "1.2.3", + dependencyVersion: "1.2.2-alpha-1234abcd.0", + want: false, + }, + { + name: "handles non-semver package version", + packageVersion: "sometag", + dependencyVersion: "1.2.3", + want: true, // for backwards compatability with the code before versions were verified + }, + { + name: "handles non-semver package version", + packageVersion: "1.2.3", + dependencyVersion: "sometag", + want: true, // for backwards compatability with the code before versions were verified + }, + { + name: "handles file:... inside repo", + packageVersion: "1.2.3", + dependencyVersion: "file:../libB", + want: true, // this is a sibling package + }, + { + name: "handles file:... outside repo", + packageVersion: "1.2.3", + dependencyVersion: "file:../../../otherproject", + want: false, // this is not within the repo root + }, + { + name: "handles link:... inside repo", + packageVersion: "1.2.3", + dependencyVersion: "link:../libB", + want: true, // this is a sibling package + }, + { + name: "handles link:... outside repo", + packageVersion: "1.2.3", + dependencyVersion: "link:../../../otherproject", + want: false, // this is not within the repo root + }, + { + name: "handles development versions", + packageVersion: "0.0.0-development", + dependencyVersion: "*", + want: true, // "*" should always match + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := isWorkspaceReference(tt.packageVersion, tt.dependencyVersion, pkgDir, rootpath) + if got != tt.want { + t.Errorf("isWorkspaceReference(%v, %v, %v, %v) got = %v, want %v", tt.packageVersion, tt.dependencyVersion, pkgDir, rootpath, got, tt.want) + } + }) + } +} + +func TestBuildPackageGraph_DuplicateNames(t *testing.T) { + path := getTestDir(t, "dupe-workspace-names") + pkgJSON := &fs.PackageJSON{ + Name: "dupe-workspace-names", + PackageManager: "pnpm@7.15.0", + } + + _, actualErr := BuildPackageGraph(path, pkgJSON) + + // Not asserting the full error message, because it includes a path with slashes and backslashes + // getting the regex incantation to check that is not worth it. + // We have to use regex because the actual error may be different depending on which workspace was + // added first and which one was second, causing the error. + testifyAssert.Regexp(t, regexp.MustCompile("^Failed to add workspace \"same-name\".+$"), actualErr) +} + +// This is duplicated from fs.turbo_json_test.go. +// I wasn't able to pull it into a helper file/package because +// it requires the `fs` package and it would cause cyclical dependencies +// when used in turbo_json_test.go and would require more changes to fix that. +func getTestDir(t *testing.T, testName string) turbopath.AbsoluteSystemPath { + defaultCwd, err := os.Getwd() + if err != nil { + t.Errorf("failed to get cwd: %v", err) + } + cwd, err := fs.CheckedToAbsoluteSystemPath(defaultCwd) + if err != nil { + t.Fatalf("cwd is not an absolute directory %v: %v", defaultCwd, err) + } + + return cwd.UntypedJoin("testdata", testName) +} diff --git a/cli/internal/context/testdata/dupe-workspace-names/apps/a/package.json b/cli/internal/context/testdata/dupe-workspace-names/apps/a/package.json new file mode 100644 index 0000000..94301a3 --- /dev/null +++ b/cli/internal/context/testdata/dupe-workspace-names/apps/a/package.json @@ -0,0 +1,6 @@ +{ + "name": "same-name", + "dependencies": { + "ui": "workspace:*" + } +} diff --git a/cli/internal/context/testdata/dupe-workspace-names/apps/b/package.json b/cli/internal/context/testdata/dupe-workspace-names/apps/b/package.json new file mode 100644 index 0000000..94301a3 --- /dev/null +++ b/cli/internal/context/testdata/dupe-workspace-names/apps/b/package.json @@ -0,0 +1,6 @@ +{ + "name": "same-name", + "dependencies": { + "ui": "workspace:*" + } +} diff --git a/cli/internal/context/testdata/dupe-workspace-names/package.json b/cli/internal/context/testdata/dupe-workspace-names/package.json new file mode 100644 index 0000000..3bf7403 --- /dev/null +++ b/cli/internal/context/testdata/dupe-workspace-names/package.json @@ -0,0 +1,7 @@ +{ + "name": "dupe-workspace-names", + "workspaces": [ + "apps/*" + ], + "packageManager": "pnpm@7.15.0" +} diff --git a/cli/internal/context/testdata/dupe-workspace-names/packages/ui/package.json b/cli/internal/context/testdata/dupe-workspace-names/packages/ui/package.json new file mode 100644 index 0000000..1cd75b5 --- /dev/null +++ b/cli/internal/context/testdata/dupe-workspace-names/packages/ui/package.json @@ -0,0 +1,3 @@ +{ + "name": "ui" +} diff --git a/cli/internal/context/testdata/dupe-workspace-names/pnpm-lock.yaml b/cli/internal/context/testdata/dupe-workspace-names/pnpm-lock.yaml new file mode 100644 index 0000000..0909cde --- /dev/null +++ b/cli/internal/context/testdata/dupe-workspace-names/pnpm-lock.yaml @@ -0,0 +1,21 @@ +lockfileVersion: 5.4 + +importers: + + .: + specifiers: {} + + apps/a: + specifiers: + ui: workspace:* + dependencies: + ui: link:../../packages/ui + + apps/b: + specifiers: + ui: workspace:* + dependencies: + ui: link:../../packages/ui + + packages/ui: + specifiers: {} diff --git a/cli/internal/context/testdata/dupe-workspace-names/pnpm-workspace.yaml b/cli/internal/context/testdata/dupe-workspace-names/pnpm-workspace.yaml new file mode 100644 index 0000000..3ff5faa --- /dev/null +++ b/cli/internal/context/testdata/dupe-workspace-names/pnpm-workspace.yaml @@ -0,0 +1,3 @@ +packages: + - "apps/*" + - "packages/*" diff --git a/cli/internal/core/engine.go b/cli/internal/core/engine.go new file mode 100644 index 0000000..7f08ea8 --- /dev/null +++ b/cli/internal/core/engine.go @@ -0,0 +1,591 @@ +package core + +import ( + "errors" + "fmt" + "os" + "sort" + "strings" + "sync/atomic" + + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/graph" + "github.com/vercel/turbo/cli/internal/util" + + "github.com/pyr-sh/dag" +) + +const ROOT_NODE_NAME = "___ROOT___" + +// Task is a higher level struct that contains the underlying TaskDefinition +// but also some adjustments to it, based on business logic. +type Task struct { + Name string + // TaskDefinition contains the config for the task from turbo.json + TaskDefinition fs.TaskDefinition +} + +type Visitor = func(taskID string) error + +// Engine contains both the DAG for the packages and the tasks and implements the methods to execute tasks in them +type Engine struct { + // TaskGraph is a graph of package-tasks + TaskGraph *dag.AcyclicGraph + PackageTaskDeps map[string][]string + rootEnabledTasks util.Set + + // completeGraph is the CompleteGraph. We need this to look up the Pipeline, etc. + completeGraph *graph.CompleteGraph + // isSinglePackage is used to load turbo.json correctly + isSinglePackage bool +} + +// NewEngine creates a new engine given a topologic graph of workspace package names +func NewEngine( + completeGraph *graph.CompleteGraph, + isSinglePackage bool, +) *Engine { + return &Engine{ + completeGraph: completeGraph, + TaskGraph: &dag.AcyclicGraph{}, + PackageTaskDeps: map[string][]string{}, + rootEnabledTasks: make(util.Set), + isSinglePackage: isSinglePackage, + } +} + +// EngineBuildingOptions help construct the TaskGraph +type EngineBuildingOptions struct { + // Packages in the execution scope, if nil, all packages will be considered in scope + Packages []string + // TaskNames in the execution scope, if nil, all tasks will be executed + TaskNames []string + // Restrict execution to only the listed task names + TasksOnly bool +} + +// EngineExecutionOptions controls a single walk of the task graph +type EngineExecutionOptions struct { + // Parallel is whether to run tasks in parallel + Parallel bool + // Concurrency is the number of concurrent tasks that can be executed + Concurrency int +} + +// Execute executes the pipeline, constructing an internal task graph and walking it accordingly. +func (e *Engine) Execute(visitor Visitor, opts EngineExecutionOptions) []error { + var sema = util.NewSemaphore(opts.Concurrency) + var errored int32 + return e.TaskGraph.Walk(func(v dag.Vertex) error { + // If something has already errored, short-circuit. + // There is a race here between concurrent tasks. However, if there is not a + // dependency edge between them, we are not required to have a strict order + // between them, so a failed task can fail to short-circuit a concurrent + // task that happened to be starting at the same time. + if atomic.LoadInt32(&errored) != 0 { + return nil + } + // Each vertex in the graph is a taskID (package#task format) + taskID := dag.VertexName(v) + + // Always return if it is the root node + if strings.Contains(taskID, ROOT_NODE_NAME) { + return nil + } + + // Acquire the semaphore unless parallel + if !opts.Parallel { + sema.Acquire() + defer sema.Release() + } + + if err := visitor(taskID); err != nil { + // We only ever flip from false to true, so we don't need to compare and swap the atomic + atomic.StoreInt32(&errored, 1) + return err + } + return nil + }) +} + +// MissingTaskError is a specialized Error thrown in the case that we can't find a task. +// We want to allow this error when getting task definitions, so we have to special case it. +type MissingTaskError struct { + workspaceName string + taskID string + taskName string +} + +func (m *MissingTaskError) Error() string { + return fmt.Sprintf("Could not find \"%s\" or \"%s\" in workspace \"%s\"", m.taskName, m.taskID, m.workspaceName) +} + +func (e *Engine) getTaskDefinition(pkg string, taskName string, taskID string) (*Task, error) { + pipeline, err := e.completeGraph.GetPipelineFromWorkspace(pkg, e.isSinglePackage) + + if err != nil { + if pkg != util.RootPkgName { + // If there was no turbo.json in the workspace, fallback to the root turbo.json + if errors.Is(err, os.ErrNotExist) { + return e.getTaskDefinition(util.RootPkgName, taskName, taskID) + } + + // otherwise bubble it up + return nil, err + } + + return nil, err + } + + if task, ok := pipeline[taskID]; ok { + return &Task{ + Name: taskName, + TaskDefinition: task.GetTaskDefinition(), + }, nil + } + + if task, ok := pipeline[taskName]; ok { + return &Task{ + Name: taskName, + TaskDefinition: task.GetTaskDefinition(), + }, nil + } + + // An error here means turbo.json exists, but didn't define the task. + // Fallback to the root pipeline to find the task. + if pkg != util.RootPkgName { + return e.getTaskDefinition(util.RootPkgName, taskName, taskID) + } + + // Return this as a custom type so we can ignore it specifically + return nil, &MissingTaskError{ + taskName: taskName, + taskID: taskID, + workspaceName: pkg, + } +} + +// Prepare constructs the Task Graph for a list of packages and tasks +func (e *Engine) Prepare(options *EngineBuildingOptions) error { + pkgs := options.Packages + taskNames := options.TaskNames + tasksOnly := options.TasksOnly + + // If there are no affected packages, we don't need to go through all this work + // we can just exit early. + // TODO(mehulkar): but we still need to validate bad task names? + if len(pkgs) == 0 { + return nil + } + + traversalQueue := []string{} + + // get a set of taskNames passed in. we'll remove the ones that have a definition + missing := util.SetFromStrings(taskNames) + + // Get a list of entry points into our TaskGraph. + // We do this by taking the input taskNames, and pkgs + // and creating a queue of taskIDs that we can traverse and gather dependencies from. + for _, pkg := range pkgs { + for _, taskName := range taskNames { + taskID := util.GetTaskId(pkg, taskName) + + // Look up the task in the package + foundTask, err := e.getTaskDefinition(pkg, taskName, taskID) + + // We can skip MissingTaskErrors because we'll validate against them later + // Return all other errors + if err != nil { + var e *MissingTaskError + if errors.As(err, &e) { + // Initially, non-package tasks are not required to exist, as long as some + // package in the list packages defines it as a package-task. Dependencies + // *are* required to have a definition. + continue + } + + return err + } + + // If we found a task definition, remove it from the missing list + if foundTask != nil { + // delete taskName if it was found + missing.Delete(taskName) + + // Even if a task definition was found, we _only_ want to add it as an entry point to + // the task graph (i.e. the traversalQueue), if it's: + // - A task from the non-root workspace (i.e. tasks from every other workspace) + // - A task that we *know* is rootEnabled task (in which case, the root workspace is acceptable) + isRootPkg := pkg == util.RootPkgName + if !isRootPkg || e.rootEnabledTasks.Includes(taskName) { + traversalQueue = append(traversalQueue, taskID) + } + } + } + } + + visited := make(util.Set) + + // validate that all tasks passed were found + missingList := missing.UnsafeListOfStrings() + sort.Strings(missingList) + + if len(missingList) > 0 { + return fmt.Errorf("Could not find the following tasks in project: %s", strings.Join(missingList, ", ")) + } + + // Things get appended to traversalQueue inside this loop, so we use the len() check instead of range. + for len(traversalQueue) > 0 { + // pop off the first item from the traversalQueue + taskID := traversalQueue[0] + traversalQueue = traversalQueue[1:] + + pkg, taskName := util.GetPackageTaskFromId(taskID) + + if pkg == util.RootPkgName && !e.rootEnabledTasks.Includes(taskName) { + return fmt.Errorf("%v needs an entry in turbo.json before it can be depended on because it is a task run from the root package", taskID) + } + + if pkg != ROOT_NODE_NAME { + if _, ok := e.completeGraph.WorkspaceInfos.PackageJSONs[pkg]; !ok { + // If we have a pkg it should be in WorkspaceInfos. + // If we're hitting this error something has gone wrong earlier when building WorkspaceInfos + // or the workspace really doesn't exist and turbo.json is misconfigured. + return fmt.Errorf("Could not find workspace \"%s\" from task \"%s\" in project", pkg, taskID) + } + } + + taskDefinitions, err := e.getTaskDefinitionChain(taskID, taskName) + if err != nil { + return err + } + + taskDefinition, err := fs.MergeTaskDefinitions(taskDefinitions) + if err != nil { + return err + } + + // Skip this iteration of the loop if we've already seen this taskID + if visited.Includes(taskID) { + continue + } + + visited.Add(taskID) + + // Put this taskDefinition into the Graph so we can look it up later during execution. + e.completeGraph.TaskDefinitions[taskID] = taskDefinition + + topoDeps := util.SetFromStrings(taskDefinition.TopologicalDependencies) + deps := make(util.Set) + isPackageTask := util.IsPackageTask(taskName) + + for _, dependency := range taskDefinition.TaskDependencies { + // If the current task is a workspace-specific task (including root Task) + // and its dependency is _also_ a workspace-specific task, we need to add + // a reference to this dependency directly into the engine. + // TODO @mehulkar: Why do we need this? + if isPackageTask && util.IsPackageTask(dependency) { + if err := e.AddDep(dependency, taskName); err != nil { + return err + } + } else { + // For non-workspace-specific dependencies, we attach a reference to + // the task that is added into the engine. + deps.Add(dependency) + } + } + + // Filter down the tasks if there's a filter in place + // https: //turbo.build/repo/docs/reference/command-line-reference#--only + if tasksOnly { + deps = deps.Filter(func(d interface{}) bool { + for _, target := range taskNames { + return fmt.Sprintf("%v", d) == target + } + return false + }) + topoDeps = topoDeps.Filter(func(d interface{}) bool { + for _, target := range taskNames { + return fmt.Sprintf("%v", d) == target + } + return false + }) + } + + toTaskID := taskID + + // hasTopoDeps will be true if the task depends on any tasks from dependency packages + // E.g. `dev: { dependsOn: [^dev] }` + hasTopoDeps := topoDeps.Len() > 0 && e.completeGraph.WorkspaceGraph.DownEdges(pkg).Len() > 0 + + // hasDeps will be true if the task depends on any tasks from its own package + // E.g. `build: { dependsOn: [dev] }` + hasDeps := deps.Len() > 0 + + // hasPackageTaskDeps will be true if this is a workspace-specific task, and + // it depends on another workspace-specific tasks + // E.g. `my-package#build: { dependsOn: [my-package#beforebuild] }`. + hasPackageTaskDeps := false + if _, ok := e.PackageTaskDeps[toTaskID]; ok { + hasPackageTaskDeps = true + } + + if hasTopoDeps { + depPkgs := e.completeGraph.WorkspaceGraph.DownEdges(pkg) + for _, from := range topoDeps.UnsafeListOfStrings() { + // add task dep from all the package deps within repo + for depPkg := range depPkgs { + fromTaskID := util.GetTaskId(depPkg, from) + e.TaskGraph.Add(fromTaskID) + e.TaskGraph.Add(toTaskID) + e.TaskGraph.Connect(dag.BasicEdge(toTaskID, fromTaskID)) + traversalQueue = append(traversalQueue, fromTaskID) + } + } + } + + if hasDeps { + for _, from := range deps.UnsafeListOfStrings() { + fromTaskID := util.GetTaskId(pkg, from) + e.TaskGraph.Add(fromTaskID) + e.TaskGraph.Add(toTaskID) + e.TaskGraph.Connect(dag.BasicEdge(toTaskID, fromTaskID)) + traversalQueue = append(traversalQueue, fromTaskID) + } + } + + if hasPackageTaskDeps { + if pkgTaskDeps, ok := e.PackageTaskDeps[toTaskID]; ok { + for _, fromTaskID := range pkgTaskDeps { + e.TaskGraph.Add(fromTaskID) + e.TaskGraph.Add(toTaskID) + e.TaskGraph.Connect(dag.BasicEdge(toTaskID, fromTaskID)) + traversalQueue = append(traversalQueue, fromTaskID) + } + } + } + + // Add the root node into the graph + if !hasDeps && !hasTopoDeps && !hasPackageTaskDeps { + e.TaskGraph.Add(ROOT_NODE_NAME) + e.TaskGraph.Add(toTaskID) + e.TaskGraph.Connect(dag.BasicEdge(toTaskID, ROOT_NODE_NAME)) + } + } + + return nil +} + +// AddTask adds root tasks to the engine so they can be looked up later. +func (e *Engine) AddTask(taskName string) { + if util.IsPackageTask(taskName) { + pkg, taskName := util.GetPackageTaskFromId(taskName) + if pkg == util.RootPkgName { + e.rootEnabledTasks.Add(taskName) + } + } +} + +// AddDep adds tuples from+to task ID combos in tuple format so they can be looked up later. +func (e *Engine) AddDep(fromTaskID string, toTaskID string) error { + fromPkg, _ := util.GetPackageTaskFromId(fromTaskID) + if fromPkg != ROOT_NODE_NAME && fromPkg != util.RootPkgName && !e.completeGraph.WorkspaceGraph.HasVertex(fromPkg) { + return fmt.Errorf("found reference to unknown package: %v in task %v", fromPkg, fromTaskID) + } + + if _, ok := e.PackageTaskDeps[toTaskID]; !ok { + e.PackageTaskDeps[toTaskID] = []string{} + } + + e.PackageTaskDeps[toTaskID] = append(e.PackageTaskDeps[toTaskID], fromTaskID) + + return nil +} + +// ValidatePersistentDependencies checks if any task dependsOn persistent tasks and throws +// an error if that task is actually implemented +func (e *Engine) ValidatePersistentDependencies(graph *graph.CompleteGraph, concurrency int) error { + var validationError error + persistentCount := 0 + + // Adding in a lock because otherwise walking the graph can introduce a data race + // (reproducible with `go test -race`) + var sema = util.NewSemaphore(1) + + errs := e.TaskGraph.Walk(func(v dag.Vertex) error { + vertexName := dag.VertexName(v) // vertexName is a taskID + + // No need to check the root node if that's where we are. + if strings.Contains(vertexName, ROOT_NODE_NAME) { + return nil + } + + // Aquire a lock, because otherwise walking this group can cause a race condition + // writing to the same validationError var defined outside the Walk(). This shows + // up when running tests with the `-race` flag. + sema.Acquire() + defer sema.Release() + + currentTaskDefinition, currentTaskExists := e.completeGraph.TaskDefinitions[vertexName] + if currentTaskExists && currentTaskDefinition.Persistent { + persistentCount++ + } + + currentPackageName, currentTaskName := util.GetPackageTaskFromId(vertexName) + + // For each "downEdge" (i.e. each task that _this_ task dependsOn) + // check if the downEdge is a Persistent task, and if it actually has the script implemented + // in that package's package.json + for dep := range e.TaskGraph.DownEdges(vertexName) { + depTaskID := dep.(string) + // No need to check the root node + if strings.Contains(depTaskID, ROOT_NODE_NAME) { + return nil + } + + // Parse the taskID of this dependency task + packageName, taskName := util.GetPackageTaskFromId(depTaskID) + + // Get the Task Definition so we can check if it is Persistent + depTaskDefinition, taskExists := e.completeGraph.TaskDefinitions[depTaskID] + + if !taskExists { + return fmt.Errorf("Cannot find task definition for %v in package %v", depTaskID, packageName) + } + + // Get information about the package + pkg, pkgExists := graph.WorkspaceInfos.PackageJSONs[packageName] + if !pkgExists { + return fmt.Errorf("Cannot find package %v", packageName) + } + _, hasScript := pkg.Scripts[taskName] + + // If both conditions are true set a value and break out of checking the dependencies + if depTaskDefinition.Persistent && hasScript { + validationError = fmt.Errorf( + "\"%s\" is a persistent task, \"%s\" cannot depend on it", + util.GetTaskId(packageName, taskName), + util.GetTaskId(currentPackageName, currentTaskName), + ) + + break + } + } + + return nil + }) + + for _, err := range errs { + return fmt.Errorf("Validation failed: %v", err) + } + + if validationError != nil { + return validationError + } else if persistentCount >= concurrency { + return fmt.Errorf("You have %v persistent tasks but `turbo` is configured for concurrency of %v. Set --concurrency to at least %v", persistentCount, concurrency, persistentCount+1) + } + + return nil +} + +// getTaskDefinitionChain gets a set of TaskDefinitions that apply to the taskID. +// These definitions should be merged by the consumer. +func (e *Engine) getTaskDefinitionChain(taskID string, taskName string) ([]fs.BookkeepingTaskDefinition, error) { + // Start a list of TaskDefinitions we've found for this TaskID + taskDefinitions := []fs.BookkeepingTaskDefinition{} + + rootPipeline, err := e.completeGraph.GetPipelineFromWorkspace(util.RootPkgName, e.isSinglePackage) + if err != nil { + // It should be very unlikely that we can't find a root pipeline. Even for single package repos + // the pipeline is synthesized from package.json, so there should be _something_ here. + return nil, err + } + + // Look for the taskDefinition in the root pipeline. + if rootTaskDefinition, err := rootPipeline.GetTask(taskID, taskName); err == nil { + taskDefinitions = append(taskDefinitions, *rootTaskDefinition) + } + + // If we're in a single package repo, we can just exit with the TaskDefinition in the root pipeline + // since there are no workspaces, and we don't need to follow any extends keys. + if e.isSinglePackage { + if len(taskDefinitions) == 0 { + return nil, fmt.Errorf("Could not find \"%s\" in root turbo.json", taskID) + } + return taskDefinitions, nil + } + + // If the taskID is a root task (e.g. //#build), we don't need to look + // for a workspace task, since these can only be defined in the root turbo.json. + taskIDPackage, _ := util.GetPackageTaskFromId(taskID) + if taskIDPackage != util.RootPkgName && taskIDPackage != ROOT_NODE_NAME { + // If there is an error, we can ignore it, since turbo.json config is not required in the workspace. + if workspaceTurboJSON, err := e.completeGraph.GetTurboConfigFromWorkspace(taskIDPackage, e.isSinglePackage); err != nil { + // swallow the error where the config file doesn't exist, but bubble up other things + if !errors.Is(err, os.ErrNotExist) { + return nil, err + } + } else { + // Run some validations on a workspace turbo.json. Note that these validations are on + // the whole struct, and not relevant to the taskID we're looking at right now. + validationErrors := workspaceTurboJSON.Validate([]fs.TurboJSONValidation{ + validateNoPackageTaskSyntax, + validateExtends, + }) + + if len(validationErrors) > 0 { + fullError := errors.New("Invalid turbo.json") + for _, validationErr := range validationErrors { + fullError = fmt.Errorf("%w\n - %s", fullError, validationErr) + } + + return nil, fullError + } + + // If there are no errors, we can (try to) add the TaskDefinition to our list. + if workspaceDefinition, ok := workspaceTurboJSON.Pipeline[taskName]; ok { + taskDefinitions = append(taskDefinitions, workspaceDefinition) + } + } + } + + if len(taskDefinitions) == 0 { + return nil, fmt.Errorf("Could not find \"%s\" in root turbo.json or \"%s\" workspace", taskID, taskIDPackage) + } + + return taskDefinitions, nil +} + +func validateNoPackageTaskSyntax(turboJSON *fs.TurboJSON) []error { + errors := []error{} + + for taskIDOrName := range turboJSON.Pipeline { + if util.IsPackageTask(taskIDOrName) { + taskName := util.StripPackageName(taskIDOrName) + errors = append(errors, fmt.Errorf("\"%s\". Use \"%s\" instead", taskIDOrName, taskName)) + } + } + + return errors +} + +func validateExtends(turboJSON *fs.TurboJSON) []error { + extendErrors := []error{} + extends := turboJSON.Extends + // TODO(mehulkar): Enable extending from more than one workspace. + if len(extends) > 1 { + extendErrors = append(extendErrors, fmt.Errorf("You can only extend from the root workspace")) + } + + // We don't support this right now + if len(extends) == 0 { + extendErrors = append(extendErrors, fmt.Errorf("No \"extends\" key found")) + } + + // TODO(mehulkar): Enable extending from non-root workspace. + if len(extends) == 1 && extends[0] != util.RootPkgName { + extendErrors = append(extendErrors, fmt.Errorf("You can only extend from the root workspace")) + } + + return extendErrors +} diff --git a/cli/internal/core/engine_test.go b/cli/internal/core/engine_test.go new file mode 100644 index 0000000..a92264d --- /dev/null +++ b/cli/internal/core/engine_test.go @@ -0,0 +1,88 @@ +package core + +import ( + "errors" + "testing" + + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/graph" + "github.com/vercel/turbo/cli/internal/workspace" + "gotest.tools/v3/assert" + + "github.com/pyr-sh/dag" +) + +func TestShortCircuiting(t *testing.T) { + var workspaceGraph dag.AcyclicGraph + workspaceGraph.Add("a") + workspaceGraph.Add("b") + workspaceGraph.Add("c") + // Dependencies: a -> b -> c + workspaceGraph.Connect(dag.BasicEdge("a", "b")) + workspaceGraph.Connect(dag.BasicEdge("b", "c")) + + buildTask := &fs.BookkeepingTaskDefinition{} + err := buildTask.UnmarshalJSON([]byte("{\"dependsOn\": [\"^build\"]}")) + assert.NilError(t, err, "BookkeepingTaskDefinition unmarshall") + + pipeline := map[string]fs.BookkeepingTaskDefinition{ + "build": *buildTask, + } + + p := NewEngine(&graph.CompleteGraph{ + WorkspaceGraph: workspaceGraph, + Pipeline: pipeline, + TaskDefinitions: map[string]*fs.TaskDefinition{}, + WorkspaceInfos: workspace.Catalog{ + PackageJSONs: map[string]*fs.PackageJSON{ + "//": {}, + "a": {}, + "b": {}, + "c": {}, + }, + TurboConfigs: map[string]*fs.TurboJSON{ + "//": { + Pipeline: pipeline, + }, + }, + }, + }, false) + + p.AddTask("build") + + err = p.Prepare(&EngineBuildingOptions{ + Packages: []string{"a", "b", "c"}, + TaskNames: []string{"build"}, + TasksOnly: false, + }) + + if err != nil { + t.Fatalf("%v", err) + } + + executed := map[string]bool{ + "a#build": false, + "b#build": false, + "c#build": false, + } + expectedErr := errors.New("an error occurred") + // b#build is going to error, we expect to not execute a#build, which depends on b + testVisitor := func(taskID string) error { + println(taskID) + executed[taskID] = true + if taskID == "b#build" { + return expectedErr + } + return nil + } + + errs := p.Execute(testVisitor, EngineExecutionOptions{ + Concurrency: 10, + }) + assert.Equal(t, len(errs), 1) + assert.Equal(t, errs[0], expectedErr) + + assert.Equal(t, executed["c#build"], true) + assert.Equal(t, executed["b#build"], true) + assert.Equal(t, executed["a#build"], false) +} diff --git a/cli/internal/daemon/connector/connector.go b/cli/internal/daemon/connector/connector.go new file mode 100644 index 0000000..d05ef59 --- /dev/null +++ b/cli/internal/daemon/connector/connector.go @@ -0,0 +1,391 @@ +package connector + +import ( + "context" + "fmt" + "io/fs" + "os" + "os/exec" + "time" + + "github.com/cenkalti/backoff/v4" + "github.com/hashicorp/go-hclog" + "github.com/nightlyone/lockfile" + "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/turbodprotocol" + "github.com/vercel/turbo/cli/internal/turbopath" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/credentials/insecure" + "google.golang.org/grpc/status" +) + +var ( + // ErrFailedToStart is returned when the daemon process cannot be started + ErrFailedToStart = errors.New("daemon could not be started") + // ErrVersionMismatch is returned when the daemon process was spawned by a different version than the connecting client + ErrVersionMismatch = errors.New("daemon version does not match client version") + errConnectionFailure = errors.New("could not connect to daemon") + // ErrTooManyAttempts is returned when the client fails to connect too many times + ErrTooManyAttempts = errors.New("reached maximum number of attempts contacting daemon") + // ErrDaemonNotRunning is returned when the client cannot contact the daemon and has + // been instructed not to attempt to start a new daemon + ErrDaemonNotRunning = errors.New("the daemon is not running") +) + +// Opts is the set of configurable options for the client connection, +// including some options to be passed through to the daemon process if +// it needs to be started. +type Opts struct { + ServerTimeout time.Duration + DontStart bool // if true, don't attempt to start the daemon + DontKill bool // if true, don't attempt to kill the daemon +} + +// Client represents a connection to the daemon process +type Client struct { + turbodprotocol.TurbodClient + *grpc.ClientConn + SockPath turbopath.AbsoluteSystemPath + PidPath turbopath.AbsoluteSystemPath + LogPath turbopath.AbsoluteSystemPath +} + +// Connector instances are used to create a connection to turbo's daemon process +// The daemon will be started , or killed and restarted, if necessary +type Connector struct { + Logger hclog.Logger + Bin string + Opts Opts + SockPath turbopath.AbsoluteSystemPath + PidPath turbopath.AbsoluteSystemPath + LogPath turbopath.AbsoluteSystemPath + TurboVersion string +} + +// ConnectionError is returned in the error case from connect. It wraps the underlying +// cause and adds a message with the relevant files for the user to check. +type ConnectionError struct { + SockPath turbopath.AbsoluteSystemPath + PidPath turbopath.AbsoluteSystemPath + LogPath turbopath.AbsoluteSystemPath + cause error +} + +func (ce *ConnectionError) Error() string { + return fmt.Sprintf(`connection to turbo daemon process failed. Please ensure the following: + - the process identified by the pid in the file at %v is not running, and remove %v + - check the logs at %v + - the unix domain socket at %v has been removed + You can also run without the daemon process by passing --no-daemon`, ce.PidPath, ce.PidPath, ce.LogPath, ce.SockPath) +} + +// Unwrap allows a connection error to work with standard library "errors" and compatible packages +func (ce *ConnectionError) Unwrap() error { + return ce.cause +} + +func (c *Connector) wrapConnectionError(err error) error { + return &ConnectionError{ + SockPath: c.SockPath, + PidPath: c.PidPath, + LogPath: c.LogPath, + cause: err, + } +} + +// lockFile returns a pointer to where a lockfile should be. +// lockfile.New does not perform IO and the only error it produces +// is in the case a non-absolute path was provided. We're guaranteeing an +// turbopath.AbsoluteSystemPath, so an error here is an indication of a bug and +// we should crash. +func (c *Connector) lockFile() lockfile.Lockfile { + lockFile, err := lockfile.New(c.PidPath.ToString()) + if err != nil { + panic(err) + } + return lockFile +} + +func (c *Connector) addr() string { + // grpc special-cases parsing of unix: urls + // to avoid url.Parse. This lets us pass through our absolute + // paths unmodified, even on windows. + // See code here: https://github.com/grpc/grpc-go/blob/d83070ec0d9043f713b6a63e1963c593b447208c/internal/transport/http_util.go#L392 + return fmt.Sprintf("unix:%v", c.SockPath.ToString()) +} + +// We defer to the daemon's pid file as the locking mechanism. +// If it doesn't exist, we will attempt to start the daemon. +// If the daemon has a different version, ask it to shut down. +// If the pid file exists but we can't connect, try to kill +// the daemon. +// If we can't cause the daemon to remove the pid file, report +// an error to the user that includes the file location so that +// they can resolve it. +const ( + _maxAttempts = 3 + _shutdownTimeout = 1 * time.Second + _socketPollTimeout = 1 * time.Second +) + +// killLiveServer tells a running server to shut down. This method is also responsible +// for closing this client connection. +func (c *Connector) killLiveServer(ctx context.Context, client *Client, serverPid int) error { + defer func() { _ = client.Close() }() + + _, err := client.Shutdown(ctx, &turbodprotocol.ShutdownRequest{}) + if err != nil { + c.Logger.Error(fmt.Sprintf("failed to shutdown running daemon. attempting to force it closed: %v", err)) + return c.killDeadServer(serverPid) + } + // Wait for the server to gracefully exit + err = backoff.Retry(func() error { + lockFile := c.lockFile() + owner, err := lockFile.GetOwner() + if os.IsNotExist(err) { + // If there is no pid more file, we can conclude that the daemon successfully + // exited and cleaned up after itself. + return nil + } else if err != nil { + // some other error occurred getting the lockfile owner + return backoff.Permanent(err) + } else if owner.Pid == serverPid { + // // We're still waiting for the server to shut down + return errNeedsRetry + } + // if there's no error and the lockfile has a new pid, someone else must've started a new daemon. + // Consider the old one killed and move on. + return nil + }, backoffWithTimeout(_shutdownTimeout)) + if errors.Is(err, errNeedsRetry) { + c.Logger.Error(fmt.Sprintf("daemon did not exit after %v, attempting to force it closed", _shutdownTimeout.String())) + return c.killDeadServer(serverPid) + } else if err != nil { + return err + } + return nil +} + +func (c *Connector) killDeadServer(pid int) error { + // currently the only error that this constructor returns is + // in the case that you don't provide an absolute path. + // Given that we require an absolute path as input, this should + // hopefully never happen. + lockFile := c.lockFile() + process, err := lockFile.GetOwner() + if err == nil { + // Check that this is the same process that we failed to connect to. + // Otherwise, connectInternal will loop around again and start with whatever + // new process has the pid file. + if process.Pid == pid { + // we have a process that we need to kill + // TODO(gsoltis): graceful kill? the process is already not responding to requests, + // but it could be in the middle of a graceful shutdown. Probably should let it clean + // itself up, and report an error and defer to a force-kill by the user + if err := process.Kill(); err != nil { + return err + } + } + return nil + } else if errors.Is(err, os.ErrNotExist) { + // There's no pid file. Someone else killed it. Returning no error will cause the + // connectInternal to loop around and try the connection again. + return nil + } + return err +} + +// Connect attempts to create a connection to a turbo daemon. +// Retries and daemon restarts are built in. If this fails, +// it is unlikely to succeed after an automated retry. +func (c *Connector) Connect(ctx context.Context) (*Client, error) { + client, err := c.connectInternal(ctx) + if err != nil { + return nil, c.wrapConnectionError(err) + } + return client, nil +} + +func (c *Connector) connectInternal(ctx context.Context) (*Client, error) { + // for each attempt, we: + // 1. try to find or start a daemon process, getting its pid + // 2. wait for the unix domain socket file to appear + // 3. connect to the unix domain socket. Note that this connection is not validated + // 4. send a hello message. This validates the connection as a side effect of + // negotiating versions, which currently requires exact match. + // In the event of a live, but incompatible server, we attempt to shut it down and start + // a new one. In the event of an unresponsive server, we attempt to kill the process + // identified by the pid file, with the hope that it will clean up after itself. + // Failures include details about where to find logs, the pid file, and the socket file. + for i := 0; i < _maxAttempts; i++ { + serverPid, err := c.getOrStartDaemon() + if err != nil { + // If we fail to even start the daemon process, return immediately, we're unlikely + // to succeed without user intervention + return nil, err + } + if err := c.waitForSocket(); errors.Is(err, ErrFailedToStart) { + // If we didn't see the socket file, try again. It's possible that + // the daemon encountered an transitory error + continue + } else if err != nil { + return nil, err + } + client, err := c.getClientConn() + if err != nil { + return nil, err + } + if err := c.sendHello(ctx, client); err == nil { + // We connected and negotiated a version, we're all set + return client, nil + } else if errors.Is(err, ErrVersionMismatch) { + // We don't want to knock down a perfectly fine daemon in a status check. + if c.Opts.DontKill { + return nil, err + } + + // We now know we aren't going to return this client, + // but killLiveServer still needs it to send the Shutdown request. + // killLiveServer will close the client when it is done with it. + if err := c.killLiveServer(ctx, client, serverPid); err != nil { + return nil, err + } + // Loops back around and tries again. + } else if errors.Is(err, errConnectionFailure) { + // close the client, see if we can kill the stale daemon + _ = client.Close() + if err := c.killDeadServer(serverPid); err != nil { + return nil, err + } + // if we successfully killed the dead server, loop around and try again + } else if err != nil { + // Some other error occurred, close the client and + // report the error to the user + if closeErr := client.Close(); closeErr != nil { + // In the event that we fail to close the client, bundle that error along also. + // Keep the original error in the error chain, as it's more likely to be useful + // or needed for matching on later. + err = errors.Wrapf(err, "also failed to close client connection: %v", closeErr) + } + return nil, err + } + } + return nil, ErrTooManyAttempts +} + +// getOrStartDaemon returns the PID of the daemon process on success. It may start +// the daemon if it doesn't find one running. +func (c *Connector) getOrStartDaemon() (int, error) { + lockFile := c.lockFile() + daemonProcess, getDaemonProcessErr := lockFile.GetOwner() + if getDaemonProcessErr != nil { + // If we're in a clean state this isn't an "error" per se. + // We attempt to start a daemon. + if errors.Is(getDaemonProcessErr, fs.ErrNotExist) { + if c.Opts.DontStart { + return 0, ErrDaemonNotRunning + } + pid, startDaemonErr := c.startDaemon() + if startDaemonErr != nil { + return 0, startDaemonErr + } + return pid, nil + } + + // We could have hit any number of errors. + // - Failed to read the file for permission reasons. + // - User emptied the file's contents. + // - etc. + return 0, errors.Wrapf(getDaemonProcessErr, "An issue was encountered with the pid file. Please remove it and try again: %v", c.PidPath) + } + + return daemonProcess.Pid, nil +} + +func (c *Connector) getClientConn() (*Client, error) { + creds := insecure.NewCredentials() + conn, err := grpc.Dial(c.addr(), grpc.WithTransportCredentials(creds)) + if err != nil { + return nil, err + } + tc := turbodprotocol.NewTurbodClient(conn) + return &Client{ + TurbodClient: tc, + ClientConn: conn, + SockPath: c.SockPath, + PidPath: c.PidPath, + LogPath: c.LogPath, + }, nil +} + +func (c *Connector) sendHello(ctx context.Context, client turbodprotocol.TurbodClient) error { + _, err := client.Hello(ctx, &turbodprotocol.HelloRequest{ + Version: c.TurboVersion, + // TODO: add session id + }) + status := status.Convert(err) + switch status.Code() { + case codes.OK: + return nil + case codes.FailedPrecondition: + return ErrVersionMismatch + case codes.Unavailable: + return errConnectionFailure + default: + return err + } +} + +var errNeedsRetry = errors.New("retry the operation") + +// backoffWithTimeout returns an exponential backoff, starting at 2ms and doubling until +// the specific timeout has elapsed. Note that backoff instances are stateful, so we need +// a new one each time we do a Retry. +func backoffWithTimeout(timeout time.Duration) *backoff.ExponentialBackOff { + return &backoff.ExponentialBackOff{ + Multiplier: 2, + InitialInterval: 2 * time.Millisecond, + MaxElapsedTime: timeout, + Clock: backoff.SystemClock, + Stop: backoff.Stop, + } +} + +// waitForSocket waits for the unix domain socket to appear +func (c *Connector) waitForSocket() error { + // Note that we don't care if this is our daemon + // or not. We started a process, but someone else could beat + // use to listening. That's fine, we'll check the version + // later. + err := backoff.Retry(func() error { + if !c.SockPath.FileExists() { + return errNeedsRetry + } + return nil + }, backoffWithTimeout(_socketPollTimeout)) + if errors.Is(err, errNeedsRetry) { + return ErrFailedToStart + } else if err != nil { + return err + } + return nil +} + +// startDaemon starts the daemon and returns the pid for the new process +func (c *Connector) startDaemon() (int, error) { + args := []string{"daemon"} + if c.Opts.ServerTimeout != 0 { + args = append(args, fmt.Sprintf("--idle-time=%v", c.Opts.ServerTimeout.String())) + } + c.Logger.Debug(fmt.Sprintf("starting turbod binary %v", c.Bin)) + cmd := exec.Command(c.Bin, args...) + // For the daemon to have its own process group id so that any attempts + // to kill it and its process tree don't kill this client. + cmd.SysProcAttr = getSysProcAttrs() + err := cmd.Start() + if err != nil { + return 0, err + } + return cmd.Process.Pid, nil +} diff --git a/cli/internal/daemon/connector/connector_test.go b/cli/internal/daemon/connector/connector_test.go new file mode 100644 index 0000000..62b4504 --- /dev/null +++ b/cli/internal/daemon/connector/connector_test.go @@ -0,0 +1,256 @@ +package connector + +import ( + "context" + "errors" + "net" + "os/exec" + "runtime" + "strconv" + "testing" + + "github.com/hashicorp/go-hclog" + "github.com/nightlyone/lockfile" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbodprotocol" + "github.com/vercel/turbo/cli/internal/turbopath" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/credentials/insecure" + "google.golang.org/grpc/status" + "google.golang.org/grpc/test/bufconn" + "gotest.tools/v3/assert" +) + +// testBin returns a platform-appropriate executable to run node. +// Node works here as an arbitrary process to start, since it's +// required for turbo development. It will obviously not implement +// our grpc service, use a mockServer instance where that's needed. +func testBin() string { + if runtime.GOOS == "windows" { + return "node.exe" + } + return "node" +} + +func getUnixSocket(dir turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { + return dir.UntypedJoin("turbod-test.sock") +} + +func getPidFile(dir turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { + return dir.UntypedJoin("turbod-test.pid") +} + +func TestGetOrStartDaemonInvalidPIDFile(t *testing.T) { + logger := hclog.Default() + dir := t.TempDir() + dirPath := fs.AbsoluteSystemPathFromUpstream(dir) + + pidPath := getPidFile(dirPath) + writeFileErr := pidPath.WriteFile(nil, 0777) + assert.NilError(t, writeFileErr, "WriteFile") + + c := &Connector{ + Logger: logger, + Opts: Opts{}, + PidPath: pidPath, + } + + pid, err := c.getOrStartDaemon() + assert.Equal(t, pid, 0) + assert.ErrorContains(t, err, "issue was encountered with the pid file") +} + +func TestConnectFailsWithoutGrpcServer(t *testing.T) { + // We aren't starting a server that is going to write + // to our socket file, so we should see a series of connection + // failures, followed by ErrTooManyAttempts + logger := hclog.Default() + dir := t.TempDir() + dirPath := fs.AbsoluteSystemPathFromUpstream(dir) + + sockPath := getUnixSocket(dirPath) + pidPath := getPidFile(dirPath) + ctx := context.Background() + bin := testBin() + c := &Connector{ + Logger: logger, + Bin: bin, + Opts: Opts{}, + SockPath: sockPath, + PidPath: pidPath, + } + // Note that we expect ~3s here, for 3 attempts with a timeout of 1s + _, err := c.connectInternal(ctx) + assert.ErrorIs(t, err, ErrTooManyAttempts) +} + +func TestKillDeadServerNoPid(t *testing.T) { + logger := hclog.Default() + dir := t.TempDir() + dirPath := fs.AbsoluteSystemPathFromUpstream(dir) + + sockPath := getUnixSocket(dirPath) + pidPath := getPidFile(dirPath) + c := &Connector{ + Logger: logger, + Bin: "nonexistent", + Opts: Opts{}, + SockPath: sockPath, + PidPath: pidPath, + } + + err := c.killDeadServer(99999) + assert.NilError(t, err, "killDeadServer") +} + +func TestKillDeadServerNoProcess(t *testing.T) { + logger := hclog.Default() + dir := t.TempDir() + dirPath := fs.AbsoluteSystemPathFromUpstream(dir) + + sockPath := getUnixSocket(dirPath) + pidPath := getPidFile(dirPath) + // Simulate the socket already existing, with no live daemon + err := sockPath.WriteFile([]byte("junk"), 0644) + assert.NilError(t, err, "WriteFile") + err = pidPath.WriteFile([]byte("99999"), 0644) + assert.NilError(t, err, "WriteFile") + c := &Connector{ + Logger: logger, + Bin: "nonexistent", + Opts: Opts{}, + SockPath: sockPath, + PidPath: pidPath, + } + + err = c.killDeadServer(99999) + assert.ErrorIs(t, err, lockfile.ErrDeadOwner) + stillExists := pidPath.FileExists() + if !stillExists { + t.Error("pidPath should still exist, expected the user to clean it up") + } +} + +func TestKillDeadServerWithProcess(t *testing.T) { + logger := hclog.Default() + dir := t.TempDir() + dirPath := fs.AbsoluteSystemPathFromUpstream(dir) + + sockPath := getUnixSocket(dirPath) + pidPath := getPidFile(dirPath) + // Simulate the socket already existing, with no live daemon + err := sockPath.WriteFile([]byte("junk"), 0644) + assert.NilError(t, err, "WriteFile") + bin := testBin() + cmd := exec.Command(bin) + err = cmd.Start() + assert.NilError(t, err, "cmd.Start") + pid := cmd.Process.Pid + if pid == 0 { + t.Fatalf("failed to start process %v", bin) + } + + err = pidPath.WriteFile([]byte(strconv.Itoa(pid)), 0644) + assert.NilError(t, err, "WriteFile") + c := &Connector{ + Logger: logger, + Bin: "nonexistent", + Opts: Opts{}, + SockPath: sockPath, + PidPath: pidPath, + } + + err = c.killDeadServer(pid) + assert.NilError(t, err, "killDeadServer") + stillExists := pidPath.FileExists() + if !stillExists { + t.Error("pidPath no longer exists, expected client to not clean it up") + } + err = cmd.Wait() + exitErr := &exec.ExitError{} + if !errors.As(err, &exitErr) { + t.Errorf("expected an exit error from %v, got %v", bin, err) + } +} + +type mockServer struct { + turbodprotocol.UnimplementedTurbodServer + helloErr error + shutdownResp *turbodprotocol.ShutdownResponse + pidFile turbopath.AbsoluteSystemPath +} + +// Simulates server exiting by cleaning up the pid file +func (s *mockServer) Shutdown(ctx context.Context, req *turbodprotocol.ShutdownRequest) (*turbodprotocol.ShutdownResponse, error) { + if err := s.pidFile.Remove(); err != nil { + return nil, err + } + return s.shutdownResp, nil +} + +func (s *mockServer) Hello(ctx context.Context, req *turbodprotocol.HelloRequest) (*turbodprotocol.HelloResponse, error) { + if req.Version == "" { + return nil, errors.New("missing version") + } + return nil, s.helloErr +} + +func TestKillLiveServer(t *testing.T) { + logger := hclog.Default() + dir := t.TempDir() + dirPath := fs.AbsoluteSystemPathFromUpstream(dir) + + sockPath := getUnixSocket(dirPath) + pidPath := getPidFile(dirPath) + err := pidPath.WriteFile([]byte("99999"), 0644) + assert.NilError(t, err, "WriteFile") + + ctx := context.Background() + c := &Connector{ + Logger: logger, + Bin: "nonexistent", + Opts: Opts{}, + SockPath: sockPath, + PidPath: pidPath, + TurboVersion: "some-version", + } + + st := status.New(codes.FailedPrecondition, "version mismatch") + mock := &mockServer{ + shutdownResp: &turbodprotocol.ShutdownResponse{}, + helloErr: st.Err(), + pidFile: pidPath, + } + lis := bufconn.Listen(1024 * 1024) + grpcServer := grpc.NewServer() + turbodprotocol.RegisterTurbodServer(grpcServer, mock) + go func(t *testing.T) { + if err := grpcServer.Serve(lis); err != nil { + t.Logf("server closed: %v", err) + } + }(t) + + conn, err := grpc.DialContext(ctx, "bufnet", grpc.WithContextDialer(func(ctx context.Context, s string) (net.Conn, error) { + return lis.Dial() + }), grpc.WithTransportCredentials(insecure.NewCredentials())) + assert.NilError(t, err, "DialContext") + turboClient := turbodprotocol.NewTurbodClient(conn) + client := &Client{ + TurbodClient: turboClient, + ClientConn: conn, + } + err = c.sendHello(ctx, client) + if !errors.Is(err, ErrVersionMismatch) { + t.Errorf("sendHello error got %v, want %v", err, ErrVersionMismatch) + } + err = c.killLiveServer(ctx, client, 99999) + assert.NilError(t, err, "killLiveServer") + // Expect the pid file and socket files to have been cleaned up + if pidPath.FileExists() { + t.Errorf("expected pid file to have been deleted: %v", pidPath) + } + if sockPath.FileExists() { + t.Errorf("expected socket file to have been deleted: %v", sockPath) + } +} diff --git a/cli/internal/daemon/connector/fork.go b/cli/internal/daemon/connector/fork.go new file mode 100644 index 0000000..8a6d01d --- /dev/null +++ b/cli/internal/daemon/connector/fork.go @@ -0,0 +1,15 @@ +//go:build !windows +// +build !windows + +package connector + +import "syscall" + +// getSysProcAttrs returns the platform-specific attributes we want to +// use while forking the daemon process. Currently this is limited to +// forcing a new process group +func getSysProcAttrs() *syscall.SysProcAttr { + return &syscall.SysProcAttr{ + Setpgid: true, + } +} diff --git a/cli/internal/daemon/connector/fork_windows.go b/cli/internal/daemon/connector/fork_windows.go new file mode 100644 index 0000000..b9d6e77 --- /dev/null +++ b/cli/internal/daemon/connector/fork_windows.go @@ -0,0 +1,15 @@ +//go:build windows +// +build windows + +package connector + +import "syscall" + +// getSysProcAttrs returns the platform-specific attributes we want to +// use while forking the daemon process. Currently this is limited to +// forcing a new process group +func getSysProcAttrs() *syscall.SysProcAttr { + return &syscall.SysProcAttr{ + CreationFlags: syscall.CREATE_NEW_PROCESS_GROUP, + } +} diff --git a/cli/internal/daemon/daemon.go b/cli/internal/daemon/daemon.go new file mode 100644 index 0000000..81d5283 --- /dev/null +++ b/cli/internal/daemon/daemon.go @@ -0,0 +1,307 @@ +package daemon + +import ( + "context" + "crypto/sha256" + "encoding/hex" + "fmt" + "io" + "net" + "os" + "path/filepath" + "strings" + "time" + + grpc_recovery "github.com/grpc-ecosystem/go-grpc-middleware/recovery" + "github.com/hashicorp/go-hclog" + "github.com/nightlyone/lockfile" + "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/cmdutil" + "github.com/vercel/turbo/cli/internal/daemon/connector" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/server" + "github.com/vercel/turbo/cli/internal/signals" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/turbostate" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +type daemon struct { + logger hclog.Logger + repoRoot turbopath.AbsoluteSystemPath + timeout time.Duration + reqCh chan struct{} + timedOutCh chan struct{} +} + +func getRepoHash(repoRoot turbopath.AbsoluteSystemPath) string { + pathHash := sha256.Sum256([]byte(repoRoot.ToString())) + // We grab a substring of the hash because there is a 108-character limit on the length + // of a filepath for unix domain socket. + return hex.EncodeToString(pathHash[:])[:16] +} + +func getDaemonFileRoot(repoRoot turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { + tempDir := fs.TempDir("turbod") + hexHash := getRepoHash(repoRoot) + return tempDir.UntypedJoin(hexHash) +} + +func getLogFilePath(repoRoot turbopath.AbsoluteSystemPath) (turbopath.AbsoluteSystemPath, error) { + hexHash := getRepoHash(repoRoot) + base := repoRoot.Base() + logFilename := fmt.Sprintf("%v-%v.log", hexHash, base) + + logsDir := fs.GetTurboDataDir().UntypedJoin("logs") + return logsDir.UntypedJoin(logFilename), nil +} + +func getUnixSocket(repoRoot turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { + root := getDaemonFileRoot(repoRoot) + return root.UntypedJoin("turbod.sock") +} + +func getPidFile(repoRoot turbopath.AbsoluteSystemPath) turbopath.AbsoluteSystemPath { + root := getDaemonFileRoot(repoRoot) + return root.UntypedJoin("turbod.pid") +} + +// logError logs an error and outputs it to the UI. +func (d *daemon) logError(err error) { + d.logger.Error(fmt.Sprintf("error %v", err)) +} + +// we're only appending, and we're creating the file if it doesn't exist. +// we do not need to read the log file. +var _logFileFlags = os.O_WRONLY | os.O_APPEND | os.O_CREATE + +// ExecuteDaemon executes the root daemon command +func ExecuteDaemon(ctx context.Context, helper *cmdutil.Helper, signalWatcher *signals.Watcher, args *turbostate.ParsedArgsFromRust) error { + base, err := helper.GetCmdBase(args) + if err != nil { + return err + } + if args.TestRun { + base.UI.Info("Daemon test run successful") + return nil + } + + idleTimeout := 4 * time.Hour + if args.Command.Daemon.IdleTimeout != "" { + idleTimeout, err = time.ParseDuration(args.Command.Daemon.IdleTimeout) + if err != nil { + return err + } + } + + logFilePath, err := getLogFilePath(base.RepoRoot) + if err != nil { + return err + } + if err := logFilePath.EnsureDir(); err != nil { + return err + } + logFile, err := logFilePath.OpenFile(_logFileFlags, 0644) + if err != nil { + return err + } + defer func() { _ = logFile.Close() }() + logger := hclog.New(&hclog.LoggerOptions{ + Output: io.MultiWriter(logFile, os.Stdout), + Level: hclog.Info, + Color: hclog.ColorOff, + Name: "turbod", + }) + + d := &daemon{ + logger: logger, + repoRoot: base.RepoRoot, + timeout: idleTimeout, + reqCh: make(chan struct{}), + timedOutCh: make(chan struct{}), + } + serverName := getRepoHash(base.RepoRoot) + turboServer, err := server.New(serverName, d.logger.Named("rpc server"), base.RepoRoot, base.TurboVersion, logFilePath) + if err != nil { + d.logError(err) + return err + } + defer func() { _ = turboServer.Close() }() + err = d.runTurboServer(ctx, turboServer, signalWatcher) + if err != nil { + d.logError(err) + return err + } + return nil +} + +var errInactivityTimeout = errors.New("turbod shut down from inactivity") + +// tryAcquirePidfileLock attempts to ensure that only one daemon is running from the given pid file path +// at a time. If this process fails to write its PID to the lockfile, it must exit. +func tryAcquirePidfileLock(pidPath turbopath.AbsoluteSystemPath) (lockfile.Lockfile, error) { + if err := pidPath.EnsureDir(); err != nil { + return "", err + } + lockFile, err := lockfile.New(pidPath.ToString()) + if err != nil { + // lockfile.New should only return an error if it wasn't given an absolute path. + // We are attempting to use the type system to enforce that we are passing an + // absolute path. An error here likely means a bug, and we should crash. + panic(err) + } + if err := lockFile.TryLock(); err != nil { + return "", err + } + return lockFile, nil +} + +type rpcServer interface { + Register(grpcServer server.GRPCServer) +} + +func (d *daemon) runTurboServer(parentContext context.Context, rpcServer rpcServer, signalWatcher *signals.Watcher) error { + ctx, cancel := context.WithCancel(parentContext) + defer cancel() + pidPath := getPidFile(d.repoRoot) + lock, err := tryAcquirePidfileLock(pidPath) + if err != nil { + return errors.Wrapf(err, "failed to lock the pid file at %v. Is another turbo daemon running?", lock) + } + // When we're done serving, clean up the pid file. + // Also, if *this* goroutine panics, make sure we unlock the pid file. + defer func() { + if err := lock.Unlock(); err != nil { + d.logger.Error(errors.Wrapf(err, "failed unlocking pid file at %v", lock).Error()) + } + }() + // This handler runs in request goroutines. If a request causes a panic, + // this handler will get called after a call to recover(), meaning we are + // no longer panicking. We return a server error and cancel our context, + // which triggers a shutdown of the server. + panicHandler := func(thePanic interface{}) error { + cancel() + d.logger.Error(fmt.Sprintf("Caught panic %v", thePanic)) + return status.Error(codes.Internal, "server panicked") + } + + // If we have the lock, assume that we are the owners of the socket file, + // whether it already exists or not. That means we are free to remove it. + sockPath := getUnixSocket(d.repoRoot) + if err := sockPath.Remove(); err != nil && !errors.Is(err, os.ErrNotExist) { + return err + } + d.logger.Debug(fmt.Sprintf("Using socket path %v (%v)\n", sockPath, len(sockPath))) + lis, err := net.Listen("unix", sockPath.ToString()) + if err != nil { + return err + } + // We don't need to explicitly close 'lis', the grpc server will handle that + s := grpc.NewServer( + grpc.ChainUnaryInterceptor( + d.onRequest, + grpc_recovery.UnaryServerInterceptor(grpc_recovery.WithRecoveryHandler(panicHandler)), + ), + ) + go d.timeoutLoop(ctx) + + rpcServer.Register(s) + errCh := make(chan error) + go func(errCh chan<- error) { + if err := s.Serve(lis); err != nil { + errCh <- err + } + close(errCh) + }(errCh) + + // Note that we aren't deferring s.GracefulStop here because we also need + // to drain the error channel, which isn't guaranteed to happen until + // the server has stopped. That in turn may depend on GracefulStop being + // called. + // Future work could restructure this to make that simpler. + var exitErr error + select { + case err, ok := <-errCh: + // The server exited + if ok { + exitErr = err + } + case <-d.timedOutCh: + // This is the inactivity timeout case + exitErr = errInactivityTimeout + s.GracefulStop() + case <-ctx.Done(): + // If a request handler panics, it will cancel this context + s.GracefulStop() + case <-signalWatcher.Done(): + // This is fired if caught a signal + s.GracefulStop() + } + // Wait for the server to exit, if it hasn't already. + // When it does, this channel will close. We don't + // care about the error in this scenario because we've + // either requested a close via cancelling the context, + // an inactivity timeout, or caught a signal. + for range errCh { + } + return exitErr +} + +func (d *daemon) onRequest(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (resp interface{}, err error) { + d.reqCh <- struct{}{} + return handler(ctx, req) +} + +func (d *daemon) timeoutLoop(ctx context.Context) { + timeoutCh := time.After(d.timeout) +outer: + for { + select { + case <-d.reqCh: + timeoutCh = time.After(d.timeout) + case <-timeoutCh: + close(d.timedOutCh) + break outer + case <-ctx.Done(): + break outer + } + } +} + +// ClientOpts re-exports connector.Ops to encapsulate the connector package +type ClientOpts = connector.Opts + +// Client re-exports connector.Client to encapsulate the connector package +type Client = connector.Client + +// GetClient returns a client that can be used to interact with the daemon +func GetClient(ctx context.Context, repoRoot turbopath.AbsoluteSystemPath, logger hclog.Logger, turboVersion string, opts ClientOpts) (*Client, error) { + sockPath := getUnixSocket(repoRoot) + pidPath := getPidFile(repoRoot) + logPath, err := getLogFilePath(repoRoot) + if err != nil { + return nil, err + } + bin, err := os.Executable() + if err != nil { + return nil, err + } + // The Go binary can no longer be called directly, so we need to route back to the rust wrapper + if strings.HasSuffix(bin, "go-turbo") { + bin = filepath.Join(filepath.Dir(bin), "turbo") + } else if strings.HasSuffix(bin, "go-turbo.exe") { + bin = filepath.Join(filepath.Dir(bin), "turbo.exe") + } + c := &connector.Connector{ + Logger: logger.Named("TurbodClient"), + Bin: bin, + Opts: opts, + SockPath: sockPath, + PidPath: pidPath, + LogPath: logPath, + TurboVersion: turboVersion, + } + return c.Connect(ctx) +} diff --git a/cli/internal/daemon/daemon_test.go b/cli/internal/daemon/daemon_test.go new file mode 100644 index 0000000..66a714d --- /dev/null +++ b/cli/internal/daemon/daemon_test.go @@ -0,0 +1,262 @@ +package daemon + +import ( + "context" + "errors" + "os/exec" + "runtime" + "strconv" + "sync" + "testing" + "time" + + "github.com/hashicorp/go-hclog" + "github.com/nightlyone/lockfile" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/server" + "github.com/vercel/turbo/cli/internal/signals" + "github.com/vercel/turbo/cli/internal/turbopath" + "google.golang.org/grpc" + "google.golang.org/grpc/credentials/insecure" + "google.golang.org/grpc/test/grpc_testing" + "gotest.tools/v3/assert" +) + +// testBin returns a platform-appropriate node binary. +// We need some process to be running and findable by the +// lockfile library, and we don't particularly care what it is. +// Since node is required for turbo development, it makes a decent +// candidate. +func testBin() string { + if runtime.GOOS == "windows" { + return "node.exe" + } + return "node" +} + +func TestPidFileLock(t *testing.T) { + repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) + + pidPath := getPidFile(repoRoot) + // the lockfile library handles removing pids from dead owners + _, err := tryAcquirePidfileLock(pidPath) + assert.NilError(t, err, "acquirePidLock") + + // Start up a node process and fake a pid file for it. + // Ensure that we can't start the daemon while the node process is live + bin := testBin() + node := exec.Command(bin) + err = node.Start() + assert.NilError(t, err, "Start") + stopNode := func() error { + if err := node.Process.Kill(); err != nil { + return err + } + // We expect an error from node, we just sent a kill signal + _ = node.Wait() + return nil + } + // In case we fail the test, still try to kill the node process + t.Cleanup(func() { _ = stopNode() }) + nodePid := node.Process.Pid + err = pidPath.WriteFile([]byte(strconv.Itoa(nodePid)), 0644) + assert.NilError(t, err, "WriteFile") + + _, err = tryAcquirePidfileLock(pidPath) + assert.ErrorIs(t, err, lockfile.ErrBusy) + + // Stop the node process, but leave the pid file there + // This simulates a crash + err = stopNode() + assert.NilError(t, err, "stopNode") + // the lockfile library handles removing pids from dead owners + _, err = tryAcquirePidfileLock(pidPath) + assert.NilError(t, err, "acquirePidLock") +} + +type testRPCServer struct { + grpc_testing.UnimplementedTestServiceServer + registered chan struct{} +} + +func (ts *testRPCServer) EmptyCall(ctx context.Context, req *grpc_testing.Empty) (*grpc_testing.Empty, error) { + panic("intended to panic") +} + +func (ts *testRPCServer) Register(grpcServer server.GRPCServer) { + grpc_testing.RegisterTestServiceServer(grpcServer, ts) + ts.registered <- struct{}{} +} + +func newTestRPCServer() *testRPCServer { + return &testRPCServer{ + registered: make(chan struct{}, 1), + } +} + +func waitForFile(t *testing.T, filename turbopath.AbsoluteSystemPath, timeout time.Duration) { + t.Helper() + deadline := time.After(timeout) +outer: + for !filename.FileExists() { + select { + case <-deadline: + break outer + case <-time.After(10 * time.Millisecond): + } + } + if !filename.FileExists() { + t.Errorf("timed out waiting for %v to exist after %v", filename, timeout) + } +} + +func TestDaemonLifecycle(t *testing.T) { + logger := hclog.Default() + logger.SetLevel(hclog.Debug) + repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) + + ts := newTestRPCServer() + watcher := signals.NewWatcher() + ctx, cancel := context.WithCancel(context.Background()) + + d := &daemon{ + logger: logger, + repoRoot: repoRoot, + timeout: 10 * time.Second, + reqCh: make(chan struct{}), + timedOutCh: make(chan struct{}), + } + + var serverErr error + wg := &sync.WaitGroup{} + wg.Add(1) + go func() { + serverErr = d.runTurboServer(ctx, ts, watcher) + wg.Done() + }() + + sockPath := getUnixSocket(repoRoot) + waitForFile(t, sockPath, 30*time.Second) + pidPath := getPidFile(repoRoot) + waitForFile(t, pidPath, 1*time.Second) + cancel() + wg.Wait() + assert.NilError(t, serverErr, "runTurboServer") + if sockPath.FileExists() { + t.Errorf("%v still exists, should have been cleaned up", sockPath) + } + if pidPath.FileExists() { + t.Errorf("%v still exists, should have been cleaned up", sockPath) + } +} + +func TestTimeout(t *testing.T) { + logger := hclog.Default() + logger.SetLevel(hclog.Debug) + repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) + + ts := newTestRPCServer() + watcher := signals.NewWatcher() + ctx := context.Background() + + d := &daemon{ + logger: logger, + repoRoot: repoRoot, + timeout: 5 * time.Millisecond, + reqCh: make(chan struct{}), + timedOutCh: make(chan struct{}), + } + err := d.runTurboServer(ctx, ts, watcher) + if !errors.Is(err, errInactivityTimeout) { + t.Errorf("server error got %v, want %v", err, errInactivityTimeout) + } +} + +func TestCaughtSignal(t *testing.T) { + logger := hclog.Default() + logger.SetLevel(hclog.Debug) + repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) + + ts := newTestRPCServer() + watcher := signals.NewWatcher() + ctx := context.Background() + + d := &daemon{ + logger: logger, + repoRoot: repoRoot, + timeout: 5 * time.Second, + reqCh: make(chan struct{}), + timedOutCh: make(chan struct{}), + } + errCh := make(chan error) + go func() { + err := d.runTurboServer(ctx, ts, watcher) + errCh <- err + }() + <-ts.registered + // grpc doesn't provide a signal to know when the server is serving. + // So while this call to Close can race with the call to grpc.Server.Serve, if we've + // registered with the turboserver, we've registered all of our + // signal handlers as well. We just may or may not be serving when Close() + // is called. It shouldn't matter for the purposes of this test: + // Either we are serving, and Serve will return with nil when GracefulStop is + // called, or we aren't serving yet, and the subsequent call to Serve will + // immediately return with grpc.ErrServerStopped. So, both nil and grpc.ErrServerStopped + // are acceptable outcomes for runTurboServer. Any other error, or a timeout, is a + // failure. + watcher.Close() + + err := <-errCh + pidPath := getPidFile(repoRoot) + if pidPath.FileExists() { + t.Errorf("expected to clean up %v, but it still exists", pidPath) + } + // We'll either get nil or ErrServerStopped, depending on whether + // or not we close the signal watcher before grpc.Server.Serve was + // called. + if err != nil && !errors.Is(err, grpc.ErrServerStopped) { + t.Errorf("runTurboServer got err %v, want nil or ErrServerStopped", err) + } +} + +func TestCleanupOnPanic(t *testing.T) { + logger := hclog.Default() + logger.SetLevel(hclog.Debug) + repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) + + ts := newTestRPCServer() + watcher := signals.NewWatcher() + ctx := context.Background() + + d := &daemon{ + logger: logger, + repoRoot: repoRoot, + timeout: 5 * time.Second, + reqCh: make(chan struct{}), + timedOutCh: make(chan struct{}), + } + errCh := make(chan error) + go func() { + err := d.runTurboServer(ctx, ts, watcher) + errCh <- err + }() + <-ts.registered + + creds := insecure.NewCredentials() + sockFile := getUnixSocket(repoRoot) + conn, err := grpc.Dial("unix://"+sockFile.ToString(), grpc.WithTransportCredentials(creds)) + assert.NilError(t, err, "Dial") + + client := grpc_testing.NewTestServiceClient(conn) + _, err = client.EmptyCall(ctx, &grpc_testing.Empty{}) + if err == nil { + t.Error("nil error") + } + // wait for the server to finish + <-errCh + + pidPath := getPidFile(repoRoot) + if pidPath.FileExists() { + t.Errorf("expected to clean up %v, but it still exists", pidPath) + } +} diff --git a/cli/internal/daemonclient/daemonclient.go b/cli/internal/daemonclient/daemonclient.go new file mode 100644 index 0000000..c415cd3 --- /dev/null +++ b/cli/internal/daemonclient/daemonclient.go @@ -0,0 +1,70 @@ +// Package daemonclient is a wrapper around a grpc client +// to talk to turbod +package daemonclient + +import ( + "context" + + "github.com/vercel/turbo/cli/internal/daemon/connector" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbodprotocol" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// DaemonClient provides access to higher-level functionality from the daemon to a turbo run. +type DaemonClient struct { + client *connector.Client +} + +// Status provides details about the daemon's status +type Status struct { + UptimeMs uint64 `json:"uptimeMs"` + LogFile turbopath.AbsoluteSystemPath `json:"logFile"` + PidFile turbopath.AbsoluteSystemPath `json:"pidFile"` + SockFile turbopath.AbsoluteSystemPath `json:"sockFile"` +} + +// New creates a new instance of a DaemonClient. +func New(client *connector.Client) *DaemonClient { + return &DaemonClient{ + client: client, + } +} + +// GetChangedOutputs implements runcache.OutputWatcher.GetChangedOutputs +func (d *DaemonClient) GetChangedOutputs(ctx context.Context, hash string, repoRelativeOutputGlobs []string) ([]string, error) { + resp, err := d.client.GetChangedOutputs(ctx, &turbodprotocol.GetChangedOutputsRequest{ + Hash: hash, + OutputGlobs: repoRelativeOutputGlobs, + }) + if err != nil { + return nil, err + } + + return resp.ChangedOutputGlobs, nil +} + +// NotifyOutputsWritten implements runcache.OutputWatcher.NotifyOutputsWritten +func (d *DaemonClient) NotifyOutputsWritten(ctx context.Context, hash string, repoRelativeOutputGlobs fs.TaskOutputs) error { + _, err := d.client.NotifyOutputsWritten(ctx, &turbodprotocol.NotifyOutputsWrittenRequest{ + Hash: hash, + OutputGlobs: repoRelativeOutputGlobs.Inclusions, + OutputExclusionGlobs: repoRelativeOutputGlobs.Exclusions, + }) + return err +} + +// Status returns the DaemonStatus from the daemon +func (d *DaemonClient) Status(ctx context.Context) (*Status, error) { + resp, err := d.client.Status(ctx, &turbodprotocol.StatusRequest{}) + if err != nil { + return nil, err + } + daemonStatus := resp.DaemonStatus + return &Status{ + UptimeMs: daemonStatus.UptimeMsec, + LogFile: d.client.LogPath, + PidFile: d.client.PidPath, + SockFile: d.client.SockPath, + }, nil +} diff --git a/cli/internal/doublestar/doublestar.go b/cli/internal/doublestar/doublestar.go new file mode 100644 index 0000000..6fa05f1 --- /dev/null +++ b/cli/internal/doublestar/doublestar.go @@ -0,0 +1,11 @@ +// Package doublestar is adapted from https://github.com/bmatcuk/doublestar +// Copyright Bob Matcuk. All Rights Reserved. +// SPDX-License-Identifier: MIT +package doublestar + +import ( + "path" +) + +// ErrBadPattern indicates a pattern was malformed. +var ErrBadPattern = path.ErrBadPattern diff --git a/cli/internal/doublestar/doublestar_test.go b/cli/internal/doublestar/doublestar_test.go new file mode 100644 index 0000000..512f8b7 --- /dev/null +++ b/cli/internal/doublestar/doublestar_test.go @@ -0,0 +1,557 @@ +// Package doublestar is adapted from https://github.com/bmatcuk/doublestar +// Copyright Bob Matcuk. All Rights Reserved. +// SPDX-License-Identifier: MIT + +// This file is mostly copied from Go's path/match_test.go + +package doublestar + +import ( + "io/fs" + "log" + "os" + "path" + "path/filepath" + "runtime" + "strings" + "testing" +) + +type MatchTest struct { + pattern, testPath string // a pattern and path to test the pattern on + shouldMatch bool // true if the pattern should match the path + expectedErr error // an expected error + isStandard bool // pattern doesn't use any doublestar features + testOnDisk bool // true: test pattern against files in "test" directory + numResults int // number of glob results if testing on disk + winNumResults int // number of glob results on Windows +} + +// Tests which contain escapes and symlinks will not work on Windows +var onWindows = runtime.GOOS == "windows" + +var matchTests = []MatchTest{ + {"*", "", true, nil, true, false, 0, 0}, + {"*", "/", false, nil, true, false, 0, 0}, + {"/*", "/", true, nil, true, false, 0, 0}, + {"/*", "/debug/", false, nil, true, false, 0, 0}, + {"/*", "//", false, nil, true, false, 0, 0}, + {"abc", "abc", true, nil, true, true, 1, 1}, + {"*", "abc", true, nil, true, true, 19, 15}, + {"*c", "abc", true, nil, true, true, 2, 2}, + {"*/", "a/", true, nil, true, false, 0, 0}, + {"a*", "a", true, nil, true, true, 9, 9}, + {"a*", "abc", true, nil, true, true, 9, 9}, + {"a*", "ab/c", false, nil, true, true, 9, 9}, + {"a*/b", "abc/b", true, nil, true, true, 2, 2}, + {"a*/b", "a/c/b", false, nil, true, true, 2, 2}, + {"a*b*c*d*e*", "axbxcxdxe", true, nil, true, true, 3, 3}, + {"a*b*c*d*e*/f", "axbxcxdxe/f", true, nil, true, true, 2, 2}, + {"a*b*c*d*e*/f", "axbxcxdxexxx/f", true, nil, true, true, 2, 2}, + {"a*b*c*d*e*/f", "axbxcxdxe/xxx/f", false, nil, true, true, 2, 2}, + {"a*b*c*d*e*/f", "axbxcxdxexxx/fff", false, nil, true, true, 2, 2}, + {"a*b?c*x", "abxbbxdbxebxczzx", true, nil, true, true, 2, 2}, + {"a*b?c*x", "abxbbxdbxebxczzy", false, nil, true, true, 2, 2}, + {"ab[c]", "abc", true, nil, true, true, 1, 1}, + {"ab[b-d]", "abc", true, nil, true, true, 1, 1}, + {"ab[e-g]", "abc", false, nil, true, true, 0, 0}, + {"ab[^c]", "abc", false, nil, true, true, 0, 0}, + {"ab[^b-d]", "abc", false, nil, true, true, 0, 0}, + {"ab[^e-g]", "abc", true, nil, true, true, 1, 1}, + {"a\\*b", "ab", false, nil, true, true, 0, 0}, + {"a?b", "a☺b", true, nil, true, true, 1, 1}, + {"a[^a]b", "a☺b", true, nil, true, true, 1, 1}, + {"a[!a]b", "a☺b", true, nil, false, true, 1, 1}, + {"a???b", "a☺b", false, nil, true, true, 0, 0}, + {"a[^a][^a][^a]b", "a☺b", false, nil, true, true, 0, 0}, + {"[a-ζ]*", "α", true, nil, true, true, 17, 15}, + {"*[a-ζ]", "A", false, nil, true, true, 17, 15}, + {"a?b", "a/b", false, nil, true, true, 1, 1}, + {"a*b", "a/b", false, nil, true, true, 1, 1}, + {"[\\]a]", "]", true, nil, true, !onWindows, 2, 2}, + {"[\\-]", "-", true, nil, true, !onWindows, 1, 1}, + {"[x\\-]", "x", true, nil, true, !onWindows, 2, 2}, + {"[x\\-]", "-", true, nil, true, !onWindows, 2, 2}, + {"[x\\-]", "z", false, nil, true, !onWindows, 2, 2}, + {"[\\-x]", "x", true, nil, true, !onWindows, 2, 2}, + {"[\\-x]", "-", true, nil, true, !onWindows, 2, 2}, + {"[\\-x]", "a", false, nil, true, !onWindows, 2, 2}, + {"[]a]", "]", false, ErrBadPattern, true, true, 0, 0}, + // doublestar, like bash, allows these when path.Match() does not + {"[-]", "-", true, nil, false, !onWindows, 1, 0}, + {"[x-]", "x", true, nil, false, true, 2, 1}, + {"[x-]", "-", true, nil, false, !onWindows, 2, 1}, + {"[x-]", "z", false, nil, false, true, 2, 1}, + {"[-x]", "x", true, nil, false, true, 2, 1}, + {"[-x]", "-", true, nil, false, !onWindows, 2, 1}, + {"[-x]", "a", false, nil, false, true, 2, 1}, + {"[a-b-d]", "a", true, nil, false, true, 3, 2}, + {"[a-b-d]", "b", true, nil, false, true, 3, 2}, + {"[a-b-d]", "-", true, nil, false, !onWindows, 3, 2}, + {"[a-b-d]", "c", false, nil, false, true, 3, 2}, + {"[a-b-x]", "x", true, nil, false, true, 4, 3}, + {"\\", "a", false, ErrBadPattern, true, !onWindows, 0, 0}, + {"[", "a", false, ErrBadPattern, true, true, 0, 0}, + {"[^", "a", false, ErrBadPattern, true, true, 0, 0}, + {"[^bc", "a", false, ErrBadPattern, true, true, 0, 0}, + {"a[", "a", false, ErrBadPattern, true, true, 0, 0}, + {"a[", "ab", false, ErrBadPattern, true, true, 0, 0}, + {"ad[", "ab", false, ErrBadPattern, true, true, 0, 0}, + {"*x", "xxx", true, nil, true, true, 4, 4}, + {"[abc]", "b", true, nil, true, true, 3, 3}, + {"**", "", true, nil, false, false, 38, 38}, + {"a/**", "a", true, nil, false, true, 7, 7}, + {"a/**", "a/", true, nil, false, false, 7, 7}, + {"a/**", "a/b", true, nil, false, true, 7, 7}, + {"a/**", "a/b/c", true, nil, false, true, 7, 7}, + // These tests differ since we've disabled walking symlinks + {"**/c", "c", true, nil, false, true, 4, 4}, + {"**/c", "b/c", true, nil, false, true, 4, 4}, + {"**/c", "a/b/c", true, nil, false, true, 4, 4}, + {"**/c", "a/b", false, nil, false, true, 4, 4}, + {"**/c", "abcd", false, nil, false, true, 4, 4}, + {"**/c", "a/abc", false, nil, false, true, 4, 4}, + {"a/**/b", "a/b", true, nil, false, true, 2, 2}, + {"a/**/c", "a/b/c", true, nil, false, true, 2, 2}, + {"a/**/d", "a/b/c/d", true, nil, false, true, 1, 1}, + {"a/\\**", "a/b/c", false, nil, false, !onWindows, 0, 0}, + {"a/\\[*\\]", "a/bc", false, nil, true, !onWindows, 0, 0}, + // this is an odd case: filepath.Glob() will return results + {"a//b/c", "a/b/c", false, nil, true, false, 0, 0}, + {"a/b/c", "a/b//c", false, nil, true, true, 1, 1}, + // also odd: Glob + filepath.Glob return results + {"a/", "a", false, nil, true, false, 0, 0}, + {"ab{c,d}", "abc", true, nil, false, true, 1, 1}, + {"ab{c,d,*}", "abcde", true, nil, false, true, 5, 5}, + {"ab{c,d}[", "abcd", false, ErrBadPattern, false, true, 0, 0}, + {"a{,bc}", "a", true, nil, false, true, 2, 2}, + {"a{,bc}", "abc", true, nil, false, true, 2, 2}, + {"a/{b/c,c/b}", "a/b/c", true, nil, false, true, 2, 2}, + {"a/{b/c,c/b}", "a/c/b", true, nil, false, true, 2, 2}, + {"{a/{b,c},abc}", "a/b", true, nil, false, true, 3, 3}, + {"{a/{b,c},abc}", "a/c", true, nil, false, true, 3, 3}, + {"{a/{b,c},abc}", "abc", true, nil, false, true, 3, 3}, + {"{a/{b,c},abc}", "a/b/c", false, nil, false, true, 3, 3}, + {"{a/ab*}", "a/abc", true, nil, false, true, 1, 1}, + {"{a/*}", "a/b", true, nil, false, true, 3, 3}, + {"{a/abc}", "a/abc", true, nil, false, true, 1, 1}, + {"{a/b,a/c}", "a/c", true, nil, false, true, 2, 2}, + {"abc/**", "abc/b", true, nil, false, true, 3, 3}, + {"**/abc", "abc", true, nil, false, true, 2, 2}, + {"abc**", "abc/b", false, nil, false, true, 3, 3}, + {"**/*.txt", "abc/【test】.txt", true, nil, false, true, 1, 1}, + {"**/【*", "abc/【test】.txt", true, nil, false, true, 1, 1}, + // unfortunately, io/fs can't handle this, so neither can Glob =( + {"broken-symlink", "broken-symlink", true, nil, true, false, 1, 1}, + // We don't care about matching a particular file, we want to verify + // that we don't traverse the symlink + {"working-symlink/c/*", "working-symlink/c/d", true, nil, true, !onWindows, 1, 1}, + {"working-sym*/*", "irrelevant", false, nil, false, !onWindows, 0, 0}, + {"b/**/f", "irrelevant", false, nil, false, !onWindows, 0, 0}, +} + +func TestValidatePattern(t *testing.T) { + for idx, tt := range matchTests { + testValidatePatternWith(t, idx, tt) + } +} + +func testValidatePatternWith(t *testing.T, idx int, tt MatchTest) { + defer func() { + if r := recover(); r != nil { + t.Errorf("#%v. Validate(%#q) panicked: %#v", idx, tt.pattern, r) + } + }() + + result := ValidatePattern(tt.pattern) + if result != (tt.expectedErr == nil) { + t.Errorf("#%v. ValidatePattern(%#q) = %v want %v", idx, tt.pattern, result, !result) + } +} + +func TestMatch(t *testing.T) { + for idx, tt := range matchTests { + // Since Match() always uses "/" as the separator, we + // don't need to worry about the tt.testOnDisk flag + testMatchWith(t, idx, tt) + } +} + +func testMatchWith(t *testing.T, idx int, tt MatchTest) { + defer func() { + if r := recover(); r != nil { + t.Errorf("#%v. Match(%#q, %#q) panicked: %#v", idx, tt.pattern, tt.testPath, r) + } + }() + + // Match() always uses "/" as the separator + ok, err := Match(tt.pattern, tt.testPath) + if ok != tt.shouldMatch || err != tt.expectedErr { + t.Errorf("#%v. Match(%#q, %#q) = %v, %v want %v, %v", idx, tt.pattern, tt.testPath, ok, err, tt.shouldMatch, tt.expectedErr) + } + + if tt.isStandard { + stdOk, stdErr := path.Match(tt.pattern, tt.testPath) + if ok != stdOk || !compareErrors(err, stdErr) { + t.Errorf("#%v. Match(%#q, %#q) != path.Match(...). Got %v, %v want %v, %v", idx, tt.pattern, tt.testPath, ok, err, stdOk, stdErr) + } + } +} + +func BenchmarkMatch(b *testing.B) { + b.ReportAllocs() + for i := 0; i < b.N; i++ { + for _, tt := range matchTests { + if tt.isStandard { + _, _ = Match(tt.pattern, tt.testPath) + } + } + } +} + +func BenchmarkGoMatch(b *testing.B) { + b.ReportAllocs() + for i := 0; i < b.N; i++ { + for _, tt := range matchTests { + if tt.isStandard { + _, _ = path.Match(tt.pattern, tt.testPath) + } + } + } +} + +func TestPathMatch(t *testing.T) { + for idx, tt := range matchTests { + // Even though we aren't actually matching paths on disk, we are using + // PathMatch() which will use the system's separator. As a result, any + // patterns that might cause problems on-disk need to also be avoided + // here in this test. + if tt.testOnDisk { + testPathMatchWith(t, idx, tt) + } + } +} + +func testPathMatchWith(t *testing.T, idx int, tt MatchTest) { + defer func() { + if r := recover(); r != nil { + t.Errorf("#%v. Match(%#q, %#q) panicked: %#v", idx, tt.pattern, tt.testPath, r) + } + }() + + pattern := filepath.FromSlash(tt.pattern) + testPath := filepath.FromSlash(tt.testPath) + ok, err := PathMatch(pattern, testPath) + if ok != tt.shouldMatch || err != tt.expectedErr { + t.Errorf("#%v. PathMatch(%#q, %#q) = %v, %v want %v, %v", idx, pattern, testPath, ok, err, tt.shouldMatch, tt.expectedErr) + } + + if tt.isStandard { + stdOk, stdErr := filepath.Match(pattern, testPath) + if ok != stdOk || !compareErrors(err, stdErr) { + t.Errorf("#%v. PathMatch(%#q, %#q) != filepath.Match(...). Got %v, %v want %v, %v", idx, pattern, testPath, ok, err, stdOk, stdErr) + } + } +} + +func TestPathMatchFake(t *testing.T) { + // This test fakes that our path separator is `\\` so we can test what it + // would be like on Windows - obviously, we don't need to do that if we + // actually _are_ on Windows, since TestPathMatch will cover it. + if onWindows { + return + } + + for idx, tt := range matchTests { + // Even though we aren't actually matching paths on disk, we are using + // PathMatch() which will use the system's separator. As a result, any + // patterns that might cause problems on-disk need to also be avoided + // here in this test. + if tt.testOnDisk && tt.pattern != "\\" { + testPathMatchFakeWith(t, idx, tt) + } + } +} + +func testPathMatchFakeWith(t *testing.T, idx int, tt MatchTest) { + defer func() { + if r := recover(); r != nil { + t.Errorf("#%v. Match(%#q, %#q) panicked: %#v", idx, tt.pattern, tt.testPath, r) + } + }() + + pattern := strings.ReplaceAll(tt.pattern, "/", "\\") + testPath := strings.ReplaceAll(tt.testPath, "/", "\\") + ok, err := matchWithSeparator(pattern, testPath, '\\', true) + if ok != tt.shouldMatch || err != tt.expectedErr { + t.Errorf("#%v. PathMatch(%#q, %#q) = %v, %v want %v, %v", idx, pattern, testPath, ok, err, tt.shouldMatch, tt.expectedErr) + } +} + +func BenchmarkPathMatch(b *testing.B) { + b.ReportAllocs() + for i := 0; i < b.N; i++ { + for _, tt := range matchTests { + if tt.isStandard && tt.testOnDisk { + pattern := filepath.FromSlash(tt.pattern) + testPath := filepath.FromSlash(tt.testPath) + _, _ = PathMatch(pattern, testPath) + } + } + } +} + +func BenchmarkGoPathMatch(b *testing.B) { + b.ReportAllocs() + for i := 0; i < b.N; i++ { + for _, tt := range matchTests { + if tt.isStandard && tt.testOnDisk { + pattern := filepath.FromSlash(tt.pattern) + testPath := filepath.FromSlash(tt.testPath) + _, _ = filepath.Match(pattern, testPath) + } + } + } +} + +func TestGlob(t *testing.T) { + fsys := os.DirFS("test") + for idx, tt := range matchTests { + if tt.testOnDisk { + testGlobWith(t, idx, tt, fsys) + } + } +} + +func testGlobWith(t *testing.T, idx int, tt MatchTest, fsys fs.FS) { + defer func() { + if r := recover(); r != nil { + t.Errorf("#%v. Glob(%#q) panicked: %#v", idx, tt.pattern, r) + } + }() + + matches, err := Glob(fsys, tt.pattern) + verifyGlobResults(t, idx, "Glob", tt, fsys, matches, err) +} + +func TestGlobWalk(t *testing.T) { + fsys := os.DirFS("test") + for idx, tt := range matchTests { + if tt.testOnDisk { + testGlobWalkWith(t, idx, tt, fsys) + } + } +} + +func testGlobWalkWith(t *testing.T, idx int, tt MatchTest, fsys fs.FS) { + defer func() { + if r := recover(); r != nil { + t.Errorf("#%v. Glob(%#q) panicked: %#v", idx, tt.pattern, r) + } + }() + + var matches []string + err := GlobWalk(fsys, tt.pattern, func(p string, d fs.DirEntry) error { + matches = append(matches, p) + return nil + }) + verifyGlobResults(t, idx, "GlobWalk", tt, fsys, matches, err) +} + +func verifyGlobResults(t *testing.T, idx int, fn string, tt MatchTest, fsys fs.FS, matches []string, err error) { + numResults := tt.numResults + if onWindows { + numResults = tt.winNumResults + } + if len(matches) != numResults { + t.Errorf("#%v. %v(%#q) = %#v - should have %#v results", idx, fn, tt.pattern, matches, tt.numResults) + } + if inSlice(tt.testPath, matches) != tt.shouldMatch { + if tt.shouldMatch { + t.Errorf("#%v. %v(%#q) = %#v - doesn't contain %v, but should", idx, fn, tt.pattern, matches, tt.testPath) + } else { + t.Errorf("#%v. %v(%#q) = %#v - contains %v, but shouldn't", idx, fn, tt.pattern, matches, tt.testPath) + } + } + if err != tt.expectedErr { + t.Errorf("#%v. %v(%#q) has error %v, but should be %v", idx, fn, tt.pattern, err, tt.expectedErr) + } + + if tt.isStandard { + stdMatches, stdErr := fs.Glob(fsys, tt.pattern) + if !compareSlices(matches, stdMatches) || !compareErrors(err, stdErr) { + t.Errorf("#%v. %v(%#q) != fs.Glob(...). Got %#v, %v want %#v, %v", idx, fn, tt.pattern, matches, err, stdMatches, stdErr) + } + } +} + +func BenchmarkGlob(b *testing.B) { + fsys := os.DirFS("test") + b.ReportAllocs() + for i := 0; i < b.N; i++ { + for _, tt := range matchTests { + if tt.isStandard && tt.testOnDisk { + _, _ = Glob(fsys, tt.pattern) + } + } + } +} + +func BenchmarkGlobWalk(b *testing.B) { + fsys := os.DirFS("test") + b.ReportAllocs() + for i := 0; i < b.N; i++ { + for _, tt := range matchTests { + if tt.isStandard && tt.testOnDisk { + _ = GlobWalk(fsys, tt.pattern, func(p string, d fs.DirEntry) error { + return nil + }) + } + } + } +} + +func BenchmarkGoGlob(b *testing.B) { + fsys := os.DirFS("test") + b.ReportAllocs() + for i := 0; i < b.N; i++ { + for _, tt := range matchTests { + if tt.isStandard && tt.testOnDisk { + _, _ = fs.Glob(fsys, tt.pattern) + } + } + } +} + +func compareErrors(a, b error) bool { + if a == nil { + return b == nil + } + return b != nil +} + +func inSlice(s string, a []string) bool { + for _, i := range a { + if i == s { + return true + } + } + return false +} + +func compareSlices(a, b []string) bool { + if len(a) != len(b) { + return false + } + + diff := make(map[string]int, len(a)) + + for _, x := range a { + diff[x]++ + } + + for _, y := range b { + if _, ok := diff[y]; !ok { + return false + } + + diff[y]-- + if diff[y] == 0 { + delete(diff, y) + } + } + + return len(diff) == 0 +} + +func mkdirp(parts ...string) { + dirs := path.Join(parts...) + err := os.MkdirAll(dirs, 0755) + if err != nil { + log.Fatalf("Could not create test directories %v: %v\n", dirs, err) + } +} + +func touch(parts ...string) { + filename := path.Join(parts...) + f, err := os.Create(filename) + if err != nil { + log.Fatalf("Could not create test file %v: %v\n", filename, err) + } + _ = f.Close() +} + +func symlink(oldname, newname string) { + // since this will only run on non-windows, we can assume "/" as path separator + err := os.Symlink(oldname, newname) + if err != nil && !os.IsExist(err) { + log.Fatalf("Could not create symlink %v -> %v: %v\n", oldname, newname, err) + } +} + +func TestGlobSorted(t *testing.T) { + fsys := os.DirFS("test") + expected := []string{"a", "abc", "abcd", "abcde", "abxbbxdbxebxczzx", "abxbbxdbxebxczzy", "axbxcxdxe", "axbxcxdxexxx", "a☺b"} + matches, err := Glob(fsys, "a*") + if err != nil { + t.Errorf("Unexpected error %v", err) + return + } + + if len(matches) != len(expected) { + t.Errorf("Glob returned %#v; expected %#v", matches, expected) + return + } + for idx, match := range matches { + if match != expected[idx] { + t.Errorf("Glob returned %#v; expected %#v", matches, expected) + return + } + } +} + +func TestMain(m *testing.M) { + // create the test directory + mkdirp("test", "a", "b", "c") + mkdirp("test", "a", "c") + mkdirp("test", "abc") + mkdirp("test", "axbxcxdxe", "xxx") + mkdirp("test", "axbxcxdxexxx") + mkdirp("test", "b") + + // create test files + touch("test", "a", "abc") + touch("test", "a", "b", "c", "d") + touch("test", "a", "c", "b") + touch("test", "abc", "b") + touch("test", "abcd") + touch("test", "abcde") + touch("test", "abxbbxdbxebxczzx") + touch("test", "abxbbxdbxebxczzy") + touch("test", "axbxcxdxe", "f") + touch("test", "axbxcxdxe", "xxx", "f") + touch("test", "axbxcxdxexxx", "f") + touch("test", "axbxcxdxexxx", "fff") + touch("test", "a☺b") + touch("test", "b", "c") + touch("test", "c") + touch("test", "x") + touch("test", "xxx") + touch("test", "z") + touch("test", "α") + touch("test", "abc", "【test】.txt") + + if !onWindows { + // these files/symlinks won't work on Windows + touch("test", "-") + touch("test", "]") + symlink("../axbxcxdxe/", "test/b/symlink-dir") + symlink("/tmp/nonexistant-file-20160902155705", "test/broken-symlink") + symlink("a/b", "test/working-symlink") + } + + // os.Exit(m.Run()) + exitCode := m.Run() + _ = os.RemoveAll("test") + os.Exit(exitCode) +} diff --git a/cli/internal/doublestar/glob.go b/cli/internal/doublestar/glob.go new file mode 100644 index 0000000..eee8920 --- /dev/null +++ b/cli/internal/doublestar/glob.go @@ -0,0 +1,393 @@ +// Package doublestar is adapted from https://github.com/bmatcuk/doublestar +// Copyright Bob Matcuk. All Rights Reserved. +// SPDX-License-Identifier: MIT +package doublestar + +import ( + "io/fs" + "path" +) + +// Glob returns the names of all files matching pattern or nil if there is no +// matching file. The syntax of pattern is the same as in Match(). The pattern +// may describe hierarchical names such as usr/*/bin/ed. +// +// Glob ignores file system errors such as I/O errors reading directories. +// The only possible returned error is ErrBadPattern, reporting that the +// pattern is malformed. +// +// Note: this is meant as a drop-in replacement for io/fs.Glob(). Like +// io/fs.Glob(), this function assumes that your pattern uses `/` as the path +// separator even if that's not correct for your OS (like Windows). If you +// aren't sure if that's the case, you can use filepath.ToSlash() on your +// pattern before calling Glob(). +// +// Like `io/fs.Glob()`, patterns containing `/./`, `/../`, or starting with `/` +// will return no results and no errors. You can use SplitPattern to divide a +// pattern into a base path (to initialize an `FS` object) and pattern. +func Glob(fsys fs.FS, pattern string) ([]string, error) { + if !ValidatePattern(pattern) { + return nil, ErrBadPattern + } + if hasMidDoubleStar(pattern) { + // If the pattern has a `**` anywhere but the very end, GlobWalk is more + // performant because it can get away with less allocations. If the pattern + // ends in a `**`, both methods are pretty much the same, but Glob has a + // _very_ slight advantage because of lower function call overhead. + var matches []string + err := doGlobWalk(fsys, pattern, true, func(p string, d fs.DirEntry) error { + matches = append(matches, p) + return nil + }) + return matches, err + } + return doGlob(fsys, pattern, nil, true) +} + +// Does the actual globbin' +func doGlob(fsys fs.FS, pattern string, m []string, firstSegment bool) ([]string, error) { + matches := m + patternStart := indexMeta(pattern) + if patternStart == -1 { + // pattern doesn't contain any meta characters - does a file matching the + // pattern exist? + if exists(fsys, pattern) { + matches = append(matches, pattern) + } + return matches, nil + } + + dir := "." + splitIdx := lastIndexSlashOrAlt(pattern) + if splitIdx != -1 { + if pattern[splitIdx] == '}' { + openingIdx := indexMatchedOpeningAlt(pattern[:splitIdx]) + if openingIdx == -1 { + // if there's no matching opening index, technically Match() will treat + // an unmatched `}` as nothing special, so... we will, too! + splitIdx = lastIndexSlash(pattern[:splitIdx]) + } else { + // otherwise, we have to handle the alts: + return globAlts(fsys, pattern, openingIdx, splitIdx, matches, firstSegment) + } + } + + dir = pattern[:splitIdx] + pattern = pattern[splitIdx+1:] + } + + // if `splitIdx` is less than `patternStart`, we know `dir` has no meta + // characters. They would be equal if they are both -1, which means `dir` + // will be ".", and we know that doesn't have meta characters either. + if splitIdx <= patternStart { + return globDir(fsys, dir, pattern, matches, firstSegment) + } + + var dirs []string + var err error + dirs, err = doGlob(fsys, dir, matches, false) + if err != nil { + return nil, err + } + for _, d := range dirs { + matches, err = globDir(fsys, d, pattern, matches, firstSegment) + if err != nil { + return nil, err + } + } + + return matches, nil +} + +// handle alts in the glob pattern - `openingIdx` and `closingIdx` are the +// indexes of `{` and `}`, respectively +func globAlts(fsys fs.FS, pattern string, openingIdx, closingIdx int, m []string, firstSegment bool) ([]string, error) { + matches := m + + var dirs []string + startIdx := 0 + afterIdx := closingIdx + 1 + splitIdx := lastIndexSlashOrAlt(pattern[:openingIdx]) + if splitIdx == -1 || pattern[splitIdx] == '}' { + // no common prefix + dirs = []string{""} + } else { + // our alts have a common prefix that we can process first + var err error + dirs, err = doGlob(fsys, pattern[:splitIdx], matches, false) + if err != nil { + return nil, err + } + + startIdx = splitIdx + 1 + } + + for _, d := range dirs { + patIdx := openingIdx + 1 + altResultsStartIdx := len(matches) + thisResultStartIdx := altResultsStartIdx + for patIdx < closingIdx { + nextIdx := indexNextAlt(pattern[patIdx:closingIdx], true) + if nextIdx == -1 { + nextIdx = closingIdx + } else { + nextIdx += patIdx + } + + alt := buildAlt(d, pattern, startIdx, openingIdx, patIdx, nextIdx, afterIdx) + var err error + matches, err = doGlob(fsys, alt, matches, firstSegment) + if err != nil { + return nil, err + } + + matchesLen := len(matches) + if altResultsStartIdx != thisResultStartIdx && thisResultStartIdx != matchesLen { + // Alts can result in matches that aren't sorted, or, worse, duplicates + // (consider the trivial pattern `path/to/{a,*}`). Since doGlob returns + // sorted results, we can do a sort of in-place merge and remove + // duplicates. But, we only need to do this if this isn't the first alt + // (ie, `altResultsStartIdx != thisResultsStartIdx`) and if the latest + // alt actually added some matches (`thisResultStartIdx != + // len(matches)`) + matches = sortAndRemoveDups(matches, altResultsStartIdx, thisResultStartIdx, matchesLen) + + // length of matches may have changed + thisResultStartIdx = len(matches) + } else { + thisResultStartIdx = matchesLen + } + + patIdx = nextIdx + 1 + } + } + + return matches, nil +} + +// find files/subdirectories in the given `dir` that match `pattern` +func globDir(fsys fs.FS, dir, pattern string, matches []string, canMatchFiles bool) ([]string, error) { + m := matches + + if pattern == "" { + // pattern can be an empty string if the original pattern ended in a slash, + // in which case, we should just return dir, but only if it actually exists + // and it's a directory (or a symlink to a directory) + if isPathDir(fsys, dir) { + m = append(m, dir) + } + return m, nil + } + + if pattern == "**" { + m = globDoubleStar(fsys, dir, m, canMatchFiles) + return m, nil + } + + dirs, err := fs.ReadDir(fsys, dir) + if err != nil { + // ignore IO errors + return m, nil + } + + var matched bool + for _, info := range dirs { + name := info.Name() + if canMatchFiles || isDir(fsys, dir, name, info) { + matched, err = matchWithSeparator(pattern, name, '/', false) + if err != nil { + return nil, err + } + if matched { + m = append(m, path.Join(dir, name)) + } + } + } + + return m, nil +} + +func globDoubleStar(fsys fs.FS, dir string, matches []string, canMatchFiles bool) []string { + dirs, err := fs.ReadDir(fsys, dir) + if err != nil { + // ignore IO errors + return matches + } + + // `**` can match *this* dir, so add it + matches = append(matches, dir) + for _, info := range dirs { + name := info.Name() + if isDir(fsys, dir, name, info) { + matches = globDoubleStar(fsys, path.Join(dir, name), matches, canMatchFiles) + } else if canMatchFiles { + matches = append(matches, path.Join(dir, name)) + } + } + + return matches +} + +// Returns true if the pattern has a doublestar in the middle of the pattern. +// In this case, GlobWalk is faster because it can get away with less +// allocations. However, Glob has a _very_ slight edge if the pattern ends in +// `**`. +func hasMidDoubleStar(p string) bool { + // subtract 3: 2 because we want to return false if the pattern ends in `**` + // (Glob is _very_ slightly faster in that case), and the extra 1 because our + // loop checks p[i] and p[i+1]. + l := len(p) - 3 + for i := 0; i < l; i++ { + if p[i] == '\\' { + // escape next byte + i++ + } else if p[i] == '*' && p[i+1] == '*' { + return true + } + } + return false +} + +// Returns the index of the first unescaped meta character, or negative 1. +func indexMeta(s string) int { + var c byte + l := len(s) + for i := 0; i < l; i++ { + c = s[i] + if c == '*' || c == '?' || c == '[' || c == '{' { + return i + } else if c == '\\' { + // skip next byte + i++ + } + } + return -1 +} + +// Returns the index of the last unescaped slash or closing alt (`}`) in the +// string, or negative 1. +func lastIndexSlashOrAlt(s string) int { + for i := len(s) - 1; i >= 0; i-- { + if (s[i] == '/' || s[i] == '}') && (i == 0 || s[i-1] != '\\') { + return i + } + } + return -1 +} + +// Returns the index of the last unescaped slash in the string, or negative 1. +func lastIndexSlash(s string) int { + for i := len(s) - 1; i >= 0; i-- { + if s[i] == '/' && (i == 0 || s[i-1] != '\\') { + return i + } + } + return -1 +} + +// Assuming the byte after the end of `s` is a closing `}`, this function will +// find the index of the matching `{`. That is, it'll skip over any nested `{}` +// and account for escaping. +func indexMatchedOpeningAlt(s string) int { + alts := 1 + for i := len(s) - 1; i >= 0; i-- { + if s[i] == '}' && (i == 0 || s[i-1] != '\\') { + alts++ + } else if s[i] == '{' && (i == 0 || s[i-1] != '\\') { + if alts--; alts == 0 { + return i + } + } + } + return -1 +} + +// Returns true if the path exists +func exists(fsys fs.FS, name string) bool { + if _, err := fs.Stat(fsys, name); err != nil { + return false + } + return true +} + +// Returns true if the path is a directory, or a symlink to a directory +func isPathDir(fsys fs.FS, name string) bool { + info, err := fs.Stat(fsys, name) + if err != nil { + return false + } + return info.IsDir() +} + +// Returns whether or not the given DirEntry is a directory. If the DirEntry +// represents a symbolic link, return false +func isDir(fsys fs.FS, dir string, name string, info fs.DirEntry) bool { + if (info.Type() & fs.ModeSymlink) > 0 { + return false + } + return info.IsDir() +} + +// Builds a string from an alt +func buildAlt(prefix, pattern string, startIdx, openingIdx, currentIdx, nextIdx, afterIdx int) string { + // pattern: + // ignored/start{alts,go,here}remaining - len = 36 + // | | | | ^--- afterIdx = 27 + // | | | \--------- nextIdx = 21 + // | | \----------- currentIdx = 19 + // | \----------------- openingIdx = 13 + // \---------------------- startIdx = 8 + // + // result: + // prefix/startgoremaining - len = 7 + 5 + 2 + 9 = 23 + var buf []byte + patLen := len(pattern) + size := (openingIdx - startIdx) + (nextIdx - currentIdx) + (patLen - afterIdx) + if prefix != "" { + buf = make([]byte, 0, size+len(prefix)+1) + buf = append(buf, prefix...) + buf = append(buf, '/') + } else { + buf = make([]byte, 0, size) + } + buf = append(buf, pattern[startIdx:openingIdx]...) + buf = append(buf, pattern[currentIdx:nextIdx]...) + if afterIdx < patLen { + buf = append(buf, pattern[afterIdx:]...) + } + return string(buf) +} + +// Running alts can produce results that are not sorted, and, worse, can cause +// duplicates (consider the trivial pattern `path/to/{a,*}`). Since we know +// each run of doGlob is sorted, we can basically do the "merge" step of a +// merge sort in-place. +func sortAndRemoveDups(matches []string, idx1, idx2, l int) []string { + var tmp string + for ; idx1 < idx2; idx1++ { + if matches[idx1] < matches[idx2] { + // order is correct + continue + } else if matches[idx1] > matches[idx2] { + // need to swap and then re-sort matches above idx2 + tmp = matches[idx1] + matches[idx1] = matches[idx2] + + shft := idx2 + 1 + for ; shft < l && matches[shft] < tmp; shft++ { + matches[shft-1] = matches[shft] + } + matches[shft-1] = tmp + } else { + // duplicate - shift matches above idx2 down one and decrement l + for shft := idx2 + 1; shft < l; shft++ { + matches[shft-1] = matches[shft] + } + if l--; idx2 == l { + // nothing left to do... matches[idx2:] must have been full of dups + break + } + } + } + return matches[:l] +} diff --git a/cli/internal/doublestar/globwalk.go b/cli/internal/doublestar/globwalk.go new file mode 100644 index 0000000..6caec3e --- /dev/null +++ b/cli/internal/doublestar/globwalk.go @@ -0,0 +1,277 @@ +// Package doublestar is adapted from https://github.com/bmatcuk/doublestar +// Copyright Bob Matcuk. All Rights Reserved. +// SPDX-License-Identifier: MIT +package doublestar + +import ( + "io/fs" + "path" +) + +// GlobWalkFunc is a callback function for GlobWalk(). If the function returns an error, GlobWalk +// will end immediately and return the same error. +type GlobWalkFunc func(path string, d fs.DirEntry) error + +// GlobWalk calls the callback function `fn` for every file matching pattern. +// The syntax of pattern is the same as in Match() and the behavior is the same +// as Glob(), with regard to limitations (such as patterns containing `/./`, +// `/../`, or starting with `/`). The pattern may describe hierarchical names +// such as usr/*/bin/ed. +// +// GlobWalk may have a small performance benefit over Glob if you do not need a +// slice of matches because it can avoid allocating memory for the matches. +// Additionally, GlobWalk gives you access to the `fs.DirEntry` objects for +// each match, and lets you quit early by returning a non-nil error from your +// callback function. +// +// GlobWalk ignores file system errors such as I/O errors reading directories. +// GlobWalk may return ErrBadPattern, reporting that the pattern is malformed. +// Additionally, if the callback function `fn` returns an error, GlobWalk will +// exit immediately and return that error. +// +// Like Glob(), this function assumes that your pattern uses `/` as the path +// separator even if that's not correct for your OS (like Windows). If you +// aren't sure if that's the case, you can use filepath.ToSlash() on your +// pattern before calling GlobWalk(). +func GlobWalk(fsys fs.FS, pattern string, fn GlobWalkFunc) error { + if !ValidatePattern(pattern) { + return ErrBadPattern + } + return doGlobWalk(fsys, pattern, true, fn) +} + +// Actually execute GlobWalk +func doGlobWalk(fsys fs.FS, pattern string, firstSegment bool, fn GlobWalkFunc) error { + patternStart := indexMeta(pattern) + if patternStart == -1 { + // pattern doesn't contain any meta characters - does a file matching the + // pattern exist? + info, err := fs.Stat(fsys, pattern) + if err == nil { + err = fn(pattern, newDirEntryFromFileInfo(info)) + return err + } + // ignore IO errors + return nil + } + + dir := "." + splitIdx := lastIndexSlashOrAlt(pattern) + if splitIdx != -1 { + if pattern[splitIdx] == '}' { + openingIdx := indexMatchedOpeningAlt(pattern[:splitIdx]) + if openingIdx == -1 { + // if there's no matching opening index, technically Match() will treat + // an unmatched `}` as nothing special, so... we will, too! + splitIdx = lastIndexSlash(pattern[:splitIdx]) + } else { + // otherwise, we have to handle the alts: + return globAltsWalk(fsys, pattern, openingIdx, splitIdx, firstSegment, fn) + } + } + + dir = pattern[:splitIdx] + pattern = pattern[splitIdx+1:] + } + + // if `splitIdx` is less than `patternStart`, we know `dir` has no meta + // characters. They would be equal if they are both -1, which means `dir` + // will be ".", and we know that doesn't have meta characters either. + if splitIdx <= patternStart { + return globDirWalk(fsys, dir, pattern, firstSegment, fn) + } + + return doGlobWalk(fsys, dir, false, func(p string, d fs.DirEntry) error { + if err := globDirWalk(fsys, p, pattern, firstSegment, fn); err != nil { + return err + } + return nil + }) +} + +// handle alts in the glob pattern - `openingIdx` and `closingIdx` are the +// indexes of `{` and `}`, respectively +func globAltsWalk(fsys fs.FS, pattern string, openingIdx, closingIdx int, firstSegment bool, fn GlobWalkFunc) error { + var matches []dirEntryWithFullPath + startIdx := 0 + afterIdx := closingIdx + 1 + splitIdx := lastIndexSlashOrAlt(pattern[:openingIdx]) + if splitIdx == -1 || pattern[splitIdx] == '}' { + // no common prefix + var err error + matches, err = doGlobAltsWalk(fsys, "", pattern, startIdx, openingIdx, closingIdx, afterIdx, firstSegment, matches) + if err != nil { + return err + } + } else { + // our alts have a common prefix that we can process first + startIdx = splitIdx + 1 + err := doGlobWalk(fsys, pattern[:splitIdx], false, func(p string, d fs.DirEntry) (e error) { + matches, e = doGlobAltsWalk(fsys, p, pattern, startIdx, openingIdx, closingIdx, afterIdx, firstSegment, matches) + return e + }) + if err != nil { + return err + } + } + + for _, m := range matches { + if err := fn(m.Path, m.Entry); err != nil { + return err + } + } + + return nil +} + +// runs actual matching for alts +func doGlobAltsWalk(fsys fs.FS, d, pattern string, startIdx, openingIdx, closingIdx, afterIdx int, firstSegment bool, m []dirEntryWithFullPath) ([]dirEntryWithFullPath, error) { + matches := m + matchesLen := len(m) + patIdx := openingIdx + 1 + for patIdx < closingIdx { + nextIdx := indexNextAlt(pattern[patIdx:closingIdx], true) + if nextIdx == -1 { + nextIdx = closingIdx + } else { + nextIdx += patIdx + } + + alt := buildAlt(d, pattern, startIdx, openingIdx, patIdx, nextIdx, afterIdx) + err := doGlobWalk(fsys, alt, firstSegment, func(p string, d fs.DirEntry) error { + // insertion sort, ignoring dups + insertIdx := matchesLen + for insertIdx > 0 && matches[insertIdx-1].Path > p { + insertIdx-- + } + if insertIdx > 0 && matches[insertIdx-1].Path == p { + // dup + return nil + } + + // append to grow the slice, then insert + entry := dirEntryWithFullPath{d, p} + matches = append(matches, entry) + for i := matchesLen; i > insertIdx; i-- { + matches[i] = matches[i-1] + } + matches[insertIdx] = entry + matchesLen++ + + return nil + }) + if err != nil { + return nil, err + } + + patIdx = nextIdx + 1 + } + + return matches, nil +} + +func globDirWalk(fsys fs.FS, dir, pattern string, canMatchFiles bool, fn GlobWalkFunc) error { + if pattern == "" { + // pattern can be an empty string if the original pattern ended in a slash, + // in which case, we should just return dir, but only if it actually exists + // and it's a directory (or a symlink to a directory) + info, err := fs.Stat(fsys, dir) + if err != nil || !info.IsDir() { + return nil + } + return fn(dir, newDirEntryFromFileInfo(info)) + } + + if pattern == "**" { + // `**` can match *this* dir + info, err := fs.Stat(fsys, dir) + if err != nil || !info.IsDir() { + return nil + } + if err = fn(dir, newDirEntryFromFileInfo(info)); err != nil { + return err + } + return globDoubleStarWalk(fsys, dir, canMatchFiles, fn) + } + + dirs, err := fs.ReadDir(fsys, dir) + if err != nil { + // ignore IO errors + return nil + } + + var matched bool + for _, info := range dirs { + name := info.Name() + if canMatchFiles || isDir(fsys, dir, name, info) { + matched, err = matchWithSeparator(pattern, name, '/', false) + if err != nil { + return err + } + if matched { + if err = fn(path.Join(dir, name), info); err != nil { + return err + } + } + } + } + + return nil +} + +func globDoubleStarWalk(fsys fs.FS, dir string, canMatchFiles bool, fn GlobWalkFunc) error { + dirs, err := fs.ReadDir(fsys, dir) + if err != nil { + // ignore IO errors + return nil + } + + // `**` can match *this* dir, so add it + for _, info := range dirs { + name := info.Name() + if isDir(fsys, dir, name, info) { + p := path.Join(dir, name) + if e := fn(p, info); e != nil { + return e + } + if e := globDoubleStarWalk(fsys, p, canMatchFiles, fn); e != nil { + return e + } + } else if canMatchFiles { + if e := fn(path.Join(dir, name), info); e != nil { + return e + } + } + } + + return nil +} + +type dirEntryFromFileInfo struct { + fi fs.FileInfo +} + +func (d *dirEntryFromFileInfo) Name() string { + return d.fi.Name() +} + +func (d *dirEntryFromFileInfo) IsDir() bool { + return d.fi.IsDir() +} + +func (d *dirEntryFromFileInfo) Type() fs.FileMode { + return d.fi.Mode().Type() +} + +func (d *dirEntryFromFileInfo) Info() (fs.FileInfo, error) { + return d.fi, nil +} + +func newDirEntryFromFileInfo(fi fs.FileInfo) fs.DirEntry { + return &dirEntryFromFileInfo{fi} +} + +type dirEntryWithFullPath struct { + Entry fs.DirEntry + Path string +} diff --git a/cli/internal/doublestar/match.go b/cli/internal/doublestar/match.go new file mode 100644 index 0000000..d8c9536 --- /dev/null +++ b/cli/internal/doublestar/match.go @@ -0,0 +1,377 @@ +// Package doublestar is adapted from https://github.com/bmatcuk/doublestar +// Copyright Bob Matcuk. All Rights Reserved. +// SPDX-License-Identifier: MIT +package doublestar + +import ( + "path/filepath" + "unicode/utf8" +) + +// Match reports whether name matches the shell pattern. +// The pattern syntax is: +// +// pattern: +// { term } +// term: +// '*' matches any sequence of non-path-separators +// '/**/' matches zero or more directories +// '?' matches any single non-path-separator character +// '[' [ '^' '!' ] { character-range } ']' +// character class (must be non-empty) +// starting with `^` or `!` negates the class +// '{' { term } [ ',' { term } ... ] '}' +// alternatives +// c matches character c (c != '*', '?', '\\', '[') +// '\\' c matches character c +// +// character-range: +// c matches character c (c != '\\', '-', ']') +// '\\' c matches character c +// lo '-' hi matches character c for lo <= c <= hi +// +// Match returns true if `name` matches the file name `pattern`. `name` and +// `pattern` are split on forward slash (`/`) characters and may be relative or +// absolute. +// +// Match requires pattern to match all of name, not just a substring. +// The only possible returned error is ErrBadPattern, when pattern +// is malformed. +// +// A doublestar (`**`) should appear surrounded by path separators such as +// `/**/`. A mid-pattern doublestar (`**`) behaves like bash's globstar +// option: a pattern such as `path/to/**.txt` would return the same results as +// `path/to/*.txt`. The pattern you're looking for is `path/to/**/*.txt`. +// +// Note: this is meant as a drop-in replacement for path.Match() which +// always uses '/' as the path separator. If you want to support systems +// which use a different path separator (such as Windows), what you want +// is PathMatch(). Alternatively, you can run filepath.ToSlash() on both +// pattern and name and then use this function. +func Match(pattern, name string) (bool, error) { + return matchWithSeparator(pattern, name, '/', true) +} + +// PathMatch returns true if `name` matches the file name `pattern`. The +// difference between Match and PathMatch is that PathMatch will automatically +// use your system's path separator to split `name` and `pattern`. On systems +// where the path separator is `'\'`, escaping will be disabled. +// +// Note: this is meant as a drop-in replacement for filepath.Match(). It +// assumes that both `pattern` and `name` are using the system's path +// separator. If you can't be sure of that, use filepath.ToSlash() on both +// `pattern` and `name`, and then use the Match() function instead. +func PathMatch(pattern, name string) (bool, error) { + return matchWithSeparator(pattern, name, filepath.Separator, true) +} + +func matchWithSeparator(pattern, name string, separator rune, validate bool) (matched bool, err error) { + doublestarPatternBacktrack := -1 + doublestarNameBacktrack := -1 + starPatternBacktrack := -1 + starNameBacktrack := -1 + patIdx := 0 + nameIdx := 0 + patLen := len(pattern) + nameLen := len(name) + startOfSegment := true +MATCH: + for nameIdx < nameLen { + if patIdx < patLen { + switch pattern[patIdx] { + case '*': + if patIdx++; patIdx < patLen && pattern[patIdx] == '*' { + // doublestar - must begin with a path separator, otherwise we'll + // treat it like a single star like bash + patIdx++ + if startOfSegment { + if patIdx >= patLen { + // pattern ends in `/**`: return true + return true, nil + } + + // doublestar must also end with a path separator, otherwise we're + // just going to treat the doublestar as a single star like bash + patRune, patRuneLen := utf8.DecodeRuneInString(pattern[patIdx:]) + if patRune == separator { + patIdx += patRuneLen + + doublestarPatternBacktrack = patIdx + doublestarNameBacktrack = nameIdx + starPatternBacktrack = -1 + starNameBacktrack = -1 + continue + } + } + } + startOfSegment = false + + starPatternBacktrack = patIdx + starNameBacktrack = nameIdx + continue + + case '?': + startOfSegment = false + nameRune, nameRuneLen := utf8.DecodeRuneInString(name[nameIdx:]) + if nameRune == separator { + // `?` cannot match the separator + break + } + + patIdx++ + nameIdx += nameRuneLen + continue + + case '[': + startOfSegment = false + if patIdx++; patIdx >= patLen { + // class didn't end + return false, ErrBadPattern + } + nameRune, nameRuneLen := utf8.DecodeRuneInString(name[nameIdx:]) + + matched := false + negate := pattern[patIdx] == '!' || pattern[patIdx] == '^' + if negate { + patIdx++ + } + + if patIdx >= patLen || pattern[patIdx] == ']' { + // class didn't end or empty character class + return false, ErrBadPattern + } + + last := utf8.MaxRune + for patIdx < patLen && pattern[patIdx] != ']' { + patRune, patRuneLen := utf8.DecodeRuneInString(pattern[patIdx:]) + patIdx += patRuneLen + + // match a range + if last < utf8.MaxRune && patRune == '-' && patIdx < patLen && pattern[patIdx] != ']' { + if pattern[patIdx] == '\\' { + // next character is escaped + patIdx++ + } + patRune, patRuneLen = utf8.DecodeRuneInString(pattern[patIdx:]) + patIdx += patRuneLen + + if last <= nameRune && nameRune <= patRune { + matched = true + break + } + + // didn't match range - reset `last` + last = utf8.MaxRune + continue + } + + // not a range - check if the next rune is escaped + if patRune == '\\' { + patRune, patRuneLen = utf8.DecodeRuneInString(pattern[patIdx:]) + patIdx += patRuneLen + } + + // check if the rune matches + if patRune == nameRune { + matched = true + break + } + + // no matches yet + last = patRune + } + + if matched == negate { + // failed to match - if we reached the end of the pattern, that means + // we never found a closing `]` + if patIdx >= patLen { + return false, ErrBadPattern + } + break + } + + closingIdx := indexUnescapedByte(pattern[patIdx:], ']', true) + if closingIdx == -1 { + // no closing `]` + return false, ErrBadPattern + } + + patIdx += closingIdx + 1 + nameIdx += nameRuneLen + continue + + case '{': + // Note: removed 'startOfSegment = false' here. + // This block is guaranteed to return, so assigning it was useless + // and triggering a lint error + patIdx++ + closingIdx := indexMatchedClosingAlt(pattern[patIdx:], separator != '\\') + if closingIdx == -1 { + // no closing `}` + return false, ErrBadPattern + } + closingIdx += patIdx + + for { + commaIdx := indexNextAlt(pattern[patIdx:closingIdx], separator != '\\') + if commaIdx == -1 { + break + } + commaIdx += patIdx + + result, err := matchWithSeparator(pattern[patIdx:commaIdx]+pattern[closingIdx+1:], name[nameIdx:], separator, validate) + if result || err != nil { + return result, err + } + + patIdx = commaIdx + 1 + } + return matchWithSeparator(pattern[patIdx:closingIdx]+pattern[closingIdx+1:], name[nameIdx:], separator, validate) + + case '\\': + if separator != '\\' { + // next rune is "escaped" in the pattern - literal match + if patIdx++; patIdx >= patLen { + // pattern ended + return false, ErrBadPattern + } + } + fallthrough + + default: + patRune, patRuneLen := utf8.DecodeRuneInString(pattern[patIdx:]) + nameRune, nameRuneLen := utf8.DecodeRuneInString(name[nameIdx:]) + if patRune != nameRune { + if separator != '\\' && patIdx > 0 && pattern[patIdx-1] == '\\' { + // if this rune was meant to be escaped, we need to move patIdx + // back to the backslash before backtracking or validating below + patIdx-- + } + break + } + + patIdx += patRuneLen + nameIdx += nameRuneLen + startOfSegment = patRune == separator + continue + } + } + + if starPatternBacktrack >= 0 { + // `*` backtrack, but only if the `name` rune isn't the separator + nameRune, nameRuneLen := utf8.DecodeRuneInString(name[starNameBacktrack:]) + if nameRune != separator { + starNameBacktrack += nameRuneLen + patIdx = starPatternBacktrack + nameIdx = starNameBacktrack + startOfSegment = false + continue + } + } + + if doublestarPatternBacktrack >= 0 { + // `**` backtrack, advance `name` past next separator + nameIdx = doublestarNameBacktrack + for nameIdx < nameLen { + nameRune, nameRuneLen := utf8.DecodeRuneInString(name[nameIdx:]) + nameIdx += nameRuneLen + if nameRune == separator { + doublestarNameBacktrack = nameIdx + patIdx = doublestarPatternBacktrack + startOfSegment = true + continue MATCH + } + } + } + + if validate && patIdx < patLen && !doValidatePattern(pattern[patIdx:], separator) { + return false, ErrBadPattern + } + return false, nil + } + + if nameIdx < nameLen { + // we reached the end of `pattern` before the end of `name` + return false, nil + } + + // we've reached the end of `name`; we've successfully matched if we've also + // reached the end of `pattern`, or if the rest of `pattern` can match a + // zero-length string + return isZeroLengthPattern(pattern[patIdx:], separator) +} + +func isZeroLengthPattern(pattern string, separator rune) (ret bool, err error) { + // `/**` is a special case - a pattern such as `path/to/a/**` *should* match + // `path/to/a` because `a` might be a directory + if pattern == "" || pattern == "*" || pattern == "**" || pattern == string(separator)+"**" { + return true, nil + } + + if pattern[0] == '{' { + closingIdx := indexMatchedClosingAlt(pattern[1:], separator != '\\') + if closingIdx == -1 { + // no closing '}' + return false, ErrBadPattern + } + closingIdx++ + + patIdx := 1 + for { + commaIdx := indexNextAlt(pattern[patIdx:closingIdx], separator != '\\') + if commaIdx == -1 { + break + } + commaIdx += patIdx + + ret, err = isZeroLengthPattern(pattern[patIdx:commaIdx]+pattern[closingIdx+1:], separator) + if ret || err != nil { + return + } + + patIdx = commaIdx + 1 + } + return isZeroLengthPattern(pattern[patIdx:closingIdx]+pattern[closingIdx+1:], separator) + } + + // no luck - validate the rest of the pattern + if !doValidatePattern(pattern, separator) { + return false, ErrBadPattern + } + return false, nil +} + +// Finds the index of the first unescaped byte `c`, or negative 1. +func indexUnescapedByte(s string, c byte, allowEscaping bool) int { + l := len(s) + for i := 0; i < l; i++ { + if allowEscaping && s[i] == '\\' { + // skip next byte + i++ + } else if s[i] == c { + return i + } + } + return -1 +} + +// Assuming the byte before the beginning of `s` is an opening `{`, this +// function will find the index of the matching `}`. That is, it'll skip over +// any nested `{}` and account for escaping +func indexMatchedClosingAlt(s string, allowEscaping bool) int { + alts := 1 + l := len(s) + for i := 0; i < l; i++ { + if allowEscaping && s[i] == '\\' { + // skip next byte + i++ + } else if s[i] == '{' { + alts++ + } else if s[i] == '}' { + if alts--; alts == 0 { + return i + } + } + } + return -1 +} diff --git a/cli/internal/doublestar/utils.go b/cli/internal/doublestar/utils.go new file mode 100644 index 0000000..7236cd0 --- /dev/null +++ b/cli/internal/doublestar/utils.go @@ -0,0 +1,71 @@ +// Package doublestar is adapted from https://github.com/bmatcuk/doublestar +// Copyright Bob Matcuk. All Rights Reserved. +// SPDX-License-Identifier: MIT +package doublestar + +// SplitPattern is a utility function. Given a pattern, SplitPattern will +// return two strings: the first string is everything up to the last slash +// (`/`) that appears _before_ any unescaped "meta" characters (ie, `*?[{`). +// The second string is everything after that slash. For example, given the +// pattern: +// +// ../../path/to/meta*/** +// ^----------- split here +// +// SplitPattern returns "../../path/to" and "meta*/**". This is useful for +// initializing os.DirFS() to call Glob() because Glob() will silently fail if +// your pattern includes `/./` or `/../`. For example: +// +// base, pattern := SplitPattern("../../path/to/meta*/**") +// fsys := os.DirFS(base) +// matches, err := Glob(fsys, pattern) +// +// If SplitPattern cannot find somewhere to split the pattern (for example, +// `meta*/**`), it will return "." and the unaltered pattern (`meta*/**` in +// this example). +// +// Of course, it is your responsibility to decide if the returned base path is +// "safe" in the context of your application. Perhaps you could use Match() to +// validate against a list of approved base directories? +func SplitPattern(p string) (string, string) { + base := "." + pattern := p + + splitIdx := -1 + for i := 0; i < len(p); i++ { + c := p[i] + if c == '\\' { + i++ + } else if c == '/' { + splitIdx = i + } else if c == '*' || c == '?' || c == '[' || c == '{' { + break + } + } + + if splitIdx >= 0 { + return p[:splitIdx], p[splitIdx+1:] + } + + return base, pattern +} + +// Finds the next comma, but ignores any commas that appear inside nested `{}`. +// Assumes that each opening bracket has a corresponding closing bracket. +func indexNextAlt(s string, allowEscaping bool) int { + alts := 1 + l := len(s) + for i := 0; i < l; i++ { + if allowEscaping && s[i] == '\\' { + // skip next byte + i++ + } else if s[i] == '{' { + alts++ + } else if s[i] == '}' { + alts-- + } else if s[i] == ',' && alts == 1 { + return i + } + } + return -1 +} diff --git a/cli/internal/doublestar/validate.go b/cli/internal/doublestar/validate.go new file mode 100644 index 0000000..225fc5e --- /dev/null +++ b/cli/internal/doublestar/validate.go @@ -0,0 +1,83 @@ +// Package doublestar is adapted from https://github.com/bmatcuk/doublestar +// Copyright Bob Matcuk. All Rights Reserved. +// SPDX-License-Identifier: MIT +package doublestar + +import "path/filepath" + +// ValidatePattern validates a pattern. Patterns are validated while they run in Match(), +// PathMatch(), and Glob(), so, you normally wouldn't need to call this. +// However, there are cases where this might be useful: for example, if your +// program allows a user to enter a pattern that you'll run at a later time, +// you might want to validate it. +// +// ValidatePattern assumes your pattern uses '/' as the path separator. +func ValidatePattern(s string) bool { + return doValidatePattern(s, '/') +} + +// ValidatePathPattern only uses your OS path separator. In other words, use +// ValidatePattern if you would normally use Match() or Glob(). Use +// ValidatePathPattern if you would normally use PathMatch(). Keep in mind, +// Glob() requires '/' separators, even if your OS uses something else. +func ValidatePathPattern(s string) bool { + return doValidatePattern(s, filepath.Separator) +} + +func doValidatePattern(s string, separator rune) bool { + altDepth := 0 + l := len(s) +VALIDATE: + for i := 0; i < l; i++ { + switch s[i] { + case '\\': + if separator != '\\' { + // skip the next byte - return false if there is no next byte + if i++; i >= l { + return false + } + } + continue + + case '[': + if i++; i >= l { + // class didn't end + return false + } + if s[i] == '^' || s[i] == '!' { + i++ + } + if i >= l || s[i] == ']' { + // class didn't end or empty character class + return false + } + + for ; i < l; i++ { + if separator != '\\' && s[i] == '\\' { + i++ + } else if s[i] == ']' { + // looks good + continue VALIDATE + } + } + + // class didn't end + return false + + case '{': + altDepth++ + continue + + case '}': + if altDepth == 0 { + // alt end without a corresponding start + return false + } + altDepth-- + continue + } + } + + // valid as long as all alts are closed + return altDepth == 0 +} diff --git a/cli/internal/encoding/gitoutput/gitoutput.go b/cli/internal/encoding/gitoutput/gitoutput.go new file mode 100644 index 0000000..1c2ad4f --- /dev/null +++ b/cli/internal/encoding/gitoutput/gitoutput.go @@ -0,0 +1,345 @@ +// Package gitoutput reads the output of calls to `git`. +package gitoutput + +import ( + "bufio" + "bytes" + "errors" + "fmt" + "io" +) + +// These describe the structure of fields in the output of `git` commands. +var ( + LsTreeFields = []Field{ObjectMode, ObjectType, ObjectName, Path} + LsFilesFields = []Field{ObjectMode, ObjectName, ObjectStage, Path} + StatusFields = []Field{StatusX, StatusY, Path} +) + +var _lsTreeFieldToIndex = map[Field]int{ + ObjectMode: 0, + ObjectType: 1, + ObjectName: 2, + Path: 3, +} + +var _lsFilesFieldToIndex = map[Field]int{ + ObjectMode: 0, + ObjectName: 1, + ObjectStage: 2, + Path: 3, +} + +var _statusFieldToIndex = map[Field]int{ + StatusX: 0, + StatusY: 1, + Path: 2, +} + +// Field is the type for fields available in outputs to `git`. +// Used for naming and sensible call sites. +type Field int + +const ( + // ObjectMode is the mode field from `git` outputs. e.g. 100644 + ObjectMode Field = iota + 1 + // ObjectType is the set of allowed types from `git` outputs: blob, tree, commit + ObjectType + // ObjectName is the 40-character SHA hash + ObjectName + // ObjectStage is a value 0-3. + ObjectStage + // StatusX is the first character of the two-character output from `git status`. + StatusX + // StatusY is the second character of the two-character output from `git status`. + StatusY + // Path is the file path under version control in `git`. + Path +) + +// LsTreeEntry is the result from call `git ls-files` +type LsTreeEntry []string + +// LsFilesEntry is the result from call `git ls-tree` +type LsFilesEntry []string + +// StatusEntry is the result from call `git status` +type StatusEntry []string + +// GetField returns the value of the specified field. +func (e LsTreeEntry) GetField(field Field) string { + value, exists := _lsTreeFieldToIndex[field] + if !exists { + panic("Received an invalid field for LsTreeEntry.") + } + return e[value] +} + +// GetField returns the value of the specified field. +func (e LsFilesEntry) GetField(field Field) string { + value, exists := _lsFilesFieldToIndex[field] + if !exists { + panic("Received an invalid field for LsFilesEntry.") + } + return e[value] +} + +// GetField returns the value of the specified field. +func (e StatusEntry) GetField(field Field) string { + value, exists := _statusFieldToIndex[field] + if !exists { + panic("Received an invalid field for StatusEntry.") + } + return e[value] +} + +// Separators that appear in the output of `git` commands. +const ( + _space = ' ' + _tab = '\t' + _nul = '\000' +) + +// A ParseError is returned for parsing errors. +// Entries and columns are both 1-indexed. +type ParseError struct { + Entry int // Entry where the error occurred + Column int // Column where the error occurred + Err error // The actual error +} + +// Error creates a string for a parse error. +func (e *ParseError) Error() string { + return fmt.Sprintf("parse error on entry %d, column %d: %v", e.Entry, e.Column, e.Err) +} + +// Unwrap returns the raw error. +func (e *ParseError) Unwrap() error { return e.Err } + +// These are the errors that can be returned in ParseError.Err. +var ( + ErrInvalidObjectMode = errors.New("object mode is not valid") + ErrInvalidObjectType = errors.New("object type is not valid") + ErrInvalidObjectName = errors.New("object name is not valid") + ErrInvalidObjectStage = errors.New("object stage is not valid") + ErrInvalidObjectStatusX = errors.New("object status x is not valid") + ErrInvalidObjectStatusY = errors.New("object status y is not valid") + ErrInvalidPath = errors.New("path is not valid") + ErrUnknownField = errors.New("unknown field") +) + +// A Reader reads records from `git`'s output`. +type Reader struct { + // ReuseRecord controls whether calls to Read may return a slice sharing + // the backing array of the previous call's returned slice for performance. + // By default, each call to Read returns newly allocated memory owned by the caller. + ReuseRecord bool + + // Fields specifies the type of each field. + Fields []Field + + reader *bufio.Reader + + // numEntry is the current entry being read in the `git` output. + numEntry int + + // rawBuffer is an entry buffer only used by the readEntry method. + rawBuffer []byte + + // recordBuffer holds the unescaped fields, one after another. + // The fields can be accessed by using the indexes in fieldIndexes. + recordBuffer []byte + + // fieldIndexes is an index of fields inside recordBuffer. + // The i'th field ends at offset fieldIndexes[i] in recordBuffer. + fieldIndexes []int + + // fieldPositions is an index of field positions for the + // last record returned by Read. + fieldPositions []position + + // lastRecord is a record cache and only used when ReuseRecord == true. + lastRecord []string +} + +// NewLSTreeReader returns a new Reader that reads from reader. +func NewLSTreeReader(reader io.Reader) *Reader { + return &Reader{ + reader: bufio.NewReader(reader), + Fields: LsTreeFields, + } +} + +// NewLSFilesReader returns a new Reader that reads from reader. +func NewLSFilesReader(reader io.Reader) *Reader { + return &Reader{ + reader: bufio.NewReader(reader), + Fields: LsFilesFields, + } +} + +// NewStatusReader returns a new Reader that reads from reader. +func NewStatusReader(reader io.Reader) *Reader { + return &Reader{ + reader: bufio.NewReader(reader), + Fields: StatusFields, + } +} + +// Read reads one record from `reader`. +// Read always returns either a non-nil record or a non-nil error, +// but not both. +// +// If there is no data left to be read, Read returns nil, io.EOF. +// +// If ReuseRecord is true, the returned slice may be shared +// between multiple calls to Read. +func (r *Reader) Read() (record []string, err error) { + if r.ReuseRecord { + record, err = r.readRecord(r.lastRecord) + r.lastRecord = record + } else { + record, err = r.readRecord(nil) + } + return record, err +} + +// FieldPos returns the entry and column corresponding to +// the start of the field with the given index in the slice most recently +// returned by Read. Numbering of entries and columns starts at 1; +// columns are counted in bytes, not runes. +// +// If this is called with an out-of-bounds index, it panics. +func (r *Reader) FieldPos(field int) (entry int, column int) { + if field < 0 || field >= len(r.fieldPositions) { + panic("out of range index passed to FieldPos") + } + p := &r.fieldPositions[field] + return p.entry, p.col +} + +// pos holds the position of a field in the current entry. +type position struct { + entry, col int +} + +// ReadAll reads all the records from reader until EOF. +// +// A successful call returns err == nil, not err == io.EOF. Because ReadAll is +// defined to read until EOF, it does not treat end of file as an error to be +// reported. +func (r *Reader) ReadAll() (records [][]string, err error) { + for { + record, err := r.readRecord(nil) + if err == io.EOF { + return records, nil + } + if err != nil { + return nil, err + } + records = append(records, record) + } +} + +// readEntry reads the next entry (with the trailing NUL). +// If EOF is hit without a trailing NUL, it will be omitted. +// If some bytes were read then the error is never io.EOF. +// The result is only valid until the next call to readEntry. +func (r *Reader) readEntry() ([]byte, error) { + entry, err := r.reader.ReadSlice('\000') + if err == bufio.ErrBufferFull { + r.rawBuffer = append(r.rawBuffer[:0], entry...) + for err == bufio.ErrBufferFull { + entry, err = r.reader.ReadSlice('\000') + r.rawBuffer = append(r.rawBuffer, entry...) + } + entry = r.rawBuffer + } + if len(entry) > 0 && err == io.EOF { + entry = append(entry, '\000') + err = nil + } + r.numEntry++ + + return entry, err +} + +// getFieldLength returns the field length and the separator length for advancing. +func getFieldLength(fieldType Field, fieldNumber int, fieldCount int, entry *[]byte) (int, int) { + switch fieldType { + case StatusX: + return 1, 0 + case StatusY: + return 1, 1 + default: + return bytes.IndexRune(*entry, getSeparator(fieldNumber, fieldCount)), 1 + } +} + +// getSeparator returns the separator between the current field and the next field. +// Since fields separators are regular it doesn't hard code them. +func getSeparator(fieldNumber int, fieldCount int) rune { + remaining := fieldCount - fieldNumber + + switch remaining { + default: + return _space + case 2: + return _tab + case 1: + return _nul + } +} + +// readRecord reads a single record. +func (r *Reader) readRecord(dst []string) ([]string, error) { + entry, errRead := r.readEntry() + if errRead == io.EOF { + return nil, errRead + } + + // Parse each field in the record. + r.recordBuffer = r.recordBuffer[:0] + r.fieldIndexes = r.fieldIndexes[:0] + r.fieldPositions = r.fieldPositions[:0] + pos := position{entry: r.numEntry, col: 1} + + fieldCount := len(r.Fields) + + for fieldNumber, fieldType := range r.Fields { + length, advance := getFieldLength(fieldType, fieldNumber, fieldCount, &entry) + field := entry[:length] + + fieldError := checkValid(fieldType, field) + if fieldError != nil { + return nil, &ParseError{ + Entry: pos.entry, + Column: pos.col, + Err: fieldError, + } + } + + offset := length + advance + entry = entry[offset:] + r.recordBuffer = append(r.recordBuffer, field...) + r.fieldIndexes = append(r.fieldIndexes, len(r.recordBuffer)) + r.fieldPositions = append(r.fieldPositions, pos) + pos.col += offset + } + + // Create a single string and create slices out of it. + // This pins the memory of the fields together, but allocates once. + str := string(r.recordBuffer) // Convert to string once to batch allocations + dst = dst[:0] + if cap(dst) < len(r.fieldIndexes) { + dst = make([]string, len(r.fieldIndexes)) + } + dst = dst[:len(r.fieldIndexes)] + var preIdx int + for i, idx := range r.fieldIndexes { + dst[i] = str[preIdx:idx] + preIdx = idx + } + + return dst, nil +} diff --git a/cli/internal/encoding/gitoutput/gitoutput_test.go b/cli/internal/encoding/gitoutput/gitoutput_test.go new file mode 100644 index 0000000..19ab056 --- /dev/null +++ b/cli/internal/encoding/gitoutput/gitoutput_test.go @@ -0,0 +1,377 @@ +package gitoutput + +import ( + "fmt" + "io" + "reflect" + "strings" + "testing" + "unicode/utf8" +) + +type readTest struct { + Name string + Input string + Output [][]string + Reader func(io.Reader) *Reader + Positions [][][2]int + Errors []error + + // These fields are copied into the Reader + ReuseRecord bool +} + +// In these tests, the § and ∑ characters in readTest.Input are used to denote +// the start of a field and the position of an error respectively. +// They are removed before parsing and are used to verify the position +// information reported by FieldPos. + +var lsTreeTests = []readTest{ + { + Name: "simple", + Input: "§100644 §blob §e69de29bb2d1d6434b8b29ae775ad8c2e48c5391\t§package.json\000", + Output: [][]string{{"100644", "blob", "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", "package.json"}}, + Reader: NewLSTreeReader, + }, + { + Name: "no trailing nul", + Input: "§100644 §blob §e69de29bb2d1d6434b8b29ae775ad8c2e48c5391\t§package.json", + Output: [][]string{{"100644", "blob", "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", "package.json"}}, + Reader: NewLSTreeReader, + }, + { + Name: "weird file names", + Input: "§100644 §blob §e69de29bb2d1d6434b8b29ae775ad8c2e48c5391\t§\t\000§100644 §blob §e69de29bb2d1d6434b8b29ae775ad8c2e48c5391\t§\"\000§100644 §blob §5b999efa470b056e329b4c23a73904e0794bdc2f\t§\n\000§100644 §blob §f44f57fff95196c5f7139dfa0b96875f1e9650a9\t§.gitignore\000§100644 §blob §33dbaf21275ca2a5f460249d941cbc27d5da3121\t§README.md\000§040000 §tree §7360f2d292aec95907cebdcbb412a6bf2bd10f8a\t§apps\000§100644 §blob §9ec2879b24ce2c817296eebe2cb3846f8e4751ea\t§package.json\000§040000 §tree §5759aadaea2cde55468a61e7104eb0a9d86c1d30\t§packages\000§100644 §blob §33d0621ee2f4da4a2f6f6bdd51a42618d181e337\t§turbo.json\000", + Output: [][]string{ + {"100644", "blob", "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", "\t"}, + {"100644", "blob", "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", "\""}, + {"100644", "blob", "5b999efa470b056e329b4c23a73904e0794bdc2f", "\n"}, + {"100644", "blob", "f44f57fff95196c5f7139dfa0b96875f1e9650a9", ".gitignore"}, + {"100644", "blob", "33dbaf21275ca2a5f460249d941cbc27d5da3121", "README.md"}, + {"040000", "tree", "7360f2d292aec95907cebdcbb412a6bf2bd10f8a", "apps"}, + {"100644", "blob", "9ec2879b24ce2c817296eebe2cb3846f8e4751ea", "package.json"}, + {"040000", "tree", "5759aadaea2cde55468a61e7104eb0a9d86c1d30", "packages"}, + {"100644", "blob", "33d0621ee2f4da4a2f6f6bdd51a42618d181e337", "turbo.json"}, + }, + Reader: NewLSTreeReader, + }, + { + Name: "invalid object mode", + Input: "∑888888 §blob §5b999efa470b056e329b4c23a73904e0794bdc2f\t§.eslintrc.js\000", + Output: [][]string{}, + Reader: NewLSTreeReader, + Errors: []error{&ParseError{Err: ErrInvalidObjectMode}}, + }, + { + Name: "invalid object type", + Input: "§100644 ∑bush §5b999efa470b056e329b4c23a73904e0794bdc2f\t§.eslintrc.js\000", + Output: [][]string{}, + Reader: NewLSTreeReader, + Errors: []error{&ParseError{Err: ErrInvalidObjectType}}, + }, + { + Name: "invalid object name", + Input: "§100644 §blob ∑Zb999efa470b056e329b4c23a73904e0794bdc2f\t§.eslintrc.js\000", + Output: [][]string{}, + Reader: NewLSTreeReader, + Errors: []error{&ParseError{Err: ErrInvalidObjectName}}, + }, + { + Name: "invalid path", + Input: "§100644 §blob §5b999efa470b056e329b4c23a73904e0794bdc2f\t∑\000", + Output: [][]string{}, + Reader: NewLSTreeReader, + Errors: []error{&ParseError{Err: ErrInvalidPath}}, + }, +} + +var lsFilesTests = []readTest{ + { + Name: "simple", + Input: "§100644 §e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 §0\t§package.json\000", + Output: [][]string{{"100644", "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", "0", "package.json"}}, + Reader: NewLSFilesReader, + }, + { + Name: "no trailing nul", + Input: "§100644 §e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 §0\t§package.json", + Output: [][]string{{"100644", "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", "0", "package.json"}}, + Reader: NewLSFilesReader, + }, + { + Name: "invalid object mode", + Input: "∑888888 §e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 §0\t§package.json", + Output: [][]string{}, + Reader: NewLSFilesReader, + Errors: []error{&ParseError{Err: ErrInvalidObjectMode}}, + }, + { + Name: "invalid object name", + Input: "§100644 ∑Z69de29bb2d1d6434b8b29ae775ad8c2e48c5391 §0\t§package.json", + Output: [][]string{}, + Reader: NewLSFilesReader, + Errors: []error{&ParseError{Err: ErrInvalidObjectName}}, + }, + { + Name: "invalid object stage", + Input: "§100644 §e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 ∑4\t§package.json", + Output: [][]string{}, + Reader: NewLSFilesReader, + Errors: []error{&ParseError{Err: ErrInvalidObjectStage}}, + }, + { + Name: "invalid path", + Input: "§100644 §e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 §0\t∑", + Output: [][]string{}, + Reader: NewLSFilesReader, + Errors: []error{&ParseError{Err: ErrInvalidPath}}, + }, +} + +var statusTests = []readTest{ + { + Name: "simple", + Input: "§A§D §package.json\000", + Output: [][]string{{"A", "D", "package.json"}}, + Reader: NewStatusReader, + }, + { + Name: "no trailing nul", + Input: "§A§D §package.json", + Output: [][]string{{"A", "D", "package.json"}}, + Reader: NewStatusReader, + }, + { + Name: "invalid status X", + Input: "∑~§D §package.json\000", + Output: [][]string{}, + Reader: NewStatusReader, + Errors: []error{&ParseError{Err: ErrInvalidObjectStatusX}}, + }, + { + Name: "invalid status Y", + Input: "§D∑~ §package.json\000", + Output: [][]string{}, + Reader: NewStatusReader, + Errors: []error{&ParseError{Err: ErrInvalidObjectStatusY}}, + }, + { + Name: "invalid path", + Input: "§A§D ∑\000", + Output: [][]string{}, + Reader: NewStatusReader, + Errors: []error{&ParseError{Err: ErrInvalidPath}}, + }, +} + +func TestRead(t *testing.T) { + newReader := func(tt readTest) (*Reader, [][][2]int, map[int][2]int) { + positions, errPositions, input := makePositions(tt.Input) + r := tt.Reader(strings.NewReader(input)) + + r.ReuseRecord = tt.ReuseRecord + return r, positions, errPositions + } + + allTests := []readTest{} + allTests = append(allTests, lsTreeTests...) + allTests = append(allTests, lsFilesTests...) + allTests = append(allTests, statusTests...) + + for _, tt := range allTests { + t.Run(tt.Name, func(t *testing.T) { + r, positions, errPositions := newReader(tt) + out, err := r.ReadAll() + if wantErr := firstError(tt.Errors, positions, errPositions); wantErr != nil { + if !reflect.DeepEqual(err, wantErr) { + t.Fatalf("ReadAll() error mismatch:\ngot %v (%#v)\nwant %v (%#v)", err, err, wantErr, wantErr) + } + if out != nil { + t.Fatalf("ReadAll() output:\ngot %q\nwant nil", out) + } + } else { + if err != nil { + t.Fatalf("unexpected Readall() error: %v", err) + } + if !reflect.DeepEqual(out, tt.Output) { + t.Fatalf("ReadAll() output:\ngot %q\nwant %q", out, tt.Output) + } + } + + // Check field and error positions. + r, _, _ = newReader(tt) + for recNum := 0; ; recNum++ { + rec, err := r.Read() + var wantErr error + if recNum < len(tt.Errors) && tt.Errors[recNum] != nil { + wantErr = errorWithPosition(tt.Errors[recNum], recNum, positions, errPositions) + } else if recNum >= len(tt.Output) { + wantErr = io.EOF + } + if !reflect.DeepEqual(err, wantErr) { + t.Fatalf("Read() error at record %d:\ngot %v (%#v)\nwant %v (%#v)", recNum, err, err, wantErr, wantErr) + } + if err != nil { + if recNum < len(tt.Output) { + t.Fatalf("need more records; got %d want %d", recNum, len(tt.Output)) + } + break + } + if got, want := rec, tt.Output[recNum]; !reflect.DeepEqual(got, want) { + t.Errorf("Read vs ReadAll mismatch;\ngot %q\nwant %q", got, want) + } + pos := positions[recNum] + if len(pos) != len(rec) { + t.Fatalf("mismatched position length at record %d", recNum) + } + for i := range rec { + entry, col := r.FieldPos(i) + if got, want := [2]int{entry, col}, pos[i]; got != want { + t.Errorf("position mismatch at record %d, field %d;\ngot %v\nwant %v", recNum, i, got, want) + } + } + } + }) + } +} + +// firstError returns the first non-nil error in errs, +// with the position adjusted according to the error's +// index inside positions. +func firstError(errs []error, positions [][][2]int, errPositions map[int][2]int) error { + for i, err := range errs { + if err != nil { + return errorWithPosition(err, i, positions, errPositions) + } + } + return nil +} + +func errorWithPosition(err error, recNum int, positions [][][2]int, errPositions map[int][2]int) error { + parseErr, ok := err.(*ParseError) + if !ok { + return err + } + if recNum >= len(positions) { + panic(fmt.Errorf("no positions found for error at record %d", recNum)) + } + errPos, ok := errPositions[recNum] + if !ok { + panic(fmt.Errorf("no error position found for error at record %d", recNum)) + } + parseErr1 := *parseErr + parseErr1.Entry = errPos[0] + parseErr1.Column = errPos[1] + return &parseErr1 +} + +// makePositions returns the expected field positions of all the fields in text, +// the positions of any errors, and the text with the position markers removed. +// +// The start of each field is marked with a § symbol; +// Error positions are marked with ∑ symbols. +func makePositions(text string) ([][][2]int, map[int][2]int, string) { + buf := make([]byte, 0, len(text)) + var positions [][][2]int + errPositions := make(map[int][2]int) + entry, col := 1, 1 + recNum := 0 + + for len(text) > 0 { + r, size := utf8.DecodeRuneInString(text) + switch r { + case '\000': + col = 1 + buf = append(buf, '\000') + positions = append(positions, [][2]int{}) + entry++ + recNum++ + case '§': + if len(positions) == 0 { + positions = append(positions, [][2]int{}) + } + positions[len(positions)-1] = append(positions[len(positions)-1], [2]int{entry, col}) + case '∑': + errPositions[recNum] = [2]int{entry, col} + default: + buf = append(buf, text[:size]...) + col += size + } + text = text[size:] + } + return positions, errPositions, string(buf) +} + +// nTimes is an io.Reader which yields the string s n times. +type nTimes struct { + s string + n int + off int +} + +func (r *nTimes) Read(p []byte) (n int, err error) { + for { + if r.n <= 0 || r.s == "" { + return n, io.EOF + } + n0 := copy(p, r.s[r.off:]) + p = p[n0:] + n += n0 + r.off += n0 + if r.off == len(r.s) { + r.off = 0 + r.n-- + } + if len(p) == 0 { + return + } + } +} + +// TODO: track other types. +// benchmarkRead measures reading the provided ls-tree data. +// initReader, if non-nil, modifies the Reader before it's used. +func benchmarkRead(b *testing.B, getReader func(reader io.Reader) *Reader, initReader func(*Reader), rows string) { + b.ReportAllocs() + r := getReader(&nTimes{s: rows, n: b.N}) + if initReader != nil { + initReader(r) + } + for { + _, err := r.Read() + if err == io.EOF { + break + } + if err != nil { + b.Fatal(err) + } + } +} + +const benchmarkLSTreeData = `100644 blob e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 \000100644 blob e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 "\000100644 blob 5b999efa470b056e329b4c23a73904e0794bdc2f .eslintrc.js\000100644 blob f44f57fff95196c5f7139dfa0b96875f1e9650a9 .gitignore\000100644 blob 33dbaf21275ca2a5f460249d941cbc27d5da3121 README.md\000040000 tree 7360f2d292aec95907cebdcbb412a6bf2bd10f8a apps\000100644 blob 9ec2879b24ce2c817296eebe2cb3846f8e4751ea package.json\000040000 tree 5759aadaea2cde55468a61e7104eb0a9d86c1d30 packages\000100644 blob 33d0621ee2f4da4a2f6f6bdd51a42618d181e337 turbo.json\000` +const benchmarkLSFilesData = `100644 13e399637190f1edb7f034b4281ecfafb5dab9e2 0 Makefile\000100644 6c1c500409989499db51f1eff37b38b857547fdc 0 cmd/turbo/main.go\000100644 2d2b9a2c3ba82f6b806f58c7f7d5eb55fefa837e 0 cmd/turbo/main_utils.go\000100644 3329c8a7f6edee487caeeaf56c600f7c85fc69e7 0 cmd/turbo/signals.go\000100644 e81df7b6ed9a277c30dd35e3524d00e8b13cf584 0 cmd/turbo/version.go\000100644 8992ebf37df05fc5ff64c0f811a3259adff10d70 0 go.mod\000100644 3da872301c79986673d6a12914fbd48c924f5999 0 go.sum\000100644 d7b2d20a037aa9bf8b48eef451eb5f9ba5904237 0 internal/analytics/analytics.go\000` +const benchmarkStatusData = ` M cli/internal/encoding/gitoutput/gitoutput.go\000 M cli/internal/encoding/gitoutput/gitoutput_test.go\000?? NOTICES.md\000 M cli/internal/encoding/gitoutput/gitoutput.go\000 M cli/internal/encoding/gitoutput/gitoutput_test.go\000?? NOTICES.md\000 M cli/internal/encoding/gitoutput/gitoutput.go\000 M cli/internal/encoding/gitoutput/gitoutput_test.go\000?? NOTICES.md\000 M cli/internal/encoding/gitoutput/gitoutput.go\000 M cli/internal/encoding/gitoutput/gitoutput_test.go\000?? NOTICES.md\000 M cli/internal/encoding/gitoutput/gitoutput.go\000 M cli/internal/encoding/gitoutput/gitoutput_test.go\000` + +func BenchmarkLSTreeRead(b *testing.B) { + benchmarkRead(b, NewLSTreeReader, nil, benchmarkLSTreeData) +} + +func BenchmarkLSTreeReadReuseRecord(b *testing.B) { + benchmarkRead(b, NewLSTreeReader, func(r *Reader) { r.ReuseRecord = true }, benchmarkLSTreeData) +} + +func BenchmarkLSFilesRead(b *testing.B) { + benchmarkRead(b, NewLSFilesReader, nil, benchmarkLSFilesData) +} + +func BenchmarkLSFilesReadReuseRecord(b *testing.B) { + benchmarkRead(b, NewLSFilesReader, func(r *Reader) { r.ReuseRecord = true }, benchmarkLSFilesData) +} + +func BenchmarkStatusRead(b *testing.B) { + benchmarkRead(b, NewStatusReader, nil, benchmarkStatusData) +} + +func BenchmarkStatusReadReuseRecord(b *testing.B) { + benchmarkRead(b, NewStatusReader, func(r *Reader) { r.ReuseRecord = true }, benchmarkStatusData) +} diff --git a/cli/internal/encoding/gitoutput/validators.go b/cli/internal/encoding/gitoutput/validators.go new file mode 100644 index 0000000..e13c2d5 --- /dev/null +++ b/cli/internal/encoding/gitoutput/validators.go @@ -0,0 +1,148 @@ +package gitoutput + +import "bytes" + +var _allowedObjectType = []byte(" blob tree commit ") +var _allowedStatusChars = []byte(" MTADRCU?!") + +// checkValid provides a uniform interface for calling `gitoutput` validators. +func checkValid(fieldType Field, value []byte) error { + switch fieldType { + case ObjectMode: + return checkObjectMode(value) + case ObjectType: + return checkObjectType(value) + case ObjectName: + return CheckObjectName(value) + case ObjectStage: + return checkObjectStage(value) + case StatusX: + return checkStatusX(value) + case StatusY: + return checkStatusY(value) + case Path: + return checkPath(value) + default: + return ErrUnknownField + } +} + +// checkObjectMode asserts that a byte slice is a six digit octal string (100644). +// It does not attempt to ensure that the values in particular positions are reasonable. +func checkObjectMode(value []byte) error { + if len(value) != 6 { + return ErrInvalidObjectMode + } + + // 0-7 are 0x30 - 0x37 + for _, currentByte := range value { + if (currentByte ^ 0x30) > 7 { + return ErrInvalidObjectMode + } + } + + // length of 6, 0-7 + return nil +} + +// checkObjectType asserts that a byte slice is a valid possibility (blob, tree, commit). +func checkObjectType(value []byte) error { + typeLength := len(value) + // Based upon: + // min(len("blob"), len("tree"), len("commit")) + // max(len("blob"), len("tree"), len("commit")) + if typeLength < 4 || typeLength > 6 { + return ErrInvalidObjectType + } + + // Because of the space separator there is no way to pass in a space. + // We use that trick to enable fast lookups in _allowedObjectType. + index := bytes.Index(_allowedObjectType, value) + + // Impossible to match at 0, not found is -1. + if index < 1 { + return ErrInvalidObjectType + } + + // Followed by a space. + if _allowedObjectType[index-1] != byte(_space) { + return ErrInvalidObjectType + } + + // Preceded by a space. + if _allowedObjectType[index+typeLength] != byte(_space) { + return ErrInvalidObjectType + } + return nil +} + +// CheckObjectName asserts that a byte slice looks like a SHA hash. +func CheckObjectName(value []byte) error { + if len(value) != 40 { + return ErrInvalidObjectName + } + + // 0-9 are 0x30 - 0x39 + // a-f are 0x61 - 0x66 + for _, currentByte := range value { + isNumber := (currentByte ^ 0x30) < 10 + numericAlpha := (currentByte ^ 0x60) + isAlpha := (numericAlpha < 7) && (numericAlpha > 0) + if !(isNumber || isAlpha) { + return ErrInvalidObjectName + } + } + + // length of 40, hex + return nil +} + +// checkObjectStage asserts that a byte slice is a valid possibility (0-3). +func checkObjectStage(value []byte) error { + // 0-3 are 0x30 - 0x33 + if len(value) != 1 { + return ErrInvalidObjectStage + } + + currentByte := value[0] + if (currentByte ^ 0x30) >= 4 { + return ErrInvalidObjectStage + } + + return nil +} + +// checkStatusX asserts that a byte slice is a valid possibility (" MTADRCU?!"). +func checkStatusX(value []byte) error { + if len(value) != 1 { + return ErrInvalidObjectStatusX + } + + index := bytes.Index(_allowedStatusChars, value) + if index == -1 { + return ErrInvalidObjectStatusX + } + return nil +} + +// checkStatusY asserts that a byte slice is a valid possibility (" MTADRCU?!"). +func checkStatusY(value []byte) error { + if len(value) != 1 { + return ErrInvalidObjectStatusY + } + + index := bytes.Index(_allowedStatusChars, value) + if index == -1 { + return ErrInvalidObjectStatusY + } + return nil +} + +// checkPath asserts that a byte slice is non-empty. +func checkPath(value []byte) error { + // Exists at all. This is best effort as trying to be fully-compatible is silly. + if len(value) == 0 { + return ErrInvalidPath + } + return nil +} diff --git a/cli/internal/encoding/gitoutput/validators_test.go b/cli/internal/encoding/gitoutput/validators_test.go new file mode 100644 index 0000000..29e1274 --- /dev/null +++ b/cli/internal/encoding/gitoutput/validators_test.go @@ -0,0 +1,514 @@ +package gitoutput + +import ( + "testing" +) + +func Test_checkValid(t *testing.T) { + type args struct { + fieldType Field + value []byte + } + tests := []struct { + name string + args args + wantErr bool + }{ + { + name: "ObjectMode", + args: args{ + fieldType: ObjectMode, + value: []byte("100644"), + }, + wantErr: false, + }, + { + name: "ObjectType", + args: args{ + fieldType: ObjectType, + value: []byte("blob"), + }, + wantErr: false, + }, + { + name: "ObjectName", + args: args{ + fieldType: ObjectName, + value: []byte("8992ebf37df05fc5ff64c0f811a3259adff10d70"), + }, + wantErr: false, + }, + { + name: "ObjectStage", + args: args{ + fieldType: ObjectStage, + value: []byte("0"), + }, + wantErr: false, + }, + { + name: "StatusX", + args: args{ + fieldType: StatusX, + value: []byte("!"), + }, + wantErr: false, + }, + { + name: "StatusY", + args: args{ + fieldType: StatusY, + value: []byte("?"), + }, + wantErr: false, + }, + { + name: "Path", + args: args{ + fieldType: Path, + value: []byte("/hello/world"), + }, + wantErr: false, + }, + { + name: "Unknown", + args: args{ + fieldType: Field(12), + value: []byte("unused"), + }, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := checkValid(tt.args.fieldType, tt.args.value); (err != nil) != tt.wantErr { + t.Errorf("checkValid() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +func Test_checkObjectMode(t *testing.T) { + type args struct { + value []byte + } + tests := []struct { + name string + args args + wantErr bool + }{ + { + name: "Simple", + args: args{ + value: []byte("100644"), + }, + wantErr: false, + }, + { + name: "All sevens", + args: args{ + value: []byte("777777"), + }, + wantErr: false, + }, + { + name: "All zeroes", + args: args{ + value: []byte("000000"), + }, + wantErr: false, + }, + { + name: "Non-octal chars", + args: args{ + value: []byte("sixsix"), + }, + wantErr: true, + }, + { + name: "nul", + args: args{ + value: []byte("\000\000\000\000\000\000"), + }, + wantErr: true, + }, + { + name: "too long", + args: args{ + value: []byte("1234567"), + }, + wantErr: true, + }, + { + name: "off by plus one", + args: args{ + value: []byte("888888"), + }, + wantErr: true, + }, + { + name: "off by minus one", + args: args{ + value: []byte("//////"), + }, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := checkObjectMode(tt.args.value); (err != nil) != tt.wantErr { + t.Errorf("checkObjectMode() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +func Test_checkObjectType(t *testing.T) { + type args struct { + value []byte + } + tests := []struct { + name string + args args + wantErr bool + }{ + { + name: "Finds blob", + args: args{ + value: []byte("blob"), + }, + wantErr: false, + }, + { + name: "Finds tree", + args: args{ + value: []byte("tree"), + }, + wantErr: false, + }, + { + name: "Finds commit", + args: args{ + value: []byte("commit"), + }, + wantErr: false, + }, + { + name: "nonsense input", + args: args{ + value: []byte("input"), + }, + wantErr: true, + }, + { + name: "Knows too much about the implementation details (all 3)", + args: args{ + value: []byte("blob tree commit"), + }, + wantErr: true, + }, + { + name: "Knows too much about the implementation details (first two)", + args: args{ + value: []byte("blob tree"), + }, + wantErr: true, + }, + { + name: "Knows too much about the implementation details (last two)", + args: args{ + value: []byte("tree commit"), + }, + wantErr: true, + }, + { + name: "Knows too much about the implementation details (arbitrary substring)", + args: args{ + value: []byte("tree c"), + }, + wantErr: true, + }, + { + name: "Knows too much about the implementation details (space)", + args: args{ + value: []byte(" "), + }, + wantErr: true, + }, + { + name: "Knows too much about the implementation details (empty string)", + args: args{ + value: []byte(""), + }, + wantErr: true, + }, + { + name: "Knows too much about the implementation details (leading space)", + args: args{ + value: []byte(" tree"), + }, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := checkObjectType(tt.args.value); (err != nil) != tt.wantErr { + t.Errorf("checkObjectType() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +func TestCheckObjectName(t *testing.T) { + type args struct { + value []byte + } + tests := []struct { + name string + args args + wantErr bool + }{ + { + name: "Simple", + args: args{ + value: []byte("8992ebf37df05fc5ff64c0f811a3259adff10d70"), + }, + wantErr: false, + }, + { + name: "Too short", + args: args{ + value: []byte("8992ebf37df05fc5ff64"), + }, + wantErr: true, + }, + { + name: "Too long", + args: args{ + value: []byte("8992ebf37df05fc5ff64c0f811a3259adff10d708992ebf37df05fc5ff64c0f811a3259adff10d70"), + }, + wantErr: true, + }, + { + name: "Not hex", + args: args{ + value: []byte("z992ebf37df05fc5ff64c0f811a3259adff10d70"), + }, + wantErr: true, + }, + { + name: "Not lowercase", + args: args{ + value: []byte("8992EBF37DF05FC5FF64C0F811A3259ADFF10D70"), + }, + wantErr: true, + }, + { + name: "Off by plus one in the ASCII table (a-f).", + args: args{ + value: []byte("gggggggggggggggggggggggggggggggggggggggg"), + }, + wantErr: true, + }, + { + name: "Off by minus one in the ASCII table (a-f).", + args: args{ + value: []byte("````````````````````````````````````````"), + }, + wantErr: true, + }, + { + name: "Off by minus one in the ASCII table (0-9).", + args: args{ + value: []byte("////////////////////////////////////////"), + }, + wantErr: true, + }, + { + name: "Off by plus one in the ASCII table (0-9).", + args: args{ + value: []byte("::::::::::::::::::::::::::::::::::::::::"), + }, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := CheckObjectName(tt.args.value); (err != nil) != tt.wantErr { + t.Errorf("CheckObjectName() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +func Test_checkObjectStage(t *testing.T) { + type args struct { + value []byte + } + tests := []struct { + name string + args args + wantErr bool + }{ + { + name: "0", + args: args{ + value: []byte("0"), + }, + wantErr: false, + }, + { + name: "1", + args: args{ + value: []byte("1"), + }, + wantErr: false, + }, + { + name: "2", + args: args{ + value: []byte("2"), + }, + wantErr: false, + }, + { + name: "3", + args: args{ + value: []byte("3"), + }, + wantErr: false, + }, + { + name: "/", + args: args{ + value: []byte("/"), + }, + wantErr: true, + }, + { + name: "4", + args: args{ + value: []byte("4"), + }, + wantErr: true, + }, + { + name: "00", + args: args{ + value: []byte("00"), + }, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := checkObjectStage(tt.args.value); (err != nil) != tt.wantErr { + t.Errorf("checkObjectStage() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +func Test_checkStatus(t *testing.T) { + type args struct { + value []byte + } + tests := []struct { + name string + args args + wantErr bool + }{ + { + name: "Simple", + args: args{ + value: []byte("D"), + }, + wantErr: false, + }, + { + name: "Space", + args: args{ + value: []byte(" "), + }, + wantErr: false, + }, + { + name: "Empty", + args: args{ + value: []byte(""), + }, + wantErr: true, + }, + { + name: "Too long", + args: args{ + value: []byte("?!"), + }, + wantErr: true, + }, + { + name: "nul", + args: args{ + value: []byte("\000"), + }, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := checkStatusX(tt.args.value); (err != nil) != tt.wantErr { + t.Errorf("checkStatusX() error = %v, wantErr %v", err, tt.wantErr) + } + if err := checkStatusY(tt.args.value); (err != nil) != tt.wantErr { + t.Errorf("checkStatusY() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + +func Test_checkPath(t *testing.T) { + type args struct { + value []byte + } + tests := []struct { + name string + args args + wantErr bool + }{ + { + name: "Simple", + args: args{ + value: []byte("./"), + }, + wantErr: false, + }, + { + name: "newline", + args: args{ + value: []byte("has\nnewline"), + }, + wantErr: false, + }, + { + name: "Empty", + args: args{ + value: []byte(""), + }, + wantErr: true, + }, + { + name: "newline", + args: args{ + value: []byte("\n"), + }, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := checkPath(tt.args.value); (err != nil) != tt.wantErr { + t.Errorf("checkPath() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} diff --git a/cli/internal/ffi/bindings.h b/cli/internal/ffi/bindings.h new file mode 100644 index 0000000..c2bbcea --- /dev/null +++ b/cli/internal/ffi/bindings.h @@ -0,0 +1,21 @@ +#include +#include +#include +#include + +typedef struct Buffer { + uint32_t len; + uint8_t *data; +} Buffer; + +void free_buffer(struct Buffer buffer); + +struct Buffer get_turbo_data_dir(void); + +struct Buffer changed_files(struct Buffer buffer); + +struct Buffer previous_content(struct Buffer buffer); + +struct Buffer npm_transitive_closure(struct Buffer buf); + +struct Buffer npm_subgraph(struct Buffer buf); diff --git a/cli/internal/ffi/ffi.go b/cli/internal/ffi/ffi.go new file mode 100644 index 0000000..7ac15e4 --- /dev/null +++ b/cli/internal/ffi/ffi.go @@ -0,0 +1,224 @@ +package ffi + +// ffi +// +// Please read the notes about safety (marked with `SAFETY`) in both this file, +// and in turborepo-ffi/lib.rs before modifying this file. + +// #include "bindings.h" +// +// #cgo darwin,arm64 LDFLAGS: -L${SRCDIR} -lturborepo_ffi_darwin_arm64 -lz -liconv +// #cgo darwin,amd64 LDFLAGS: -L${SRCDIR} -lturborepo_ffi_darwin_amd64 -lz -liconv +// #cgo linux,arm64,staticbinary LDFLAGS: -L${SRCDIR} -lturborepo_ffi_linux_arm64 -lunwind +// #cgo linux,amd64,staticbinary LDFLAGS: -L${SRCDIR} -lturborepo_ffi_linux_amd64 -lunwind +// #cgo linux,arm64,!staticbinary LDFLAGS: -L${SRCDIR} -lturborepo_ffi_linux_arm64 -lz +// #cgo linux,amd64,!staticbinary LDFLAGS: -L${SRCDIR} -lturborepo_ffi_linux_amd64 -lz +// #cgo windows,amd64 LDFLAGS: -L${SRCDIR} -lturborepo_ffi_windows_amd64 -lole32 -lbcrypt -lws2_32 -luserenv +import "C" + +import ( + "errors" + "reflect" + "unsafe" + + ffi_proto "github.com/vercel/turbo/cli/internal/ffi/proto" + "google.golang.org/protobuf/proto" +) + +// Unmarshal consumes a buffer and parses it into a proto.Message +func Unmarshal[M proto.Message](b C.Buffer, c M) error { + bytes := toBytes(b) + if err := proto.Unmarshal(bytes, c); err != nil { + return err + } + + // free the buffer on the rust side + // + // SAFETY: do not use `C.free_buffer` to free a buffer that has been allocated + // on the go side. If you happen to accidentally use the wrong one, you can + // expect a segfault on some platforms. This is the only valid callsite. + C.free_buffer(b) + + return nil +} + +// Marshal consumes a proto.Message and returns a bufferfire +// +// NOTE: the buffer must be freed by calling `Free` on it +func Marshal[M proto.Message](c M) C.Buffer { + bytes, err := proto.Marshal(c) + if err != nil { + panic(err) + } + + return toBuffer(bytes) +} + +// Free frees a buffer that has been allocated *on the go side*. +// +// SAFETY: this is not the same as `C.free_buffer`, which frees a buffer that +// has been allocated *on the rust side*. If you happen to accidentally use +// the wrong one, you can expect a segfault on some platforms. +// +// EXAMPLE: it is recommended use this function via a `defer` statement, like so: +// +// reqBuf := Marshal(&req) +// defer reqBuf.Free() +func (c C.Buffer) Free() { + C.free(unsafe.Pointer(c.data)) +} + +// rather than use C.GoBytes, we use this function to avoid copying the bytes, +// since it is going to be immediately Unmarshalled into a proto.Message +// +// SAFETY: go slices contain a pointer to an underlying buffer with a length. +// if the buffer is known to the garbage collector, dropping the last slice will +// cause the memory to be freed. this memory is owned by the rust side (and is +// not known the garbage collector), so dropping the slice will do nothing +func toBytes(b C.Buffer) []byte { + var out []byte + + len := (uint32)(b.len) + + sh := (*reflect.SliceHeader)(unsafe.Pointer(&out)) + sh.Data = uintptr(unsafe.Pointer(b.data)) + sh.Len = int(len) + sh.Cap = int(len) + + return out +} + +func toBuffer(bytes []byte) C.Buffer { + b := C.Buffer{} + b.len = C.uint(len(bytes)) + b.data = (*C.uchar)(C.CBytes(bytes)) + return b +} + +// GetTurboDataDir returns the path to the Turbo data directory +func GetTurboDataDir() string { + buffer := C.get_turbo_data_dir() + resp := ffi_proto.TurboDataDirResp{} + if err := Unmarshal(buffer, resp.ProtoReflect().Interface()); err != nil { + panic(err) + } + return resp.Dir +} + +// Go convention is to use an empty string for an uninitialized or null-valued +// string. Rust convention is to use an Option for the same purpose, which +// is encoded on the Go side as *string. This converts between the two. +func stringToRef(s string) *string { + if s == "" { + return nil + } + return &s +} + +// ChangedFiles returns the files changed in between two commits, the workdir and the index, and optionally untracked files +func ChangedFiles(gitRoot string, turboRoot string, fromCommit string, toCommit string) ([]string, error) { + fromCommitRef := stringToRef(fromCommit) + toCommitRef := stringToRef(toCommit) + + req := ffi_proto.ChangedFilesReq{ + GitRoot: gitRoot, + FromCommit: fromCommitRef, + ToCommit: toCommitRef, + TurboRoot: turboRoot, + } + + reqBuf := Marshal(&req) + defer reqBuf.Free() + + respBuf := C.changed_files(reqBuf) + + resp := ffi_proto.ChangedFilesResp{} + if err := Unmarshal(respBuf, resp.ProtoReflect().Interface()); err != nil { + panic(err) + } + if err := resp.GetError(); err != "" { + return nil, errors.New(err) + } + + return resp.GetFiles().GetFiles(), nil +} + +// PreviousContent returns the content of a file at a previous commit +func PreviousContent(gitRoot, fromCommit, filePath string) ([]byte, error) { + req := ffi_proto.PreviousContentReq{ + GitRoot: gitRoot, + FromCommit: fromCommit, + FilePath: filePath, + } + + reqBuf := Marshal(&req) + defer reqBuf.Free() + + respBuf := C.previous_content(reqBuf) + + resp := ffi_proto.PreviousContentResp{} + if err := Unmarshal(respBuf, resp.ProtoReflect().Interface()); err != nil { + panic(err) + } + content := resp.GetContent() + if err := resp.GetError(); err != "" { + return nil, errors.New(err) + } + + return []byte(content), nil +} + +// NpmTransitiveDeps returns the transitive external deps of a given package based on the deps and specifiers given +func NpmTransitiveDeps(content []byte, pkgDir string, unresolvedDeps map[string]string) ([]*ffi_proto.LockfilePackage, error) { + return transitiveDeps(npmTransitiveDeps, content, pkgDir, unresolvedDeps) +} + +func npmTransitiveDeps(buf C.Buffer) C.Buffer { + return C.npm_transitive_closure(buf) +} + +func transitiveDeps(cFunc func(C.Buffer) C.Buffer, content []byte, pkgDir string, unresolvedDeps map[string]string) ([]*ffi_proto.LockfilePackage, error) { + req := ffi_proto.TransitiveDepsRequest{ + Contents: content, + WorkspaceDir: pkgDir, + UnresolvedDeps: unresolvedDeps, + } + reqBuf := Marshal(&req) + resBuf := cFunc(reqBuf) + reqBuf.Free() + + resp := ffi_proto.TransitiveDepsResponse{} + if err := Unmarshal(resBuf, resp.ProtoReflect().Interface()); err != nil { + panic(err) + } + + if err := resp.GetError(); err != "" { + return nil, errors.New(err) + } + + list := resp.GetPackages() + return list.GetList(), nil +} + +// NpmSubgraph returns the contents of a npm lockfile subgraph +func NpmSubgraph(content []byte, workspaces []string, packages []string) ([]byte, error) { + req := ffi_proto.SubgraphRequest{ + Contents: content, + Workspaces: workspaces, + Packages: packages, + } + reqBuf := Marshal(&req) + resBuf := C.npm_subgraph(reqBuf) + reqBuf.Free() + + resp := ffi_proto.SubgraphResponse{} + if err := Unmarshal(resBuf, resp.ProtoReflect().Interface()); err != nil { + panic(err) + } + + if err := resp.GetError(); err != "" { + return nil, errors.New(err) + } + + return resp.GetContents(), nil +} diff --git a/cli/internal/ffi/proto/messages.pb.go b/cli/internal/ffi/proto/messages.pb.go new file mode 100644 index 0000000..22992d3 --- /dev/null +++ b/cli/internal/ffi/proto/messages.pb.go @@ -0,0 +1,1380 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.1 +// protoc v3.21.12 +// source: turborepo-ffi/messages.proto + +package proto + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type TurboDataDirResp struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Dir string `protobuf:"bytes,1,opt,name=dir,proto3" json:"dir,omitempty"` +} + +func (x *TurboDataDirResp) Reset() { + *x = TurboDataDirResp{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *TurboDataDirResp) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TurboDataDirResp) ProtoMessage() {} + +func (x *TurboDataDirResp) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TurboDataDirResp.ProtoReflect.Descriptor instead. +func (*TurboDataDirResp) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{0} +} + +func (x *TurboDataDirResp) GetDir() string { + if x != nil { + return x.Dir + } + return "" +} + +type GlobReq struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + BasePath string `protobuf:"bytes,1,opt,name=base_path,json=basePath,proto3" json:"base_path,omitempty"` + IncludePatterns []string `protobuf:"bytes,2,rep,name=include_patterns,json=includePatterns,proto3" json:"include_patterns,omitempty"` + ExcludePatterns []string `protobuf:"bytes,3,rep,name=exclude_patterns,json=excludePatterns,proto3" json:"exclude_patterns,omitempty"` + FilesOnly bool `protobuf:"varint,4,opt,name=files_only,json=filesOnly,proto3" json:"files_only,omitempty"` // note that the default for a bool is false +} + +func (x *GlobReq) Reset() { + *x = GlobReq{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GlobReq) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GlobReq) ProtoMessage() {} + +func (x *GlobReq) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GlobReq.ProtoReflect.Descriptor instead. +func (*GlobReq) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{1} +} + +func (x *GlobReq) GetBasePath() string { + if x != nil { + return x.BasePath + } + return "" +} + +func (x *GlobReq) GetIncludePatterns() []string { + if x != nil { + return x.IncludePatterns + } + return nil +} + +func (x *GlobReq) GetExcludePatterns() []string { + if x != nil { + return x.ExcludePatterns + } + return nil +} + +func (x *GlobReq) GetFilesOnly() bool { + if x != nil { + return x.FilesOnly + } + return false +} + +type GlobResp struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Response: + // *GlobResp_Files + // *GlobResp_Error + Response isGlobResp_Response `protobuf_oneof:"response"` +} + +func (x *GlobResp) Reset() { + *x = GlobResp{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GlobResp) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GlobResp) ProtoMessage() {} + +func (x *GlobResp) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GlobResp.ProtoReflect.Descriptor instead. +func (*GlobResp) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{2} +} + +func (m *GlobResp) GetResponse() isGlobResp_Response { + if m != nil { + return m.Response + } + return nil +} + +func (x *GlobResp) GetFiles() *GlobRespList { + if x, ok := x.GetResponse().(*GlobResp_Files); ok { + return x.Files + } + return nil +} + +func (x *GlobResp) GetError() string { + if x, ok := x.GetResponse().(*GlobResp_Error); ok { + return x.Error + } + return "" +} + +type isGlobResp_Response interface { + isGlobResp_Response() +} + +type GlobResp_Files struct { + Files *GlobRespList `protobuf:"bytes,1,opt,name=files,proto3,oneof"` +} + +type GlobResp_Error struct { + Error string `protobuf:"bytes,2,opt,name=error,proto3,oneof"` +} + +func (*GlobResp_Files) isGlobResp_Response() {} + +func (*GlobResp_Error) isGlobResp_Response() {} + +type GlobRespList struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Files []string `protobuf:"bytes,1,rep,name=files,proto3" json:"files,omitempty"` +} + +func (x *GlobRespList) Reset() { + *x = GlobRespList{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GlobRespList) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GlobRespList) ProtoMessage() {} + +func (x *GlobRespList) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GlobRespList.ProtoReflect.Descriptor instead. +func (*GlobRespList) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{3} +} + +func (x *GlobRespList) GetFiles() []string { + if x != nil { + return x.Files + } + return nil +} + +type ChangedFilesReq struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + GitRoot string `protobuf:"bytes,1,opt,name=git_root,json=gitRoot,proto3" json:"git_root,omitempty"` + TurboRoot string `protobuf:"bytes,2,opt,name=turbo_root,json=turboRoot,proto3" json:"turbo_root,omitempty"` + FromCommit *string `protobuf:"bytes,3,opt,name=from_commit,json=fromCommit,proto3,oneof" json:"from_commit,omitempty"` + ToCommit *string `protobuf:"bytes,4,opt,name=to_commit,json=toCommit,proto3,oneof" json:"to_commit,omitempty"` +} + +func (x *ChangedFilesReq) Reset() { + *x = ChangedFilesReq{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ChangedFilesReq) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ChangedFilesReq) ProtoMessage() {} + +func (x *ChangedFilesReq) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ChangedFilesReq.ProtoReflect.Descriptor instead. +func (*ChangedFilesReq) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{4} +} + +func (x *ChangedFilesReq) GetGitRoot() string { + if x != nil { + return x.GitRoot + } + return "" +} + +func (x *ChangedFilesReq) GetTurboRoot() string { + if x != nil { + return x.TurboRoot + } + return "" +} + +func (x *ChangedFilesReq) GetFromCommit() string { + if x != nil && x.FromCommit != nil { + return *x.FromCommit + } + return "" +} + +func (x *ChangedFilesReq) GetToCommit() string { + if x != nil && x.ToCommit != nil { + return *x.ToCommit + } + return "" +} + +type ChangedFilesResp struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Response: + // *ChangedFilesResp_Files + // *ChangedFilesResp_Error + Response isChangedFilesResp_Response `protobuf_oneof:"response"` +} + +func (x *ChangedFilesResp) Reset() { + *x = ChangedFilesResp{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ChangedFilesResp) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ChangedFilesResp) ProtoMessage() {} + +func (x *ChangedFilesResp) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ChangedFilesResp.ProtoReflect.Descriptor instead. +func (*ChangedFilesResp) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{5} +} + +func (m *ChangedFilesResp) GetResponse() isChangedFilesResp_Response { + if m != nil { + return m.Response + } + return nil +} + +func (x *ChangedFilesResp) GetFiles() *ChangedFilesList { + if x, ok := x.GetResponse().(*ChangedFilesResp_Files); ok { + return x.Files + } + return nil +} + +func (x *ChangedFilesResp) GetError() string { + if x, ok := x.GetResponse().(*ChangedFilesResp_Error); ok { + return x.Error + } + return "" +} + +type isChangedFilesResp_Response interface { + isChangedFilesResp_Response() +} + +type ChangedFilesResp_Files struct { + Files *ChangedFilesList `protobuf:"bytes,1,opt,name=files,proto3,oneof"` +} + +type ChangedFilesResp_Error struct { + Error string `protobuf:"bytes,2,opt,name=error,proto3,oneof"` +} + +func (*ChangedFilesResp_Files) isChangedFilesResp_Response() {} + +func (*ChangedFilesResp_Error) isChangedFilesResp_Response() {} + +type ChangedFilesList struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Files []string `protobuf:"bytes,1,rep,name=files,proto3" json:"files,omitempty"` +} + +func (x *ChangedFilesList) Reset() { + *x = ChangedFilesList{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ChangedFilesList) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ChangedFilesList) ProtoMessage() {} + +func (x *ChangedFilesList) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ChangedFilesList.ProtoReflect.Descriptor instead. +func (*ChangedFilesList) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{6} +} + +func (x *ChangedFilesList) GetFiles() []string { + if x != nil { + return x.Files + } + return nil +} + +type PreviousContentReq struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + GitRoot string `protobuf:"bytes,1,opt,name=git_root,json=gitRoot,proto3" json:"git_root,omitempty"` + FromCommit string `protobuf:"bytes,2,opt,name=from_commit,json=fromCommit,proto3" json:"from_commit,omitempty"` + FilePath string `protobuf:"bytes,3,opt,name=file_path,json=filePath,proto3" json:"file_path,omitempty"` +} + +func (x *PreviousContentReq) Reset() { + *x = PreviousContentReq{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PreviousContentReq) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PreviousContentReq) ProtoMessage() {} + +func (x *PreviousContentReq) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PreviousContentReq.ProtoReflect.Descriptor instead. +func (*PreviousContentReq) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{7} +} + +func (x *PreviousContentReq) GetGitRoot() string { + if x != nil { + return x.GitRoot + } + return "" +} + +func (x *PreviousContentReq) GetFromCommit() string { + if x != nil { + return x.FromCommit + } + return "" +} + +func (x *PreviousContentReq) GetFilePath() string { + if x != nil { + return x.FilePath + } + return "" +} + +type PreviousContentResp struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Response: + // *PreviousContentResp_Content + // *PreviousContentResp_Error + Response isPreviousContentResp_Response `protobuf_oneof:"response"` +} + +func (x *PreviousContentResp) Reset() { + *x = PreviousContentResp{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PreviousContentResp) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PreviousContentResp) ProtoMessage() {} + +func (x *PreviousContentResp) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PreviousContentResp.ProtoReflect.Descriptor instead. +func (*PreviousContentResp) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{8} +} + +func (m *PreviousContentResp) GetResponse() isPreviousContentResp_Response { + if m != nil { + return m.Response + } + return nil +} + +func (x *PreviousContentResp) GetContent() []byte { + if x, ok := x.GetResponse().(*PreviousContentResp_Content); ok { + return x.Content + } + return nil +} + +func (x *PreviousContentResp) GetError() string { + if x, ok := x.GetResponse().(*PreviousContentResp_Error); ok { + return x.Error + } + return "" +} + +type isPreviousContentResp_Response interface { + isPreviousContentResp_Response() +} + +type PreviousContentResp_Content struct { + Content []byte `protobuf:"bytes,1,opt,name=content,proto3,oneof"` +} + +type PreviousContentResp_Error struct { + Error string `protobuf:"bytes,2,opt,name=error,proto3,oneof"` +} + +func (*PreviousContentResp_Content) isPreviousContentResp_Response() {} + +func (*PreviousContentResp_Error) isPreviousContentResp_Response() {} + +type TransitiveDepsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Contents []byte `protobuf:"bytes,1,opt,name=contents,proto3" json:"contents,omitempty"` + WorkspaceDir string `protobuf:"bytes,2,opt,name=workspace_dir,json=workspaceDir,proto3" json:"workspace_dir,omitempty"` + UnresolvedDeps map[string]string `protobuf:"bytes,3,rep,name=unresolved_deps,json=unresolvedDeps,proto3" json:"unresolved_deps,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *TransitiveDepsRequest) Reset() { + *x = TransitiveDepsRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *TransitiveDepsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TransitiveDepsRequest) ProtoMessage() {} + +func (x *TransitiveDepsRequest) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TransitiveDepsRequest.ProtoReflect.Descriptor instead. +func (*TransitiveDepsRequest) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{9} +} + +func (x *TransitiveDepsRequest) GetContents() []byte { + if x != nil { + return x.Contents + } + return nil +} + +func (x *TransitiveDepsRequest) GetWorkspaceDir() string { + if x != nil { + return x.WorkspaceDir + } + return "" +} + +func (x *TransitiveDepsRequest) GetUnresolvedDeps() map[string]string { + if x != nil { + return x.UnresolvedDeps + } + return nil +} + +type TransitiveDepsResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Response: + // *TransitiveDepsResponse_Packages + // *TransitiveDepsResponse_Error + Response isTransitiveDepsResponse_Response `protobuf_oneof:"response"` +} + +func (x *TransitiveDepsResponse) Reset() { + *x = TransitiveDepsResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *TransitiveDepsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TransitiveDepsResponse) ProtoMessage() {} + +func (x *TransitiveDepsResponse) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TransitiveDepsResponse.ProtoReflect.Descriptor instead. +func (*TransitiveDepsResponse) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{10} +} + +func (m *TransitiveDepsResponse) GetResponse() isTransitiveDepsResponse_Response { + if m != nil { + return m.Response + } + return nil +} + +func (x *TransitiveDepsResponse) GetPackages() *LockfilePackageList { + if x, ok := x.GetResponse().(*TransitiveDepsResponse_Packages); ok { + return x.Packages + } + return nil +} + +func (x *TransitiveDepsResponse) GetError() string { + if x, ok := x.GetResponse().(*TransitiveDepsResponse_Error); ok { + return x.Error + } + return "" +} + +type isTransitiveDepsResponse_Response interface { + isTransitiveDepsResponse_Response() +} + +type TransitiveDepsResponse_Packages struct { + Packages *LockfilePackageList `protobuf:"bytes,1,opt,name=packages,proto3,oneof"` +} + +type TransitiveDepsResponse_Error struct { + Error string `protobuf:"bytes,2,opt,name=error,proto3,oneof"` +} + +func (*TransitiveDepsResponse_Packages) isTransitiveDepsResponse_Response() {} + +func (*TransitiveDepsResponse_Error) isTransitiveDepsResponse_Response() {} + +type LockfilePackage struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Key string `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` + Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + Found bool `protobuf:"varint,3,opt,name=found,proto3" json:"found,omitempty"` +} + +func (x *LockfilePackage) Reset() { + *x = LockfilePackage{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *LockfilePackage) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*LockfilePackage) ProtoMessage() {} + +func (x *LockfilePackage) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use LockfilePackage.ProtoReflect.Descriptor instead. +func (*LockfilePackage) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{11} +} + +func (x *LockfilePackage) GetKey() string { + if x != nil { + return x.Key + } + return "" +} + +func (x *LockfilePackage) GetVersion() string { + if x != nil { + return x.Version + } + return "" +} + +func (x *LockfilePackage) GetFound() bool { + if x != nil { + return x.Found + } + return false +} + +type LockfilePackageList struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + List []*LockfilePackage `protobuf:"bytes,1,rep,name=list,proto3" json:"list,omitempty"` +} + +func (x *LockfilePackageList) Reset() { + *x = LockfilePackageList{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *LockfilePackageList) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*LockfilePackageList) ProtoMessage() {} + +func (x *LockfilePackageList) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use LockfilePackageList.ProtoReflect.Descriptor instead. +func (*LockfilePackageList) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{12} +} + +func (x *LockfilePackageList) GetList() []*LockfilePackage { + if x != nil { + return x.List + } + return nil +} + +type SubgraphRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Contents []byte `protobuf:"bytes,1,opt,name=contents,proto3" json:"contents,omitempty"` + Workspaces []string `protobuf:"bytes,2,rep,name=workspaces,proto3" json:"workspaces,omitempty"` + Packages []string `protobuf:"bytes,3,rep,name=packages,proto3" json:"packages,omitempty"` +} + +func (x *SubgraphRequest) Reset() { + *x = SubgraphRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[13] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SubgraphRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SubgraphRequest) ProtoMessage() {} + +func (x *SubgraphRequest) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[13] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SubgraphRequest.ProtoReflect.Descriptor instead. +func (*SubgraphRequest) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{13} +} + +func (x *SubgraphRequest) GetContents() []byte { + if x != nil { + return x.Contents + } + return nil +} + +func (x *SubgraphRequest) GetWorkspaces() []string { + if x != nil { + return x.Workspaces + } + return nil +} + +func (x *SubgraphRequest) GetPackages() []string { + if x != nil { + return x.Packages + } + return nil +} + +type SubgraphResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Response: + // *SubgraphResponse_Contents + // *SubgraphResponse_Error + Response isSubgraphResponse_Response `protobuf_oneof:"response"` +} + +func (x *SubgraphResponse) Reset() { + *x = SubgraphResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_turborepo_ffi_messages_proto_msgTypes[14] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *SubgraphResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SubgraphResponse) ProtoMessage() {} + +func (x *SubgraphResponse) ProtoReflect() protoreflect.Message { + mi := &file_turborepo_ffi_messages_proto_msgTypes[14] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SubgraphResponse.ProtoReflect.Descriptor instead. +func (*SubgraphResponse) Descriptor() ([]byte, []int) { + return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{14} +} + +func (m *SubgraphResponse) GetResponse() isSubgraphResponse_Response { + if m != nil { + return m.Response + } + return nil +} + +func (x *SubgraphResponse) GetContents() []byte { + if x, ok := x.GetResponse().(*SubgraphResponse_Contents); ok { + return x.Contents + } + return nil +} + +func (x *SubgraphResponse) GetError() string { + if x, ok := x.GetResponse().(*SubgraphResponse_Error); ok { + return x.Error + } + return "" +} + +type isSubgraphResponse_Response interface { + isSubgraphResponse_Response() +} + +type SubgraphResponse_Contents struct { + Contents []byte `protobuf:"bytes,1,opt,name=contents,proto3,oneof"` +} + +type SubgraphResponse_Error struct { + Error string `protobuf:"bytes,2,opt,name=error,proto3,oneof"` +} + +func (*SubgraphResponse_Contents) isSubgraphResponse_Response() {} + +func (*SubgraphResponse_Error) isSubgraphResponse_Response() {} + +var File_turborepo_ffi_messages_proto protoreflect.FileDescriptor + +var file_turborepo_ffi_messages_proto_rawDesc = []byte{ + 0x0a, 0x1c, 0x74, 0x75, 0x72, 0x62, 0x6f, 0x72, 0x65, 0x70, 0x6f, 0x2d, 0x66, 0x66, 0x69, 0x2f, + 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x24, + 0x0a, 0x10, 0x54, 0x75, 0x72, 0x62, 0x6f, 0x44, 0x61, 0x74, 0x61, 0x44, 0x69, 0x72, 0x52, 0x65, + 0x73, 0x70, 0x12, 0x10, 0x0a, 0x03, 0x64, 0x69, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x03, 0x64, 0x69, 0x72, 0x22, 0x9b, 0x01, 0x0a, 0x07, 0x47, 0x6c, 0x6f, 0x62, 0x52, 0x65, 0x71, + 0x12, 0x1b, 0x0a, 0x09, 0x62, 0x61, 0x73, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x08, 0x62, 0x61, 0x73, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x29, 0x0a, + 0x10, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, + 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0f, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x64, 0x65, + 0x50, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x73, 0x12, 0x29, 0x0a, 0x10, 0x65, 0x78, 0x63, 0x6c, + 0x75, 0x64, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x74, 0x65, 0x72, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, + 0x28, 0x09, 0x52, 0x0f, 0x65, 0x78, 0x63, 0x6c, 0x75, 0x64, 0x65, 0x50, 0x61, 0x74, 0x74, 0x65, + 0x72, 0x6e, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x5f, 0x6f, 0x6e, 0x6c, + 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x4f, 0x6e, + 0x6c, 0x79, 0x22, 0x55, 0x0a, 0x08, 0x47, 0x6c, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x12, 0x25, + 0x0a, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, + 0x47, 0x6c, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x4c, 0x69, 0x73, 0x74, 0x48, 0x00, 0x52, 0x05, + 0x66, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, 0x0a, + 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x24, 0x0a, 0x0c, 0x47, 0x6c, 0x6f, + 0x62, 0x52, 0x65, 0x73, 0x70, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x66, 0x69, 0x6c, + 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x22, + 0xb1, 0x01, 0x0a, 0x0f, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, 0x46, 0x69, 0x6c, 0x65, 0x73, + 0x52, 0x65, 0x71, 0x12, 0x19, 0x0a, 0x08, 0x67, 0x69, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x67, 0x69, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x1d, + 0x0a, 0x0a, 0x74, 0x75, 0x72, 0x62, 0x6f, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x09, 0x74, 0x75, 0x72, 0x62, 0x6f, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x24, 0x0a, + 0x0b, 0x66, 0x72, 0x6f, 0x6d, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x09, 0x48, 0x00, 0x52, 0x0a, 0x66, 0x72, 0x6f, 0x6d, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, + 0x88, 0x01, 0x01, 0x12, 0x20, 0x0a, 0x09, 0x74, 0x6f, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x08, 0x74, 0x6f, 0x43, 0x6f, 0x6d, 0x6d, + 0x69, 0x74, 0x88, 0x01, 0x01, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x66, 0x72, 0x6f, 0x6d, 0x5f, 0x63, + 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x74, 0x6f, 0x5f, 0x63, 0x6f, 0x6d, + 0x6d, 0x69, 0x74, 0x22, 0x61, 0x0a, 0x10, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, 0x46, 0x69, + 0x6c, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x12, 0x29, 0x0a, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x64, + 0x46, 0x69, 0x6c, 0x65, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x48, 0x00, 0x52, 0x05, 0x66, 0x69, 0x6c, + 0x65, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x28, 0x0a, 0x10, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, + 0x64, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x66, 0x69, + 0x6c, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, + 0x22, 0x6d, 0x0a, 0x12, 0x50, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x43, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x12, 0x19, 0x0a, 0x08, 0x67, 0x69, 0x74, 0x5f, 0x72, 0x6f, + 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x67, 0x69, 0x74, 0x52, 0x6f, 0x6f, + 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x66, 0x72, 0x6f, 0x6d, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x66, 0x72, 0x6f, 0x6d, 0x43, 0x6f, 0x6d, 0x6d, + 0x69, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x22, + 0x55, 0x0a, 0x13, 0x50, 0x72, 0x65, 0x76, 0x69, 0x6f, 0x75, 0x73, 0x43, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x12, 0x1a, 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xf0, 0x01, 0x0a, 0x15, 0x54, 0x72, 0x61, 0x6e, 0x73, + 0x69, 0x74, 0x69, 0x76, 0x65, 0x44, 0x65, 0x70, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0c, 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x23, 0x0a, 0x0d, + 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x64, 0x69, 0x72, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x0c, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x44, 0x69, + 0x72, 0x12, 0x53, 0x0a, 0x0f, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x5f, + 0x64, 0x65, 0x70, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x54, 0x72, 0x61, + 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x44, 0x65, 0x70, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x2e, 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x64, 0x44, 0x65, 0x70, + 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0e, 0x75, 0x6e, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, + 0x65, 0x64, 0x44, 0x65, 0x70, 0x73, 0x1a, 0x41, 0x0a, 0x13, 0x55, 0x6e, 0x72, 0x65, 0x73, 0x6f, + 0x6c, 0x76, 0x65, 0x64, 0x44, 0x65, 0x70, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, + 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, + 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x70, 0x0a, 0x16, 0x54, 0x72, 0x61, + 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x44, 0x65, 0x70, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x32, 0x0a, 0x08, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x73, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x4c, 0x6f, 0x63, 0x6b, 0x66, 0x69, 0x6c, 0x65, + 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x48, 0x00, 0x52, 0x08, 0x70, + 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, + 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x53, 0x0a, 0x0f, 0x4c, + 0x6f, 0x63, 0x6b, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x12, 0x10, + 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, + 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x66, 0x6f, + 0x75, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x66, 0x6f, 0x75, 0x6e, 0x64, + 0x22, 0x3b, 0x0a, 0x13, 0x4c, 0x6f, 0x63, 0x6b, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x63, 0x6b, + 0x61, 0x67, 0x65, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x04, 0x6c, 0x69, 0x73, 0x74, 0x18, + 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x4c, 0x6f, 0x63, 0x6b, 0x66, 0x69, 0x6c, 0x65, + 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x52, 0x04, 0x6c, 0x69, 0x73, 0x74, 0x22, 0x69, 0x0a, + 0x0f, 0x53, 0x75, 0x62, 0x67, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0c, 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x1e, 0x0a, 0x0a, + 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x09, + 0x52, 0x0a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x73, 0x12, 0x1a, 0x0a, 0x08, + 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x08, + 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x73, 0x22, 0x54, 0x0a, 0x10, 0x53, 0x75, 0x62, 0x67, + 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1c, 0x0a, 0x08, + 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, + 0x52, 0x08, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x16, 0x0a, 0x05, 0x65, 0x72, + 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, + 0x6f, 0x72, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x0b, + 0x5a, 0x09, 0x66, 0x66, 0x69, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x33, +} + +var ( + file_turborepo_ffi_messages_proto_rawDescOnce sync.Once + file_turborepo_ffi_messages_proto_rawDescData = file_turborepo_ffi_messages_proto_rawDesc +) + +func file_turborepo_ffi_messages_proto_rawDescGZIP() []byte { + file_turborepo_ffi_messages_proto_rawDescOnce.Do(func() { + file_turborepo_ffi_messages_proto_rawDescData = protoimpl.X.CompressGZIP(file_turborepo_ffi_messages_proto_rawDescData) + }) + return file_turborepo_ffi_messages_proto_rawDescData +} + +var file_turborepo_ffi_messages_proto_msgTypes = make([]protoimpl.MessageInfo, 16) +var file_turborepo_ffi_messages_proto_goTypes = []interface{}{ + (*TurboDataDirResp)(nil), // 0: TurboDataDirResp + (*GlobReq)(nil), // 1: GlobReq + (*GlobResp)(nil), // 2: GlobResp + (*GlobRespList)(nil), // 3: GlobRespList + (*ChangedFilesReq)(nil), // 4: ChangedFilesReq + (*ChangedFilesResp)(nil), // 5: ChangedFilesResp + (*ChangedFilesList)(nil), // 6: ChangedFilesList + (*PreviousContentReq)(nil), // 7: PreviousContentReq + (*PreviousContentResp)(nil), // 8: PreviousContentResp + (*TransitiveDepsRequest)(nil), // 9: TransitiveDepsRequest + (*TransitiveDepsResponse)(nil), // 10: TransitiveDepsResponse + (*LockfilePackage)(nil), // 11: LockfilePackage + (*LockfilePackageList)(nil), // 12: LockfilePackageList + (*SubgraphRequest)(nil), // 13: SubgraphRequest + (*SubgraphResponse)(nil), // 14: SubgraphResponse + nil, // 15: TransitiveDepsRequest.UnresolvedDepsEntry +} +var file_turborepo_ffi_messages_proto_depIdxs = []int32{ + 3, // 0: GlobResp.files:type_name -> GlobRespList + 6, // 1: ChangedFilesResp.files:type_name -> ChangedFilesList + 15, // 2: TransitiveDepsRequest.unresolved_deps:type_name -> TransitiveDepsRequest.UnresolvedDepsEntry + 12, // 3: TransitiveDepsResponse.packages:type_name -> LockfilePackageList + 11, // 4: LockfilePackageList.list:type_name -> LockfilePackage + 5, // [5:5] is the sub-list for method output_type + 5, // [5:5] is the sub-list for method input_type + 5, // [5:5] is the sub-list for extension type_name + 5, // [5:5] is the sub-list for extension extendee + 0, // [0:5] is the sub-list for field type_name +} + +func init() { file_turborepo_ffi_messages_proto_init() } +func file_turborepo_ffi_messages_proto_init() { + if File_turborepo_ffi_messages_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_turborepo_ffi_messages_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*TurboDataDirResp); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GlobReq); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GlobResp); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*GlobRespList); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ChangedFilesReq); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ChangedFilesResp); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ChangedFilesList); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PreviousContentReq); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*PreviousContentResp); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*TransitiveDepsRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*TransitiveDepsResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*LockfilePackage); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*LockfilePackageList); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SubgraphRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_turborepo_ffi_messages_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*SubgraphResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_turborepo_ffi_messages_proto_msgTypes[2].OneofWrappers = []interface{}{ + (*GlobResp_Files)(nil), + (*GlobResp_Error)(nil), + } + file_turborepo_ffi_messages_proto_msgTypes[4].OneofWrappers = []interface{}{} + file_turborepo_ffi_messages_proto_msgTypes[5].OneofWrappers = []interface{}{ + (*ChangedFilesResp_Files)(nil), + (*ChangedFilesResp_Error)(nil), + } + file_turborepo_ffi_messages_proto_msgTypes[8].OneofWrappers = []interface{}{ + (*PreviousContentResp_Content)(nil), + (*PreviousContentResp_Error)(nil), + } + file_turborepo_ffi_messages_proto_msgTypes[10].OneofWrappers = []interface{}{ + (*TransitiveDepsResponse_Packages)(nil), + (*TransitiveDepsResponse_Error)(nil), + } + file_turborepo_ffi_messages_proto_msgTypes[14].OneofWrappers = []interface{}{ + (*SubgraphResponse_Contents)(nil), + (*SubgraphResponse_Error)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_turborepo_ffi_messages_proto_rawDesc, + NumEnums: 0, + NumMessages: 16, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_turborepo_ffi_messages_proto_goTypes, + DependencyIndexes: file_turborepo_ffi_messages_proto_depIdxs, + MessageInfos: file_turborepo_ffi_messages_proto_msgTypes, + }.Build() + File_turborepo_ffi_messages_proto = out.File + file_turborepo_ffi_messages_proto_rawDesc = nil + file_turborepo_ffi_messages_proto_goTypes = nil + file_turborepo_ffi_messages_proto_depIdxs = nil +} diff --git a/cli/internal/filewatcher/backend.go b/cli/internal/filewatcher/backend.go new file mode 100644 index 0000000..b8b7fa8 --- /dev/null +++ b/cli/internal/filewatcher/backend.go @@ -0,0 +1,209 @@ +//go:build !darwin +// +build !darwin + +package filewatcher + +import ( + "fmt" + "os" + "path/filepath" + "sync" + + "github.com/fsnotify/fsnotify" + "github.com/hashicorp/go-hclog" + "github.com/karrick/godirwalk" + "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/doublestar" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// watchAddMode is used to indicate whether watchRecursively should synthesize events +// for existing files. +type watchAddMode int + +const ( + dontSynthesizeEvents watchAddMode = iota + synthesizeEvents +) + +type fsNotifyBackend struct { + watcher *fsnotify.Watcher + events chan Event + errors chan error + logger hclog.Logger + + mu sync.Mutex + allExcludes []string + closed bool +} + +func (f *fsNotifyBackend) Events() <-chan Event { + return f.events +} + +func (f *fsNotifyBackend) Errors() <-chan error { + return f.errors +} + +func (f *fsNotifyBackend) Close() error { + f.mu.Lock() + defer f.mu.Unlock() + if f.closed { + return ErrFilewatchingClosed + } + f.closed = true + close(f.events) + close(f.errors) + if err := f.watcher.Close(); err != nil { + return err + } + return nil +} + +// onFileAdded helps up paper over cross-platform inconsistencies in fsnotify. +// Some fsnotify backends automatically add the contents of directories. Some do +// not. Adding a watch is idempotent, so anytime any file we care about gets added, +// watch it. +func (f *fsNotifyBackend) onFileAdded(name turbopath.AbsoluteSystemPath) error { + info, err := name.Lstat() + if err != nil { + if errors.Is(err, os.ErrNotExist) { + // We can race with a file being added and removed. Ignore it + return nil + } + return errors.Wrapf(err, "error checking lstat of new file %v", name) + } + if info.IsDir() { + // If a directory has been added, we need to synthesize events for everything it contains + if err := f.watchRecursively(name, []string{}, synthesizeEvents); err != nil { + return errors.Wrapf(err, "failed recursive watch of %v", name) + } + } else { + if err := f.watcher.Add(name.ToString()); err != nil { + return errors.Wrapf(err, "failed adding watch to %v", name) + } + } + return nil +} + +func (f *fsNotifyBackend) watchRecursively(root turbopath.AbsoluteSystemPath, excludePatterns []string, addMode watchAddMode) error { + f.mu.Lock() + defer f.mu.Unlock() + err := fs.WalkMode(root.ToString(), func(name string, isDir bool, info os.FileMode) error { + for _, excludePattern := range excludePatterns { + excluded, err := doublestar.Match(excludePattern, filepath.ToSlash(name)) + if err != nil { + return err + } + if excluded { + return godirwalk.SkipThis + } + } + if info.IsDir() && (info&os.ModeSymlink == 0) { + if err := f.watcher.Add(name); err != nil { + return errors.Wrapf(err, "failed adding watch to %v", name) + } + f.logger.Debug(fmt.Sprintf("watching directory %v", name)) + } + if addMode == synthesizeEvents { + f.events <- Event{ + Path: fs.AbsoluteSystemPathFromUpstream(name), + EventType: FileAdded, + } + } + return nil + }) + if err != nil { + return err + } + f.allExcludes = append(f.allExcludes, excludePatterns...) + + return nil +} + +func (f *fsNotifyBackend) watch() { +outer: + for { + select { + case ev, ok := <-f.watcher.Events: + if !ok { + break outer + } + eventType := toFileEvent(ev.Op) + path := fs.AbsoluteSystemPathFromUpstream(ev.Name) + if eventType == FileAdded { + if err := f.onFileAdded(path); err != nil { + f.errors <- err + } + } + f.events <- Event{ + Path: path, + EventType: eventType, + } + case err, ok := <-f.watcher.Errors: + if !ok { + break outer + } + f.errors <- err + } + } +} + +var _modifiedMask = fsnotify.Chmod | fsnotify.Write + +func toFileEvent(op fsnotify.Op) FileEvent { + if op&fsnotify.Create != 0 { + return FileAdded + } else if op&fsnotify.Remove != 0 { + return FileDeleted + } else if op&_modifiedMask != 0 { + return FileModified + } else if op&fsnotify.Rename != 0 { + return FileRenamed + } + return FileOther +} + +func (f *fsNotifyBackend) Start() error { + f.mu.Lock() + defer f.mu.Unlock() + if f.closed { + return ErrFilewatchingClosed + } + for _, dir := range f.watcher.WatchList() { + for _, excludePattern := range f.allExcludes { + excluded, err := doublestar.Match(excludePattern, filepath.ToSlash(dir)) + if err != nil { + return err + } + if excluded { + if err := f.watcher.Remove(dir); err != nil { + return err + } + } + } + } + go f.watch() + return nil +} + +func (f *fsNotifyBackend) AddRoot(root turbopath.AbsoluteSystemPath, excludePatterns ...string) error { + // We don't synthesize events for the initial watch + return f.watchRecursively(root, excludePatterns, dontSynthesizeEvents) +} + +// GetPlatformSpecificBackend returns a filewatching backend appropriate for the OS we are +// running on. +func GetPlatformSpecificBackend(logger hclog.Logger) (Backend, error) { + watcher, err := fsnotify.NewWatcher() + if err != nil { + return nil, err + } + return &fsNotifyBackend{ + watcher: watcher, + events: make(chan Event), + errors: make(chan error), + logger: logger.Named("fsnotify"), + }, nil +} diff --git a/cli/internal/filewatcher/backend_darwin.go b/cli/internal/filewatcher/backend_darwin.go new file mode 100644 index 0000000..4c029c4 --- /dev/null +++ b/cli/internal/filewatcher/backend_darwin.go @@ -0,0 +1,220 @@ +//go:build darwin +// +build darwin + +package filewatcher + +import ( + "fmt" + "strings" + "sync" + "time" + + "github.com/pkg/errors" + "github.com/yookoala/realpath" + + "github.com/fsnotify/fsevents" + "github.com/hashicorp/go-hclog" + "github.com/vercel/turbo/cli/internal/doublestar" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +type fseventsBackend struct { + events chan Event + errors chan error + logger hclog.Logger + mu sync.Mutex + streams []*fsevents.EventStream + closed bool +} + +func (f *fseventsBackend) Events() <-chan Event { + return f.events +} + +func (f *fseventsBackend) Errors() <-chan error { + return f.errors +} + +func (f *fseventsBackend) Close() error { + f.mu.Lock() + defer f.mu.Unlock() + if f.closed { + return ErrFilewatchingClosed + } + f.closed = true + for _, stream := range f.streams { + stream.Stop() + } + close(f.events) + close(f.errors) + return nil +} + +func (f *fseventsBackend) Start() error { + return nil +} + +var ( + _eventLatency = 10 * time.Millisecond + _cookieTimeout = 500 * time.Millisecond +) + +// AddRoot starts watching a new directory hierarchy. Events matching the provided excludePatterns +// will not be forwarded. +func (f *fseventsBackend) AddRoot(someRoot turbopath.AbsoluteSystemPath, excludePatterns ...string) error { + // We need to resolve the real path to the hierarchy that we are going to watch + realRoot, err := realpath.Realpath(someRoot.ToString()) + if err != nil { + return err + } + root := fs.AbsoluteSystemPathFromUpstream(realRoot) + dev, err := fsevents.DeviceForPath(root.ToString()) + if err != nil { + return err + } + + // Optimistically set up and start a stream, assuming the watch is still valid. + s := &fsevents.EventStream{ + Paths: []string{root.ToString()}, + Latency: _eventLatency, + Device: dev, + Flags: fsevents.FileEvents | fsevents.WatchRoot, + } + s.Start() + events := s.Events + + // fsevents delivers events for all existing files first, so use a cookie to detect when we're ready for new events + if err := waitForCookie(root, events, _cookieTimeout); err != nil { + s.Stop() + return err + } + + // Now try to persist the stream. + f.mu.Lock() + defer f.mu.Unlock() + if f.closed { + s.Stop() + return ErrFilewatchingClosed + } + f.streams = append(f.streams, s) + f.logger.Debug(fmt.Sprintf("watching root %v, excluding %v", root, excludePatterns)) + + go func() { + for evs := range events { + for _, ev := range evs { + isExcluded := false + + // 1. Ensure that we have a `/`-prefixed path from the event. + var eventPath string + if !strings.HasPrefix("/", ev.Path) { + eventPath = "/" + ev.Path + } else { + eventPath = ev.Path + } + + // 2. We're getting events from the real path, but we need to translate + // back to the path we were provided since that's what the caller will + // expect in terms of event paths. + watchRootRelativePath := eventPath[len(realRoot):] + processedEventPath := someRoot.UntypedJoin(watchRootRelativePath) + + // 3. Compare the event to all exclude patterns, short-circuit if we know + // we are not watching this file. + processedPathString := processedEventPath.ToString() // loop invariant + for _, pattern := range excludePatterns { + matches, err := doublestar.Match(pattern, processedPathString) + if err != nil { + f.errors <- err + } else if matches { + isExcluded = true + break + } + } + + // 4. Report the file events we care about. + if !isExcluded { + f.events <- Event{ + Path: processedEventPath, + EventType: toFileEvent(ev.Flags), + } + } + } + } + }() + + return nil +} + +func waitForCookie(root turbopath.AbsoluteSystemPath, events <-chan []fsevents.Event, timeout time.Duration) error { + // This cookie needs to be in a location that we're watching, and at this point we can't guarantee + // what the root is, or if something like "node_modules/.cache/turbo" would make sense. As a compromise, ensure + // that we clean it up even in the event of a failure. + cookiePath := root.UntypedJoin(".turbo-cookie") + if err := cookiePath.WriteFile([]byte("cookie"), 0755); err != nil { + return err + } + expected := cookiePath.ToString()[1:] // trim leading slash + if err := waitForEvent(events, expected, fsevents.ItemCreated, timeout); err != nil { + // Attempt to not leave the cookie file lying around. + // Ignore the error, since there's not much we can do with it. + _ = cookiePath.Remove() + return err + } + if err := cookiePath.Remove(); err != nil { + return err + } + if err := waitForEvent(events, expected, fsevents.ItemRemoved, timeout); err != nil { + return err + } + return nil +} + +func waitForEvent(events <-chan []fsevents.Event, path string, flag fsevents.EventFlags, timeout time.Duration) error { + ch := make(chan struct{}) + go func() { + for evs := range events { + for _, ev := range evs { + if ev.Path == path && ev.Flags&flag != 0 { + close(ch) + return + } + } + } + }() + select { + case <-time.After(timeout): + return errors.Wrap(ErrFailedToStart, "timed out waiting for initial fsevents cookie") + case <-ch: + return nil + } +} + +var _modifiedMask = fsevents.ItemModified | fsevents.ItemInodeMetaMod | fsevents.ItemFinderInfoMod | fsevents.ItemChangeOwner | fsevents.ItemXattrMod + +func toFileEvent(flags fsevents.EventFlags) FileEvent { + if flags&fsevents.ItemCreated != 0 { + return FileAdded + } else if flags&fsevents.ItemRemoved != 0 { + return FileDeleted + } else if flags&_modifiedMask != 0 { + return FileModified + } else if flags&fsevents.ItemRenamed != 0 { + return FileRenamed + } else if flags&fsevents.RootChanged != 0 { + // count this as a delete, something affected the path to the root + // of the stream + return FileDeleted + } + return FileOther +} + +// GetPlatformSpecificBackend returns a filewatching backend appropriate for the OS we are +// running on. +func GetPlatformSpecificBackend(logger hclog.Logger) (Backend, error) { + return &fseventsBackend{ + events: make(chan Event), + errors: make(chan error), + logger: logger.Named("fsevents"), + }, nil +} diff --git a/cli/internal/filewatcher/cookie.go b/cli/internal/filewatcher/cookie.go new file mode 100644 index 0000000..7a4931e --- /dev/null +++ b/cli/internal/filewatcher/cookie.go @@ -0,0 +1,160 @@ +package filewatcher + +import ( + "fmt" + "os" + "sync" + "sync/atomic" + "time" + + "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// CookieWaiter is the interface used by clients that need to wait +// for a roundtrip through the filewatching API. +type CookieWaiter interface { + WaitForCookie() error +} + +var ( + // ErrCookieTimeout is returned when we did not see our cookie file within the given time constraints + ErrCookieTimeout = errors.New("timed out waiting for cookie") + // ErrCookieWatchingClosed is returned when the underlying filewatching has been closed. + ErrCookieWatchingClosed = errors.New("filewatching has closed, cannot watch cookies") +) + +// CookieJar is used for tracking roundtrips through the filesystem watching API +type CookieJar struct { + timeout time.Duration + dir turbopath.AbsoluteSystemPath + serial uint64 + mu sync.Mutex + cookies map[turbopath.AbsoluteSystemPath]chan error + closed bool +} + +// NewCookieJar returns a new instance of a CookieJar. There should only ever be a single +// instance live per cookieDir, since they expect to have full control over that directory. +func NewCookieJar(cookieDir turbopath.AbsoluteSystemPath, timeout time.Duration) (*CookieJar, error) { + if err := cookieDir.RemoveAll(); err != nil { + return nil, err + } + if err := cookieDir.MkdirAll(0775); err != nil { + return nil, err + } + return &CookieJar{ + timeout: timeout, + dir: cookieDir, + cookies: make(map[turbopath.AbsoluteSystemPath]chan error), + }, nil +} + +// removeAllCookiesWithError sends the error to every channel, closes every channel, +// and attempts to remove every cookie file. Must be called while the cj.mu is held. +// If the cookie jar is going to be reused afterwards, the cookies map must be reinitialized. +func (cj *CookieJar) removeAllCookiesWithError(err error) { + for p, ch := range cj.cookies { + _ = p.Remove() + ch <- err + close(ch) + } + // Drop all of the references so they can be cleaned up + cj.cookies = nil +} + +// OnFileWatchClosed handles the case where filewatching had to close for some reason +// We send an error to all of our cookies and stop accepting new ones. +func (cj *CookieJar) OnFileWatchClosed() { + cj.mu.Lock() + defer cj.mu.Unlock() + cj.closed = true + cj.removeAllCookiesWithError(ErrCookieWatchingClosed) + +} + +// OnFileWatchError handles when filewatching has encountered an error. +// In the error case, we remove all cookies and send them errors. We remain +// available for later cookies. +func (cj *CookieJar) OnFileWatchError(err error) { + // We are now in an inconsistent state. Drop all of our cookies, + // but we still allow new ones to be created + cj.mu.Lock() + defer cj.mu.Unlock() + cj.removeAllCookiesWithError(err) + cj.cookies = make(map[turbopath.AbsoluteSystemPath]chan error) +} + +// OnFileWatchEvent determines if the specified event is relevant +// for cookie watching and notifies the appropriate cookie if so. +func (cj *CookieJar) OnFileWatchEvent(ev Event) { + if ev.EventType == FileAdded { + isCookie, err := fs.DirContainsPath(cj.dir.ToStringDuringMigration(), ev.Path.ToStringDuringMigration()) + if err != nil { + cj.OnFileWatchError(errors.Wrapf(err, "failed to determine if path is a cookie: %v", ev.Path)) + } else if isCookie { + cj.notifyCookie(ev.Path, nil) + } + } +} + +// WaitForCookie touches a unique file, then waits for it to show up in filesystem notifications. +// This provides a theoretical bound on filesystem operations, although it's possible +// that underlying filewatch mechanisms don't respect this ordering. +func (cj *CookieJar) WaitForCookie() error { + // we're only ever going to send a single error on the channel, add a buffer so that we never + // block sending it. + ch := make(chan error, 1) + serial := atomic.AddUint64(&cj.serial, 1) + cookiePath := cj.dir.UntypedJoin(fmt.Sprintf("%v.cookie", serial)) + cj.mu.Lock() + if cj.closed { + cj.mu.Unlock() + return ErrCookieWatchingClosed + } + cj.cookies[cookiePath] = ch + cj.mu.Unlock() + if err := touchCookieFile(cookiePath); err != nil { + cj.notifyCookie(cookiePath, err) + return err + } + select { + case <-time.After(cj.timeout): + return ErrCookieTimeout + case err, ok := <-ch: + if !ok { + // the channel closed without an error, we're all set + return nil + } + // the channel didn't close, meaning we got some error. + // We don't need to wait on channel close, it's going to be closed + // immediately by whoever sent the error. Return the error directly + return err + } +} + +func (cj *CookieJar) notifyCookie(cookie turbopath.AbsoluteSystemPath, err error) { + cj.mu.Lock() + ch, ok := cj.cookies[cookie] + // delete is a no-op if the key doesn't exist + delete(cj.cookies, cookie) + cj.mu.Unlock() + if ok { + if err != nil { + ch <- err + } + close(ch) + } +} + +func touchCookieFile(cookie turbopath.AbsoluteSystemPath) error { + f, err := cookie.OpenFile(os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0700) + if err != nil { + return err + } + if err := f.Close(); err != nil { + return err + } + return nil +} diff --git a/cli/internal/filewatcher/cookie_test.go b/cli/internal/filewatcher/cookie_test.go new file mode 100644 index 0000000..96241b4 --- /dev/null +++ b/cli/internal/filewatcher/cookie_test.go @@ -0,0 +1,130 @@ +package filewatcher + +import ( + "testing" + "time" + + "github.com/hashicorp/go-hclog" + "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/fs" + "gotest.tools/v3/assert" +) + +func TestWaitForCookie(t *testing.T) { + logger := hclog.Default() + cookieDir := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) + repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) + + jar, err := NewCookieJar(cookieDir, 5*time.Second) + assert.NilError(t, err, "NewCookieJar") + + watcher, err := GetPlatformSpecificBackend(logger) + assert.NilError(t, err, "NewWatcher") + fw := New(logger, repoRoot, watcher) + err = fw.Start() + assert.NilError(t, err, "Start") + fw.AddClient(jar) + err = fw.AddRoot(cookieDir) + assert.NilError(t, err, "Add") + + err = jar.WaitForCookie() + assert.NilError(t, err, "failed to roundtrip cookie") +} + +func TestWaitForCookieAfterClose(t *testing.T) { + logger := hclog.Default() + cookieDir := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) + repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) + + jar, err := NewCookieJar(cookieDir, 5*time.Second) + assert.NilError(t, err, "NewCookieJar") + + watcher, err := GetPlatformSpecificBackend(logger) + assert.NilError(t, err, "NewWatcher") + fw := New(logger, repoRoot, watcher) + err = fw.Start() + assert.NilError(t, err, "Start") + fw.AddClient(jar) + err = fw.AddRoot(cookieDir) + assert.NilError(t, err, "Add") + + err = fw.Close() + assert.NilError(t, err, "Close") + err = jar.WaitForCookie() + assert.ErrorIs(t, err, ErrCookieWatchingClosed) +} + +func TestWaitForCookieTimeout(t *testing.T) { + logger := hclog.Default() + cookieDir := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) + repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) + + jar, err := NewCookieJar(cookieDir, 10*time.Millisecond) + assert.NilError(t, err, "NewCookieJar") + + watcher, err := GetPlatformSpecificBackend(logger) + assert.NilError(t, err, "NewWatcher") + fw := New(logger, repoRoot, watcher) + err = fw.Start() + assert.NilError(t, err, "Start") + fw.AddClient(jar) + + // NOTE: don't call fw.Add here so that no file event gets delivered + + err = jar.WaitForCookie() + assert.ErrorIs(t, err, ErrCookieTimeout) +} + +func TestWaitForCookieWithError(t *testing.T) { + logger := hclog.Default() + cookieDir := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) + repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) + + jar, err := NewCookieJar(cookieDir, 10*time.Second) + assert.NilError(t, err, "NewCookieJar") + + watcher, err := GetPlatformSpecificBackend(logger) + assert.NilError(t, err, "NewWatcher") + fw := New(logger, repoRoot, watcher) + err = fw.Start() + assert.NilError(t, err, "Start") + fw.AddClient(jar) + + // NOTE: don't call fw.Add here so that no file event gets delivered + myErr := errors.New("an error") + ch := make(chan error) + go func() { + if err := jar.WaitForCookie(); err != nil { + ch <- err + } + close(ch) + }() + // wait for the cookie to be registered in the jar + for { + found := false + jar.mu.Lock() + if len(jar.cookies) == 1 { + found = true + } + jar.mu.Unlock() + if found { + break + } + <-time.After(10 * time.Millisecond) + } + jar.OnFileWatchError(myErr) + + err, ok := <-ch + if !ok { + t.Error("expected to get an error from cookie watching") + } + assert.ErrorIs(t, err, myErr) + + // ensure waiting for a new cookie still works. + // Add the filewatch to allow cookies work normally + err = fw.AddRoot(cookieDir) + assert.NilError(t, err, "Add") + + err = jar.WaitForCookie() + assert.NilError(t, err, "WaitForCookie") +} diff --git a/cli/internal/filewatcher/filewatcher.go b/cli/internal/filewatcher/filewatcher.go new file mode 100644 index 0000000..4f79495 --- /dev/null +++ b/cli/internal/filewatcher/filewatcher.go @@ -0,0 +1,167 @@ +// Package filewatcher is used to handle watching for file changes inside the monorepo +package filewatcher + +import ( + "path/filepath" + "strings" + "sync" + + "github.com/hashicorp/go-hclog" + "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// _ignores is the set of paths we exempt from file-watching +var _ignores = []string{".git", "node_modules"} + +// FileWatchClient defines the callbacks used by the file watching loop. +// All methods are called from the same goroutine so they: +// 1) do not need synchronization +// 2) should minimize the work they are doing when called, if possible +type FileWatchClient interface { + OnFileWatchEvent(ev Event) + OnFileWatchError(err error) + OnFileWatchClosed() +} + +// FileEvent is an enum covering the kinds of things that can happen +// to files that we might be interested in +type FileEvent int + +const ( + // FileAdded - this is a new file + FileAdded FileEvent = iota + 1 + // FileDeleted - this file has been removed + FileDeleted + // FileModified - this file has been changed in some way + FileModified + // FileRenamed - a file's name has changed + FileRenamed + // FileOther - some other backend-specific event has happened + FileOther +) + +var ( + // ErrFilewatchingClosed is returned when filewatching has been closed + ErrFilewatchingClosed = errors.New("Close() has already been called for filewatching") + // ErrFailedToStart is returned when filewatching fails to start up + ErrFailedToStart = errors.New("filewatching failed to start") +) + +// Event is the backend-independent information about a file change +type Event struct { + Path turbopath.AbsoluteSystemPath + EventType FileEvent +} + +// Backend is the interface that describes what an underlying filesystem watching backend +// must provide. +type Backend interface { + AddRoot(root turbopath.AbsoluteSystemPath, excludePatterns ...string) error + Events() <-chan Event + Errors() <-chan error + Close() error + Start() error +} + +// FileWatcher handles watching all of the files in the monorepo. +// We currently ignore .git and top-level node_modules. We can revisit +// if necessary. +type FileWatcher struct { + backend Backend + + logger hclog.Logger + repoRoot turbopath.AbsoluteSystemPath + excludePattern string + + clientsMu sync.RWMutex + clients []FileWatchClient + closed bool +} + +// New returns a new FileWatcher instance +func New(logger hclog.Logger, repoRoot turbopath.AbsoluteSystemPath, backend Backend) *FileWatcher { + excludes := make([]string, len(_ignores)) + for i, ignore := range _ignores { + excludes[i] = filepath.ToSlash(repoRoot.UntypedJoin(ignore).ToString() + "/**") + } + excludePattern := "{" + strings.Join(excludes, ",") + "}" + return &FileWatcher{ + backend: backend, + logger: logger, + repoRoot: repoRoot, + excludePattern: excludePattern, + } +} + +// Close shuts down filewatching +func (fw *FileWatcher) Close() error { + return fw.backend.Close() +} + +// Start recursively adds all directories from the repo root, redacts the excluded ones, +// then fires off a goroutine to respond to filesystem events +func (fw *FileWatcher) Start() error { + if err := fw.backend.AddRoot(fw.repoRoot, fw.excludePattern); err != nil { + return err + } + if err := fw.backend.Start(); err != nil { + return err + } + go fw.watch() + return nil +} + +// AddRoot registers the root a filesystem hierarchy to be watched for changes. Events are *not* +// fired for existing files when AddRoot is called, only for subsequent changes. +// NOTE: if it appears helpful, we could change this behavior so that we provide a stream of initial +// events. +func (fw *FileWatcher) AddRoot(root turbopath.AbsoluteSystemPath, excludePatterns ...string) error { + return fw.backend.AddRoot(root, excludePatterns...) +} + +// watch is the main file-watching loop. Watching is not recursive, +// so when new directories are added, they are manually recursively watched. +func (fw *FileWatcher) watch() { +outer: + for { + select { + case ev, ok := <-fw.backend.Events(): + if !ok { + fw.logger.Info("Events channel closed. Exiting watch loop") + break outer + } + fw.clientsMu.RLock() + for _, client := range fw.clients { + client.OnFileWatchEvent(ev) + } + fw.clientsMu.RUnlock() + case err, ok := <-fw.backend.Errors(): + if !ok { + fw.logger.Info("Errors channel closed. Exiting watch loop") + break outer + } + fw.clientsMu.RLock() + for _, client := range fw.clients { + client.OnFileWatchError(err) + } + fw.clientsMu.RUnlock() + } + } + fw.clientsMu.Lock() + fw.closed = true + for _, client := range fw.clients { + client.OnFileWatchClosed() + } + fw.clientsMu.Unlock() +} + +// AddClient registers a client for filesystem events +func (fw *FileWatcher) AddClient(client FileWatchClient) { + fw.clientsMu.Lock() + defer fw.clientsMu.Unlock() + fw.clients = append(fw.clients, client) + if fw.closed { + client.OnFileWatchClosed() + } +} diff --git a/cli/internal/filewatcher/filewatcher_test.go b/cli/internal/filewatcher/filewatcher_test.go new file mode 100644 index 0000000..72b48ba --- /dev/null +++ b/cli/internal/filewatcher/filewatcher_test.go @@ -0,0 +1,152 @@ +package filewatcher + +import ( + "fmt" + "sync" + "testing" + "time" + + "github.com/hashicorp/go-hclog" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbopath" + "gotest.tools/v3/assert" +) + +type testClient struct { + mu sync.Mutex + createEvents []Event + notify chan Event +} + +func (c *testClient) OnFileWatchEvent(ev Event) { + if ev.EventType == FileAdded { + c.mu.Lock() + defer c.mu.Unlock() + c.createEvents = append(c.createEvents, ev) + c.notify <- ev + } +} + +func (c *testClient) OnFileWatchError(err error) {} + +func (c *testClient) OnFileWatchClosed() {} + +func expectFilesystemEvent(t *testing.T, ch <-chan Event, expected Event) { + // mark this method as a helper + t.Helper() + timeout := time.After(1 * time.Second) + for { + select { + case ev := <-ch: + t.Logf("got event %v", ev) + if ev.Path == expected.Path && ev.EventType == expected.EventType { + return + } + case <-timeout: + t.Errorf("Timed out waiting for filesystem event at %v", expected.Path) + return + } + } +} + +func expectNoFilesystemEvent(t *testing.T, ch <-chan Event) { + // mark this method as a helper + t.Helper() + select { + case ev, ok := <-ch: + if ok { + t.Errorf("got unexpected filesystem event %v", ev) + } else { + t.Error("filewatching closed unexpectedly") + } + case <-time.After(500 * time.Millisecond): + return + } +} + +func expectWatching(t *testing.T, c *testClient, dirs []turbopath.AbsoluteSystemPath) { + t.Helper() + now := time.Now() + filename := fmt.Sprintf("test-%v", now.UnixMilli()) + for _, dir := range dirs { + file := dir.UntypedJoin(filename) + err := file.WriteFile([]byte("hello"), 0755) + assert.NilError(t, err, "WriteFile") + expectFilesystemEvent(t, c.notify, Event{ + Path: file, + EventType: FileAdded, + }) + } +} + +func TestFileWatching(t *testing.T) { + logger := hclog.Default() + logger.SetLevel(hclog.Debug) + repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) + err := repoRoot.UntypedJoin(".git").MkdirAll(0775) + assert.NilError(t, err, "MkdirAll") + err = repoRoot.UntypedJoin("node_modules", "some-dep").MkdirAll(0775) + assert.NilError(t, err, "MkdirAll") + err = repoRoot.UntypedJoin("parent", "child").MkdirAll(0775) + assert.NilError(t, err, "MkdirAll") + err = repoRoot.UntypedJoin("parent", "sibling").MkdirAll(0775) + assert.NilError(t, err, "MkdirAll") + + // Directory layout: + // / + // .git/ + // node_modules/ + // some-dep/ + // parent/ + // child/ + // sibling/ + + watcher, err := GetPlatformSpecificBackend(logger) + assert.NilError(t, err, "GetPlatformSpecificBackend") + fw := New(logger, repoRoot, watcher) + err = fw.Start() + assert.NilError(t, err, "fw.Start") + + // Add a client + ch := make(chan Event, 1) + c := &testClient{ + notify: ch, + } + fw.AddClient(c) + expectedWatching := []turbopath.AbsoluteSystemPath{ + repoRoot, + repoRoot.UntypedJoin("parent"), + repoRoot.UntypedJoin("parent", "child"), + repoRoot.UntypedJoin("parent", "sibling"), + } + expectWatching(t, c, expectedWatching) + + fooPath := repoRoot.UntypedJoin("parent", "child", "foo") + err = fooPath.WriteFile([]byte("hello"), 0644) + assert.NilError(t, err, "WriteFile") + expectFilesystemEvent(t, ch, Event{ + EventType: FileAdded, + Path: fooPath, + }) + + deepPath := repoRoot.UntypedJoin("parent", "sibling", "deep", "path") + err = deepPath.MkdirAll(0775) + assert.NilError(t, err, "MkdirAll") + // We'll catch an event for "deep", but not "deep/path" since + // we don't have a recursive watch + expectFilesystemEvent(t, ch, Event{ + Path: repoRoot.UntypedJoin("parent", "sibling", "deep"), + EventType: FileAdded, + }) + expectFilesystemEvent(t, ch, Event{ + Path: repoRoot.UntypedJoin("parent", "sibling", "deep", "path"), + EventType: FileAdded, + }) + expectedWatching = append(expectedWatching, deepPath, repoRoot.UntypedJoin("parent", "sibling", "deep")) + expectWatching(t, c, expectedWatching) + + gitFilePath := repoRoot.UntypedJoin(".git", "git-file") + err = gitFilePath.WriteFile([]byte("nope"), 0644) + assert.NilError(t, err, "WriteFile") + expectNoFilesystemEvent(t, ch) +} diff --git a/cli/internal/fs/copy_file.go b/cli/internal/fs/copy_file.go new file mode 100644 index 0000000..e7619de --- /dev/null +++ b/cli/internal/fs/copy_file.go @@ -0,0 +1,81 @@ +// Adapted from https://github.com/thought-machine/please +// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +package fs + +import ( + "errors" + "os" + "path/filepath" + + "github.com/karrick/godirwalk" +) + +// RecursiveCopy copies either a single file or a directory. +// 'mode' is the mode of the destination file. +func RecursiveCopy(from string, to string) error { + // Verified all callers are passing in absolute paths for from (and to) + statedFrom := LstatCachedFile{Path: UnsafeToAbsoluteSystemPath(from)} + fromType, err := statedFrom.GetType() + if err != nil { + return err + } + + if fromType.IsDir() { + return WalkMode(statedFrom.Path.ToStringDuringMigration(), func(name string, isDir bool, fileType os.FileMode) error { + dest := filepath.Join(to, name[len(statedFrom.Path.ToString()):]) + // name is absolute, (originates from godirwalk) + src := LstatCachedFile{Path: UnsafeToAbsoluteSystemPath(name), fileType: &fileType} + if isDir { + mode, err := src.GetMode() + if err != nil { + return err + } + return os.MkdirAll(dest, mode) + } + return CopyFile(&src, dest) + }) + } + return CopyFile(&statedFrom, to) +} + +// Walk implements an equivalent to filepath.Walk. +// It's implemented over github.com/karrick/godirwalk but the provided interface doesn't use that +// to make it a little easier to handle. +func Walk(rootPath string, callback func(name string, isDir bool) error) error { + return WalkMode(rootPath, func(name string, isDir bool, mode os.FileMode) error { + return callback(name, isDir) + }) +} + +// WalkMode is like Walk but the callback receives an additional type specifying the file mode type. +// N.B. This only includes the bits of the mode that determine the mode type, not the permissions. +func WalkMode(rootPath string, callback func(name string, isDir bool, mode os.FileMode) error) error { + return godirwalk.Walk(rootPath, &godirwalk.Options{ + Callback: func(name string, info *godirwalk.Dirent) error { + // currently we support symlinked files, but not symlinked directories: + // For copying, we Mkdir and bail if we encounter a symlink to a directoy + // For finding packages, we enumerate the symlink, but don't follow inside + isDir, err := info.IsDirOrSymlinkToDir() + if err != nil { + pathErr := &os.PathError{} + if errors.As(err, &pathErr) { + // If we have a broken link, skip this entry + return godirwalk.SkipThis + } + return err + } + return callback(name, isDir, info.ModeType()) + }, + ErrorCallback: func(pathname string, err error) godirwalk.ErrorAction { + pathErr := &os.PathError{} + if errors.As(err, &pathErr) { + return godirwalk.SkipNode + } + return godirwalk.Halt + }, + Unsorted: true, + AllowNonDirectory: true, + FollowSymbolicLinks: false, + }) +} diff --git a/cli/internal/fs/copy_file_test.go b/cli/internal/fs/copy_file_test.go new file mode 100644 index 0000000..6a61576 --- /dev/null +++ b/cli/internal/fs/copy_file_test.go @@ -0,0 +1,198 @@ +package fs + +import ( + "errors" + "io/ioutil" + "os" + "path/filepath" + "testing" + + "github.com/vercel/turbo/cli/internal/turbopath" + "gotest.tools/v3/assert" + "gotest.tools/v3/fs" +) + +func TestCopyFile(t *testing.T) { + srcTmpDir := turbopath.AbsoluteSystemPath(t.TempDir()) + destTmpDir := turbopath.AbsoluteSystemPath(t.TempDir()) + srcFilePath := srcTmpDir.UntypedJoin("src") + destFilePath := destTmpDir.UntypedJoin("dest") + from := &LstatCachedFile{Path: srcFilePath} + + // The src file doesn't exist, will error. + err := CopyFile(from, destFilePath.ToString()) + pathErr := &os.PathError{} + if !errors.As(err, &pathErr) { + t.Errorf("got %v, want PathError", err) + } + + // Create the src file. + srcFile, err := srcFilePath.Create() + assert.NilError(t, err, "Create") + _, err = srcFile.WriteString("src") + assert.NilError(t, err, "WriteString") + assert.NilError(t, srcFile.Close(), "Close") + + // Copy the src to the dest. + err = CopyFile(from, destFilePath.ToString()) + assert.NilError(t, err, "src exists dest does not, should not error.") + + // Now test for symlinks. + symlinkSrcDir := turbopath.AbsoluteSystemPath(t.TempDir()) + symlinkTargetDir := turbopath.AbsoluteSystemPath(t.TempDir()) + symlinkDestDir := turbopath.AbsoluteSystemPath(t.TempDir()) + symlinkSrcPath := symlinkSrcDir.UntypedJoin("symlink") + symlinkTargetPath := symlinkTargetDir.UntypedJoin("target") + symlinkDestPath := symlinkDestDir.UntypedJoin("dest") + fromSymlink := &LstatCachedFile{Path: symlinkSrcPath} + + // Create the symlink target. + symlinkTargetFile, err := symlinkTargetPath.Create() + assert.NilError(t, err, "Create") + _, err = symlinkTargetFile.WriteString("Target") + assert.NilError(t, err, "WriteString") + assert.NilError(t, symlinkTargetFile.Close(), "Close") + + // Link things up. + err = symlinkSrcPath.Symlink(symlinkTargetPath.ToString()) + assert.NilError(t, err, "Symlink") + + // Run the test. + err = CopyFile(fromSymlink, symlinkDestPath.ToString()) + assert.NilError(t, err, "Copying a valid symlink does not error.") + + // Break the symlink. + err = symlinkTargetPath.Remove() + assert.NilError(t, err, "breaking the symlink") + + // Remove the existing copy. + err = symlinkDestPath.Remove() + assert.NilError(t, err, "existing copy is removed") + + // Try copying the now-broken symlink. + err = CopyFile(fromSymlink, symlinkDestPath.ToString()) + assert.NilError(t, err, "CopyFile") + + // Confirm that it copied + target, err := symlinkDestPath.Readlink() + assert.NilError(t, err, "Readlink") + assert.Equal(t, target, symlinkTargetPath.ToString()) +} + +func TestCopyOrLinkFileWithPerms(t *testing.T) { + // Directory layout: + // + // / + // foo + readonlyMode := os.FileMode(0444) + srcDir := turbopath.AbsoluteSystemPath(t.TempDir()) + dstDir := turbopath.AbsoluteSystemPath(t.TempDir()) + srcFilePath := srcDir.UntypedJoin("src") + dstFilePath := dstDir.UntypedJoin("dst") + srcFile, err := srcFilePath.Create() + defer func() { _ = srcFile.Close() }() + assert.NilError(t, err, "Create") + err = srcFile.Chmod(readonlyMode) + assert.NilError(t, err, "Chmod") + err = CopyFile(&LstatCachedFile{Path: srcFilePath}, dstFilePath.ToStringDuringMigration()) + assert.NilError(t, err, "CopyOrLinkFile") + info, err := dstFilePath.Lstat() + assert.NilError(t, err, "Lstat") + assert.Equal(t, info.Mode(), readonlyMode, "expected dest to have matching permissions") +} + +func TestRecursiveCopy(t *testing.T) { + // Directory layout: + // + // / + // b + // child/ + // a + // link -> ../b + // broken -> missing + // circle -> ../child + src := fs.NewDir(t, "recursive-copy-or-link") + dst := fs.NewDir(t, "recursive-copy-or-link-dist") + childDir := filepath.Join(src.Path(), "child") + err := os.Mkdir(childDir, os.ModeDir|0777) + assert.NilError(t, err, "Mkdir") + aPath := filepath.Join(childDir, "a") + aFile, err := os.Create(aPath) + assert.NilError(t, err, "Create") + _, err = aFile.WriteString("hello") + assert.NilError(t, err, "WriteString") + assert.NilError(t, aFile.Close(), "Close") + + bPath := filepath.Join(src.Path(), "b") + bFile, err := os.Create(bPath) + assert.NilError(t, err, "Create") + _, err = bFile.WriteString("bFile") + assert.NilError(t, err, "WriteString") + assert.NilError(t, bFile.Close(), "Close") + + srcLinkPath := filepath.Join(childDir, "link") + assert.NilError(t, os.Symlink(filepath.FromSlash("../b"), srcLinkPath), "Symlink") + + srcBrokenLinkPath := filepath.Join(childDir, "broken") + assert.NilError(t, os.Symlink("missing", srcBrokenLinkPath), "Symlink") + circlePath := filepath.Join(childDir, "circle") + assert.NilError(t, os.Symlink(filepath.FromSlash("../child"), circlePath), "Symlink") + + err = RecursiveCopy(src.Path(), dst.Path()) + assert.NilError(t, err, "RecursiveCopy") + // For ensure multiple times copy will not broken + err = RecursiveCopy(src.Path(), dst.Path()) + assert.NilError(t, err, "RecursiveCopy") + + dstChildDir := filepath.Join(dst.Path(), "child") + assertDirMatches(t, childDir, dstChildDir) + dstAPath := filepath.Join(dst.Path(), "child", "a") + assertFileMatches(t, aPath, dstAPath) + dstBPath := filepath.Join(dst.Path(), "b") + assertFileMatches(t, bPath, dstBPath) + dstLinkPath := filepath.Join(dst.Path(), "child", "link") + dstLinkDest, err := os.Readlink(dstLinkPath) + assert.NilError(t, err, "Readlink") + expectedLinkDest := filepath.FromSlash("../b") + if dstLinkDest != expectedLinkDest { + t.Errorf("Readlink got %v, want %v", dstLinkDest, expectedLinkDest) + } + dstBrokenLinkPath := filepath.Join(dst.Path(), "child", "broken") + brokenLinkExists := PathExists(dstBrokenLinkPath) + if brokenLinkExists { + t.Errorf("We cached a broken link at %v", dstBrokenLinkPath) + } + // Currently, we convert symlink-to-directory to empty-directory + // This is very likely not ideal behavior, but leaving this test here to verify + // that it is what we expect at this point in time. + dstCirclePath := filepath.Join(dst.Path(), "child", "circle") + circleStat, err := os.Lstat(dstCirclePath) + assert.NilError(t, err, "Lstat") + assert.Equal(t, circleStat.IsDir(), true) + entries, err := os.ReadDir(dstCirclePath) + assert.NilError(t, err, "ReadDir") + assert.Equal(t, len(entries), 0) +} + +func assertFileMatches(t *testing.T, orig string, copy string) { + t.Helper() + origBytes, err := ioutil.ReadFile(orig) + assert.NilError(t, err, "ReadFile") + copyBytes, err := ioutil.ReadFile(copy) + assert.NilError(t, err, "ReadFile") + assert.DeepEqual(t, origBytes, copyBytes) + origStat, err := os.Lstat(orig) + assert.NilError(t, err, "Lstat") + copyStat, err := os.Lstat(copy) + assert.NilError(t, err, "Lstat") + assert.Equal(t, origStat.Mode(), copyStat.Mode()) +} + +func assertDirMatches(t *testing.T, orig string, copy string) { + t.Helper() + origStat, err := os.Lstat(orig) + assert.NilError(t, err, "Lstat") + copyStat, err := os.Lstat(copy) + assert.NilError(t, err, "Lstat") + assert.Equal(t, origStat.Mode(), copyStat.Mode()) +} diff --git a/cli/internal/fs/fs.go b/cli/internal/fs/fs.go new file mode 100644 index 0000000..77804c0 --- /dev/null +++ b/cli/internal/fs/fs.go @@ -0,0 +1,191 @@ +package fs + +import ( + "io" + "io/ioutil" + "log" + "os" + "path/filepath" + "runtime" + "strings" + + "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/util" +) + +// https://github.com/thought-machine/please/blob/master/src/fs/fs.go + +// DirPermissions are the default permission bits we apply to directories. +const DirPermissions = os.ModeDir | 0775 + +// EnsureDir ensures that the directory of the given file has been created. +func EnsureDir(filename string) error { + dir := filepath.Dir(filename) + err := os.MkdirAll(dir, DirPermissions) + if err != nil && FileExists(dir) { + // It looks like this is a file and not a directory. Attempt to remove it; this can + // happen in some cases if you change a rule from outputting a file to a directory. + log.Printf("Attempting to remove file %s; a subdirectory is required", dir) + if err2 := os.Remove(dir); err2 == nil { + err = os.MkdirAll(dir, DirPermissions) + } else { + return err + } + } + return err +} + +var nonRelativeSentinel string = ".." + string(filepath.Separator) + +// DirContainsPath returns true if the path 'target' is contained within 'dir' +// Expects both paths to be absolute and does not verify that either path exists. +func DirContainsPath(dir string, target string) (bool, error) { + // On windows, trying to get a relative path between files on different volumes + // is an error. We don't care about the error, it's good enough for us to say + // that one path doesn't contain the other if they're on different volumes. + if runtime.GOOS == "windows" && filepath.VolumeName(dir) != filepath.VolumeName(target) { + return false, nil + } + // In Go, filepath.Rel can return a path that starts with "../" or equivalent. + // Checking filesystem-level contains can get extremely complicated + // (see https://github.com/golang/dep/blob/f13583b555deaa6742f141a9c1185af947720d60/internal/fs/fs.go#L33) + // As a compromise, rely on the stdlib to generate a relative path and then check + // if the first step is "../". + rel, err := filepath.Rel(dir, target) + if err != nil { + return false, err + } + return !strings.HasPrefix(rel, nonRelativeSentinel), nil +} + +// PathExists returns true if the given path exists, as a file or a directory. +func PathExists(filename string) bool { + _, err := os.Lstat(filename) + return err == nil +} + +// FileExists returns true if the given path exists and is a file. +func FileExists(filename string) bool { + info, err := os.Lstat(filename) + return err == nil && !info.IsDir() +} + +// CopyFile copies a file from 'from' to 'to', with an attempt to perform a copy & rename +// to avoid chaos if anything goes wrong partway. +func CopyFile(from *LstatCachedFile, to string) error { + fromMode, err := from.GetMode() + if err != nil { + return errors.Wrapf(err, "getting mode for %v", from.Path) + } + if fromMode&os.ModeSymlink != 0 { + target, err := from.Path.Readlink() + if err != nil { + return errors.Wrapf(err, "reading link target for %v", from.Path) + } + if err := EnsureDir(to); err != nil { + return err + } + if _, err := os.Lstat(to); err == nil { + // target link file exist, should remove it first + err := os.Remove(to) + if err != nil { + return err + } + } + return os.Symlink(target, to) + } + fromFile, err := from.Path.Open() + if err != nil { + return err + } + defer util.CloseAndIgnoreError(fromFile) + return writeFileFromStream(fromFile, to, fromMode) +} + +// writeFileFromStream writes data from a reader to the file named 'to', with an attempt to perform +// a copy & rename to avoid chaos if anything goes wrong partway. +func writeFileFromStream(fromFile io.Reader, to string, mode os.FileMode) error { + dir, file := filepath.Split(to) + if dir != "" { + if err := os.MkdirAll(dir, DirPermissions); err != nil { + return err + } + } + tempFile, err := ioutil.TempFile(dir, file) + if err != nil { + return err + } + if _, err := io.Copy(tempFile, fromFile); err != nil { + return err + } + if err := tempFile.Close(); err != nil { + return err + } + // OK, now file is written; adjust permissions appropriately. + if mode == 0 { + mode = 0664 + } + if err := os.Chmod(tempFile.Name(), mode); err != nil { + return err + } + // And move it to its final destination. + return renameFile(tempFile.Name(), to) +} + +// IsDirectory checks if a given path is a directory +func IsDirectory(path string) bool { + info, err := os.Stat(path) + return err == nil && info.IsDir() +} + +// Try to gracefully rename the file as the os.Rename does not work across +// filesystems and on most Linux systems /tmp is mounted as tmpfs +func renameFile(from, to string) (err error) { + err = os.Rename(from, to) + if err == nil { + return nil + } + err = copyFile(from, to) + if err != nil { + return err + } + err = os.RemoveAll(from) + if err != nil { + return err + } + return nil +} + +func copyFile(from, to string) (err error) { + in, err := os.Open(from) + if err != nil { + return err + } + defer in.Close() + + out, err := os.Create(to) + if err != nil { + return err + } + defer func() { + if e := out.Close(); e != nil { + err = e + } + }() + + _, err = io.Copy(out, in) + if err != nil { + return err + } + + si, err := os.Stat(from) + if err != nil { + return err + } + err = os.Chmod(to, si.Mode()) + if err != nil { + return err + } + + return nil +} diff --git a/cli/internal/fs/fs_test.go b/cli/internal/fs/fs_test.go new file mode 100644 index 0000000..0598d43 --- /dev/null +++ b/cli/internal/fs/fs_test.go @@ -0,0 +1,60 @@ +package fs + +import ( + "path/filepath" + "testing" +) + +func Test_DirContainsPath(t *testing.T) { + parent, err := filepath.Abs(filepath.Join("some", "path")) + if err != nil { + t.Fatalf("failed to construct parent path %v", err) + } + testcases := []struct { + target []string + want bool + }{ + { + []string{"..", "elsewhere"}, + false, + }, + { + []string{"sibling"}, + false, + }, + { + // The same path as parent + []string{"some", "path"}, + true, + }, + { + []string{"some", "path", "..", "path", "inside", "parent"}, + true, + }, + { + []string{"some", "path", "inside", "..", "inside", "parent"}, + true, + }, + { + []string{"some", "path", "inside", "..", "..", "outside", "parent"}, + false, + }, + { + []string{"some", "pathprefix"}, + false, + }, + } + for _, tc := range testcases { + target, err := filepath.Abs(filepath.Join(tc.target...)) + if err != nil { + t.Fatalf("failed to construct path for %v: %v", tc.target, err) + } + got, err := DirContainsPath(parent, target) + if err != nil { + t.Fatalf("failed to check ") + } + if got != tc.want { + t.Errorf("DirContainsPath(%v, %v) got %v, want %v", parent, target, got, tc.want) + } + } +} diff --git a/cli/internal/fs/fs_windows_test.go b/cli/internal/fs/fs_windows_test.go new file mode 100644 index 0000000..4e71e2c --- /dev/null +++ b/cli/internal/fs/fs_windows_test.go @@ -0,0 +1,18 @@ +//go:build windows +// +build windows + +package fs + +import "testing" + +func TestDifferentVolumes(t *testing.T) { + p1 := "C:\\some\\path" + p2 := "D:\\other\\path" + contains, err := DirContainsPath(p1, p2) + if err != nil { + t.Errorf("DirContainsPath got error %v, want ", err) + } + if contains { + t.Errorf("DirContainsPath got true, want false") + } +} diff --git a/cli/internal/fs/get_turbo_data_dir_go.go b/cli/internal/fs/get_turbo_data_dir_go.go new file mode 100644 index 0000000..2cf459a --- /dev/null +++ b/cli/internal/fs/get_turbo_data_dir_go.go @@ -0,0 +1,16 @@ +//go:build go || !rust +// +build go !rust + +package fs + +import ( + "github.com/adrg/xdg" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// GetTurboDataDir returns a directory outside of the repo +// where turbo can store data files related to turbo. +func GetTurboDataDir() turbopath.AbsoluteSystemPath { + dataHome := AbsoluteSystemPathFromUpstream(xdg.DataHome) + return dataHome.UntypedJoin("turborepo") +} diff --git a/cli/internal/fs/get_turbo_data_dir_rust.go b/cli/internal/fs/get_turbo_data_dir_rust.go new file mode 100644 index 0000000..dbc80f3 --- /dev/null +++ b/cli/internal/fs/get_turbo_data_dir_rust.go @@ -0,0 +1,16 @@ +//go:build rust +// +build rust + +package fs + +import ( + "github.com/vercel/turbo/cli/internal/ffi" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// GetTurboDataDir returns a directory outside of the repo +// where turbo can store data files related to turbo. +func GetTurboDataDir() turbopath.AbsoluteSystemPath { + dir := ffi.GetTurboDataDir() + return turbopath.AbsoluteSystemPathFromUpstream(dir) +} diff --git a/cli/internal/fs/hash.go b/cli/internal/fs/hash.go new file mode 100644 index 0000000..fed7d87 --- /dev/null +++ b/cli/internal/fs/hash.go @@ -0,0 +1,61 @@ +package fs + +import ( + "crypto/sha1" + "encoding/hex" + "fmt" + "io" + "os" + "strconv" + + "github.com/vercel/turbo/cli/internal/xxhash" +) + +func HashObject(i interface{}) (string, error) { + hash := xxhash.New() + + _, err := hash.Write([]byte(fmt.Sprintf("%v", i))) + + return hex.EncodeToString(hash.Sum(nil)), err +} + +func HashFile(filePath string) (string, error) { + file, err := os.Open(filePath) + if err != nil { + return "", err + } + defer file.Close() + + hash := xxhash.New() + if _, err := io.Copy(hash, file); err != nil { + return "", err + } + + return hex.EncodeToString(hash.Sum(nil)), nil +} + +// GitLikeHashFile is a function that mimics how Git +// calculates the SHA1 for a file (or, in Git terms, a "blob") (without git) +func GitLikeHashFile(filePath string) (string, error) { + file, err := os.Open(filePath) + if err != nil { + return "", err + } + defer file.Close() + + stat, err := file.Stat() + if err != nil { + return "", err + } + hash := sha1.New() + hash.Write([]byte("blob")) + hash.Write([]byte(" ")) + hash.Write([]byte(strconv.FormatInt(stat.Size(), 10))) + hash.Write([]byte{0}) + + if _, err := io.Copy(hash, file); err != nil { + return "", err + } + + return hex.EncodeToString(hash.Sum(nil)), nil +} diff --git a/cli/internal/fs/hash_test.go b/cli/internal/fs/hash_test.go new file mode 100644 index 0000000..dd2fa84 --- /dev/null +++ b/cli/internal/fs/hash_test.go @@ -0,0 +1,53 @@ +package fs + +import ( + "testing" + + "gotest.tools/v3/assert" +) + +const _numOfRuns = 20 + +func Test_HashObjectStability(t *testing.T) { + type TestCase struct { + name string + obj interface{} + } + type complexStruct struct { + nested TaskOutputs + foo string + bar []string + } + + testCases := []TestCase{ + { + name: "task object", + obj: TaskOutputs{ + Inclusions: []string{"foo", "bar"}, + Exclusions: []string{"baz"}, + }, + }, + { + name: "complex struct", + obj: complexStruct{ + nested: TaskOutputs{ + Exclusions: []string{"bar", "baz"}, + Inclusions: []string{"foo"}, + }, + foo: "a", + bar: []string{"b", "c"}, + }, + }, + } + + for _, tc := range testCases { + expectedHash, err := HashObject(tc.obj) + assert.NilError(t, err, tc.name) + + for n := 0; n < _numOfRuns; n++ { + hash, err := HashObject(tc.obj) + assert.NilError(t, err, tc.name) + assert.Equal(t, expectedHash, hash, tc.name) + } + } +} diff --git a/cli/internal/fs/lstat.go b/cli/internal/fs/lstat.go new file mode 100644 index 0000000..eff0810 --- /dev/null +++ b/cli/internal/fs/lstat.go @@ -0,0 +1,74 @@ +package fs + +import ( + "io/fs" + "os" + + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// LstatCachedFile maintains a cache of file info, mode and type for the given Path +type LstatCachedFile struct { + Path turbopath.AbsoluteSystemPath + fileInfo fs.FileInfo + fileMode *fs.FileMode + fileType *fs.FileMode +} + +// GetInfo returns, and caches the file info for the LstatCachedFile.Path +func (file *LstatCachedFile) GetInfo() (fs.FileInfo, error) { + if file.fileInfo != nil { + return file.fileInfo, nil + } + + err := file.lstat() + if err != nil { + return nil, err + } + + return file.fileInfo, nil +} + +// GetMode returns, and caches the file mode for the LstatCachedFile.Path +func (file *LstatCachedFile) GetMode() (fs.FileMode, error) { + if file.fileMode != nil { + return *file.fileMode, nil + } + + err := file.lstat() + if err != nil { + return 0, err + } + + return *file.fileMode, nil +} + +// GetType returns, and caches the type bits of (FileMode & os.ModeType) for the LstatCachedFile.Path +func (file *LstatCachedFile) GetType() (fs.FileMode, error) { + if file.fileType != nil { + return *file.fileType, nil + } + + err := file.lstat() + if err != nil { + return 0, err + } + + return *file.fileType, nil +} + +func (file *LstatCachedFile) lstat() error { + fileInfo, err := file.Path.Lstat() + if err != nil { + return err + } + + fileMode := fileInfo.Mode() + fileModeType := fileMode & os.ModeType + + file.fileInfo = fileInfo + file.fileMode = &fileMode + file.fileType = &fileModeType + + return nil +} diff --git a/cli/internal/fs/package_json.go b/cli/internal/fs/package_json.go new file mode 100644 index 0000000..883f7a4 --- /dev/null +++ b/cli/internal/fs/package_json.go @@ -0,0 +1,142 @@ +package fs + +import ( + "bytes" + "encoding/json" + "sync" + + "github.com/vercel/turbo/cli/internal/lockfile" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// PackageJSON represents NodeJS package.json +type PackageJSON struct { + Name string `json:"name"` + Version string `json:"version"` + Scripts map[string]string `json:"scripts"` + Dependencies map[string]string `json:"dependencies"` + DevDependencies map[string]string `json:"devDependencies"` + OptionalDependencies map[string]string `json:"optionalDependencies"` + PeerDependencies map[string]string `json:"peerDependencies"` + PackageManager string `json:"packageManager"` + Os []string `json:"os"` + Workspaces Workspaces `json:"workspaces"` + Private bool `json:"private"` + // Exact JSON object stored in package.json including unknown fields + // During marshalling struct fields will take priority over raw fields + RawJSON map[string]interface{} `json:"-"` + + // relative path from repo root to the package.json file + PackageJSONPath turbopath.AnchoredSystemPath `json:"-"` + // relative path from repo root to the package + Dir turbopath.AnchoredSystemPath `json:"-"` + InternalDeps []string `json:"-"` + UnresolvedExternalDeps map[string]string `json:"-"` + TransitiveDeps []lockfile.Package `json:"-"` + LegacyTurboConfig *TurboJSON `json:"turbo"` + Mu sync.Mutex `json:"-"` + ExternalDepsHash string `json:"-"` +} + +type Workspaces []string + +type WorkspacesAlt struct { + Packages []string `json:"packages,omitempty"` +} + +func (r *Workspaces) UnmarshalJSON(data []byte) error { + var tmp = &WorkspacesAlt{} + if err := json.Unmarshal(data, tmp); err == nil { + *r = Workspaces(tmp.Packages) + return nil + } + var tempstr = []string{} + if err := json.Unmarshal(data, &tempstr); err != nil { + return err + } + *r = tempstr + return nil +} + +// ReadPackageJSON returns a struct of package.json +func ReadPackageJSON(path turbopath.AbsoluteSystemPath) (*PackageJSON, error) { + b, err := path.ReadFile() + if err != nil { + return nil, err + } + return UnmarshalPackageJSON(b) +} + +// UnmarshalPackageJSON decodes a byte slice into a PackageJSON struct +func UnmarshalPackageJSON(data []byte) (*PackageJSON, error) { + var rawJSON map[string]interface{} + if err := json.Unmarshal(data, &rawJSON); err != nil { + return nil, err + } + + pkgJSON := &PackageJSON{} + if err := json.Unmarshal(data, &pkgJSON); err != nil { + return nil, err + } + pkgJSON.RawJSON = rawJSON + + return pkgJSON, nil +} + +// MarshalPackageJSON Serialize PackageJSON to a slice of bytes +func MarshalPackageJSON(pkgJSON *PackageJSON) ([]byte, error) { + structuredContent, err := json.Marshal(pkgJSON) + if err != nil { + return nil, err + } + var structuredFields map[string]interface{} + if err := json.Unmarshal(structuredContent, &structuredFields); err != nil { + return nil, err + } + + fieldsToSerialize := make(map[string]interface{}, len(pkgJSON.RawJSON)) + + // copy pkgJSON.RawJSON + for key, value := range pkgJSON.RawJSON { + fieldsToSerialize[key] = value + } + + for key, value := range structuredFields { + if isEmpty(value) { + delete(fieldsToSerialize, key) + } else { + fieldsToSerialize[key] = value + } + } + + var b bytes.Buffer + encoder := json.NewEncoder(&b) + encoder.SetEscapeHTML(false) + encoder.SetIndent("", " ") + if err := encoder.Encode(fieldsToSerialize); err != nil { + return nil, err + } + + return b.Bytes(), nil +} + +func isEmpty(value interface{}) bool { + if value == nil { + return true + } + switch s := value.(type) { + case string: + return s == "" + case bool: + return !s + case []string: + return len(s) == 0 + case map[string]interface{}: + return len(s) == 0 + case Workspaces: + return len(s) == 0 + default: + // Assume any unknown types aren't empty + return false + } +} diff --git a/cli/internal/fs/package_json_test.go b/cli/internal/fs/package_json_test.go new file mode 100644 index 0000000..3c16620 --- /dev/null +++ b/cli/internal/fs/package_json_test.go @@ -0,0 +1,174 @@ +package fs + +import ( + "testing" + + "gotest.tools/v3/assert" +) + +func Test_UnmarshalPackageJSON(t *testing.T) { + type Case struct { + name string + json string + expectedFields *PackageJSON + } + + testCases := []Case{ + { + name: "basic types are in raw and processed", + json: `{"name":"foo","version":"1.2.3"}`, + expectedFields: &PackageJSON{ + Name: "foo", + Version: "1.2.3", + RawJSON: map[string]interface{}{ + "name": "foo", + "version": "1.2.3", + }, + }, + }, + { + name: "map types get copied", + json: `{"dependencies":{"foo":"1.2.3"},"devDependencies":{"bar": "^1.0.0"}}`, + expectedFields: &PackageJSON{ + Dependencies: map[string]string{"foo": "1.2.3"}, + DevDependencies: map[string]string{"bar": "^1.0.0"}, + RawJSON: map[string]interface{}{ + "dependencies": map[string]interface{}{"foo": "1.2.3"}, + "devDependencies": map[string]interface{}{"bar": "^1.0.0"}, + }, + }, + }, + { + name: "array types get copied", + json: `{"os":["linux", "windows"]}`, + expectedFields: &PackageJSON{ + Os: []string{"linux", "windows"}, + RawJSON: map[string]interface{}{ + "os": []interface{}{"linux", "windows"}, + }, + }, + }, + } + + for _, testCase := range testCases { + actual, err := UnmarshalPackageJSON([]byte(testCase.json)) + assert.NilError(t, err, testCase.name) + assertPackageJSONEqual(t, actual, testCase.expectedFields) + } +} + +func Test_MarshalPackageJSON(t *testing.T) { + type TestCase struct { + name string + input *PackageJSON + expected *PackageJSON + } + + testCases := []TestCase{ + { + name: "roundtrip should have no effect", + input: &PackageJSON{ + Name: "foo", + Version: "1.2.3", + RawJSON: map[string]interface{}{ + "name": "foo", + "version": "1.2.3", + }, + }, + expected: &PackageJSON{ + Name: "foo", + Version: "1.2.3", + RawJSON: map[string]interface{}{ + "name": "foo", + "version": "1.2.3", + }, + }, + }, + { + name: "structured fields should take priority over raw values", + input: &PackageJSON{ + Name: "foo", + Version: "2.3.4", + RawJSON: map[string]interface{}{ + "name": "foo", + "version": "1.2.3", + }, + }, + expected: &PackageJSON{ + Name: "foo", + Version: "2.3.4", + RawJSON: map[string]interface{}{ + "name": "foo", + "version": "2.3.4", + }, + }, + }, + { + name: "empty structured fields don't get serialized", + input: &PackageJSON{ + Name: "foo", + Version: "", + RawJSON: map[string]interface{}{ + "name": "foo", + "version": "1.2.3", + }, + }, + expected: &PackageJSON{ + Name: "foo", + Version: "", + RawJSON: map[string]interface{}{ + "name": "foo", + }, + }, + }, + { + name: "unstructured fields survive the round trip", + input: &PackageJSON{ + Name: "foo", + RawJSON: map[string]interface{}{ + "name": "foo", + "special-field": "special-value", + "special-config": map[string]interface{}{ + "flag": true, + "value": "toggled", + }, + }, + }, + expected: &PackageJSON{ + Name: "foo", + RawJSON: map[string]interface{}{ + "name": "foo", + "special-field": "special-value", + "special-config": map[string]interface{}{ + "flag": true, + "value": "toggled", + }, + }, + }, + }, + } + + for _, testCase := range testCases { + serializedInput, err := MarshalPackageJSON(testCase.input) + assert.NilError(t, err, testCase.name) + actual, err := UnmarshalPackageJSON(serializedInput) + assert.NilError(t, err, testCase.name) + assertPackageJSONEqual(t, actual, testCase.expected) + } +} + +// Asserts that the data section of two PackageJSON structs are equal +func assertPackageJSONEqual(t *testing.T, x *PackageJSON, y *PackageJSON) { + t.Helper() + assert.Equal(t, x.Name, y.Name) + assert.Equal(t, x.Version, y.Version) + assert.DeepEqual(t, x.Scripts, y.Scripts) + assert.DeepEqual(t, x.Dependencies, y.Dependencies) + assert.DeepEqual(t, x.DevDependencies, y.DevDependencies) + assert.DeepEqual(t, x.OptionalDependencies, y.OptionalDependencies) + assert.DeepEqual(t, x.PeerDependencies, y.PeerDependencies) + assert.Equal(t, x.PackageManager, y.PackageManager) + assert.DeepEqual(t, x.Workspaces, y.Workspaces) + assert.DeepEqual(t, x.Private, y.Private) + assert.DeepEqual(t, x.RawJSON, y.RawJSON) +} diff --git a/cli/internal/fs/path.go b/cli/internal/fs/path.go new file mode 100644 index 0000000..2023d69 --- /dev/null +++ b/cli/internal/fs/path.go @@ -0,0 +1,113 @@ +package fs + +import ( + "fmt" + iofs "io/fs" + "os" + "path/filepath" + "reflect" + + "github.com/adrg/xdg" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// CheckedToAbsoluteSystemPath inspects a string and determines if it is an absolute path. +func CheckedToAbsoluteSystemPath(s string) (turbopath.AbsoluteSystemPath, error) { + if filepath.IsAbs(s) { + return turbopath.AbsoluteSystemPath(s), nil + } + return "", fmt.Errorf("%v is not an absolute path", s) +} + +// ResolveUnknownPath returns unknown if it is an absolute path, otherwise, it +// assumes unknown is a path relative to the given root. +func ResolveUnknownPath(root turbopath.AbsoluteSystemPath, unknown string) turbopath.AbsoluteSystemPath { + if filepath.IsAbs(unknown) { + return turbopath.AbsoluteSystemPath(unknown) + } + return root.UntypedJoin(unknown) +} + +// UnsafeToAbsoluteSystemPath directly converts a string to an AbsoluteSystemPath +func UnsafeToAbsoluteSystemPath(s string) turbopath.AbsoluteSystemPath { + return turbopath.AbsoluteSystemPath(s) +} + +// UnsafeToAnchoredSystemPath directly converts a string to an AbsoluteSystemPath +func UnsafeToAnchoredSystemPath(s string) turbopath.AnchoredSystemPath { + return turbopath.AnchoredSystemPath(s) +} + +// AbsoluteSystemPathFromUpstream is used to mark return values from APIs that we +// expect to give us absolute paths. No checking is performed. +// Prefer to use this over a cast to maintain the search-ability of interfaces +// into and out of the turbopath.AbsoluteSystemPath type. +func AbsoluteSystemPathFromUpstream(s string) turbopath.AbsoluteSystemPath { + return turbopath.AbsoluteSystemPath(s) +} + +// GetCwd returns the calculated working directory after traversing symlinks. +func GetCwd(cwdRaw string) (turbopath.AbsoluteSystemPath, error) { + if cwdRaw == "" { + var err error + cwdRaw, err = os.Getwd() + if err != nil { + return "", err + } + } + // We evaluate symlinks here because the package managers + // we support do the same. + cwdRaw, err := filepath.EvalSymlinks(cwdRaw) + if err != nil { + return "", fmt.Errorf("evaluating symlinks in cwd: %w", err) + } + cwd, err := CheckedToAbsoluteSystemPath(cwdRaw) + if err != nil { + return "", fmt.Errorf("cwd is not an absolute path %v: %v", cwdRaw, err) + } + return cwd, nil +} + +// GetVolumeRoot returns the root directory given an absolute path. +func GetVolumeRoot(absolutePath string) string { + return filepath.VolumeName(absolutePath) + string(os.PathSeparator) +} + +// CreateDirFSAtRoot creates an `os.dirFS` instance at the root of the +// volume containing the specified path. +func CreateDirFSAtRoot(absolutePath string) iofs.FS { + return os.DirFS(GetVolumeRoot(absolutePath)) +} + +// GetDirFSRootPath returns the root path of a os.dirFS. +func GetDirFSRootPath(fsys iofs.FS) string { + // We can't typecheck fsys to enforce using an `os.dirFS` because the + // type isn't exported from `os`. So instead, reflection. 🤷‍♂️ + + fsysType := reflect.TypeOf(fsys).Name() + if fsysType != "dirFS" { + // This is not a user error, fail fast + panic("GetDirFSRootPath must receive an os.dirFS") + } + + // The underlying type is a string; this is the original path passed in. + return reflect.ValueOf(fsys).String() +} + +// IofsRelativePath calculates a `os.dirFS`-friendly path from an absolute system path. +func IofsRelativePath(fsysRoot string, absolutePath string) (string, error) { + return filepath.Rel(fsysRoot, absolutePath) +} + +// TempDir returns the absolute path of a directory with the given name +// under the system's default temp directory location +func TempDir(subDir string) turbopath.AbsoluteSystemPath { + return turbopath.AbsoluteSystemPath(os.TempDir()).UntypedJoin(subDir) +} + +// GetUserConfigDir returns the platform-specific common location +// for configuration files that belong to a user. +func GetUserConfigDir() turbopath.AbsoluteSystemPath { + configHome := AbsoluteSystemPathFromUpstream(xdg.ConfigHome) + return configHome.UntypedJoin("turborepo") +} diff --git a/cli/internal/fs/testdata/both/package.json b/cli/internal/fs/testdata/both/package.json new file mode 100644 index 0000000..03534b7 --- /dev/null +++ b/cli/internal/fs/testdata/both/package.json @@ -0,0 +1,7 @@ +{ + "turbo": { + "pipeline": { + "build": {} + } + } +} diff --git a/cli/internal/fs/testdata/both/turbo.json b/cli/internal/fs/testdata/both/turbo.json new file mode 100644 index 0000000..721e897 --- /dev/null +++ b/cli/internal/fs/testdata/both/turbo.json @@ -0,0 +1,18 @@ +// mocked test comment +{ + "pipeline": { + "build": { + // mocked test comment + "dependsOn": [ + // mocked test comment + "^build" + ], + "outputs": ["dist/**", ".next/**", "!dist/assets/**"], + "outputMode": "new-only" + } // mocked test comment + }, + "remoteCache": { + "teamId": "team_id", + "signature": true + } +} diff --git a/cli/internal/fs/testdata/correct/turbo.json b/cli/internal/fs/testdata/correct/turbo.json new file mode 100644 index 0000000..e22cde2 --- /dev/null +++ b/cli/internal/fs/testdata/correct/turbo.json @@ -0,0 +1,49 @@ +// mocked test comment +{ + "pipeline": { + "build": { + "experimentalPassthroughEnv": ["GITHUB_TOKEN"], + // mocked test comment + "dependsOn": [ + // mocked test comment + "^build" + ], + "outputs": ["dist/**", "!dist/assets/**", ".next/**"], + "outputMode": "new-only" + }, // mocked test comment + "lint": { + "outputs": [], + "dependsOn": ["$MY_VAR"], + "cache": true, + "outputMode": "new-only" + }, + "dev": { + "cache": false, + "outputMode": "full" + }, + /* mocked test comment */ + "publish": { + "outputs": ["dist/**"], + "inputs": [ + /* + mocked test comment + */ + "build/**/*" + ], + "dependsOn": [ + /* mocked test comment */ "^publish", + "^build", + "build", + "admin#lint" + ], + "cache": false + } + }, + "globalDependencies": ["some-file", "../another-dir/**", "$GLOBAL_ENV_VAR"], + "globlaEnv": ["SOME_VAR", "ANOTHER_VAR"], + "experimentalGlobalPassThroughEnv": ["AWS_SECRET_KEY"], + "remoteCache": { + "teamId": "team_id", + "signature": true + } +} diff --git a/cli/internal/fs/testdata/invalid-env-1/turbo.json b/cli/internal/fs/testdata/invalid-env-1/turbo.json new file mode 100644 index 0000000..e4a6517 --- /dev/null +++ b/cli/internal/fs/testdata/invalid-env-1/turbo.json @@ -0,0 +1,8 @@ +{ + "pipeline": { + "task1": { + // all invalid value + "env": ["$A", "$B"] + } + } +} diff --git a/cli/internal/fs/testdata/invalid-env-2/turbo.json b/cli/internal/fs/testdata/invalid-env-2/turbo.json new file mode 100644 index 0000000..92eec96 --- /dev/null +++ b/cli/internal/fs/testdata/invalid-env-2/turbo.json @@ -0,0 +1,8 @@ +{ + "pipeline": { + "task1": { + // Mixed values + "env": ["$A", "B"] + } + } +} diff --git a/cli/internal/fs/testdata/invalid-global-env/turbo.json b/cli/internal/fs/testdata/invalid-global-env/turbo.json new file mode 100644 index 0000000..2ae9ff9 --- /dev/null +++ b/cli/internal/fs/testdata/invalid-global-env/turbo.json @@ -0,0 +1,11 @@ +{ + // Both global declarations with duplicates + "globalDependencies": ["$FOO", "$BAR", "somefile.txt", "somefile.txt"], + // some invalid values + "globalEnv": ["FOO", "BAZ", "$QUX"], + "pipeline": { + "task1": { + "dependsOn": ["$A"] + } + } +} diff --git a/cli/internal/fs/testdata/legacy-env/turbo.json b/cli/internal/fs/testdata/legacy-env/turbo.json new file mode 100644 index 0000000..6b082c4 --- /dev/null +++ b/cli/internal/fs/testdata/legacy-env/turbo.json @@ -0,0 +1,34 @@ +// mocked test comment +{ + // Both global declarations with duplicates and with + "globalDependencies": ["$FOO", "$BAR", "somefile.txt", "somefile.txt"], + "globalEnv": ["FOO", "BAZ", "QUX"], + "pipeline": { + // Only legacy declaration + "task1": { + "dependsOn": ["$A"] + }, + // Only new declaration + "task2": { + "env": ["A"] + }, + // Same var declared in both + "task3": { + "dependsOn": ["$A"], + "env": ["A"] + }, + // Different vars declared in both + "task4": { + "dependsOn": ["$A"], + "env": ["B"] + }, + + // some edge cases + "task6": { "env": ["A", "B", "C"], "dependsOn": ["$D", "$E", "$F"] }, + "task7": { "env": ["A", "B", "C"], "dependsOn": ["$A", "$B", "$C"] }, + "task8": { "env": ["A", "B", "C"], "dependsOn": ["A", "B", "C"] }, + "task9": { "env": [], "dependsOn": ["$A"] }, + "task10": { "env": ["A", "A"], "dependsOn": ["$A", "$A"] }, + "task11": { "env": ["A", "A"], "dependsOn": ["$B", "$B"] } + } +} diff --git a/cli/internal/fs/testdata/legacy-only/package.json b/cli/internal/fs/testdata/legacy-only/package.json new file mode 100644 index 0000000..03534b7 --- /dev/null +++ b/cli/internal/fs/testdata/legacy-only/package.json @@ -0,0 +1,7 @@ +{ + "turbo": { + "pipeline": { + "build": {} + } + } +} diff --git a/cli/internal/fs/turbo_json.go b/cli/internal/fs/turbo_json.go new file mode 100644 index 0000000..71ef29d --- /dev/null +++ b/cli/internal/fs/turbo_json.go @@ -0,0 +1,741 @@ +package fs + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "log" + "os" + "path/filepath" + "sort" + "strings" + + "github.com/muhammadmuzzammil1998/jsonc" + "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/util" +) + +const ( + configFile = "turbo.json" + envPipelineDelimiter = "$" + topologicalPipelineDelimiter = "^" +) + +type rawTurboJSON struct { + // Global root filesystem dependencies + GlobalDependencies []string `json:"globalDependencies,omitempty"` + // Global env + GlobalEnv []string `json:"globalEnv,omitempty"` + + // Global passthrough env + GlobalPassthroughEnv []string `json:"experimentalGlobalPassThroughEnv,omitempty"` + + // Pipeline is a map of Turbo pipeline entries which define the task graph + // and cache behavior on a per task or per package-task basis. + Pipeline Pipeline `json:"pipeline"` + // Configuration options when interfacing with the remote cache + RemoteCacheOptions RemoteCacheOptions `json:"remoteCache,omitempty"` + + // Extends can be the name of another workspace + Extends []string `json:"extends,omitempty"` +} + +// pristineTurboJSON is used when marshaling a TurboJSON object into a turbo.json string +// Notably, it includes a PristinePipeline instead of the regular Pipeline. (i.e. TaskDefinition +// instead of BookkeepingTaskDefinition.) +type pristineTurboJSON struct { + GlobalDependencies []string `json:"globalDependencies,omitempty"` + GlobalEnv []string `json:"globalEnv,omitempty"` + GlobalPassthroughEnv []string `json:"experimentalGlobalPassThroughEnv,omitempty"` + Pipeline PristinePipeline `json:"pipeline"` + RemoteCacheOptions RemoteCacheOptions `json:"remoteCache,omitempty"` + Extends []string `json:"extends,omitempty"` +} + +// TurboJSON represents a turbo.json configuration file +type TurboJSON struct { + GlobalDeps []string + GlobalEnv []string + GlobalPassthroughEnv []string + Pipeline Pipeline + RemoteCacheOptions RemoteCacheOptions + + // A list of Workspace names + Extends []string +} + +// RemoteCacheOptions is a struct for deserializing .remoteCache of configFile +type RemoteCacheOptions struct { + TeamID string `json:"teamId,omitempty"` + Signature bool `json:"signature,omitempty"` +} + +// rawTaskWithDefaults exists to Marshal (i.e. turn a TaskDefinition into json). +// We use this for printing ResolvedTaskConfiguration, because we _want_ to show +// the user the default values for key they have not configured. +type rawTaskWithDefaults struct { + Outputs []string `json:"outputs"` + Cache *bool `json:"cache"` + DependsOn []string `json:"dependsOn"` + Inputs []string `json:"inputs"` + OutputMode util.TaskOutputMode `json:"outputMode"` + PassthroughEnv []string `json:"experimentalPassThroughEnv,omitempty"` + Env []string `json:"env"` + Persistent bool `json:"persistent"` +} + +// rawTask exists to Unmarshal from json. When fields are omitted, we _want_ +// them to be missing, so that we can distinguish missing from empty value. +type rawTask struct { + Outputs []string `json:"outputs,omitempty"` + Cache *bool `json:"cache,omitempty"` + DependsOn []string `json:"dependsOn,omitempty"` + Inputs []string `json:"inputs,omitempty"` + OutputMode *util.TaskOutputMode `json:"outputMode,omitempty"` + Env []string `json:"env,omitempty"` + PassthroughEnv []string `json:"experimentalPassthroughEnv,omitempty"` + Persistent *bool `json:"persistent,omitempty"` +} + +// taskDefinitionHashable exists as a definition for PristinePipeline, which is used down +// stream for calculating the global hash. We want to exclude experimental fields here +// because we don't want experimental fields to be part of the global hash. +type taskDefinitionHashable struct { + Outputs TaskOutputs + ShouldCache bool + EnvVarDependencies []string + TopologicalDependencies []string + TaskDependencies []string + Inputs []string + OutputMode util.TaskOutputMode + Persistent bool +} + +// taskDefinitionExperiments is a list of config fields in a task definition that are considered +// experimental. We keep these separated so we can compute a global hash without these. +type taskDefinitionExperiments struct { + PassthroughEnv []string +} + +// PristinePipeline is a map of task names to TaskDefinition or taskDefinitionHashable. +// Depending on whether any experimental fields are defined, we will use either struct. +// The purpose is to omit experimental fields when making a pristine version, so that +// it doesn't show up in --dry/--summarize output or affect the global hash. +type PristinePipeline map[string]interface{} + +// Pipeline is a struct for deserializing .pipeline in configFile +type Pipeline map[string]BookkeepingTaskDefinition + +// BookkeepingTaskDefinition holds the underlying TaskDefinition and some bookkeeping data +// about the TaskDefinition. This wrapper struct allows us to leave TaskDefinition untouched. +type BookkeepingTaskDefinition struct { + definedFields util.Set + experimentalFields util.Set + experimental taskDefinitionExperiments + TaskDefinition taskDefinitionHashable +} + +// TaskDefinition is a representation of the configFile pipeline for further computation. +type TaskDefinition struct { + Outputs TaskOutputs + ShouldCache bool + + // This field is custom-marshalled from rawTask.Env and rawTask.DependsOn + EnvVarDependencies []string + + // rawTask.PassthroughEnv + PassthroughEnv []string + + // TopologicalDependencies are tasks from package dependencies. + // E.g. "build" is a topological dependency in: + // dependsOn: ['^build']. + // This field is custom-marshalled from rawTask.DependsOn + TopologicalDependencies []string + + // TaskDependencies are anything that is not a topological dependency + // E.g. both something and //whatever are TaskDependencies in: + // dependsOn: ['something', '//whatever'] + // This field is custom-marshalled from rawTask.DependsOn + TaskDependencies []string + + // Inputs indicate the list of files this Task depends on. If any of those files change + // we can conclude that any cached outputs or logs for this Task should be invalidated. + Inputs []string + + // OutputMode determins how we should log the output. + OutputMode util.TaskOutputMode + + // Persistent indicates whether the Task is expected to exit or not + // Tasks marked Persistent do not exit (e.g. --watch mode or dev servers) + Persistent bool +} + +// GetTask returns a TaskDefinition based on the ID (package#task format) or name (e.g. "build") +func (pc Pipeline) GetTask(taskID string, taskName string) (*BookkeepingTaskDefinition, error) { + // first check for package-tasks + taskDefinition, ok := pc[taskID] + if !ok { + // then check for regular tasks + fallbackTaskDefinition, notcool := pc[taskName] + // if neither, then bail + if !notcool { + // Return an empty TaskDefinition + return nil, fmt.Errorf("Could not find task \"%s\" in pipeline", taskID) + } + + // override if we need to... + taskDefinition = fallbackTaskDefinition + } + + return &taskDefinition, nil +} + +// LoadTurboConfig loads, or optionally, synthesizes a TurboJSON instance +func LoadTurboConfig(dir turbopath.AbsoluteSystemPath, rootPackageJSON *PackageJSON, includeSynthesizedFromRootPackageJSON bool) (*TurboJSON, error) { + // If the root package.json stil has a `turbo` key, log a warning and remove it. + if rootPackageJSON.LegacyTurboConfig != nil { + log.Printf("[WARNING] \"turbo\" in package.json is no longer supported. Migrate to %s by running \"npx @turbo/codemod create-turbo-config\"\n", configFile) + rootPackageJSON.LegacyTurboConfig = nil + } + + var turboJSON *TurboJSON + turboFromFiles, err := readTurboConfig(dir.UntypedJoin(configFile)) + + if !includeSynthesizedFromRootPackageJSON && err != nil { + // If the file didn't exist, throw a custom error here instead of propagating + if errors.Is(err, os.ErrNotExist) { + return nil, errors.Wrap(err, fmt.Sprintf("Could not find %s. Follow directions at https://turbo.build/repo/docs to create one", configFile)) + + } + + // There was an error, and we don't have any chance of recovering + // because we aren't synthesizing anything + return nil, err + } else if !includeSynthesizedFromRootPackageJSON { + // We're not synthesizing anything and there was no error, we're done + return turboFromFiles, nil + } else if errors.Is(err, os.ErrNotExist) { + // turbo.json doesn't exist, but we're going try to synthesize something + turboJSON = &TurboJSON{ + Pipeline: make(Pipeline), + } + } else if err != nil { + // some other happened, we can't recover + return nil, err + } else { + // we're synthesizing, but we have a starting point + // Note: this will have to change to support task inference in a monorepo + // for now, we're going to error on any "root" tasks and turn non-root tasks into root tasks + pipeline := make(Pipeline) + for taskID, taskDefinition := range turboFromFiles.Pipeline { + if util.IsPackageTask(taskID) { + return nil, fmt.Errorf("Package tasks (#) are not allowed in single-package repositories: found %v", taskID) + } + pipeline[util.RootTaskID(taskID)] = taskDefinition + } + turboJSON = turboFromFiles + turboJSON.Pipeline = pipeline + } + + for scriptName := range rootPackageJSON.Scripts { + if !turboJSON.Pipeline.HasTask(scriptName) { + taskName := util.RootTaskID(scriptName) + // Explicitly set ShouldCache to false in this definition and add the bookkeeping fields + // so downstream we can pretend that it was set on purpose (as if read from a config file) + // rather than defaulting to the 0-value of a boolean field. + turboJSON.Pipeline[taskName] = BookkeepingTaskDefinition{ + definedFields: util.SetFromStrings([]string{"ShouldCache"}), + TaskDefinition: taskDefinitionHashable{ + ShouldCache: false, + }, + } + } + } + return turboJSON, nil +} + +// TurboJSONValidation is the signature for a validation function passed to Validate() +type TurboJSONValidation func(*TurboJSON) []error + +// Validate calls an array of validation functions on the TurboJSON struct. +// The validations can be customized by the caller. +func (tj *TurboJSON) Validate(validations []TurboJSONValidation) []error { + allErrors := []error{} + for _, validation := range validations { + errors := validation(tj) + allErrors = append(allErrors, errors...) + } + + return allErrors +} + +// TaskOutputs represents the patterns for including and excluding files from outputs +type TaskOutputs struct { + Inclusions []string + Exclusions []string +} + +// Sort contents of task outputs +func (to TaskOutputs) Sort() TaskOutputs { + var inclusions []string + var exclusions []string + copy(inclusions, to.Inclusions) + copy(exclusions, to.Exclusions) + sort.Strings(inclusions) + sort.Strings(exclusions) + return TaskOutputs{Inclusions: inclusions, Exclusions: exclusions} +} + +// readTurboConfig reads turbo.json from a provided path +func readTurboConfig(turboJSONPath turbopath.AbsoluteSystemPath) (*TurboJSON, error) { + // If the configFile exists, use that + if turboJSONPath.FileExists() { + turboJSON, err := readTurboJSON(turboJSONPath) + if err != nil { + return nil, fmt.Errorf("%s: %w", configFile, err) + } + + return turboJSON, nil + } + + // If there's no turbo.json, return an error. + return nil, os.ErrNotExist +} + +// readTurboJSON reads the configFile in to a struct +func readTurboJSON(path turbopath.AbsoluteSystemPath) (*TurboJSON, error) { + file, err := path.Open() + if err != nil { + return nil, err + } + var turboJSON *TurboJSON + data, err := ioutil.ReadAll(file) + if err != nil { + return nil, err + } + + err = jsonc.Unmarshal(data, &turboJSON) + + if err != nil { + return nil, err + } + + return turboJSON, nil +} + +// GetTaskDefinition returns a TaskDefinition from a serialized definition in configFile +func (pc Pipeline) GetTaskDefinition(taskID string) (TaskDefinition, bool) { + if entry, ok := pc[taskID]; ok { + return entry.GetTaskDefinition(), true + } + _, task := util.GetPackageTaskFromId(taskID) + entry, ok := pc[task] + return entry.GetTaskDefinition(), ok +} + +// HasTask returns true if the given task is defined in the pipeline, either directly or +// via a package task (`pkg#task`) +func (pc Pipeline) HasTask(task string) bool { + for key := range pc { + if key == task { + return true + } + if util.IsPackageTask(key) { + _, taskName := util.GetPackageTaskFromId(key) + if taskName == task { + return true + } + } + } + return false +} + +// Pristine returns a PristinePipeline, this is used for printing to console and pruning +func (pc Pipeline) Pristine() PristinePipeline { + pristine := PristinePipeline{} + for taskName, taskDef := range pc { + // If there are any experimental fields, we will include them with 0-values + // if there aren't, we will omit them entirely + if taskDef.hasExperimentalFields() { + pristine[taskName] = taskDef.GetTaskDefinition() // merges experimental fields in + } else { + pristine[taskName] = taskDef.TaskDefinition // has no experimental fields + } + } + return pristine +} + +// hasField checks the internal bookkeeping definedFields field to +// see whether a field was actually in the underlying turbo.json +// or whether it was initialized with its 0-value. +func (btd BookkeepingTaskDefinition) hasField(fieldName string) bool { + return btd.definedFields.Includes(fieldName) || btd.experimentalFields.Includes(fieldName) +} + +// hasExperimentalFields keeps track of whether any experimental fields were found +func (btd BookkeepingTaskDefinition) hasExperimentalFields() bool { + return len(btd.experimentalFields) > 0 +} + +// GetTaskDefinition gets a TaskDefinition by merging the experimental and non-experimental fields +// into a single representation to use downstream. +func (btd BookkeepingTaskDefinition) GetTaskDefinition() TaskDefinition { + return TaskDefinition{ + Outputs: btd.TaskDefinition.Outputs, + ShouldCache: btd.TaskDefinition.ShouldCache, + EnvVarDependencies: btd.TaskDefinition.EnvVarDependencies, + TopologicalDependencies: btd.TaskDefinition.TopologicalDependencies, + TaskDependencies: btd.TaskDefinition.TaskDependencies, + Inputs: btd.TaskDefinition.Inputs, + OutputMode: btd.TaskDefinition.OutputMode, + Persistent: btd.TaskDefinition.Persistent, + // From experimental fields + PassthroughEnv: btd.experimental.PassthroughEnv, + } +} + +// MergeTaskDefinitions accepts an array of BookkeepingTaskDefinitions and merges them into +// a single TaskDefinition. It uses the bookkeeping definedFields to determine which fields should +// be overwritten and when 0-values should be respected. +func MergeTaskDefinitions(taskDefinitions []BookkeepingTaskDefinition) (*TaskDefinition, error) { + // Start with an empty definition + mergedTaskDefinition := &TaskDefinition{} + + // Set the default, because the 0-value will be false, and if no turbo.jsons had + // this field set for this task, we want it to be true. + mergedTaskDefinition.ShouldCache = true + + // For each of the TaskDefinitions we know of, merge them in + for _, bookkeepingTaskDef := range taskDefinitions { + taskDef := bookkeepingTaskDef.GetTaskDefinition() + + if bookkeepingTaskDef.hasField("Outputs") { + mergedTaskDefinition.Outputs = taskDef.Outputs + } + + if bookkeepingTaskDef.hasField("ShouldCache") { + mergedTaskDefinition.ShouldCache = taskDef.ShouldCache + } + + if bookkeepingTaskDef.hasField("EnvVarDependencies") { + mergedTaskDefinition.EnvVarDependencies = taskDef.EnvVarDependencies + } + + if bookkeepingTaskDef.hasField("PassthroughEnv") { + mergedTaskDefinition.PassthroughEnv = taskDef.PassthroughEnv + } + + if bookkeepingTaskDef.hasField("DependsOn") { + mergedTaskDefinition.TopologicalDependencies = taskDef.TopologicalDependencies + } + + if bookkeepingTaskDef.hasField("DependsOn") { + mergedTaskDefinition.TaskDependencies = taskDef.TaskDependencies + } + + if bookkeepingTaskDef.hasField("Inputs") { + mergedTaskDefinition.Inputs = taskDef.Inputs + } + + if bookkeepingTaskDef.hasField("OutputMode") { + mergedTaskDefinition.OutputMode = taskDef.OutputMode + } + if bookkeepingTaskDef.hasField("Persistent") { + mergedTaskDefinition.Persistent = taskDef.Persistent + } + } + + return mergedTaskDefinition, nil +} + +// UnmarshalJSON deserializes a single task definition from +// turbo.json into a TaskDefinition struct +func (btd *BookkeepingTaskDefinition) UnmarshalJSON(data []byte) error { + task := rawTask{} + if err := json.Unmarshal(data, &task); err != nil { + return err + } + + btd.definedFields = util.Set{} + btd.experimentalFields = util.Set{} + + if task.Outputs != nil { + var inclusions []string + var exclusions []string + // Assign a bookkeeping field so we know that there really were + // outputs configured in the underlying config file. + btd.definedFields.Add("Outputs") + + for _, glob := range task.Outputs { + if strings.HasPrefix(glob, "!") { + if filepath.IsAbs(glob[1:]) { + log.Printf("[WARNING] Using an absolute path in \"outputs\" (%v) will not work and will be an error in a future version", glob) + } + exclusions = append(exclusions, glob[1:]) + } else { + if filepath.IsAbs(glob) { + log.Printf("[WARNING] Using an absolute path in \"outputs\" (%v) will not work and will be an error in a future version", glob) + } + inclusions = append(inclusions, glob) + } + } + + btd.TaskDefinition.Outputs = TaskOutputs{ + Inclusions: inclusions, + Exclusions: exclusions, + } + + sort.Strings(btd.TaskDefinition.Outputs.Inclusions) + sort.Strings(btd.TaskDefinition.Outputs.Exclusions) + } + + if task.Cache == nil { + btd.TaskDefinition.ShouldCache = true + } else { + btd.definedFields.Add("ShouldCache") + btd.TaskDefinition.ShouldCache = *task.Cache + } + + envVarDependencies := make(util.Set) + envVarPassthroughs := make(util.Set) + + btd.TaskDefinition.TopologicalDependencies = []string{} // TODO @mehulkar: this should be a set + btd.TaskDefinition.TaskDependencies = []string{} // TODO @mehulkar: this should be a set + + // If there was a dependsOn field, add the bookkeeping + // we don't care what's in the field, just that it was there + // We'll use this marker to overwrite while merging TaskDefinitions. + if task.DependsOn != nil { + btd.definedFields.Add("DependsOn") + } + + for _, dependency := range task.DependsOn { + if strings.HasPrefix(dependency, envPipelineDelimiter) { + log.Printf("[DEPRECATED] Declaring an environment variable in \"dependsOn\" is deprecated, found %s. Use the \"env\" key or use `npx @turbo/codemod migrate-env-var-dependencies`.\n", dependency) + envVarDependencies.Add(strings.TrimPrefix(dependency, envPipelineDelimiter)) + } else if strings.HasPrefix(dependency, topologicalPipelineDelimiter) { + // Note: This will get assigned multiple times in the loop, but we only care that it's true + btd.TaskDefinition.TopologicalDependencies = append(btd.TaskDefinition.TopologicalDependencies, strings.TrimPrefix(dependency, topologicalPipelineDelimiter)) + } else { + btd.TaskDefinition.TaskDependencies = append(btd.TaskDefinition.TaskDependencies, dependency) + } + } + + sort.Strings(btd.TaskDefinition.TaskDependencies) + sort.Strings(btd.TaskDefinition.TopologicalDependencies) + + // Append env key into EnvVarDependencies + if task.Env != nil { + btd.definedFields.Add("EnvVarDependencies") + if err := gatherEnvVars(task.Env, "env", &envVarDependencies); err != nil { + return err + } + } + + btd.TaskDefinition.EnvVarDependencies = envVarDependencies.UnsafeListOfStrings() + + sort.Strings(btd.TaskDefinition.EnvVarDependencies) + + if task.PassthroughEnv != nil { + btd.experimentalFields.Add("PassthroughEnv") + if err := gatherEnvVars(task.PassthroughEnv, "passthrougEnv", &envVarPassthroughs); err != nil { + return err + } + } + + btd.experimental.PassthroughEnv = envVarPassthroughs.UnsafeListOfStrings() + sort.Strings(btd.experimental.PassthroughEnv) + + if task.Inputs != nil { + // Note that we don't require Inputs to be sorted, we're going to + // hash the resulting files and sort that instead + btd.definedFields.Add("Inputs") + // TODO: during rust port, this should be moved to a post-parse validation step + for _, input := range task.Inputs { + if filepath.IsAbs(input) { + log.Printf("[WARNING] Using an absolute path in \"inputs\" (%v) will not work and will be an error in a future version", input) + } + } + btd.TaskDefinition.Inputs = task.Inputs + } + + if task.OutputMode != nil { + btd.definedFields.Add("OutputMode") + btd.TaskDefinition.OutputMode = *task.OutputMode + } + + if task.Persistent != nil { + btd.definedFields.Add("Persistent") + btd.TaskDefinition.Persistent = *task.Persistent + } else { + btd.TaskDefinition.Persistent = false + } + return nil +} + +// MarshalJSON serializes taskDefinitionHashable struct into json +func (c taskDefinitionHashable) MarshalJSON() ([]byte, error) { + task := makeRawTask( + c.Persistent, + c.ShouldCache, + c.OutputMode, + c.Inputs, + c.Outputs, + c.EnvVarDependencies, + c.TaskDependencies, + c.TopologicalDependencies, + ) + return json.Marshal(task) +} + +// MarshalJSON serializes TaskDefinition struct into json +func (c TaskDefinition) MarshalJSON() ([]byte, error) { + task := makeRawTask( + c.Persistent, + c.ShouldCache, + c.OutputMode, + c.Inputs, + c.Outputs, + c.EnvVarDependencies, + c.TaskDependencies, + c.TopologicalDependencies, + ) + + if len(c.PassthroughEnv) > 0 { + task.PassthroughEnv = append(task.PassthroughEnv, c.PassthroughEnv...) + } + sort.Strings(task.PassthroughEnv) + + return json.Marshal(task) +} + +// UnmarshalJSON deserializes the contents of turbo.json into a TurboJSON struct +func (c *TurboJSON) UnmarshalJSON(data []byte) error { + raw := &rawTurboJSON{} + if err := json.Unmarshal(data, &raw); err != nil { + return err + } + + envVarDependencies := make(util.Set) + envVarPassthroughs := make(util.Set) + globalFileDependencies := make(util.Set) + + if err := gatherEnvVars(raw.GlobalEnv, "globalEnv", &envVarDependencies); err != nil { + return err + } + if err := gatherEnvVars(raw.GlobalPassthroughEnv, "experimentalGlobalPassThroughEnv", &envVarPassthroughs); err != nil { + return err + } + + // TODO: In the rust port, warnings should be refactored to a post-parse validation step + for _, value := range raw.GlobalDependencies { + if strings.HasPrefix(value, envPipelineDelimiter) { + log.Printf("[DEPRECATED] Declaring an environment variable in \"globalDependencies\" is deprecated, found %s. Use the \"globalEnv\" key or use `npx @turbo/codemod migrate-env-var-dependencies`.\n", value) + envVarDependencies.Add(strings.TrimPrefix(value, envPipelineDelimiter)) + } else { + if filepath.IsAbs(value) { + log.Printf("[WARNING] Using an absolute path in \"globalDependencies\" (%v) will not work and will be an error in a future version", value) + } + globalFileDependencies.Add(value) + } + } + + // turn the set into an array and assign to the TurboJSON struct fields. + c.GlobalEnv = envVarDependencies.UnsafeListOfStrings() + sort.Strings(c.GlobalEnv) + + if raw.GlobalPassthroughEnv != nil { + c.GlobalPassthroughEnv = envVarPassthroughs.UnsafeListOfStrings() + sort.Strings(c.GlobalPassthroughEnv) + } + + c.GlobalDeps = globalFileDependencies.UnsafeListOfStrings() + sort.Strings(c.GlobalDeps) + + // copy these over, we don't need any changes here. + c.Pipeline = raw.Pipeline + c.RemoteCacheOptions = raw.RemoteCacheOptions + c.Extends = raw.Extends + + return nil +} + +// MarshalJSON converts a TurboJSON into the equivalent json object in bytes +// note: we go via rawTurboJSON so that the output format is correct. +// This is used by `turbo prune` to generate a pruned turbo.json +// and also by --summarize & --dry=json to serialize the known config +// into something we can print to screen +func (c *TurboJSON) MarshalJSON() ([]byte, error) { + raw := pristineTurboJSON{} + raw.GlobalDependencies = c.GlobalDeps + raw.GlobalEnv = c.GlobalEnv + raw.GlobalPassthroughEnv = c.GlobalPassthroughEnv + raw.Pipeline = c.Pipeline.Pristine() + raw.RemoteCacheOptions = c.RemoteCacheOptions + + return json.Marshal(&raw) +} + +func makeRawTask(persistent bool, shouldCache bool, outputMode util.TaskOutputMode, inputs []string, outputs TaskOutputs, envVarDependencies []string, taskDependencies []string, topologicalDependencies []string) *rawTaskWithDefaults { + // Initialize with empty arrays, so we get empty arrays serialized into JSON + task := &rawTaskWithDefaults{ + Outputs: []string{}, + Inputs: []string{}, + Env: []string{}, + PassthroughEnv: []string{}, + DependsOn: []string{}, + } + + task.Persistent = persistent + task.Cache = &shouldCache + task.OutputMode = outputMode + + if len(inputs) > 0 { + task.Inputs = inputs + } + + if len(envVarDependencies) > 0 { + task.Env = append(task.Env, envVarDependencies...) + } + + if len(outputs.Inclusions) > 0 { + task.Outputs = append(task.Outputs, outputs.Inclusions...) + } + + for _, i := range outputs.Exclusions { + task.Outputs = append(task.Outputs, "!"+i) + } + + if len(taskDependencies) > 0 { + task.DependsOn = append(task.DependsOn, taskDependencies...) + } + + for _, i := range topologicalDependencies { + task.DependsOn = append(task.DependsOn, "^"+i) + } + + // These _should_ already be sorted when the TaskDefinition struct was unmarshaled, + // but we want to ensure they're sorted on the way out also, just in case something + // in the middle mutates the items. + sort.Strings(task.DependsOn) + sort.Strings(task.Outputs) + sort.Strings(task.Env) + sort.Strings(task.Inputs) + return task +} + +// gatherEnvVars puts env vars into the provided set as long as they don't have an invalid value. +func gatherEnvVars(vars []string, key string, into *util.Set) error { + for _, value := range vars { + if strings.HasPrefix(value, envPipelineDelimiter) { + // Hard error to help people specify this correctly during migration. + // TODO: Remove this error after we have run summary. + return fmt.Errorf("You specified \"%s\" in the \"%s\" key. You should not prefix your environment variables with \"%s\"", value, key, envPipelineDelimiter) + } + + into.Add(value) + } + + return nil +} diff --git a/cli/internal/fs/turbo_json_test.go b/cli/internal/fs/turbo_json_test.go new file mode 100644 index 0000000..1d384d5 --- /dev/null +++ b/cli/internal/fs/turbo_json_test.go @@ -0,0 +1,277 @@ +package fs + +import ( + "os" + "reflect" + "sort" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/util" + "gotest.tools/v3/assert/cmp" +) + +func assertIsSorted(t *testing.T, arr []string, msg string) { + t.Helper() + if arr == nil { + return + } + + copied := make([]string, len(arr)) + copy(copied, arr) + sort.Strings(copied) + if !reflect.DeepEqual(arr, copied) { + t.Errorf("Expected sorted, got %v: %v", arr, msg) + } +} + +func Test_ReadTurboConfig(t *testing.T) { + testDir := getTestDir(t, "correct") + turboJSON, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) + + if turboJSONReadErr != nil { + t.Fatalf("invalid parse: %#v", turboJSONReadErr) + } + + assert.EqualValues(t, []string{"AWS_SECRET_KEY"}, turboJSON.GlobalPassthroughEnv) + + pipelineExpected := map[string]BookkeepingTaskDefinition{ + "build": { + definedFields: util.SetFromStrings([]string{"Outputs", "OutputMode", "DependsOn"}), + experimentalFields: util.SetFromStrings([]string{"PassthroughEnv"}), + experimental: taskDefinitionExperiments{ + PassthroughEnv: []string{"GITHUB_TOKEN"}, + }, + TaskDefinition: taskDefinitionHashable{ + Outputs: TaskOutputs{Inclusions: []string{".next/**", "dist/**"}, Exclusions: []string{"dist/assets/**"}}, + TopologicalDependencies: []string{"build"}, + EnvVarDependencies: []string{}, + TaskDependencies: []string{}, + ShouldCache: true, + OutputMode: util.NewTaskOutput, + }, + }, + "lint": { + definedFields: util.SetFromStrings([]string{"Outputs", "OutputMode", "ShouldCache", "DependsOn"}), + experimentalFields: util.SetFromStrings([]string{}), + experimental: taskDefinitionExperiments{ + PassthroughEnv: []string{}, + }, + TaskDefinition: taskDefinitionHashable{ + Outputs: TaskOutputs{}, + TopologicalDependencies: []string{}, + EnvVarDependencies: []string{"MY_VAR"}, + TaskDependencies: []string{}, + ShouldCache: true, + OutputMode: util.NewTaskOutput, + }, + }, + "dev": { + definedFields: util.SetFromStrings([]string{"OutputMode", "ShouldCache"}), + experimentalFields: util.SetFromStrings([]string{}), + experimental: taskDefinitionExperiments{ + PassthroughEnv: []string{}, + }, + TaskDefinition: taskDefinitionHashable{ + Outputs: TaskOutputs{}, + TopologicalDependencies: []string{}, + EnvVarDependencies: []string{}, + TaskDependencies: []string{}, + ShouldCache: false, + OutputMode: util.FullTaskOutput, + }, + }, + "publish": { + definedFields: util.SetFromStrings([]string{"Inputs", "Outputs", "DependsOn", "ShouldCache"}), + experimentalFields: util.SetFromStrings([]string{}), + experimental: taskDefinitionExperiments{ + PassthroughEnv: []string{}, + }, + TaskDefinition: taskDefinitionHashable{ + Outputs: TaskOutputs{Inclusions: []string{"dist/**"}}, + TopologicalDependencies: []string{"build", "publish"}, + EnvVarDependencies: []string{}, + TaskDependencies: []string{"admin#lint", "build"}, + ShouldCache: false, + Inputs: []string{"build/**/*"}, + OutputMode: util.FullTaskOutput, + }, + }, + } + + validateOutput(t, turboJSON, pipelineExpected) + remoteCacheOptionsExpected := RemoteCacheOptions{"team_id", true} + assert.EqualValues(t, remoteCacheOptionsExpected, turboJSON.RemoteCacheOptions) +} + +func Test_LoadTurboConfig_Legacy(t *testing.T) { + testDir := getTestDir(t, "legacy-only") + packageJSONPath := testDir.UntypedJoin("package.json") + rootPackageJSON, pkgJSONReadErr := ReadPackageJSON(packageJSONPath) + + if pkgJSONReadErr != nil { + t.Fatalf("invalid parse: %#v", pkgJSONReadErr) + } + + _, turboJSONReadErr := LoadTurboConfig(testDir, rootPackageJSON, false) + expectedErrorMsg := "Could not find turbo.json. Follow directions at https://turbo.build/repo/docs to create one: file does not exist" + assert.EqualErrorf(t, turboJSONReadErr, expectedErrorMsg, "Error should be: %v, got: %v", expectedErrorMsg, turboJSONReadErr) +} + +func Test_LoadTurboConfig_BothCorrectAndLegacy(t *testing.T) { + testDir := getTestDir(t, "both") + + packageJSONPath := testDir.UntypedJoin("package.json") + rootPackageJSON, pkgJSONReadErr := ReadPackageJSON(packageJSONPath) + + if pkgJSONReadErr != nil { + t.Fatalf("invalid parse: %#v", pkgJSONReadErr) + } + + turboJSON, turboJSONReadErr := LoadTurboConfig(testDir, rootPackageJSON, false) + + if turboJSONReadErr != nil { + t.Fatalf("invalid parse: %#v", turboJSONReadErr) + } + + pipelineExpected := map[string]BookkeepingTaskDefinition{ + "build": { + definedFields: util.SetFromStrings([]string{"Outputs", "OutputMode", "DependsOn"}), + experimentalFields: util.SetFromStrings([]string{}), + experimental: taskDefinitionExperiments{ + PassthroughEnv: []string{}, + }, + TaskDefinition: taskDefinitionHashable{ + Outputs: TaskOutputs{Inclusions: []string{".next/**", "dist/**"}, Exclusions: []string{"dist/assets/**"}}, + TopologicalDependencies: []string{"build"}, + EnvVarDependencies: []string{}, + TaskDependencies: []string{}, + ShouldCache: true, + OutputMode: util.NewTaskOutput, + }, + }, + } + + validateOutput(t, turboJSON, pipelineExpected) + + remoteCacheOptionsExpected := RemoteCacheOptions{"team_id", true} + assert.EqualValues(t, remoteCacheOptionsExpected, turboJSON.RemoteCacheOptions) + assert.Equal(t, rootPackageJSON.LegacyTurboConfig == nil, true) +} + +func Test_ReadTurboConfig_InvalidEnvDeclarations1(t *testing.T) { + testDir := getTestDir(t, "invalid-env-1") + _, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) + + expectedErrorMsg := "turbo.json: You specified \"$A\" in the \"env\" key. You should not prefix your environment variables with \"$\"" + assert.EqualErrorf(t, turboJSONReadErr, expectedErrorMsg, "Error should be: %v, got: %v", expectedErrorMsg, turboJSONReadErr) +} + +func Test_ReadTurboConfig_InvalidEnvDeclarations2(t *testing.T) { + testDir := getTestDir(t, "invalid-env-2") + _, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) + expectedErrorMsg := "turbo.json: You specified \"$A\" in the \"env\" key. You should not prefix your environment variables with \"$\"" + assert.EqualErrorf(t, turboJSONReadErr, expectedErrorMsg, "Error should be: %v, got: %v", expectedErrorMsg, turboJSONReadErr) +} + +func Test_ReadTurboConfig_InvalidGlobalEnvDeclarations(t *testing.T) { + testDir := getTestDir(t, "invalid-global-env") + _, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) + expectedErrorMsg := "turbo.json: You specified \"$QUX\" in the \"globalEnv\" key. You should not prefix your environment variables with \"$\"" + assert.EqualErrorf(t, turboJSONReadErr, expectedErrorMsg, "Error should be: %v, got: %v", expectedErrorMsg, turboJSONReadErr) +} + +func Test_ReadTurboConfig_EnvDeclarations(t *testing.T) { + testDir := getTestDir(t, "legacy-env") + turboJSON, turboJSONReadErr := readTurboConfig(testDir.UntypedJoin("turbo.json")) + + if turboJSONReadErr != nil { + t.Fatalf("invalid parse: %#v", turboJSONReadErr) + } + + pipeline := turboJSON.Pipeline + assert.EqualValues(t, pipeline["task1"].TaskDefinition.EnvVarDependencies, sortedArray([]string{"A"})) + assert.EqualValues(t, pipeline["task2"].TaskDefinition.EnvVarDependencies, sortedArray([]string{"A"})) + assert.EqualValues(t, pipeline["task3"].TaskDefinition.EnvVarDependencies, sortedArray([]string{"A"})) + assert.EqualValues(t, pipeline["task4"].TaskDefinition.EnvVarDependencies, sortedArray([]string{"A", "B"})) + assert.EqualValues(t, pipeline["task6"].TaskDefinition.EnvVarDependencies, sortedArray([]string{"A", "B", "C", "D", "E", "F"})) + assert.EqualValues(t, pipeline["task7"].TaskDefinition.EnvVarDependencies, sortedArray([]string{"A", "B", "C"})) + assert.EqualValues(t, pipeline["task8"].TaskDefinition.EnvVarDependencies, sortedArray([]string{"A", "B", "C"})) + assert.EqualValues(t, pipeline["task9"].TaskDefinition.EnvVarDependencies, sortedArray([]string{"A"})) + assert.EqualValues(t, pipeline["task10"].TaskDefinition.EnvVarDependencies, sortedArray([]string{"A"})) + assert.EqualValues(t, pipeline["task11"].TaskDefinition.EnvVarDependencies, sortedArray([]string{"A", "B"})) + + // check global env vars also + assert.EqualValues(t, sortedArray([]string{"FOO", "BAR", "BAZ", "QUX"}), sortedArray(turboJSON.GlobalEnv)) + assert.EqualValues(t, sortedArray([]string{"somefile.txt"}), sortedArray(turboJSON.GlobalDeps)) +} + +func Test_TaskOutputsSort(t *testing.T) { + inclusions := []string{"foo/**", "bar"} + exclusions := []string{"special-file", ".hidden/**"} + taskOutputs := TaskOutputs{Inclusions: inclusions, Exclusions: exclusions} + sortedOutputs := taskOutputs.Sort() + assertIsSorted(t, sortedOutputs.Inclusions, "Inclusions") + assertIsSorted(t, sortedOutputs.Exclusions, "Exclusions") + assert.False(t, cmp.DeepEqual(taskOutputs, sortedOutputs)().Success()) +} + +// Helpers +func validateOutput(t *testing.T, turboJSON *TurboJSON, expectedPipeline Pipeline) { + t.Helper() + assertIsSorted(t, turboJSON.GlobalDeps, "Global Deps") + assertIsSorted(t, turboJSON.GlobalEnv, "Global Env") + validatePipeline(t, turboJSON.Pipeline, expectedPipeline) +} + +func validatePipeline(t *testing.T, actual Pipeline, expected Pipeline) { + t.Helper() + // check top level keys + if len(actual) != len(expected) { + expectedKeys := []string{} + for k := range expected { + expectedKeys = append(expectedKeys, k) + } + actualKeys := []string{} + for k := range actual { + actualKeys = append(actualKeys, k) + } + t.Errorf("pipeline tasks mismatch. got %v, want %v", strings.Join(actualKeys, ","), strings.Join(expectedKeys, ",")) + } + + // check individual task definitions + for taskName, expectedTaskDefinition := range expected { + bookkeepingTaskDef, ok := actual[taskName] + if !ok { + t.Errorf("missing expected task: %v", taskName) + } + actualTaskDefinition := bookkeepingTaskDef.GetTaskDefinition() + assertIsSorted(t, actualTaskDefinition.Outputs.Inclusions, "Task output inclusions") + assertIsSorted(t, actualTaskDefinition.Outputs.Exclusions, "Task output exclusions") + assertIsSorted(t, actualTaskDefinition.EnvVarDependencies, "Task env vars") + assertIsSorted(t, actualTaskDefinition.PassthroughEnv, "Task env vars") + assertIsSorted(t, actualTaskDefinition.TopologicalDependencies, "Topo deps") + assertIsSorted(t, actualTaskDefinition.TaskDependencies, "Task deps") + assert.EqualValuesf(t, expectedTaskDefinition, bookkeepingTaskDef, "task definition mismatch for %v", taskName) + } +} + +func getTestDir(t *testing.T, testName string) turbopath.AbsoluteSystemPath { + defaultCwd, err := os.Getwd() + if err != nil { + t.Errorf("failed to get cwd: %v", err) + } + cwd, err := CheckedToAbsoluteSystemPath(defaultCwd) + if err != nil { + t.Fatalf("cwd is not an absolute directory %v: %v", defaultCwd, err) + } + + return cwd.UntypedJoin("testdata", testName) +} + +func sortedArray(arr []string) []string { + sort.Strings(arr) + return arr +} diff --git a/cli/internal/globby/globby.go b/cli/internal/globby/globby.go new file mode 100644 index 0000000..14c40d9 --- /dev/null +++ b/cli/internal/globby/globby.go @@ -0,0 +1,187 @@ +package globby + +import ( + "fmt" + "path/filepath" + "sort" + "strings" + + iofs "io/fs" + + "github.com/vercel/turbo/cli/internal/fs" + + "github.com/vercel/turbo/cli/internal/doublestar" + "github.com/vercel/turbo/cli/internal/util" +) + +// GlobAll returns an array of files and folders that match the specified set of glob patterns. +// The returned files and folders are absolute paths, assuming that basePath is an absolute path. +func GlobAll(basePath string, includePatterns []string, excludePatterns []string) ([]string, error) { + fsys := fs.CreateDirFSAtRoot(basePath) + fsysRoot := fs.GetDirFSRootPath(fsys) + output, err := globAllFs(fsys, fsysRoot, basePath, includePatterns, excludePatterns) + + // Because this is coming out of a map output is in no way ordered. + // Sorting will put the files in a depth-first order. + sort.Strings(output) + return output, err +} + +// GlobFiles returns an array of files that match the specified set of glob patterns. +// The return files are absolute paths, assuming that basePath is an absolute path. +func GlobFiles(basePath string, includePatterns []string, excludePatterns []string) ([]string, error) { + fsys := fs.CreateDirFSAtRoot(basePath) + fsysRoot := fs.GetDirFSRootPath(fsys) + output, err := globFilesFs(fsys, fsysRoot, basePath, includePatterns, excludePatterns) + + // Because this is coming out of a map output is in no way ordered. + // Sorting will put the files in a depth-first order. + sort.Strings(output) + return output, err +} + +// checkRelativePath ensures that the the requested file path is a child of `from`. +func checkRelativePath(from string, to string) error { + relativePath, err := filepath.Rel(from, to) + + if err != nil { + return err + } + + if strings.HasPrefix(relativePath, "..") { + return fmt.Errorf("the path you are attempting to specify (%s) is outside of the root", to) + } + + return nil +} + +// globFilesFs searches the specified file system to enumerate all files to include. +func globFilesFs(fsys iofs.FS, fsysRoot string, basePath string, includePatterns []string, excludePatterns []string) ([]string, error) { + return globWalkFs(fsys, fsysRoot, basePath, includePatterns, excludePatterns, false) +} + +// globAllFs searches the specified file system to enumerate all files to include. +func globAllFs(fsys iofs.FS, fsysRoot string, basePath string, includePatterns []string, excludePatterns []string) ([]string, error) { + return globWalkFs(fsys, fsysRoot, basePath, includePatterns, excludePatterns, true) +} + +// globWalkFs searches the specified file system to enumerate all files and folders to include. +func globWalkFs(fsys iofs.FS, fsysRoot string, basePath string, includePatterns []string, excludePatterns []string, includeDirs bool) ([]string, error) { + var processedIncludes []string + var processedExcludes []string + result := make(util.Set) + + for _, includePattern := range includePatterns { + includePath := filepath.Join(basePath, includePattern) + err := checkRelativePath(basePath, includePath) + + if err != nil { + return nil, err + } + + // fs.FS paths may not include leading separators. Calculate the + // correct path for this relative to the filesystem root. + // This will not error as it follows the call to checkRelativePath. + iofsRelativePath, _ := fs.IofsRelativePath(fsysRoot, includePath) + + // Includes only operate on files. + processedIncludes = append(processedIncludes, iofsRelativePath) + } + + for _, excludePattern := range excludePatterns { + excludePath := filepath.Join(basePath, excludePattern) + err := checkRelativePath(basePath, excludePath) + + if err != nil { + return nil, err + } + + // fs.FS paths may not include leading separators. Calculate the + // correct path for this relative to the filesystem root. + // This will not error as it follows the call to checkRelativePath. + iofsRelativePath, _ := fs.IofsRelativePath(fsysRoot, excludePath) + + // In case this is a file pattern and not a directory, add the exact pattern. + // In the event that the user has already specified /**, + if !strings.HasSuffix(iofsRelativePath, string(filepath.Separator)+"**") { + processedExcludes = append(processedExcludes, iofsRelativePath) + } + // TODO: we need to either document or change this behavior + // Excludes operate on entire folders, so we also exclude everything under this in case it represents a directory + processedExcludes = append(processedExcludes, filepath.Join(iofsRelativePath, "**")) + } + + // We start from a naive includePattern + includePattern := "" + includeCount := len(processedIncludes) + + // Do not use alternation if unnecessary. + if includeCount == 1 { + includePattern = processedIncludes[0] + } else if includeCount > 1 { + // We use alternation from the very root of the path. This avoids fs.Stat of the basePath. + includePattern = "{" + strings.Join(processedIncludes, ",") + "}" + } + + // We start with an empty string excludePattern which we only use if excludeCount > 0. + excludePattern := "" + excludeCount := len(processedExcludes) + + // Do not use alternation if unnecessary. + if excludeCount == 1 { + excludePattern = processedExcludes[0] + } else if excludeCount > 1 { + // We use alternation from the very root of the path. This avoids fs.Stat of the basePath. + excludePattern = "{" + strings.Join(processedExcludes, ",") + "}" + } + + // GlobWalk expects that everything uses Unix path conventions. + includePattern = filepath.ToSlash(includePattern) + excludePattern = filepath.ToSlash(excludePattern) + + err := doublestar.GlobWalk(fsys, includePattern, func(path string, dirEntry iofs.DirEntry) error { + if !includeDirs && dirEntry.IsDir() { + return nil + } + + // All files that are returned by doublestar.GlobWalk are relative to + // the fsys root. Go, however, has decided that `fs.FS` filesystems do + // not address the root of the file system using `/` and instead use + // paths without leading separators. + // + // We need to track where the `fsys` root is so that when we hand paths back + // we hand them back as the path addressable in the actual OS filesystem. + // + // As a consequence, when processing, we need to *restore* the original + // root to the file path after returning. This works because when we create + // the `os.dirFS` filesystem we do so at the root of the current volume. + if excludeCount == 0 { + // Reconstruct via string concatenation since the root is already pre-composed. + result.Add(fsysRoot + path) + return nil + } + + isExcluded, err := doublestar.Match(excludePattern, filepath.ToSlash(path)) + if err != nil { + return err + } + + if !isExcluded { + // Reconstruct via string concatenation since the root is already pre-composed. + result.Add(fsysRoot + path) + } + + return nil + }) + + // GlobWalk threw an error. + if err != nil { + return nil, err + } + + // Never actually capture the root folder. + // This is a risk because of how we rework the globs. + result.Delete(strings.TrimSuffix(basePath, "/")) + + return result.UnsafeListOfStrings(), nil +} diff --git a/cli/internal/globby/globby_test.go b/cli/internal/globby/globby_test.go new file mode 100644 index 0000000..2fdd613 --- /dev/null +++ b/cli/internal/globby/globby_test.go @@ -0,0 +1,832 @@ +package globby + +import ( + "io/fs" + "path/filepath" + "reflect" + "sort" + "testing" + + "testing/fstest" +) + +// setup prepares the test file system contents and returns the file system. +func setup(fsysRoot string, files []string) fs.FS { + fsys := fstest.MapFS{} + for _, file := range files { + // We're populating a `fs.FS` filesytem which requires paths to have no + // leading slash. As a consequence we strip it during creation. + iofsRelativePath := file[1:] + + fsys[iofsRelativePath] = &fstest.MapFile{Mode: 0666} + } + + return fsys +} + +func TestGlobFilesFs(t *testing.T) { + type args struct { + basePath string + includePatterns []string + excludePatterns []string + } + tests := []struct { + name string + files []string + args args + wantAll []string + wantFiles []string + wantErr bool + }{ + { + name: "hello world", + files: []string{"/test.txt"}, + args: args{ + basePath: "/", + includePatterns: []string{"*.txt"}, + excludePatterns: []string{}, + }, + wantAll: []string{"/test.txt"}, + wantFiles: []string{"/test.txt"}, + }, + { + name: "bullet files", + files: []string{ + "/test.txt", + "/subdir/test.txt", + "/other/test.txt", + }, + args: args{ + basePath: "/", + includePatterns: []string{"subdir/test.txt", "test.txt"}, + excludePatterns: []string{}, + }, + wantAll: []string{ + "/subdir/test.txt", + "/test.txt", + }, + wantFiles: []string{ + "/subdir/test.txt", + "/test.txt", + }, + }, + { + name: "finding workspace package.json files", + files: []string{ + "/external/file.txt", + "/repos/some-app/apps/docs/package.json", + "/repos/some-app/apps/web/package.json", + "/repos/some-app/bower_components/readline/package.json", + "/repos/some-app/examples/package.json", + "/repos/some-app/node_modules/gulp/bower_components/readline/package.json", + "/repos/some-app/node_modules/react/package.json", + "/repos/some-app/package.json", + "/repos/some-app/packages/colors/package.json", + "/repos/some-app/packages/faker/package.json", + "/repos/some-app/packages/left-pad/package.json", + "/repos/some-app/test/mocks/kitchen-sink/package.json", + "/repos/some-app/tests/mocks/kitchen-sink/package.json", + }, + args: args{ + basePath: "/repos/some-app/", + includePatterns: []string{"packages/*/package.json", "apps/*/package.json"}, + excludePatterns: []string{"**/node_modules/", "**/bower_components/", "**/test/", "**/tests/"}, + }, + wantAll: []string{ + "/repos/some-app/apps/docs/package.json", + "/repos/some-app/apps/web/package.json", + "/repos/some-app/packages/colors/package.json", + "/repos/some-app/packages/faker/package.json", + "/repos/some-app/packages/left-pad/package.json", + }, + wantFiles: []string{ + "/repos/some-app/apps/docs/package.json", + "/repos/some-app/apps/web/package.json", + "/repos/some-app/packages/colors/package.json", + "/repos/some-app/packages/faker/package.json", + "/repos/some-app/packages/left-pad/package.json", + }, + }, + { + name: "excludes unexpected workspace package.json files", + files: []string{ + "/external/file.txt", + "/repos/some-app/apps/docs/package.json", + "/repos/some-app/apps/web/package.json", + "/repos/some-app/bower_components/readline/package.json", + "/repos/some-app/examples/package.json", + "/repos/some-app/node_modules/gulp/bower_components/readline/package.json", + "/repos/some-app/node_modules/react/package.json", + "/repos/some-app/package.json", + "/repos/some-app/packages/colors/package.json", + "/repos/some-app/packages/faker/package.json", + "/repos/some-app/packages/left-pad/package.json", + "/repos/some-app/test/mocks/spanish-inquisition/package.json", + "/repos/some-app/tests/mocks/spanish-inquisition/package.json", + }, + args: args{ + basePath: "/repos/some-app/", + includePatterns: []string{"**/package.json"}, + excludePatterns: []string{"**/node_modules/", "**/bower_components/", "**/test/", "**/tests/"}, + }, + wantAll: []string{ + "/repos/some-app/apps/docs/package.json", + "/repos/some-app/apps/web/package.json", + "/repos/some-app/examples/package.json", + "/repos/some-app/package.json", + "/repos/some-app/packages/colors/package.json", + "/repos/some-app/packages/faker/package.json", + "/repos/some-app/packages/left-pad/package.json", + }, + wantFiles: []string{ + "/repos/some-app/apps/docs/package.json", + "/repos/some-app/apps/web/package.json", + "/repos/some-app/examples/package.json", + "/repos/some-app/package.json", + "/repos/some-app/packages/colors/package.json", + "/repos/some-app/packages/faker/package.json", + "/repos/some-app/packages/left-pad/package.json", + }, + }, + { + name: "nested packages work", + files: []string{ + "/external/file.txt", + "/repos/some-app/apps/docs/package.json", + "/repos/some-app/apps/web/package.json", + "/repos/some-app/bower_components/readline/package.json", + "/repos/some-app/examples/package.json", + "/repos/some-app/node_modules/gulp/bower_components/readline/package.json", + "/repos/some-app/node_modules/react/package.json", + "/repos/some-app/package.json", + "/repos/some-app/packages/xzibit/package.json", + "/repos/some-app/packages/xzibit/node_modules/street-legal/package.json", + "/repos/some-app/packages/xzibit/node_modules/paint-colors/package.json", + "/repos/some-app/packages/xzibit/packages/yo-dawg/package.json", + "/repos/some-app/packages/xzibit/packages/yo-dawg/node_modules/meme/package.json", + "/repos/some-app/packages/xzibit/packages/yo-dawg/node_modules/yo-dawg/package.json", + "/repos/some-app/packages/colors/package.json", + "/repos/some-app/packages/faker/package.json", + "/repos/some-app/packages/left-pad/package.json", + "/repos/some-app/test/mocks/spanish-inquisition/package.json", + "/repos/some-app/tests/mocks/spanish-inquisition/package.json", + }, + args: args{ + basePath: "/repos/some-app/", + includePatterns: []string{"packages/**/package.json"}, + excludePatterns: []string{"**/node_modules/", "**/bower_components/", "**/test/", "**/tests/"}, + }, + wantAll: []string{ + "/repos/some-app/packages/colors/package.json", + "/repos/some-app/packages/faker/package.json", + "/repos/some-app/packages/left-pad/package.json", + "/repos/some-app/packages/xzibit/package.json", + "/repos/some-app/packages/xzibit/packages/yo-dawg/package.json", + }, + wantFiles: []string{ + "/repos/some-app/packages/colors/package.json", + "/repos/some-app/packages/faker/package.json", + "/repos/some-app/packages/left-pad/package.json", + "/repos/some-app/packages/xzibit/package.json", + "/repos/some-app/packages/xzibit/packages/yo-dawg/package.json", + }, + }, + { + name: "includes do not override excludes", + files: []string{ + "/external/file.txt", + "/repos/some-app/apps/docs/package.json", + "/repos/some-app/apps/web/package.json", + "/repos/some-app/bower_components/readline/package.json", + "/repos/some-app/examples/package.json", + "/repos/some-app/node_modules/gulp/bower_components/readline/package.json", + "/repos/some-app/node_modules/react/package.json", + "/repos/some-app/package.json", + "/repos/some-app/packages/xzibit/package.json", + "/repos/some-app/packages/xzibit/node_modules/street-legal/package.json", + "/repos/some-app/packages/xzibit/node_modules/paint-colors/package.json", + "/repos/some-app/packages/xzibit/packages/yo-dawg/package.json", + "/repos/some-app/packages/xzibit/packages/yo-dawg/node_modules/meme/package.json", + "/repos/some-app/packages/xzibit/packages/yo-dawg/node_modules/yo-dawg/package.json", + "/repos/some-app/packages/colors/package.json", + "/repos/some-app/packages/faker/package.json", + "/repos/some-app/packages/left-pad/package.json", + "/repos/some-app/test/mocks/spanish-inquisition/package.json", + "/repos/some-app/tests/mocks/spanish-inquisition/package.json", + }, + args: args{ + basePath: "/repos/some-app/", + includePatterns: []string{"packages/**/package.json", "tests/mocks/*/package.json"}, + excludePatterns: []string{"**/node_modules/", "**/bower_components/", "**/test/", "**/tests/"}, + }, + wantAll: []string{ + "/repos/some-app/packages/colors/package.json", + "/repos/some-app/packages/faker/package.json", + "/repos/some-app/packages/left-pad/package.json", + "/repos/some-app/packages/xzibit/package.json", + "/repos/some-app/packages/xzibit/packages/yo-dawg/package.json", + }, + wantFiles: []string{ + "/repos/some-app/packages/colors/package.json", + "/repos/some-app/packages/faker/package.json", + "/repos/some-app/packages/left-pad/package.json", + "/repos/some-app/packages/xzibit/package.json", + "/repos/some-app/packages/xzibit/packages/yo-dawg/package.json", + }, + }, + { + name: "output globbing grabs the desired content", + files: []string{ + "/external/file.txt", + "/repos/some-app/src/index.js", + "/repos/some-app/public/src/css/index.css", + "/repos/some-app/.turbo/turbo-build.log", + "/repos/some-app/.turbo/somebody-touched-this-file-into-existence.txt", + "/repos/some-app/.next/log.txt", + "/repos/some-app/.next/cache/db6a76a62043520e7aaadd0bb2104e78.txt", + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + "/repos/some-app/public/dist/css/index.css", + "/repos/some-app/public/dist/images/rick_astley.jpg", + }, + args: args{ + basePath: "/repos/some-app/", + includePatterns: []string{".turbo/turbo-build.log", "dist/**", ".next/**", "public/dist/**"}, + excludePatterns: []string{}, + }, + wantAll: []string{ + "/repos/some-app/.next", + "/repos/some-app/.next/cache", + "/repos/some-app/.next/cache/db6a76a62043520e7aaadd0bb2104e78.txt", + "/repos/some-app/.next/log.txt", + "/repos/some-app/.turbo/turbo-build.log", + "/repos/some-app/dist", + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules", + "/repos/some-app/dist/js/node_modules/browserify.js", + "/repos/some-app/public/dist", + "/repos/some-app/public/dist/css", + "/repos/some-app/public/dist/css/index.css", + "/repos/some-app/public/dist/images", + "/repos/some-app/public/dist/images/rick_astley.jpg", + }, + wantFiles: []string{ + "/repos/some-app/.next/cache/db6a76a62043520e7aaadd0bb2104e78.txt", + "/repos/some-app/.next/log.txt", + "/repos/some-app/.turbo/turbo-build.log", + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + "/repos/some-app/public/dist/css/index.css", + "/repos/some-app/public/dist/images/rick_astley.jpg", + }, + }, + { + name: "passing ** captures all children", + files: []string{ + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + args: args{ + basePath: "/repos/some-app/", + includePatterns: []string{"dist/**"}, + excludePatterns: []string{}, + }, + wantAll: []string{ + "/repos/some-app/dist", + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + wantFiles: []string{ + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + }, + { + name: "passing just a directory captures no children", + files: []string{ + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + args: args{ + basePath: "/repos/some-app/", + includePatterns: []string{"dist"}, + excludePatterns: []string{}, + }, + wantAll: []string{"/repos/some-app/dist"}, + wantFiles: []string{}, + }, + { + name: "redundant includes do not duplicate", + files: []string{ + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + args: args{ + basePath: "/repos/some-app/", + includePatterns: []string{"**/*", "dist/**"}, + excludePatterns: []string{}, + }, + wantAll: []string{ + "/repos/some-app/dist", + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + wantFiles: []string{ + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + }, + { + name: "exclude everything, include everything", + files: []string{ + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + args: args{ + basePath: "/repos/some-app/", + includePatterns: []string{"**"}, + excludePatterns: []string{"**"}, + }, + wantAll: []string{}, + wantFiles: []string{}, + }, + { + name: "passing just a directory to exclude prevents capture of children", + files: []string{ + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + args: args{ + basePath: "/repos/some-app/", + includePatterns: []string{"dist/**"}, + excludePatterns: []string{"dist/js"}, + }, + wantAll: []string{ + "/repos/some-app/dist", + "/repos/some-app/dist/index.html", + }, + wantFiles: []string{ + "/repos/some-app/dist/index.html", + }, + }, + { + name: "passing ** to exclude prevents capture of children", + files: []string{ + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + args: args{ + basePath: "/repos/some-app/", + includePatterns: []string{"dist/**"}, + excludePatterns: []string{"dist/js/**"}, + }, + wantAll: []string{ + "/repos/some-app/dist", + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js", + }, + wantFiles: []string{ + "/repos/some-app/dist/index.html", + }, + }, + { + name: "exclude everything with folder . applies at base path", + files: []string{ + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + args: args{ + basePath: "/repos/some-app/", + includePatterns: []string{"**"}, + excludePatterns: []string{"./"}, + }, + wantAll: []string{}, + wantFiles: []string{}, + }, + { + name: "exclude everything with traversal applies at a non-base path", + files: []string{ + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + args: args{ + basePath: "/repos/some-app/", + includePatterns: []string{"**"}, + excludePatterns: []string{"./dist"}, + }, + wantAll: []string{}, + wantFiles: []string{}, + }, + { + name: "exclude everything with folder traversal (..) applies at base path", + files: []string{ + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + args: args{ + basePath: "/repos/some-app/", + includePatterns: []string{"**"}, + excludePatterns: []string{"dist/../"}, + }, + wantAll: []string{}, + wantFiles: []string{}, + }, + { + name: "how do globs even work bad glob microformat", + files: []string{ + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + args: args{ + basePath: "/repos/some-app/", + includePatterns: []string{"**/**/**"}, + excludePatterns: []string{}, + }, + wantAll: []string{ + "/repos/some-app/dist", + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + wantFiles: []string{ + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + }, + { + name: "directory traversal stops at base path", + files: []string{ + "/repos/spanish-inquisition/index.html", + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + args: args{ + basePath: "/repos/some-app/", + includePatterns: []string{"../spanish-inquisition/**", "dist/**"}, + excludePatterns: []string{}, + }, + wantAll: []string{}, + wantFiles: []string{}, + wantErr: true, + }, + { + name: "globs and traversal and globs do not cross base path", + files: []string{ + "/repos/spanish-inquisition/index.html", + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + args: args{ + basePath: "/repos/some-app/", + includePatterns: []string{"**/../../spanish-inquisition/**"}, + excludePatterns: []string{}, + }, + wantAll: []string{}, + wantFiles: []string{}, + wantErr: true, + }, + { + name: "traversal works within base path", + files: []string{ + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + args: args{ + basePath: "/repos/some-app/", + includePatterns: []string{"dist/js/../**"}, + excludePatterns: []string{}, + }, + wantAll: []string{ + "/repos/some-app/dist", + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + wantFiles: []string{ + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + }, + { + name: "self-references (.) work", + files: []string{ + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + args: args{ + basePath: "/repos/some-app/", + includePatterns: []string{"dist/./././**"}, + excludePatterns: []string{}, + }, + wantAll: []string{ + "/repos/some-app/dist", + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + wantFiles: []string{ + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + }, + { + name: "depth of 1 includes handles folders properly", + files: []string{ + "/repos/some-app/package.json", + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + args: args{ + basePath: "/repos/some-app/", + includePatterns: []string{"*"}, + excludePatterns: []string{}, + }, + wantAll: []string{ + "/repos/some-app/dist", + "/repos/some-app/package.json", + }, + wantFiles: []string{"/repos/some-app/package.json"}, + }, + { + name: "depth of 1 excludes prevents capturing folders", + files: []string{ + "/repos/some-app/package.json", + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + args: args{ + basePath: "/repos/some-app/", + includePatterns: []string{"**"}, + excludePatterns: []string{"dist/*"}, + }, + wantAll: []string{ + "/repos/some-app/dist", + "/repos/some-app/package.json", + }, + wantFiles: []string{"/repos/some-app/package.json"}, + }, + { + name: "No-trailing slash basePath works", + files: []string{ + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + args: args{ + basePath: "/repos/some-app", + includePatterns: []string{"dist/**"}, + excludePatterns: []string{}, + }, + wantAll: []string{ + "/repos/some-app/dist", + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + wantFiles: []string{ + "/repos/some-app/dist/index.html", + "/repos/some-app/dist/js/index.js", + "/repos/some-app/dist/js/lib.js", + "/repos/some-app/dist/js/node_modules/browserify.js", + }, + }, + { + name: "exclude single file", + files: []string{ + "/repos/some-app/included.txt", + "/repos/some-app/excluded.txt", + }, + args: args{ + basePath: "/repos/some-app", + includePatterns: []string{"*.txt"}, + excludePatterns: []string{"excluded.txt"}, + }, + wantAll: []string{ + "/repos/some-app/included.txt", + }, + wantFiles: []string{ + "/repos/some-app/included.txt", + }, + }, + { + name: "exclude nested single file", + files: []string{ + "/repos/some-app/one/included.txt", + "/repos/some-app/one/two/included.txt", + "/repos/some-app/one/two/three/included.txt", + "/repos/some-app/one/excluded.txt", + "/repos/some-app/one/two/excluded.txt", + "/repos/some-app/one/two/three/excluded.txt", + }, + args: args{ + basePath: "/repos/some-app", + includePatterns: []string{"**"}, + excludePatterns: []string{"**/excluded.txt"}, + }, + wantAll: []string{ + "/repos/some-app/one/included.txt", + "/repos/some-app/one/two/included.txt", + "/repos/some-app/one/two/three/included.txt", + "/repos/some-app/one", + "/repos/some-app/one/two", + "/repos/some-app/one/two/three", + }, + wantFiles: []string{ + "/repos/some-app/one/included.txt", + "/repos/some-app/one/two/included.txt", + "/repos/some-app/one/two/three/included.txt", + }, + }, + { + name: "exclude everything", + files: []string{ + "/repos/some-app/one/included.txt", + "/repos/some-app/one/two/included.txt", + "/repos/some-app/one/two/three/included.txt", + "/repos/some-app/one/excluded.txt", + "/repos/some-app/one/two/excluded.txt", + "/repos/some-app/one/two/three/excluded.txt", + }, + args: args{ + basePath: "/repos/some-app", + includePatterns: []string{"**"}, + excludePatterns: []string{"**"}, + }, + wantAll: []string{}, + wantFiles: []string{}, + }, + { + name: "exclude everything with slash", + files: []string{ + "/repos/some-app/one/included.txt", + "/repos/some-app/one/two/included.txt", + "/repos/some-app/one/two/three/included.txt", + "/repos/some-app/one/excluded.txt", + "/repos/some-app/one/two/excluded.txt", + "/repos/some-app/one/two/three/excluded.txt", + }, + args: args{ + basePath: "/repos/some-app", + includePatterns: []string{"**"}, + excludePatterns: []string{"**/"}, + }, + wantAll: []string{}, + wantFiles: []string{}, + }, + { + name: "exclude everything with leading **", + files: []string{ + "/repos/some-app/foo/bar", + "/repos/some-app/some-foo", + "/repos/some-app/some-foo/bar", + "/repos/some-app/included", + }, + args: args{ + basePath: "/repos/some-app", + includePatterns: []string{"**"}, + excludePatterns: []string{"**foo"}, + }, + wantAll: []string{ + "/repos/some-app/included", + }, + wantFiles: []string{ + "/repos/some-app/included", + }, + }, + { + name: "exclude everything with trailing **", + files: []string{ + "/repos/some-app/foo/bar", + "/repos/some-app/foo-file", + "/repos/some-app/foo-dir/bar", + "/repos/some-app/included", + }, + args: args{ + basePath: "/repos/some-app", + includePatterns: []string{"**"}, + excludePatterns: []string{"foo**"}, + }, + wantAll: []string{ + "/repos/some-app/included", + }, + wantFiles: []string{ + "/repos/some-app/included", + }, + }, + } + for _, tt := range tests { + fsysRoot := "/" + fsys := setup(fsysRoot, tt.files) + + t.Run(tt.name, func(t *testing.T) { + got, err := globFilesFs(fsys, fsysRoot, tt.args.basePath, tt.args.includePatterns, tt.args.excludePatterns) + + if (err != nil) != tt.wantErr { + t.Errorf("globFilesFs() error = %v, wantErr %v", err, tt.wantErr) + return + } + + gotToSlash := make([]string, len(got)) + for index, path := range got { + gotToSlash[index] = filepath.ToSlash(path) + } + + sort.Strings(gotToSlash) + + if !reflect.DeepEqual(gotToSlash, tt.wantFiles) { + t.Errorf("globFilesFs() = %v, want %v", gotToSlash, tt.wantFiles) + } + }) + + t.Run(tt.name, func(t *testing.T) { + got, err := globAllFs(fsys, fsysRoot, tt.args.basePath, tt.args.includePatterns, tt.args.excludePatterns) + + if (err != nil) != tt.wantErr { + t.Errorf("globAllFs() error = %v, wantErr %v", err, tt.wantErr) + return + } + + gotToSlash := make([]string, len(got)) + for index, path := range got { + gotToSlash[index] = filepath.ToSlash(path) + } + + sort.Strings(gotToSlash) + sort.Strings(tt.wantAll) + + if !reflect.DeepEqual(gotToSlash, tt.wantAll) { + t.Errorf("globAllFs() = %v, want %v", gotToSlash, tt.wantAll) + } + }) + } +} diff --git a/cli/internal/globwatcher/globwatcher.go b/cli/internal/globwatcher/globwatcher.go new file mode 100644 index 0000000..9226cfa --- /dev/null +++ b/cli/internal/globwatcher/globwatcher.go @@ -0,0 +1,210 @@ +package globwatcher + +import ( + "errors" + "fmt" + "path/filepath" + "sync" + + "github.com/hashicorp/go-hclog" + "github.com/vercel/turbo/cli/internal/doublestar" + "github.com/vercel/turbo/cli/internal/filewatcher" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/util" +) + +// ErrClosed is returned when attempting to get changed globs after glob watching has closed +var ErrClosed = errors.New("glob watching is closed") + +type globs struct { + Inclusions util.Set + Exclusions util.Set +} + +// GlobWatcher is used to track unchanged globs by hash. Once a glob registers a file change +// it is no longer tracked until a new hash requests it. Once all globs for a particular hash +// have changed, that hash is no longer tracked. +type GlobWatcher struct { + logger hclog.Logger + repoRoot turbopath.AbsoluteSystemPath + cookieWaiter filewatcher.CookieWaiter + + mu sync.RWMutex // protects field below + hashGlobs map[string]globs + globStatus map[string]util.Set // glob -> hashes where this glob hasn't changed + + closed bool +} + +// New returns a new GlobWatcher instance +func New(logger hclog.Logger, repoRoot turbopath.AbsoluteSystemPath, cookieWaiter filewatcher.CookieWaiter) *GlobWatcher { + return &GlobWatcher{ + logger: logger, + repoRoot: repoRoot, + cookieWaiter: cookieWaiter, + hashGlobs: make(map[string]globs), + globStatus: make(map[string]util.Set), + } +} + +func (g *GlobWatcher) setClosed() { + g.mu.Lock() + g.closed = true + g.mu.Unlock() +} + +func (g *GlobWatcher) isClosed() bool { + g.mu.RLock() + defer g.mu.RUnlock() + return g.closed +} + +// WatchGlobs registers the given set of globs to be watched for changes and grouped +// under the given hash. This method pairs with GetChangedGlobs to determine which globs +// out of a set of candidates have changed since WatchGlobs was called for the same hash. +func (g *GlobWatcher) WatchGlobs(hash string, globsToWatch fs.TaskOutputs) error { + if g.isClosed() { + return ErrClosed + } + // Wait for a cookie here + // that will ensure that we have seen all filesystem writes + // *by the calling client*. Other tasks _could_ write to the + // same output directories, however we are relying on task + // execution dependencies to prevent that. + if err := g.cookieWaiter.WaitForCookie(); err != nil { + return err + } + g.mu.Lock() + defer g.mu.Unlock() + g.hashGlobs[hash] = globs{ + Inclusions: util.SetFromStrings(globsToWatch.Inclusions), + Exclusions: util.SetFromStrings(globsToWatch.Exclusions), + } + + for _, glob := range globsToWatch.Inclusions { + existing, ok := g.globStatus[glob] + if !ok { + existing = make(util.Set) + } + existing.Add(hash) + g.globStatus[glob] = existing + } + return nil +} + +// GetChangedGlobs returns the subset of the given candidates that we are not currently +// tracking as "unchanged". +func (g *GlobWatcher) GetChangedGlobs(hash string, candidates []string) ([]string, error) { + if g.isClosed() { + // If filewatching has crashed, return all candidates as changed. + return candidates, nil + } + // Wait for a cookie here + // that will ensure that we have seen all filesystem writes + // *by the calling client*. Other tasks _could_ write to the + // same output directories, however we are relying on task + // execution dependencies to prevent that. + if err := g.cookieWaiter.WaitForCookie(); err != nil { + return nil, err + } + // hashGlobs tracks all of the unchanged globs for a given hash + // If hashGlobs doesn't have our hash, either everything has changed, + // or we were never tracking it. Either way, consider all the candidates + // to be changed globs. + g.mu.RLock() + defer g.mu.RUnlock() + globsToCheck, ok := g.hashGlobs[hash] + if !ok { + return candidates, nil + } + allGlobs := util.SetFromStrings(candidates) + diff := allGlobs.Difference(globsToCheck.Inclusions) + + return diff.UnsafeListOfStrings(), nil +} + +// OnFileWatchEvent implements FileWatchClient.OnFileWatchEvent +// On a file change, check if we have a glob that matches this file. Invalidate +// any matching globs, and remove them from the set of unchanged globs for the corresponding +// hashes. If this is the last glob for a hash, remove the hash from being tracked. +func (g *GlobWatcher) OnFileWatchEvent(ev filewatcher.Event) { + // At this point, we don't care what the Op is, any Op represents a change + // that should invalidate matching globs + g.logger.Trace(fmt.Sprintf("Got fsnotify event %v", ev)) + absolutePath := ev.Path + repoRelativePath, err := g.repoRoot.RelativePathString(absolutePath.ToStringDuringMigration()) + if err != nil { + g.logger.Debug(fmt.Sprintf("could not get relative path from %v to %v: %v", g.repoRoot, absolutePath, err)) + return + } + g.mu.Lock() + defer g.mu.Unlock() + for glob, hashStatus := range g.globStatus { + matches, err := doublestar.Match(glob, filepath.ToSlash(repoRelativePath)) + if err != nil { + g.logger.Error(fmt.Sprintf("failed to check path %v against glob %v: %v", repoRelativePath, glob, err)) + continue + } + // If this glob matches, we know that it has changed for every hash that included this glob + // and is not excluded by a hash's exclusion globs. + // So, we can delete this glob from every hash tracking it as well as stop watching this glob. + // To stop watching, we unref each of the directories corresponding to this glob. + if matches { + for hashUntyped := range hashStatus { + hash := hashUntyped.(string) + hashGlobs, ok := g.hashGlobs[hash] + + if !ok { + g.logger.Warn(fmt.Sprintf("failed to find hash %v referenced from glob %v", hash, glob)) + continue + } + + isExcluded := false + // Check if we've excluded this path by going through exclusion globs + for exclusionGlob := range hashGlobs.Exclusions { + matches, err := doublestar.Match(exclusionGlob.(string), filepath.ToSlash(repoRelativePath)) + if err != nil { + g.logger.Error(fmt.Sprintf("failed to check path %v against glob %v: %v", repoRelativePath, glob, err)) + continue + } + + if matches { + isExcluded = true + break + } + } + + // If we have excluded this path, then we skip it + if isExcluded { + continue + } + + // We delete hash from the globStatus entry + g.globStatus[glob].Delete(hash) + + // If we've deleted the last hash for a glob in globStatus, delete the whole glob entry + if len(g.globStatus[glob]) == 0 { + delete(g.globStatus, glob) + } + + hashGlobs.Inclusions.Delete(glob) + // If we've deleted the last glob for a hash, delete the whole hash entry + if hashGlobs.Inclusions.Len() == 0 { + delete(g.hashGlobs, hash) + } + } + } + } +} + +// OnFileWatchError implements FileWatchClient.OnFileWatchError +func (g *GlobWatcher) OnFileWatchError(err error) { + g.logger.Error(fmt.Sprintf("file watching received an error: %v", err)) +} + +// OnFileWatchClosed implements FileWatchClient.OnFileWatchClosed +func (g *GlobWatcher) OnFileWatchClosed() { + g.setClosed() + g.logger.Warn("GlobWatching is closing due to file watching closing") +} diff --git a/cli/internal/globwatcher/globwatcher_test.go b/cli/internal/globwatcher/globwatcher_test.go new file mode 100644 index 0000000..6fb89a7 --- /dev/null +++ b/cli/internal/globwatcher/globwatcher_test.go @@ -0,0 +1,232 @@ +package globwatcher + +import ( + "testing" + + "github.com/hashicorp/go-hclog" + "github.com/vercel/turbo/cli/internal/filewatcher" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbopath" + "gotest.tools/v3/assert" +) + +func setup(t *testing.T, repoRoot turbopath.AbsoluteSystemPath) { + // Directory layout: + // / + // my-pkg/ + // irrelevant + // dist/ + // dist-file + // distChild/ + // child-file + // .next/ + // next-file + distPath := repoRoot.UntypedJoin("my-pkg", "dist") + childFilePath := distPath.UntypedJoin("distChild", "child-file") + err := childFilePath.EnsureDir() + assert.NilError(t, err, "EnsureDir") + f, err := childFilePath.Create() + assert.NilError(t, err, "Create") + err = f.Close() + assert.NilError(t, err, "Close") + distFilePath := repoRoot.UntypedJoin("my-pkg", "dist", "dist-file") + f, err = distFilePath.Create() + assert.NilError(t, err, "Create") + err = f.Close() + assert.NilError(t, err, "Close") + nextFilePath := repoRoot.UntypedJoin("my-pkg", ".next", "next-file") + err = nextFilePath.EnsureDir() + assert.NilError(t, err, "EnsureDir") + f, err = nextFilePath.Create() + assert.NilError(t, err, "Create") + err = f.Close() + assert.NilError(t, err, "Close") + irrelevantPath := repoRoot.UntypedJoin("my-pkg", "irrelevant") + f, err = irrelevantPath.Create() + assert.NilError(t, err, "Create") + err = f.Close() + assert.NilError(t, err, "Close") +} + +type noopCookieWaiter struct{} + +func (*noopCookieWaiter) WaitForCookie() error { + return nil +} + +var _noopCookieWaiter = &noopCookieWaiter{} + +func TestTrackOutputs(t *testing.T) { + logger := hclog.Default() + + repoRootRaw := t.TempDir() + repoRoot := fs.AbsoluteSystemPathFromUpstream(repoRootRaw) + + setup(t, repoRoot) + + globWatcher := New(logger, repoRoot, _noopCookieWaiter) + + globs := fs.TaskOutputs{ + Inclusions: []string{ + "my-pkg/dist/**", + "my-pkg/.next/**", + }, + Exclusions: []string{"my-pkg/.next/cache/**"}, + } + + hash := "the-hash" + err := globWatcher.WatchGlobs(hash, globs) + assert.NilError(t, err, "WatchGlobs") + + changed, err := globWatcher.GetChangedGlobs(hash, globs.Inclusions) + assert.NilError(t, err, "GetChangedGlobs") + assert.Equal(t, 0, len(changed), "Expected no changed paths") + + // Make an irrelevant change + globWatcher.OnFileWatchEvent(filewatcher.Event{ + EventType: filewatcher.FileAdded, + Path: repoRoot.UntypedJoin("my-pkg", "irrelevant"), + }) + + changed, err = globWatcher.GetChangedGlobs(hash, globs.Inclusions) + assert.NilError(t, err, "GetChangedGlobs") + assert.Equal(t, 0, len(changed), "Expected no changed paths") + + // Make an excluded change + globWatcher.OnFileWatchEvent(filewatcher.Event{ + EventType: filewatcher.FileAdded, + Path: repoRoot.Join("my-pkg", ".next", "cache", "foo"), + }) + + changed, err = globWatcher.GetChangedGlobs(hash, globs.Inclusions) + assert.NilError(t, err, "GetChangedGlobs") + assert.Equal(t, 0, len(changed), "Expected no changed paths") + + // Make a relevant change + globWatcher.OnFileWatchEvent(filewatcher.Event{ + EventType: filewatcher.FileAdded, + Path: repoRoot.UntypedJoin("my-pkg", "dist", "foo"), + }) + + changed, err = globWatcher.GetChangedGlobs(hash, globs.Inclusions) + assert.NilError(t, err, "GetChangedGlobs") + assert.Equal(t, 1, len(changed), "Expected one changed path remaining") + expected := "my-pkg/dist/**" + assert.Equal(t, expected, changed[0], "Expected dist glob to have changed") + + // Change a file matching the other glob + globWatcher.OnFileWatchEvent(filewatcher.Event{ + EventType: filewatcher.FileAdded, + Path: repoRoot.UntypedJoin("my-pkg", ".next", "foo"), + }) + // We should no longer be watching anything, since both globs have + // registered changes + if len(globWatcher.hashGlobs) != 0 { + t.Errorf("expected to not track any hashes, found %v", globWatcher.hashGlobs) + } + + // Both globs have changed, we should have stopped tracking + // this hash + changed, err = globWatcher.GetChangedGlobs(hash, globs.Inclusions) + assert.NilError(t, err, "GetChangedGlobs") + assert.DeepEqual(t, globs.Inclusions, changed) +} + +func TestTrackMultipleHashes(t *testing.T) { + logger := hclog.Default() + + repoRootRaw := t.TempDir() + repoRoot := fs.AbsoluteSystemPathFromUpstream(repoRootRaw) + + setup(t, repoRoot) + + globWatcher := New(logger, repoRoot, _noopCookieWaiter) + + globs := fs.TaskOutputs{ + Inclusions: []string{ + "my-pkg/dist/**", + "my-pkg/.next/**", + }, + } + + hash := "the-hash" + err := globWatcher.WatchGlobs(hash, globs) + assert.NilError(t, err, "WatchGlobs") + + secondGlobs := fs.TaskOutputs{ + Inclusions: []string{ + "my-pkg/.next/**", + }, + Exclusions: []string{"my-pkg/.next/cache/**"}, + } + + secondHash := "the-second-hash" + err = globWatcher.WatchGlobs(secondHash, secondGlobs) + assert.NilError(t, err, "WatchGlobs") + + changed, err := globWatcher.GetChangedGlobs(hash, globs.Inclusions) + assert.NilError(t, err, "GetChangedGlobs") + assert.Equal(t, 0, len(changed), "Expected no changed paths") + + changed, err = globWatcher.GetChangedGlobs(secondHash, secondGlobs.Inclusions) + assert.NilError(t, err, "GetChangedGlobs") + assert.Equal(t, 0, len(changed), "Expected no changed paths") + + // Make a change that is excluded in one of the hashes but not in the other + globWatcher.OnFileWatchEvent(filewatcher.Event{ + EventType: filewatcher.FileAdded, + Path: repoRoot.UntypedJoin("my-pkg", ".next", "cache", "foo"), + }) + + changed, err = globWatcher.GetChangedGlobs(hash, globs.Inclusions) + assert.NilError(t, err, "GetChangedGlobs") + assert.Equal(t, 1, len(changed), "Expected one changed path remaining") + + changed, err = globWatcher.GetChangedGlobs(secondHash, secondGlobs.Inclusions) + assert.NilError(t, err, "GetChangedGlobs") + assert.Equal(t, 0, len(changed), "Expected no changed paths") + + assert.Equal(t, 1, len(globWatcher.globStatus["my-pkg/.next/**"]), "Expected to be still watching `my-pkg/.next/**`") + + // Make a change for secondHash + globWatcher.OnFileWatchEvent(filewatcher.Event{ + EventType: filewatcher.FileAdded, + Path: repoRoot.UntypedJoin("my-pkg", ".next", "bar"), + }) + + assert.Equal(t, 0, len(globWatcher.globStatus["my-pkg/.next/**"]), "Expected to be no longer watching `my-pkg/.next/**`") +} + +func TestWatchSingleFile(t *testing.T) { + logger := hclog.Default() + + repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) + + setup(t, repoRoot) + + //watcher := newTestWatcher() + globWatcher := New(logger, repoRoot, _noopCookieWaiter) + globs := fs.TaskOutputs{ + Inclusions: []string{"my-pkg/.next/next-file"}, + Exclusions: []string{}, + } + hash := "the-hash" + err := globWatcher.WatchGlobs(hash, globs) + assert.NilError(t, err, "WatchGlobs") + + assert.Equal(t, 1, len(globWatcher.hashGlobs)) + + // A change to an irrelevant file + globWatcher.OnFileWatchEvent(filewatcher.Event{ + EventType: filewatcher.FileAdded, + Path: repoRoot.UntypedJoin("my-pkg", ".next", "foo"), + }) + assert.Equal(t, 1, len(globWatcher.hashGlobs)) + + // Change the watched file + globWatcher.OnFileWatchEvent(filewatcher.Event{ + EventType: filewatcher.FileAdded, + Path: repoRoot.UntypedJoin("my-pkg", ".next", "next-file"), + }) + assert.Equal(t, 0, len(globWatcher.hashGlobs)) +} diff --git a/cli/internal/graph/graph.go b/cli/internal/graph/graph.go new file mode 100644 index 0000000..480dec9 --- /dev/null +++ b/cli/internal/graph/graph.go @@ -0,0 +1,274 @@ +// Package graph contains the CompleteGraph struct and some methods around it +package graph + +import ( + gocontext "context" + "fmt" + "path/filepath" + "regexp" + "sort" + "strings" + + "github.com/hashicorp/go-hclog" + "github.com/pyr-sh/dag" + "github.com/vercel/turbo/cli/internal/env" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/nodes" + "github.com/vercel/turbo/cli/internal/runsummary" + "github.com/vercel/turbo/cli/internal/taskhash" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/util" + "github.com/vercel/turbo/cli/internal/workspace" +) + +// CompleteGraph represents the common state inferred from the filesystem and pipeline. +// It is not intended to include information specific to a particular run. +type CompleteGraph struct { + // WorkspaceGraph expresses the dependencies between packages + WorkspaceGraph dag.AcyclicGraph + + // Pipeline is config from turbo.json + Pipeline fs.Pipeline + + // WorkspaceInfos stores the package.json contents by package name + WorkspaceInfos workspace.Catalog + + // GlobalHash is the hash of all global dependencies + GlobalHash string + + RootNode string + + // Map of TaskDefinitions by taskID + TaskDefinitions map[string]*fs.TaskDefinition + RepoRoot turbopath.AbsoluteSystemPath + + TaskHashTracker *taskhash.Tracker +} + +// GetPackageTaskVisitor wraps a `visitor` function that is used for walking the TaskGraph +// during execution (or dry-runs). The function returned here does not execute any tasks itself, +// but it helps curry some data from the Complete Graph and pass it into the visitor function. +func (g *CompleteGraph) GetPackageTaskVisitor( + ctx gocontext.Context, + taskGraph *dag.AcyclicGraph, + globalEnvMode util.EnvMode, + getArgs func(taskID string) []string, + logger hclog.Logger, + execFunc func(ctx gocontext.Context, packageTask *nodes.PackageTask, taskSummary *runsummary.TaskSummary) error, +) func(taskID string) error { + return func(taskID string) error { + packageName, taskName := util.GetPackageTaskFromId(taskID) + pkg, ok := g.WorkspaceInfos.PackageJSONs[packageName] + if !ok { + return fmt.Errorf("cannot find package %v for task %v", packageName, taskID) + } + + // Check for root task + var command string + if cmd, ok := pkg.Scripts[taskName]; ok { + command = cmd + } + + if packageName == util.RootPkgName && commandLooksLikeTurbo(command) { + return fmt.Errorf("root task %v (%v) looks like it invokes turbo and might cause a loop", taskName, command) + } + + taskDefinition, ok := g.TaskDefinitions[taskID] + if !ok { + return fmt.Errorf("Could not find definition for task") + } + + // Task env mode is only independent when global env mode is `infer`. + taskEnvMode := globalEnvMode + useOldTaskHashable := false + if taskEnvMode == util.Infer { + if taskDefinition.PassthroughEnv != nil { + taskEnvMode = util.Strict + } else { + // If we're in infer mode we have just detected non-usage of strict env vars. + // Since we haven't stabilized this we don't want to break their cache. + useOldTaskHashable = true + + // But our old behavior's actual meaning of this state is `loose`. + taskEnvMode = util.Loose + } + } + + // TODO: maybe we can remove this PackageTask struct at some point + packageTask := &nodes.PackageTask{ + TaskID: taskID, + Task: taskName, + PackageName: packageName, + Pkg: pkg, + EnvMode: taskEnvMode, + Dir: pkg.Dir.ToString(), + TaskDefinition: taskDefinition, + Outputs: taskDefinition.Outputs.Inclusions, + ExcludedOutputs: taskDefinition.Outputs.Exclusions, + } + + passThruArgs := getArgs(taskName) + hash, err := g.TaskHashTracker.CalculateTaskHash( + packageTask, + taskGraph.DownEdges(taskID), + logger, + passThruArgs, + useOldTaskHashable, + ) + + // Not being able to construct the task hash is a hard error + if err != nil { + return fmt.Errorf("Hashing error: %v", err) + } + + pkgDir := pkg.Dir + packageTask.Hash = hash + envVars := g.TaskHashTracker.GetEnvVars(taskID) + expandedInputs := g.TaskHashTracker.GetExpandedInputs(packageTask) + framework := g.TaskHashTracker.GetFramework(taskID) + + logFile := repoRelativeLogFile(pkgDir, taskName) + packageTask.LogFile = logFile + packageTask.Command = command + + var envVarPassthroughMap env.EnvironmentVariableMap + if taskDefinition.PassthroughEnv != nil { + if envVarPassthroughDetailedMap, err := env.GetHashableEnvVars(taskDefinition.PassthroughEnv, nil, ""); err == nil { + envVarPassthroughMap = envVarPassthroughDetailedMap.BySource.Explicit + } + } + + summary := &runsummary.TaskSummary{ + TaskID: taskID, + Task: taskName, + Hash: hash, + Package: packageName, + Dir: pkgDir.ToString(), + Outputs: taskDefinition.Outputs.Inclusions, + ExcludedOutputs: taskDefinition.Outputs.Exclusions, + LogFile: logFile, + ResolvedTaskDefinition: taskDefinition, + ExpandedInputs: expandedInputs, + ExpandedOutputs: []turbopath.AnchoredSystemPath{}, + Command: command, + CommandArguments: passThruArgs, + Framework: framework, + EnvMode: taskEnvMode, + EnvVars: runsummary.TaskEnvVarSummary{ + Configured: envVars.BySource.Explicit.ToSecretHashable(), + Inferred: envVars.BySource.Matching.ToSecretHashable(), + Passthrough: envVarPassthroughMap.ToSecretHashable(), + }, + ExternalDepsHash: pkg.ExternalDepsHash, + } + + if ancestors, err := g.getTaskGraphAncestors(taskGraph, packageTask.TaskID); err == nil { + summary.Dependencies = ancestors + } + if descendents, err := g.getTaskGraphDescendants(taskGraph, packageTask.TaskID); err == nil { + summary.Dependents = descendents + } + + return execFunc(ctx, packageTask, summary) + } +} + +// GetPipelineFromWorkspace returns the Unmarshaled fs.Pipeline struct from turbo.json in the given workspace. +func (g *CompleteGraph) GetPipelineFromWorkspace(workspaceName string, isSinglePackage bool) (fs.Pipeline, error) { + turboConfig, err := g.GetTurboConfigFromWorkspace(workspaceName, isSinglePackage) + + if err != nil { + return nil, err + } + + return turboConfig.Pipeline, nil +} + +// GetTurboConfigFromWorkspace returns the Unmarshaled fs.TurboJSON from turbo.json in the given workspace. +func (g *CompleteGraph) GetTurboConfigFromWorkspace(workspaceName string, isSinglePackage bool) (*fs.TurboJSON, error) { + cachedTurboConfig, ok := g.WorkspaceInfos.TurboConfigs[workspaceName] + + if ok { + return cachedTurboConfig, nil + } + + var workspacePackageJSON *fs.PackageJSON + if pkgJSON, err := g.GetPackageJSONFromWorkspace(workspaceName); err == nil { + workspacePackageJSON = pkgJSON + } else { + return nil, err + } + + // Note: pkgJSON.Dir for the root workspace will be an empty string, and for + // other workspaces, it will be a relative path. + workspaceAbsolutePath := workspacePackageJSON.Dir.RestoreAnchor(g.RepoRoot) + turboConfig, err := fs.LoadTurboConfig(workspaceAbsolutePath, workspacePackageJSON, isSinglePackage) + + // If we failed to load a TurboConfig, bubble up the error + if err != nil { + return nil, err + } + + // add to cache + g.WorkspaceInfos.TurboConfigs[workspaceName] = turboConfig + + return g.WorkspaceInfos.TurboConfigs[workspaceName], nil +} + +// GetPackageJSONFromWorkspace returns an Unmarshaled struct of the package.json in the given workspace +func (g *CompleteGraph) GetPackageJSONFromWorkspace(workspaceName string) (*fs.PackageJSON, error) { + if pkgJSON, ok := g.WorkspaceInfos.PackageJSONs[workspaceName]; ok { + return pkgJSON, nil + } + + return nil, fmt.Errorf("No package.json for %s", workspaceName) +} + +// repoRelativeLogFile returns the path to the log file for this task execution as a +// relative path from the root of the monorepo. +func repoRelativeLogFile(dir turbopath.AnchoredSystemPath, taskName string) string { + return filepath.Join(dir.ToStringDuringMigration(), ".turbo", fmt.Sprintf("turbo-%v.log", taskName)) +} + +// getTaskGraphAncestors gets all the ancestors for a given task in the graph. +// "ancestors" are all tasks that the given task depends on. +func (g *CompleteGraph) getTaskGraphAncestors(taskGraph *dag.AcyclicGraph, taskID string) ([]string, error) { + ancestors, err := taskGraph.Ancestors(taskID) + if err != nil { + return nil, err + } + stringAncestors := []string{} + for _, dep := range ancestors { + // Don't leak out internal root node name, which are just placeholders + if !strings.Contains(dep.(string), g.RootNode) { + stringAncestors = append(stringAncestors, dep.(string)) + } + } + + sort.Strings(stringAncestors) + return stringAncestors, nil +} + +// getTaskGraphDescendants gets all the descendants for a given task in the graph. +// "descendants" are all tasks that depend on the given taskID. +func (g *CompleteGraph) getTaskGraphDescendants(taskGraph *dag.AcyclicGraph, taskID string) ([]string, error) { + descendents, err := taskGraph.Descendents(taskID) + if err != nil { + return nil, err + } + stringDescendents := []string{} + for _, dep := range descendents { + // Don't leak out internal root node name, which are just placeholders + if !strings.Contains(dep.(string), g.RootNode) { + stringDescendents = append(stringDescendents, dep.(string)) + } + } + sort.Strings(stringDescendents) + return stringDescendents, nil +} + +var _isTurbo = regexp.MustCompile(`(?:^|\s)turbo(?:$|\s)`) + +func commandLooksLikeTurbo(command string) bool { + return _isTurbo.MatchString(command) +} diff --git a/cli/internal/graph/graph_test.go b/cli/internal/graph/graph_test.go new file mode 100644 index 0000000..9323e19 --- /dev/null +++ b/cli/internal/graph/graph_test.go @@ -0,0 +1,50 @@ +package graph + +import ( + "testing" + + "gotest.tools/v3/assert" +) + +func Test_CommandsInvokingTurbo(t *testing.T) { + type testCase struct { + command string + match bool + } + testCases := []testCase{ + { + "turbo run foo", + true, + }, + { + "rm -rf ~/Library/Caches/pnpm && turbo run foo && rm -rf ~/.npm", + true, + }, + { + "FLAG=true turbo run foo", + true, + }, + { + "npx turbo run foo", + true, + }, + { + "echo starting; turbo foo; echo done", + true, + }, + // We don't catch this as if people are going to try to invoke the turbo + // binary directly, they'll always be able to work around us. + { + "./node_modules/.bin/turbo foo", + false, + }, + { + "rm -rf ~/Library/Caches/pnpm && rm -rf ~/Library/Caches/turbo && rm -rf ~/.npm && rm -rf ~/.pnpm-store && rm -rf ~/.turbo", + false, + }, + } + + for _, tc := range testCases { + assert.Equal(t, commandLooksLikeTurbo(tc.command), tc.match, tc.command) + } +} diff --git a/cli/internal/graphvisualizer/graphvisualizer.go b/cli/internal/graphvisualizer/graphvisualizer.go new file mode 100644 index 0000000..4e134b2 --- /dev/null +++ b/cli/internal/graphvisualizer/graphvisualizer.go @@ -0,0 +1,205 @@ +package graphvisualizer + +import ( + "fmt" + "io" + "math/rand" + "os/exec" + "path/filepath" + "sort" + "strings" + + "github.com/fatih/color" + "github.com/mitchellh/cli" + "github.com/pyr-sh/dag" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/ui" + "github.com/vercel/turbo/cli/internal/util" + "github.com/vercel/turbo/cli/internal/util/browser" +) + +// GraphVisualizer requirements +type GraphVisualizer struct { + repoRoot turbopath.AbsoluteSystemPath + ui cli.Ui + TaskGraph *dag.AcyclicGraph +} + +// hasGraphViz checks for the presence of https://graphviz.org/ +func hasGraphViz() bool { + err := exec.Command("dot", "-V").Run() + return err == nil +} + +func getRandChar() string { + i := rand.Intn(25) + 65 + return string(rune(i)) +} + +func getRandID() string { + return getRandChar() + getRandChar() + getRandChar() + getRandChar() +} + +// New creates an instance of ColorCache with helpers for adding colors to task outputs +func New(repoRoot turbopath.AbsoluteSystemPath, ui cli.Ui, TaskGraph *dag.AcyclicGraph) *GraphVisualizer { + return &GraphVisualizer{ + repoRoot: repoRoot, + ui: ui, + TaskGraph: TaskGraph, + } +} + +// Converts the TaskGraph dag into a string +func (g *GraphVisualizer) generateDotString() string { + return string(g.TaskGraph.Dot(&dag.DotOpts{ + Verbose: true, + DrawCycles: true, + })) +} + +// Outputs a warning when a file was requested, but graphviz is not available +func (g *GraphVisualizer) graphVizWarnUI() { + g.ui.Warn(color.New(color.FgYellow, color.Bold, color.ReverseVideo).Sprint(" WARNING ") + color.YellowString(" `turbo` uses Graphviz to generate an image of your\ngraph, but Graphviz isn't installed on this machine.\n\nYou can download Graphviz from https://graphviz.org/download.\n\nIn the meantime, you can use this string output with an\nonline Dot graph viewer.")) +} + +// RenderDotGraph renders a dot graph string for the current TaskGraph +func (g *GraphVisualizer) RenderDotGraph() { + g.ui.Output("") + g.ui.Output(g.generateDotString()) +} + +type nameCache map[string]string + +func (nc nameCache) getName(in string) string { + if existing, ok := nc[in]; ok { + return existing + } + newName := getRandID() + nc[in] = newName + return newName +} + +type sortableEdge dag.Edge +type sortableEdges []sortableEdge + +// methods mostly copied from marshalEdges in the dag library +func (e sortableEdges) Less(i, j int) bool { + iSrc := dag.VertexName(e[i].Source()) + jSrc := dag.VertexName(e[j].Source()) + if iSrc < jSrc { + return true + } else if iSrc > jSrc { + return false + } + return dag.VertexName(e[i].Target()) < dag.VertexName(e[j].Target()) +} +func (e sortableEdges) Len() int { return len(e) } +func (e sortableEdges) Swap(i, j int) { e[i], e[j] = e[j], e[i] } + +func (g *GraphVisualizer) generateMermaid(out io.StringWriter) error { + if _, err := out.WriteString("graph TD\n"); err != nil { + return err + } + cache := make(nameCache) + // cast edges to our custom type so we can sort them + // this allows us to generate the same graph every time + var edges sortableEdges + for _, edge := range g.TaskGraph.Edges() { + edges = append(edges, sortableEdge(edge)) + } + sort.Sort(edges) + for _, edge := range edges { + left := dag.VertexName(edge.Source()) + right := dag.VertexName(edge.Target()) + leftName := cache.getName(left) + rightName := cache.getName(right) + if _, err := out.WriteString(fmt.Sprintf("\t%v(\"%v\") --> %v(\"%v\")\n", leftName, left, rightName, right)); err != nil { + return err + } + } + return nil +} + +// GenerateGraphFile saves a visualization of the TaskGraph to a file (or renders a DotGraph as a fallback)) +func (g *GraphVisualizer) GenerateGraphFile(outputName string) error { + outputFilename := g.repoRoot.UntypedJoin(outputName) + ext := outputFilename.Ext() + // use .jpg as default extension if none is provided + if ext == "" { + ext = ".jpg" + outputFilename = g.repoRoot.UntypedJoin(outputName + ext) + } + if ext == ".mermaid" { + f, err := outputFilename.Create() + if err != nil { + return fmt.Errorf("error creating file: %w", err) + } + defer util.CloseAndIgnoreError(f) + if err := g.generateMermaid(f); err != nil { + return err + } + g.ui.Output(fmt.Sprintf("✔ Generated task graph in %s", ui.Bold(outputFilename.ToString()))) + return nil + } + graphString := g.generateDotString() + if ext == ".html" { + f, err := outputFilename.Create() + if err != nil { + return fmt.Errorf("error creating file: %w", err) + } + defer f.Close() //nolint errcheck + _, writeErr1 := f.WriteString(` + + + + Graph + + + + + + + `) + if writeErr3 != nil { + return fmt.Errorf("error creating file: %w", writeErr3) + } + + g.ui.Output("") + g.ui.Output(fmt.Sprintf("✔ Generated task graph in %s", ui.Bold(outputFilename.ToString()))) + if ui.IsTTY { + if err := browser.OpenBrowser(outputFilename.ToString()); err != nil { + g.ui.Warn(color.New(color.FgYellow, color.Bold, color.ReverseVideo).Sprintf("failed to open browser. Please navigate to file://%v", filepath.ToSlash(outputFilename.ToString()))) + } + } + return nil + } + hasDot := hasGraphViz() + if hasDot { + dotArgs := []string{"-T" + ext[1:], "-o", outputFilename.ToString()} + cmd := exec.Command("dot", dotArgs...) + cmd.Stdin = strings.NewReader(graphString) + if err := cmd.Run(); err != nil { + return fmt.Errorf("could not generate task graphfile %v: %w", outputFilename, err) + } + g.ui.Output("") + g.ui.Output(fmt.Sprintf("✔ Generated task graph in %s", ui.Bold(outputFilename.ToString()))) + + } else { + g.ui.Output("") + // User requested a file, but we're falling back to console here so warn about installing graphViz correctly + g.graphVizWarnUI() + g.RenderDotGraph() + } + return nil +} diff --git a/cli/internal/hashing/package_deps_hash.go b/cli/internal/hashing/package_deps_hash.go new file mode 100644 index 0000000..517cddd --- /dev/null +++ b/cli/internal/hashing/package_deps_hash.go @@ -0,0 +1,461 @@ +package hashing + +import ( + "bufio" + "fmt" + "io" + "os/exec" + "path/filepath" + "strings" + "sync" + + "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/encoding/gitoutput" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/globby" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/util" +) + +// PackageDepsOptions are parameters for getting git hashes for a filesystem +type PackageDepsOptions struct { + // PackagePath is the folder path to derive the package dependencies from. This is typically the folder + // containing package.json. If omitted, the default value is the current working directory. + PackagePath turbopath.AnchoredSystemPath + + InputPatterns []string +} + +// GetPackageDeps Builds an object containing git hashes for the files under the specified `packagePath` folder. +func GetPackageDeps(rootPath turbopath.AbsoluteSystemPath, p *PackageDepsOptions) (map[turbopath.AnchoredUnixPath]string, error) { + pkgPath := rootPath.UntypedJoin(p.PackagePath.ToStringDuringMigration()) + // Add all the checked in hashes. + var result map[turbopath.AnchoredUnixPath]string + + // make a copy of the inputPatterns array, because we may be appending to it later. + calculatedInputs := make([]string, len(p.InputPatterns)) + copy(calculatedInputs, p.InputPatterns) + + if len(calculatedInputs) == 0 { + gitLsTreeOutput, err := gitLsTree(pkgPath) + if err != nil { + return nil, fmt.Errorf("could not get git hashes for files in package %s: %w", p.PackagePath, err) + } + result = gitLsTreeOutput + + // Update the checked in hashes with the current repo status + // The paths returned from this call are anchored at the package directory + gitStatusOutput, err := gitStatus(pkgPath, calculatedInputs) + if err != nil { + return nil, fmt.Errorf("Could not get git hashes from git status: %v", err) + } + + var filesToHash []turbopath.AnchoredSystemPath + for filePath, status := range gitStatusOutput { + if status.isDelete() { + delete(result, filePath) + } else { + filesToHash = append(filesToHash, filePath.ToSystemPath()) + } + } + + hashes, err := gitHashObject(turbopath.AbsoluteSystemPathFromUpstream(pkgPath.ToString()), filesToHash) + if err != nil { + return nil, err + } + + // Zip up file paths and hashes together + for filePath, hash := range hashes { + result[filePath] = hash + } + } else { + // Add in package.json and turbo.json to input patterns. Both file paths are relative to pkgPath + // + // - package.json is an input because if the `scripts` in + // the package.json change (i.e. the tasks that turbo executes), we want + // a cache miss, since any existing cache could be invalid. + // - turbo.json because it's the definition of the tasks themselves. The root turbo.json + // is similarly included in the global hash. This file may not exist in the workspace, but + // that is ok, because it will get ignored downstream. + calculatedInputs = append(calculatedInputs, "package.json") + calculatedInputs = append(calculatedInputs, "turbo.json") + + // The input patterns are relative to the package. + // However, we need to change the globbing to be relative to the repo root. + // Prepend the package path to each of the input patterns. + prefixedInputPatterns := []string{} + prefixedExcludePatterns := []string{} + for _, pattern := range calculatedInputs { + if len(pattern) > 0 && pattern[0] == '!' { + rerooted, err := rootPath.PathTo(pkgPath.UntypedJoin(pattern[1:])) + if err != nil { + return nil, err + } + prefixedExcludePatterns = append(prefixedExcludePatterns, rerooted) + } else { + rerooted, err := rootPath.PathTo(pkgPath.UntypedJoin(pattern)) + if err != nil { + return nil, err + } + prefixedInputPatterns = append(prefixedInputPatterns, rerooted) + } + } + absoluteFilesToHash, err := globby.GlobFiles(rootPath.ToStringDuringMigration(), prefixedInputPatterns, prefixedExcludePatterns) + + if err != nil { + return nil, errors.Wrapf(err, "failed to resolve input globs %v", calculatedInputs) + } + + filesToHash := make([]turbopath.AnchoredSystemPath, len(absoluteFilesToHash)) + for i, rawPath := range absoluteFilesToHash { + relativePathString, err := pkgPath.RelativePathString(rawPath) + + if err != nil { + return nil, errors.Wrapf(err, "not relative to package: %v", rawPath) + } + + filesToHash[i] = turbopath.AnchoredSystemPathFromUpstream(relativePathString) + } + + hashes, err := gitHashObject(turbopath.AbsoluteSystemPathFromUpstream(pkgPath.ToStringDuringMigration()), filesToHash) + if err != nil { + return nil, errors.Wrap(err, "failed hashing resolved inputs globs") + } + result = hashes + // Note that in this scenario, we don't need to check git status, we're using hash-object directly which + // hashes the current state, not state at a commit + } + + return result, nil +} + +func manuallyHashFiles(rootPath turbopath.AbsoluteSystemPath, files []turbopath.AnchoredSystemPath) (map[turbopath.AnchoredUnixPath]string, error) { + hashObject := make(map[turbopath.AnchoredUnixPath]string) + for _, file := range files { + hash, err := fs.GitLikeHashFile(file.ToString()) + if err != nil { + return nil, fmt.Errorf("could not hash file %v. \n%w", file.ToString(), err) + } + + hashObject[file.ToUnixPath()] = hash + } + return hashObject, nil +} + +// GetHashableDeps hashes the list of given files, then returns a map of normalized path to hash +// this map is suitable for cross-platform caching. +func GetHashableDeps(rootPath turbopath.AbsoluteSystemPath, files []turbopath.AbsoluteSystemPath) (map[turbopath.AnchoredUnixPath]string, error) { + output := make([]turbopath.AnchoredSystemPath, len(files)) + convertedRootPath := turbopath.AbsoluteSystemPathFromUpstream(rootPath.ToString()) + + for index, file := range files { + anchoredSystemPath, err := file.RelativeTo(convertedRootPath) + if err != nil { + return nil, err + } + output[index] = anchoredSystemPath + } + hashObject, err := gitHashObject(convertedRootPath, output) + if err != nil { + manuallyHashedObject, err := manuallyHashFiles(convertedRootPath, output) + if err != nil { + return nil, err + } + hashObject = manuallyHashedObject + } + + return hashObject, nil +} + +// gitHashObject returns a map of paths to their SHA hashes calculated by passing the paths to `git hash-object`. +// `git hash-object` expects paths to use Unix separators, even on Windows. +// +// Note: paths of files to hash passed to `git hash-object` are processed as relative to the given anchor. +// For that reason we convert all input paths and make them relative to the anchor prior to passing them +// to `git hash-object`. +func gitHashObject(anchor turbopath.AbsoluteSystemPath, filesToHash []turbopath.AnchoredSystemPath) (map[turbopath.AnchoredUnixPath]string, error) { + fileCount := len(filesToHash) + output := make(map[turbopath.AnchoredUnixPath]string, fileCount) + + if fileCount > 0 { + cmd := exec.Command( + "git", // Using `git` from $PATH, + "hash-object", // hash a file, + "--stdin-paths", // using a list of newline-separated paths from stdin. + ) + cmd.Dir = anchor.ToString() // Start at this directory. + + // The functionality for gitHashObject is different enough that it isn't reasonable to + // generalize the behavior for `runGitCmd`. In fact, it doesn't even use the `gitoutput` + // encoding library, instead relying on its own separate `bufio.Scanner`. + + // We're going to send the list of files in via `stdin`, so we grab that pipe. + // This prevents a huge number of encoding issues and shell compatibility issues + // before they even start. + stdinPipe, stdinPipeError := cmd.StdinPipe() + if stdinPipeError != nil { + return nil, stdinPipeError + } + + // Kick the processing off in a goroutine so while that is doing its thing we can go ahead + // and wire up the consumer of `stdout`. + go func() { + defer util.CloseAndIgnoreError(stdinPipe) + + // `git hash-object` understands all relative paths to be relative to the repository. + // This function's result needs to be relative to `rootPath`. + // We convert all files to absolute paths and assume that they will be inside of the repository. + for _, file := range filesToHash { + converted := file.RestoreAnchor(anchor) + + // `git hash-object` expects paths to use Unix separators, even on Windows. + // `git hash-object` expects paths to be one per line so we must escape newlines. + // In order to understand the escapes, the path must be quoted. + // In order to quote the path, the quotes in the path must be escaped. + // Other than that, we just write everything with full Unicode. + stringPath := converted.ToString() + toSlashed := filepath.ToSlash(stringPath) + escapedNewLines := strings.ReplaceAll(toSlashed, "\n", "\\n") + escapedQuotes := strings.ReplaceAll(escapedNewLines, "\"", "\\\"") + prepared := fmt.Sprintf("\"%s\"\n", escapedQuotes) + _, err := io.WriteString(stdinPipe, prepared) + if err != nil { + return + } + } + }() + + // This gives us an io.ReadCloser so that we never have to read the entire input in + // at a single time. It is doing stream processing instead of string processing. + stdoutPipe, stdoutPipeError := cmd.StdoutPipe() + if stdoutPipeError != nil { + return nil, fmt.Errorf("failed to read `git hash-object`: %w", stdoutPipeError) + } + + startError := cmd.Start() + if startError != nil { + return nil, fmt.Errorf("failed to read `git hash-object`: %w", startError) + } + + // The output of `git hash-object` is a 40-character SHA per input, then a newline. + // We need to track the SHA that corresponds to the input file path. + index := 0 + hashes := make([]string, len(filesToHash)) + scanner := bufio.NewScanner(stdoutPipe) + + // Read the output line-by-line (which is our separator) until exhausted. + for scanner.Scan() { + bytes := scanner.Bytes() + + scanError := scanner.Err() + if scanError != nil { + return nil, fmt.Errorf("failed to read `git hash-object`: %w", scanError) + } + + hashError := gitoutput.CheckObjectName(bytes) + if hashError != nil { + return nil, fmt.Errorf("failed to read `git hash-object`: %s", "invalid hash received") + } + + // Worked, save it off. + hashes[index] = string(bytes) + index++ + } + + // Waits until stdout is closed before proceeding. + waitErr := cmd.Wait() + if waitErr != nil { + return nil, fmt.Errorf("failed to read `git hash-object`: %w", waitErr) + } + + // Make sure we end up with a matching number of files and hashes. + hashCount := len(hashes) + if fileCount != hashCount { + return nil, fmt.Errorf("failed to read `git hash-object`: %d files %d hashes", fileCount, hashCount) + } + + // The API of this method specifies that we return a `map[turbopath.AnchoredUnixPath]string`. + for i, hash := range hashes { + filePath := filesToHash[i] + output[filePath.ToUnixPath()] = hash + } + } + + return output, nil +} + +// runGitCommand provides boilerplate command handling for `ls-tree`, `ls-files`, and `status` +// Rather than doing string processing, it does stream processing of `stdout`. +func runGitCommand(cmd *exec.Cmd, commandName string, handler func(io.Reader) *gitoutput.Reader) ([][]string, error) { + stdoutPipe, pipeError := cmd.StdoutPipe() + if pipeError != nil { + return nil, fmt.Errorf("failed to read `git %s`: %w", commandName, pipeError) + } + + startError := cmd.Start() + if startError != nil { + return nil, fmt.Errorf("failed to read `git %s`: %w", commandName, startError) + } + + reader := handler(stdoutPipe) + entries, readErr := reader.ReadAll() + if readErr != nil { + return nil, fmt.Errorf("failed to read `git %s`: %w", commandName, readErr) + } + + waitErr := cmd.Wait() + if waitErr != nil { + return nil, fmt.Errorf("failed to read `git %s`: %w", commandName, waitErr) + } + + return entries, nil +} + +// gitLsTree returns a map of paths to their SHA hashes starting at a particular directory +// that are present in the `git` index at a particular revision. +func gitLsTree(rootPath turbopath.AbsoluteSystemPath) (map[turbopath.AnchoredUnixPath]string, error) { + cmd := exec.Command( + "git", // Using `git` from $PATH, + "ls-tree", // list the contents of the git index, + "-r", // recursively, + "-z", // with each file path relative to the invocation directory and \000-terminated, + "HEAD", // at this specified version. + ) + cmd.Dir = rootPath.ToString() // Include files only from this directory. + + entries, err := runGitCommand(cmd, "ls-tree", gitoutput.NewLSTreeReader) + if err != nil { + return nil, err + } + + output := make(map[turbopath.AnchoredUnixPath]string, len(entries)) + + for _, entry := range entries { + lsTreeEntry := gitoutput.LsTreeEntry(entry) + output[turbopath.AnchoredUnixPathFromUpstream(lsTreeEntry.GetField(gitoutput.Path))] = lsTreeEntry[2] + } + + return output, nil +} + +// getTraversePath gets the distance of the current working directory to the repository root. +// This is used to convert repo-relative paths to cwd-relative paths. +// +// `git rev-parse --show-cdup` always returns Unix paths, even on Windows. +func getTraversePath(rootPath turbopath.AbsoluteSystemPath) (turbopath.RelativeUnixPath, error) { + cmd := exec.Command("git", "rev-parse", "--show-cdup") + cmd.Dir = rootPath.ToString() + + traversePath, err := cmd.Output() + if err != nil { + return "", err + } + + trimmedTraversePath := strings.TrimSuffix(string(traversePath), "\n") + + return turbopath.RelativeUnixPathFromUpstream(trimmedTraversePath), nil +} + +// Don't shell out if we already know where you are in the repository. +// `memoize` is a good candidate for generics. +func memoizeGetTraversePath() func(turbopath.AbsoluteSystemPath) (turbopath.RelativeUnixPath, error) { + cacheMutex := &sync.RWMutex{} + cachedResult := map[turbopath.AbsoluteSystemPath]turbopath.RelativeUnixPath{} + cachedError := map[turbopath.AbsoluteSystemPath]error{} + + return func(rootPath turbopath.AbsoluteSystemPath) (turbopath.RelativeUnixPath, error) { + cacheMutex.RLock() + result, resultExists := cachedResult[rootPath] + err, errExists := cachedError[rootPath] + cacheMutex.RUnlock() + + if resultExists && errExists { + return result, err + } + + invokedResult, invokedErr := getTraversePath(rootPath) + cacheMutex.Lock() + cachedResult[rootPath] = invokedResult + cachedError[rootPath] = invokedErr + cacheMutex.Unlock() + + return invokedResult, invokedErr + } +} + +var memoizedGetTraversePath = memoizeGetTraversePath() + +// statusCode represents the two-letter status code from `git status` with two "named" fields, x & y. +// They have different meanings based upon the actual state of the working tree. Using x & y maps +// to upstream behavior. +type statusCode struct { + x string + y string +} + +func (s statusCode) isDelete() bool { + return s.x == "D" || s.y == "D" +} + +// gitStatus returns a map of paths to their `git` status code. This can be used to identify what should +// be done with files that do not currently match what is in the index. +// +// Note: `git status -z`'s relative path results are relative to the repository's location. +// We need to calculate where the repository's location is in order to determine what the full path is +// before we can return those paths relative to the calling directory, normalizing to the behavior of +// `ls-files` and `ls-tree`. +func gitStatus(rootPath turbopath.AbsoluteSystemPath, patterns []string) (map[turbopath.AnchoredUnixPath]statusCode, error) { + cmd := exec.Command( + "git", // Using `git` from $PATH, + "status", // tell me about the status of the working tree, + "--untracked-files", // including information about untracked files, + "--no-renames", // do not detect renames, + "-z", // with each file path relative to the repository root and \000-terminated, + "--", // and any additional argument you see is a path, promise. + ) + if len(patterns) == 0 { + cmd.Args = append(cmd.Args, ".") // Operate in the current directory instead of the root of the working tree. + } else { + // FIXME: Globbing is using `git`'s globbing rules which are not consistent with `doublestar``. + cmd.Args = append(cmd.Args, patterns...) // Pass in input patterns as arguments. + } + cmd.Dir = rootPath.ToString() // Include files only from this directory. + + entries, err := runGitCommand(cmd, "status", gitoutput.NewStatusReader) + if err != nil { + return nil, err + } + + output := make(map[turbopath.AnchoredUnixPath]statusCode, len(entries)) + convertedRootPath := turbopath.AbsoluteSystemPathFromUpstream(rootPath.ToString()) + + traversePath, err := memoizedGetTraversePath(convertedRootPath) + if err != nil { + return nil, err + } + + for _, entry := range entries { + statusEntry := gitoutput.StatusEntry(entry) + // Anchored at repository. + pathFromStatus := turbopath.AnchoredUnixPathFromUpstream(statusEntry.GetField(gitoutput.Path)) + var outputPath turbopath.AnchoredUnixPath + + if len(traversePath) > 0 { + repositoryPath := convertedRootPath.Join(traversePath.ToSystemPath()) + fileFullPath := pathFromStatus.ToSystemPath().RestoreAnchor(repositoryPath) + + relativePath, err := fileFullPath.RelativeTo(convertedRootPath) + if err != nil { + return nil, err + } + + outputPath = relativePath.ToUnixPath() + } else { + outputPath = pathFromStatus + } + + output[outputPath] = statusCode{x: statusEntry.GetField(gitoutput.StatusX), y: statusEntry.GetField(gitoutput.StatusY)} + } + + return output, nil +} diff --git a/cli/internal/hashing/package_deps_hash_test.go b/cli/internal/hashing/package_deps_hash_test.go new file mode 100644 index 0000000..8f68d38 --- /dev/null +++ b/cli/internal/hashing/package_deps_hash_test.go @@ -0,0 +1,386 @@ +package hashing + +import ( + "errors" + "fmt" + "os" + "os/exec" + "path/filepath" + "reflect" + "runtime" + "strings" + "testing" + + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbopath" + "gotest.tools/v3/assert" +) + +func getFixture(id int) turbopath.AbsoluteSystemPath { + cwd, _ := os.Getwd() + root := turbopath.AbsoluteSystemPath(filepath.VolumeName(cwd) + string(os.PathSeparator)) + checking := turbopath.AbsoluteSystemPath(cwd) + + for checking != root { + fixtureDirectory := checking.Join("fixtures") + _, err := os.Stat(fixtureDirectory.ToString()) + if !errors.Is(err, os.ErrNotExist) { + // Found the fixture directory! + files, _ := os.ReadDir(fixtureDirectory.ToString()) + + // Grab the specified fixture. + for _, file := range files { + fileName := turbopath.RelativeSystemPath(file.Name()) + if strings.Index(fileName.ToString(), fmt.Sprintf("%02d-", id)) == 0 { + return turbopath.AbsoluteSystemPath(fixtureDirectory.Join(fileName)) + } + } + } + checking = checking.Join("..") + } + + panic("fixtures not found!") +} + +func TestSpecialCharacters(t *testing.T) { + if runtime.GOOS == "windows" { + return + } + + fixturePath := getFixture(1) + newlinePath := turbopath.AnchoredUnixPath("new\nline").ToSystemPath() + quotePath := turbopath.AnchoredUnixPath("\"quote\"").ToSystemPath() + newline := newlinePath.RestoreAnchor(fixturePath) + quote := quotePath.RestoreAnchor(fixturePath) + + // Setup + one := os.WriteFile(newline.ToString(), []byte{}, 0644) + two := os.WriteFile(quote.ToString(), []byte{}, 0644) + + // Cleanup + defer func() { + one := os.Remove(newline.ToString()) + two := os.Remove(quote.ToString()) + + if one != nil || two != nil { + return + } + }() + + // Setup error check + if one != nil || two != nil { + return + } + + tests := []struct { + name string + rootPath turbopath.AbsoluteSystemPath + filesToHash []turbopath.AnchoredSystemPath + want map[turbopath.AnchoredUnixPath]string + wantErr bool + }{ + { + name: "Quotes", + rootPath: fixturePath, + filesToHash: []turbopath.AnchoredSystemPath{ + quotePath, + }, + want: map[turbopath.AnchoredUnixPath]string{ + quotePath.ToUnixPath(): "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", + }, + }, + { + name: "Newlines", + rootPath: fixturePath, + filesToHash: []turbopath.AnchoredSystemPath{ + newlinePath, + }, + want: map[turbopath.AnchoredUnixPath]string{ + newlinePath.ToUnixPath(): "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := gitHashObject(tt.rootPath, tt.filesToHash) + if (err != nil) != tt.wantErr { + t.Errorf("gitHashObject() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("gitHashObject() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_gitHashObject(t *testing.T) { + fixturePath := getFixture(1) + traversePath, err := getTraversePath(fixturePath) + if err != nil { + return + } + + tests := []struct { + name string + rootPath turbopath.AbsoluteSystemPath + filesToHash []turbopath.AnchoredSystemPath + want map[turbopath.AnchoredUnixPath]string + wantErr bool + }{ + { + name: "No paths", + rootPath: fixturePath, + filesToHash: []turbopath.AnchoredSystemPath{}, + want: map[turbopath.AnchoredUnixPath]string{}, + }, + { + name: "Absolute paths come back relative to rootPath", + rootPath: fixturePath.Join("child"), + filesToHash: []turbopath.AnchoredSystemPath{ + turbopath.AnchoredUnixPath("../root.json").ToSystemPath(), + turbopath.AnchoredUnixPath("child.json").ToSystemPath(), + turbopath.AnchoredUnixPath("grandchild/grandchild.json").ToSystemPath(), + }, + want: map[turbopath.AnchoredUnixPath]string{ + "../root.json": "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", + "child.json": "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", + "grandchild/grandchild.json": "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", + }, + }, + { + name: "Traverse outside of the repo", + rootPath: fixturePath.Join(traversePath.ToSystemPath(), ".."), + filesToHash: []turbopath.AnchoredSystemPath{ + turbopath.AnchoredUnixPath("null.json").ToSystemPath(), + }, + want: nil, + wantErr: true, + }, + { + name: "Nonexistent file", + rootPath: fixturePath, + filesToHash: []turbopath.AnchoredSystemPath{ + turbopath.AnchoredUnixPath("nonexistent.json").ToSystemPath(), + }, + want: nil, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := gitHashObject(tt.rootPath, tt.filesToHash) + if (err != nil) != tt.wantErr { + t.Errorf("gitHashObject() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("gitHashObject() = %v, want %v", got, tt.want) + } + }) + } +} + +func Test_getTraversePath(t *testing.T) { + fixturePath := getFixture(1) + + tests := []struct { + name string + rootPath turbopath.AbsoluteSystemPath + want turbopath.RelativeUnixPath + wantErr bool + }{ + { + name: "From fixture location", + rootPath: fixturePath, + want: turbopath.RelativeUnixPath("../../../"), + wantErr: false, + }, + { + name: "Traverse out of git repo", + rootPath: fixturePath.UntypedJoin("..", "..", "..", ".."), + want: "", + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := getTraversePath(tt.rootPath) + if (err != nil) != tt.wantErr { + t.Errorf("getTraversePath() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("getTraversePath() = %v, want %v", got, tt.want) + } + }) + } +} + +func requireGitCmd(t *testing.T, repoRoot turbopath.AbsoluteSystemPath, args ...string) { + t.Helper() + cmd := exec.Command("git", args...) + cmd.Dir = repoRoot.ToString() + out, err := cmd.CombinedOutput() + if err != nil { + t.Fatalf("git commit failed: %v %v", err, string(out)) + } +} + +func TestGetPackageDeps(t *testing.T) { + // Directory structure: + // / + // new-root-file <- new file not added to git + // my-pkg/ + // committed-file + // deleted-file + // uncommitted-file <- new file not added to git + // dir/ + // nested-file + + repoRoot := fs.AbsoluteSystemPathFromUpstream(t.TempDir()) + myPkgDir := repoRoot.UntypedJoin("my-pkg") + + // create the dir first + err := myPkgDir.MkdirAll(0775) + assert.NilError(t, err, "CreateDir") + + // create file 1 + committedFilePath := myPkgDir.UntypedJoin("committed-file") + err = committedFilePath.WriteFile([]byte("committed bytes"), 0644) + assert.NilError(t, err, "WriteFile") + + // create file 2 + deletedFilePath := myPkgDir.UntypedJoin("deleted-file") + err = deletedFilePath.WriteFile([]byte("delete-me"), 0644) + assert.NilError(t, err, "WriteFile") + + // create file 3 + nestedPath := myPkgDir.UntypedJoin("dir", "nested-file") + assert.NilError(t, nestedPath.EnsureDir(), "EnsureDir") + assert.NilError(t, nestedPath.WriteFile([]byte("nested"), 0644), "WriteFile") + + // create a package.json + packageJSONPath := myPkgDir.UntypedJoin("package.json") + err = packageJSONPath.WriteFile([]byte("{}"), 0644) + assert.NilError(t, err, "WriteFile") + + // set up git repo and commit all + requireGitCmd(t, repoRoot, "init", ".") + requireGitCmd(t, repoRoot, "config", "--local", "user.name", "test") + requireGitCmd(t, repoRoot, "config", "--local", "user.email", "test@example.com") + requireGitCmd(t, repoRoot, "add", ".") + requireGitCmd(t, repoRoot, "commit", "-m", "foo") + + // remove a file + err = deletedFilePath.Remove() + assert.NilError(t, err, "Remove") + + // create another untracked file in git + uncommittedFilePath := myPkgDir.UntypedJoin("uncommitted-file") + err = uncommittedFilePath.WriteFile([]byte("uncommitted bytes"), 0644) + assert.NilError(t, err, "WriteFile") + + // create an untracked file in git up a level + rootFilePath := repoRoot.UntypedJoin("new-root-file") + err = rootFilePath.WriteFile([]byte("new-root bytes"), 0644) + assert.NilError(t, err, "WriteFile") + + tests := []struct { + opts *PackageDepsOptions + expected map[turbopath.AnchoredUnixPath]string + }{ + // base case. when inputs aren't specified, all files hashes are computed + { + opts: &PackageDepsOptions{ + PackagePath: "my-pkg", + }, + expected: map[turbopath.AnchoredUnixPath]string{ + "committed-file": "3a29e62ea9ba15c4a4009d1f605d391cdd262033", + "uncommitted-file": "4e56ad89387e6379e4e91ddfe9872cf6a72c9976", + "package.json": "9e26dfeeb6e641a33dae4961196235bdb965b21b", + "dir/nested-file": "bfe53d766e64d78f80050b73cd1c88095bc70abb", + }, + }, + // with inputs, only the specified inputs are hashed + { + opts: &PackageDepsOptions{ + PackagePath: "my-pkg", + InputPatterns: []string{"uncommitted-file"}, + }, + expected: map[turbopath.AnchoredUnixPath]string{ + "package.json": "9e26dfeeb6e641a33dae4961196235bdb965b21b", + "uncommitted-file": "4e56ad89387e6379e4e91ddfe9872cf6a72c9976", + }, + }, + // inputs with glob pattern also works + { + opts: &PackageDepsOptions{ + PackagePath: "my-pkg", + InputPatterns: []string{"**/*-file"}, + }, + expected: map[turbopath.AnchoredUnixPath]string{ + "committed-file": "3a29e62ea9ba15c4a4009d1f605d391cdd262033", + "uncommitted-file": "4e56ad89387e6379e4e91ddfe9872cf6a72c9976", + "package.json": "9e26dfeeb6e641a33dae4961196235bdb965b21b", + "dir/nested-file": "bfe53d766e64d78f80050b73cd1c88095bc70abb", + }, + }, + // inputs with traversal work + { + opts: &PackageDepsOptions{ + PackagePath: "my-pkg", + InputPatterns: []string{"../**/*-file"}, + }, + expected: map[turbopath.AnchoredUnixPath]string{ + "../new-root-file": "8906ddcdd634706188bd8ef1c98ac07b9be3425e", + "committed-file": "3a29e62ea9ba15c4a4009d1f605d391cdd262033", + "uncommitted-file": "4e56ad89387e6379e4e91ddfe9872cf6a72c9976", + "package.json": "9e26dfeeb6e641a33dae4961196235bdb965b21b", + "dir/nested-file": "bfe53d766e64d78f80050b73cd1c88095bc70abb", + }, + }, + // inputs with another glob pattern works + { + opts: &PackageDepsOptions{ + PackagePath: "my-pkg", + InputPatterns: []string{"**/{uncommitted,committed}-file"}, + }, + expected: map[turbopath.AnchoredUnixPath]string{ + "committed-file": "3a29e62ea9ba15c4a4009d1f605d391cdd262033", + "package.json": "9e26dfeeb6e641a33dae4961196235bdb965b21b", + "uncommitted-file": "4e56ad89387e6379e4e91ddfe9872cf6a72c9976", + }, + }, + // inputs with another glob pattern + traversal work + { + opts: &PackageDepsOptions{ + PackagePath: "my-pkg", + InputPatterns: []string{"../**/{new-root,uncommitted,committed}-file"}, + }, + expected: map[turbopath.AnchoredUnixPath]string{ + "../new-root-file": "8906ddcdd634706188bd8ef1c98ac07b9be3425e", + "committed-file": "3a29e62ea9ba15c4a4009d1f605d391cdd262033", + "package.json": "9e26dfeeb6e641a33dae4961196235bdb965b21b", + "uncommitted-file": "4e56ad89387e6379e4e91ddfe9872cf6a72c9976", + }, + }, + } + for _, tt := range tests { + got, err := GetPackageDeps(repoRoot, tt.opts) + if err != nil { + t.Errorf("GetPackageDeps got error %v", err) + continue + } + assert.DeepEqual(t, got, tt.expected) + } +} + +func Test_memoizedGetTraversePath(t *testing.T) { + fixturePath := getFixture(1) + + gotOne, _ := memoizedGetTraversePath(fixturePath) + gotTwo, _ := memoizedGetTraversePath(fixturePath) + + assert.Check(t, gotOne == gotTwo, "The strings are identical.") +} diff --git a/cli/internal/inference/inference.go b/cli/internal/inference/inference.go new file mode 100644 index 0000000..5d6d34f --- /dev/null +++ b/cli/internal/inference/inference.go @@ -0,0 +1,167 @@ +package inference + +import "github.com/vercel/turbo/cli/internal/fs" + +// Framework is an identifier for something that we wish to inference against. +type Framework struct { + Slug string + EnvMatcher string + DependencyMatch matcher +} + +type matcher struct { + strategy matchStrategy + dependencies []string +} + +type matchStrategy int + +const ( + all matchStrategy = iota + 1 + some +) + +var _frameworks = []Framework{ + { + Slug: "blitzjs", + EnvMatcher: "^NEXT_PUBLIC_", + DependencyMatch: matcher{ + strategy: all, + dependencies: []string{"blitz"}, + }, + }, + { + Slug: "nextjs", + EnvMatcher: "^NEXT_PUBLIC_", + DependencyMatch: matcher{ + strategy: all, + dependencies: []string{"next"}, + }, + }, + { + Slug: "gatsby", + EnvMatcher: "^GATSBY_", + DependencyMatch: matcher{ + strategy: all, + dependencies: []string{"gatsby"}, + }, + }, + { + Slug: "astro", + EnvMatcher: "^PUBLIC_", + DependencyMatch: matcher{ + strategy: all, + dependencies: []string{"astro"}, + }, + }, + { + Slug: "solidstart", + EnvMatcher: "^VITE_", + DependencyMatch: matcher{ + strategy: all, + dependencies: []string{"solid-js", "solid-start"}, + }, + }, + { + Slug: "vue", + EnvMatcher: "^VUE_APP_", + DependencyMatch: matcher{ + strategy: all, + dependencies: []string{"@vue/cli-service"}, + }, + }, + { + Slug: "sveltekit", + EnvMatcher: "^VITE_", + DependencyMatch: matcher{ + strategy: all, + dependencies: []string{"@sveltejs/kit"}, + }, + }, + { + Slug: "create-react-app", + EnvMatcher: "^REACT_APP_", + DependencyMatch: matcher{ + strategy: some, + dependencies: []string{"react-scripts", "react-dev-utils"}, + }, + }, + { + Slug: "nuxtjs", + EnvMatcher: "^NUXT_ENV_", + DependencyMatch: matcher{ + strategy: some, + dependencies: []string{"nuxt", "nuxt-edge", "nuxt3", "nuxt3-edge"}, + }, + }, + { + Slug: "redwoodjs", + EnvMatcher: "^REDWOOD_ENV_", + DependencyMatch: matcher{ + strategy: all, + dependencies: []string{"@redwoodjs/core"}, + }, + }, + { + Slug: "vite", + EnvMatcher: "^VITE_", + DependencyMatch: matcher{ + strategy: all, + dependencies: []string{"vite"}, + }, + }, + { + Slug: "sanity", + EnvMatcher: "^SANITY_STUDIO_", + DependencyMatch: matcher{ + strategy: all, + dependencies: []string{"@sanity/cli"}, + }, + }, +} + +func (m matcher) match(pkg *fs.PackageJSON) bool { + deps := pkg.UnresolvedExternalDeps + // only check dependencies if we're in a non-monorepo + if pkg.Workspaces != nil && len(pkg.Workspaces) == 0 { + deps = pkg.Dependencies + } + + if m.strategy == all { + for _, dependency := range m.dependencies { + _, exists := deps[dependency] + if !exists { + return false + } + } + return true + } + + // m.strategy == some + for _, dependency := range m.dependencies { + _, exists := deps[dependency] + if exists { + return true + } + } + return false +} + +func (f Framework) match(pkg *fs.PackageJSON) bool { + return f.DependencyMatch.match(pkg) +} + +// InferFramework returns a reference to a matched framework +func InferFramework(pkg *fs.PackageJSON) *Framework { + if pkg == nil { + return nil + } + + for _, candidateFramework := range _frameworks { + if candidateFramework.match(pkg) { + return &candidateFramework + } + } + + return nil +} diff --git a/cli/internal/inference/inference_test.go b/cli/internal/inference/inference_test.go new file mode 100644 index 0000000..ed82ecc --- /dev/null +++ b/cli/internal/inference/inference_test.go @@ -0,0 +1,97 @@ +package inference + +import ( + "reflect" + "testing" + + "github.com/vercel/turbo/cli/internal/fs" +) + +func getFrameworkBySlug(slug string) *Framework { + for _, framework := range _frameworks { + if framework.Slug == slug { + return &framework + } + } + panic("that framework doesn't exist") +} + +func TestInferFramework(t *testing.T) { + tests := []struct { + name string + pkg *fs.PackageJSON + want *Framework + }{ + { + name: "Hello world", + pkg: nil, + want: nil, + }, + { + name: "Empty dependencies", + pkg: &fs.PackageJSON{UnresolvedExternalDeps: map[string]string{}}, + want: nil, + }, + { + name: "Finds Blitz", + pkg: &fs.PackageJSON{UnresolvedExternalDeps: map[string]string{ + "blitz": "*", + }}, + want: getFrameworkBySlug("blitzjs"), + }, + { + name: "Order is preserved (returns blitz, not next)", + pkg: &fs.PackageJSON{UnresolvedExternalDeps: map[string]string{ + "blitz": "*", + "next": "*", + }}, + want: getFrameworkBySlug("blitzjs"), + }, + { + name: "Finds next without blitz", + pkg: &fs.PackageJSON{UnresolvedExternalDeps: map[string]string{ + "next": "*", + }}, + want: getFrameworkBySlug("nextjs"), + }, + { + name: "match strategy of all works (solid)", + pkg: &fs.PackageJSON{UnresolvedExternalDeps: map[string]string{ + "solid-js": "*", + "solid-start": "*", + }}, + want: getFrameworkBySlug("solidstart"), + }, + { + name: "match strategy of some works (nuxt)", + pkg: &fs.PackageJSON{UnresolvedExternalDeps: map[string]string{ + "nuxt3": "*", + }}, + want: getFrameworkBySlug("nuxtjs"), + }, + { + name: "match strategy of some works (c-r-a)", + pkg: &fs.PackageJSON{UnresolvedExternalDeps: map[string]string{ + "react-scripts": "*", + }}, + want: getFrameworkBySlug("create-react-app"), + }, + { + name: "Finds next in non monorepo", + pkg: &fs.PackageJSON{ + Dependencies: map[string]string{ + "next": "*", + }, + Workspaces: []string{}, + }, + want: getFrameworkBySlug("nextjs"), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := InferFramework(tt.pkg); !reflect.DeepEqual(got, tt.want) { + t.Errorf("InferFramework() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/cli/internal/lockfile/berry_lockfile.go b/cli/internal/lockfile/berry_lockfile.go new file mode 100644 index 0000000..e76f230 --- /dev/null +++ b/cli/internal/lockfile/berry_lockfile.go @@ -0,0 +1,709 @@ +package lockfile + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "reflect" + "regexp" + "sort" + "strconv" + "strings" + + "github.com/Masterminds/semver" + "github.com/andybalholm/crlf" + "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/yaml" +) + +var _multipleKeyRegex = regexp.MustCompile(" *, *") + +// A tag cannot start with a "v" +var _tagRegex = regexp.MustCompile("^[a-zA-Z0-9.-_-[v]][a-zA-Z0-9._-]*$") + +var _metadataKey = "__metadata" + +type _void struct{} + +// BerryLockfileEntry package information from yarn lockfile +// Full Definition at https://github.com/yarnpkg/berry/blob/master/packages/yarnpkg-core/sources/Manifest.ts +// Only a subset of full definition are written to the lockfile +type BerryLockfileEntry struct { + Version string `yaml:"version"` + LanguageName string `yaml:"languageName,omitempty"` + + Dependencies map[string]string `yaml:"dependencies,omitempty"` + PeerDependencies map[string]string `yaml:"peerDependencies,omitempty"` + + DependenciesMeta map[string]BerryDependencyMetaEntry `yaml:"dependenciesMeta,omitempty"` + PeerDependenciesMeta map[string]BerryDependencyMetaEntry `yaml:"peerDependenciesMeta,omitempty"` + + Bin map[string]string `yaml:"bin,omitempty"` + + LinkType string `yaml:"linkType,omitempty"` + Resolution string `yaml:"resolution,omitempty"` + Checksum string `yaml:"checksum,omitempty"` + Conditions string `yaml:"conditions,omitempty"` + + // Only used for metadata entry + CacheKey string `yaml:"cacheKey,omitempty"` +} + +// Return a list of descriptors that this entry possibly uses +func (b *BerryLockfileEntry) possibleDescriptors() []_Descriptor { + descriptors := []_Descriptor{} + addDescriptor := func(name, version string) { + descriptors = append(descriptors, berryPossibleKeys(name, version)...) + } + + for dep, version := range b.Dependencies { + addDescriptor(dep, version) + } + + return descriptors +} + +// BerryLockfile representation of berry lockfile +type BerryLockfile struct { + packages map[_Locator]*BerryLockfileEntry + version int + cacheKey string + // Mapping descriptors (lodash@npm:^4.17.21) to their resolutions (lodash@npm:4.17.21) + descriptors map[_Descriptor]_Locator + // Mapping regular package locators to patched package locators + patches map[_Locator]_Locator + // Descriptors that are only used by package extensions + packageExtensions map[_Descriptor]_void + hasCRLF bool +} + +// BerryDependencyMetaEntry Structure for holding if a package is optional or not +type BerryDependencyMetaEntry struct { + Optional bool `yaml:"optional,omitempty"` + Unplugged bool `yaml:"unplugged,omitempty"` +} + +var _ Lockfile = (*BerryLockfile)(nil) + +// ResolvePackage Given a package and version returns the key, resolved version, and if it was found +func (l *BerryLockfile) ResolvePackage(_workspace turbopath.AnchoredUnixPath, name string, version string) (Package, error) { + for _, key := range berryPossibleKeys(name, version) { + if locator, ok := l.descriptors[key]; ok { + entry := l.packages[locator] + return Package{ + Found: true, + Key: locator.String(), + Version: entry.Version, + }, nil + } + } + + return Package{}, nil +} + +// AllDependencies Given a lockfile key return all (dev/optional/peer) dependencies of that package +func (l *BerryLockfile) AllDependencies(key string) (map[string]string, bool) { + deps := map[string]string{} + var locator _Locator + if err := locator.parseLocator(key); err != nil { + // We should never hit this as we have already vetted all entries in the lockfile + // during the creation of the lockfile struct + panic(fmt.Sprintf("invalid locator string: %s", key)) + } + entry, ok := l.packages[locator] + if !ok { + return deps, false + } + + for name, version := range entry.Dependencies { + deps[name] = version + } + + return deps, true +} + +// Subgraph Given a list of lockfile keys returns a Lockfile based off the original one that only contains the packages given +func (l *BerryLockfile) Subgraph(workspacePackages []turbopath.AnchoredSystemPath, packages []string) (Lockfile, error) { + prunedPackages := make(map[_Locator]*BerryLockfileEntry, len(packages)) + prunedDescriptors := make(map[_Descriptor]_Locator, len(prunedPackages)) + patches := make(map[_Locator]_Locator, len(l.patches)) + reverseLookup := l.locatorToDescriptors() + + // add workspace package entries + for locator, pkg := range l.packages { + if locator.reference == "workspace:." { + prunedPackages[locator] = pkg + descriptor := _Descriptor{locator._Ident, locator.reference} + prunedDescriptors[descriptor] = locator + for desc := range reverseLookup[locator] { + prunedDescriptors[desc] = locator + } + } + } + for _, workspacePackage := range workspacePackages { + expectedReference := fmt.Sprintf("workspace:%s", workspacePackage.ToUnixPath().ToString()) + for locator, pkg := range l.packages { + if locator.reference == expectedReference { + prunedPackages[locator] = pkg + descriptor := _Descriptor{locator._Ident, locator.reference} + prunedDescriptors[descriptor] = locator + } + } + } + + for _, key := range packages { + var locator _Locator + if err := locator.parseLocator(key); err != nil { + // We should never hit this as we have already vetted all entries in the lockfile + // during the creation of the lockfile struct + panic(fmt.Sprintf("invalid locator string: %s", key)) + } + entry, ok := l.packages[locator] + if ok { + prunedPackages[locator] = entry + } + // If a package has a patch it should be included in the subgraph + patchLocator, ok := l.patches[locator] + if ok { + patches[locator] = patchLocator + prunedPackages[patchLocator] = l.packages[patchLocator] + } + } + + for _, entry := range prunedPackages { + for _, desc := range entry.possibleDescriptors() { + locator, ok := l.descriptors[desc] + if ok { + prunedDescriptors[desc] = locator + } + } + } + + // For each patch we find all descriptors for the primary package and patched package + for primaryLocator, patchLocator := range patches { + primaryDescriptors := reverseLookup[primaryLocator] + patchDescriptors := reverseLookup[patchLocator] + + // For each patch descriptor we extract the primary descriptor that each patch descriptor targets + // and check if that descriptor is present in the pruned map and add it if it is present + for patch := range patchDescriptors { + primaryVersion, _ := patch.primaryVersion() + primaryDescriptor := _Descriptor{patch._Ident, primaryVersion} + _, isPresent := primaryDescriptors[primaryDescriptor] + if !isPresent { + panic(fmt.Sprintf("Unable to find primary descriptor %s", &primaryDescriptor)) + } + + _, ok := prunedDescriptors[primaryDescriptor] + if ok { + if !ok { + panic(fmt.Sprintf("Unable to find patch for %s", &patchLocator)) + } + prunedDescriptors[patch] = patchLocator + } + } + } + + // Add any descriptors used by package extensions + for descriptor := range l.packageExtensions { + locator := l.descriptors[descriptor] + _, ok := prunedPackages[locator] + if ok { + prunedDescriptors[descriptor] = locator + } + } + + // berry only includes a cache key in the lockfile if there are entries with a checksum + cacheKey := "" + for _, entry := range prunedPackages { + if entry.Checksum != "" { + cacheKey = l.cacheKey + break + } + } + + return &BerryLockfile{ + packages: prunedPackages, + version: l.version, + cacheKey: cacheKey, + descriptors: prunedDescriptors, + patches: patches, + packageExtensions: l.packageExtensions, + hasCRLF: l.hasCRLF, + }, nil +} + +// Encode encode the lockfile representation and write it to the given writer +func (l *BerryLockfile) Encode(w io.Writer) error { + // Map all resolved packages to the descriptors that match them + reverseLookup := l.locatorToDescriptors() + + lockfile := make(map[string]*BerryLockfileEntry, len(l.packages)) + + lockfile[_metadataKey] = &BerryLockfileEntry{ + Version: fmt.Sprintf("%d", l.version), + CacheKey: l.cacheKey, + } + + for locator, descriptors := range reverseLookup { + sortedDescriptors := make([]string, len(descriptors)) + i := 0 + for descriptor := range descriptors { + sortedDescriptors[i] = descriptor.String() + i++ + } + sort.Strings(sortedDescriptors) + + key := strings.Join(sortedDescriptors, ", ") + + entry, ok := l.packages[locator] + if !ok { + return fmt.Errorf("Unable to find entry for %s", &locator) + } + + lockfile[key] = entry + } + + if l.hasCRLF { + w = crlf.NewWriter(w) + } + + _, err := io.WriteString(w, `# This file is generated by running "yarn install" inside your project. +# Manual changes might be lost - proceed with caution! +`) + if err != nil { + return errors.Wrap(err, "unable to write header to lockfile") + } + + return _writeBerryLockfile(w, lockfile) +} + +// Invert the descriptor to locator map +func (l *BerryLockfile) locatorToDescriptors() map[_Locator]map[_Descriptor]_void { + reverseLookup := make(map[_Locator]map[_Descriptor]_void, len(l.packages)) + for descriptor, locator := range l.descriptors { + descriptors, ok := reverseLookup[locator] + if !ok { + reverseLookup[locator] = map[_Descriptor]_void{descriptor: {}} + } else { + descriptors[descriptor] = _void{} + } + } + + return reverseLookup +} + +// Patches return a list of patches used in the lockfile +func (l *BerryLockfile) Patches() []turbopath.AnchoredUnixPath { + patches := []turbopath.AnchoredUnixPath{} + + for _, patchLocator := range l.patches { + patchPath, isPatch := patchLocator.patchPath() + + if isPatch && !strings.HasPrefix(patchPath, "~") && !_builtinRegexp.MatchString(patchPath) { + patches = append(patches, turbopath.AnchoredUnixPath(patchPath)) + } + } + + if len(patches) == 0 { + return nil + } + + return patches +} + +// DecodeBerryLockfile Takes the contents of a berry lockfile and returns a struct representation +func DecodeBerryLockfile(contents []byte) (*BerryLockfile, error) { + var packages map[string]*BerryLockfileEntry + + hasCRLF := bytes.HasSuffix(contents, _crlfLiteral) + err := yaml.Unmarshal(contents, &packages) + if err != nil { + return &BerryLockfile{}, fmt.Errorf("could not unmarshal lockfile: %w", err) + } + + metadata, ok := packages[_metadataKey] + if !ok { + return nil, errors.New("No __metadata entry found when decoding yarn.lock") + } + version, err := strconv.Atoi(metadata.Version) + if err != nil { + return nil, errors.Wrap(err, "yarn lockfile version isn't valid integer") + } + delete(packages, _metadataKey) + + locatorToPackage := map[_Locator]*BerryLockfileEntry{} + descriptorToLocator := map[_Descriptor]_Locator{} + // A map from packages to their patch entries + patches := map[_Locator]_Locator{} + + for key, data := range packages { + var locator _Locator + if err := locator.parseLocator(data.Resolution); err != nil { + return nil, errors.Wrap(err, "unable to parse entry") + } + + if _, isPatch := locator.patchPath(); isPatch { + // A patch will have the same identifier and version allowing us to construct the non-patch entry + originalLocator := _Locator{locator._Ident, fmt.Sprintf("npm:%s", data.Version)} + patches[originalLocator] = locator + } + + // Before storing cacheKey set it to "" so we know it's invalid + data.CacheKey = "" + + locatorToPackage[locator] = data + + // All descriptors that resolve to a single locator are grouped into a single key + for _, entry := range _multipleKeyRegex.Split(key, -1) { + descriptor := _Descriptor{} + if err := descriptor.parseDescriptor(entry); err != nil { + return nil, errors.Wrap(err, "Bad entry key found") + } + + // Before lockfile version 6 descriptors could be missing the npm protocol + if version <= 6 && descriptor.versionRange != "*" { + _, err := semver.NewConstraint(descriptor.versionRange) + if err == nil || _tagRegex.MatchString(descriptor.versionRange) { + descriptor.versionRange = fmt.Sprintf("npm:%s", descriptor.versionRange) + } + } + + descriptorToLocator[descriptor] = locator + } + } + + // Build up list of all descriptors in the file + packageExtensions := make(map[_Descriptor]_void, len(descriptorToLocator)) + for descriptor := range descriptorToLocator { + if descriptor.protocol() == "npm" { + packageExtensions[descriptor] = _void{} + } + } + // Remove any that are found in the lockfile entries + for _, entry := range packages { + for _, descriptor := range entry.possibleDescriptors() { + delete(packageExtensions, descriptor) + } + } + + lockfile := BerryLockfile{ + packages: locatorToPackage, + version: version, + cacheKey: metadata.CacheKey, + descriptors: descriptorToLocator, + patches: patches, + packageExtensions: packageExtensions, + hasCRLF: hasCRLF, + } + return &lockfile, nil +} + +// GlobalChange checks if there are any differences between lockfiles that would completely invalidate +// the cache. +func (l *BerryLockfile) GlobalChange(other Lockfile) bool { + otherBerry, ok := other.(*BerryLockfile) + return !ok || + l.cacheKey != otherBerry.cacheKey || + l.version != otherBerry.version || + // This is probably overly cautious, but getting it correct will be hard + !reflect.DeepEqual(l.patches, otherBerry.patches) +} + +// Fields shared between _Locator and _Descriptor +type _Ident struct { + // Scope of package without leading @ + scope string + // Name of package + name string +} + +type _Locator struct { + _Ident + // Resolved version e.g. 1.2.3 + reference string +} + +type _Descriptor struct { + _Ident + // Version range e.g. ^1.0.0 + // Can be prefixed with the protocol e.g. npm, workspace, patch, + versionRange string +} + +func (i _Ident) String() string { + if i.scope == "" { + return i.name + } + return fmt.Sprintf("@%s/%s", i.scope, i.name) +} + +var _locatorRegexp = regexp.MustCompile("^(?:@([^/]+?)/)?([^/]+?)(?:@(.+))$") + +func (l *_Locator) parseLocator(data string) error { + matches := _locatorRegexp.FindStringSubmatch(data) + if len(matches) != 4 { + return fmt.Errorf("%s is not a valid locator string", data) + } + l.scope = matches[1] + l.name = matches[2] + l.reference = matches[3] + + return nil +} + +func (l *_Locator) String() string { + if l.scope == "" { + return fmt.Sprintf("%s@%s", l.name, l.reference) + } + return fmt.Sprintf("@%s/%s@%s", l.scope, l.name, l.reference) +} + +var _builtinRegexp = regexp.MustCompile("^builtin<([^>]+)>$") + +func (l *_Locator) patchPath() (string, bool) { + if strings.HasPrefix(l.reference, "patch:") { + patchFileIndex := strings.Index(l.reference, "#") + paramIndex := strings.LastIndex(l.reference, "::") + if patchFileIndex == -1 || paramIndex == -1 { + // Better error handling + panic("Unable to extract patch file path from lockfile entry") + } + patchPath := strings.TrimPrefix(l.reference[patchFileIndex+1:paramIndex], "./") + + return patchPath, true + } + + return "", false +} + +var _descriptorRegexp = regexp.MustCompile("^(?:@([^/]+?)/)?([^/]+?)(?:@(.+))?$") + +func (d *_Descriptor) parseDescriptor(data string) error { + matches := _descriptorRegexp.FindStringSubmatch(data) + if len(matches) != 4 { + return fmt.Errorf("%s is not a valid descriptor string", data) + } + + d.scope = matches[1] + d.name = matches[2] + d.versionRange = matches[3] + + return nil +} + +// If the descriptor is for a patch it will return the primary descriptor that it patches +func (d *_Descriptor) primaryVersion() (string, bool) { + if !strings.HasPrefix(d.versionRange, "patch:") { + return "", false + } + patchFileIndex := strings.Index(d.versionRange, "#") + versionRangeIndex := strings.Index(d.versionRange, "@") + if patchFileIndex < 0 || versionRangeIndex < 0 { + panic("Patch reference is missing required markers") + } + // The ':' following npm protocol gets encoded as '%3A' in the patch string + version := strings.Replace(d.versionRange[versionRangeIndex+1:patchFileIndex], "%3A", ":", 1) + if !strings.HasPrefix(version, "npm:") { + version = fmt.Sprintf("npm:%s", version) + } + + return version, true +} + +// Returns the protocol of the descriptor +func (d *_Descriptor) protocol() string { + if index := strings.Index(d.versionRange, ":"); index > 0 { + return d.versionRange[:index] + } + return "" +} + +func (d *_Descriptor) String() string { + if d.scope == "" { + return fmt.Sprintf("%s@%s", d.name, d.versionRange) + } + return fmt.Sprintf("@%s/%s@%s", d.scope, d.name, d.versionRange) +} + +func berryPossibleKeys(name string, version string) []_Descriptor { + makeDescriptor := func(protocol string) _Descriptor { + descriptorString := fmt.Sprintf("%s@%s%s", name, protocol, version) + var descriptor _Descriptor + if err := descriptor.parseDescriptor(descriptorString); err != nil { + panic("Generated invalid descriptor") + } + return descriptor + } + return []_Descriptor{ + makeDescriptor(""), + makeDescriptor("npm:"), + makeDescriptor("file:"), + makeDescriptor("workspace:"), + makeDescriptor("yarn:"), + } +} + +func _writeBerryLockfile(w io.Writer, lockfile map[string]*BerryLockfileEntry) error { + keys := make([]string, len(lockfile)) + i := 0 + for key := range lockfile { + keys[i] = key + i++ + } + + // The __metadata key gets hoisted to the top + sort.Slice(keys, func(i, j int) bool { + if keys[i] == _metadataKey { + return true + } else if keys[j] == _metadataKey { + return false + } + return keys[i] < keys[j] + }) + + for _, key := range keys { + value, ok := lockfile[key] + if !ok { + panic(fmt.Sprintf("Unable to find entry for %s", key)) + } + + wrappedKey := _wrapString(key) + wrappedValue := _stringifyEntry(*value, 1) + + var keyPart string + if len(wrappedKey) > 1024 { + keyPart = fmt.Sprintf("? %s\n:", keyPart) + } else { + keyPart = fmt.Sprintf("%s:", wrappedKey) + } + + _, err := io.WriteString(w, fmt.Sprintf("\n%s\n%s\n", keyPart, wrappedValue)) + if err != nil { + return errors.Wrap(err, "unable to write to lockfile") + } + } + + return nil +} + +var _simpleStringPattern = regexp.MustCompile("^[^-?:,\\][{}#&*!|>'\"%@` \t\r\n]([ \t]*[^,\\][{}:# \t\r\n])*$") + +func _wrapString(str string) string { + if !_simpleStringPattern.MatchString(str) { + var b bytes.Buffer + encoder := json.NewEncoder(&b) + encoder.SetEscapeHTML(false) + err := encoder.Encode(str) + if err != nil { + panic("Unexpected error wrapping key") + } + + return strings.TrimRight(b.String(), "\n") + } + return str +} + +func _stringifyEntry(entry BerryLockfileEntry, indentLevel int) string { + lines := []string{} + addLine := func(field, value string, inline bool) { + var line string + if inline { + line = fmt.Sprintf(" %s: %s", field, value) + } else { + line = fmt.Sprintf(" %s:\n%s", field, value) + } + lines = append(lines, line) + } + + if entry.Version != "" { + addLine("version", _wrapString(entry.Version), true) + } + if entry.Resolution != "" { + addLine("resolution", _wrapString(entry.Resolution), true) + } + if len(entry.Dependencies) > 0 { + addLine("dependencies", _stringifyDeps(entry.Dependencies), false) + } + if len(entry.PeerDependencies) > 0 { + addLine("peerDependencies", _stringifyDeps(entry.PeerDependencies), false) + } + if len(entry.DependenciesMeta) > 0 { + addLine("dependenciesMeta", _stringifyDepsMeta(entry.DependenciesMeta), false) + } + if len(entry.PeerDependenciesMeta) > 0 { + addLine("peerDependenciesMeta", _stringifyDepsMeta(entry.PeerDependenciesMeta), false) + } + + if len(entry.Bin) > 0 { + addLine("bin", _stringifyDeps(entry.Bin), false) + } + if entry.Checksum != "" { + addLine("checksum", _wrapString(entry.Checksum), true) + } + if entry.Conditions != "" { + addLine("conditions", _wrapString(entry.Conditions), true) + } + if entry.LanguageName != "" { + addLine("languageName", _wrapString(entry.LanguageName), true) + } + if entry.LinkType != "" { + addLine("linkType", _wrapString(entry.LinkType), true) + } + if entry.CacheKey != "" { + addLine("cacheKey", _wrapString(entry.CacheKey), true) + } + + return strings.Join(lines, "\n") +} + +func _stringifyDeps(deps map[string]string) string { + keys := make([]string, len(deps)) + i := 0 + for key := range deps { + keys[i] = key + i++ + } + sort.Strings(keys) + + lines := make([]string, 0, len(deps)) + addLine := func(name, version string) { + lines = append(lines, fmt.Sprintf(" %s: %s", _wrapString(name), _wrapString(version))) + } + + for _, name := range keys { + version := deps[name] + addLine(name, version) + } + + return strings.Join(lines, "\n") +} + +func _stringifyDepsMeta(meta map[string]BerryDependencyMetaEntry) string { + keys := make([]string, len(meta)) + i := 0 + for key := range meta { + keys[i] = key + i++ + } + sort.Strings(keys) + + lines := make([]string, 0, len(meta)) + addLine := func(name string, key string) { + lines = append(lines, fmt.Sprintf(" %s:\n %s: true", _wrapString(name), key)) + } + + for _, name := range keys { + optional := meta[name] + if optional.Optional { + addLine(name, "optional") + } + if optional.Unplugged { + addLine(name, "unplugged") + } + } + + return strings.Join(lines, "\n") +} diff --git a/cli/internal/lockfile/berry_lockfile_test.go b/cli/internal/lockfile/berry_lockfile_test.go new file mode 100644 index 0000000..afcbe46 --- /dev/null +++ b/cli/internal/lockfile/berry_lockfile_test.go @@ -0,0 +1,273 @@ +package lockfile + +import ( + "bytes" + "testing" + + "github.com/vercel/turbo/cli/internal/turbopath" + "gotest.tools/v3/assert" +) + +func getBerryLockfile(t *testing.T, filename string) *BerryLockfile { + content, err := getFixture(t, filename) + if err != nil { + t.Error(err) + } + lockfile, err := DecodeBerryLockfile(content) + if err != nil { + t.Error(err) + } + return lockfile +} + +func Test_DecodingBerryLockfile(t *testing.T) { + lockfile := getBerryLockfile(t, "berry.lock") + assert.Equal(t, lockfile.version, 6) + assert.Equal(t, lockfile.cacheKey, "8c0") +} + +func Test_ResolvePackage(t *testing.T) { + lockfile := getBerryLockfile(t, "berry.lock") + + type Case struct { + name string + semver string + key string + version string + found bool + } + + cases := map[string]Case{ + "can resolve '||' semver syntax": { + name: "js-tokens", + semver: "^3.0.0 || ^4.0.0", + key: "js-tokens@npm:4.0.0", + version: "4.0.0", + found: true, + }, + "handles packages with multiple descriptors": { + name: "js-tokens", + semver: "^4.0.0", + key: "js-tokens@npm:4.0.0", + version: "4.0.0", + found: true, + }, + "doesn't find nonexistent descriptors": { + name: "@babel/code-frame", + semver: "^7.12.11", + found: false, + }, + "handles workspace packages": { + name: "eslint-config-custom", + semver: "*", + key: "eslint-config-custom@workspace:packages/eslint-config-custom", + version: "0.0.0-use.local", + found: true, + }, + } + + for testName, testCase := range cases { + pkg, err := lockfile.ResolvePackage("some-pkg", testCase.name, testCase.semver) + assert.NilError(t, err) + if testCase.found { + assert.Equal(t, pkg.Key, testCase.key, testName) + assert.Equal(t, pkg.Version, testCase.version, testName) + } + assert.Equal(t, pkg.Found, testCase.found, testName) + } +} + +func Test_AllDependencies(t *testing.T) { + lockfile := getBerryLockfile(t, "berry.lock") + + pkg, err := lockfile.ResolvePackage("some-pkg", "react-dom", "18.2.0") + assert.NilError(t, err) + assert.Assert(t, pkg.Found, "expected to find react-dom") + deps, found := lockfile.AllDependencies(pkg.Key) + assert.Assert(t, found, "expected lockfile key for react-dom to be present") + assert.Equal(t, len(deps), 2, "expected to find all react-dom direct dependencies") + for pkgName, version := range deps { + pkg, err := lockfile.ResolvePackage("some-pkg", pkgName, version) + assert.NilError(t, err, "error finding %s@%s", pkgName, version) + assert.Assert(t, pkg.Found, "expected to find lockfile entry for %s@%s", pkgName, version) + } +} + +func Test_BerryPatchList(t *testing.T) { + lockfile := getBerryLockfile(t, "berry.lock") + + var locator _Locator + if err := locator.parseLocator("resolve@npm:2.0.0-next.4"); err != nil { + t.Error(err) + } + + patchLocator, ok := lockfile.patches[locator] + assert.Assert(t, ok, "Expected to find patch locator") + patch, ok := lockfile.packages[patchLocator] + assert.Assert(t, ok, "Expected to find patch") + assert.Equal(t, patch.Version, "2.0.0-next.4") +} + +func Test_PackageExtensions(t *testing.T) { + lockfile := getBerryLockfile(t, "berry.lock") + + expectedExtensions := map[_Descriptor]_void{} + for _, extension := range []string{"@babel/types@npm:^7.8.3", "lodash@npm:4.17.21"} { + var extensionDescriptor _Descriptor + if err := extensionDescriptor.parseDescriptor(extension); err != nil { + t.Error(err) + } + expectedExtensions[extensionDescriptor] = _void{} + } + + assert.DeepEqual(t, lockfile.packageExtensions, expectedExtensions) +} + +func Test_StringifyMetadata(t *testing.T) { + metadata := BerryLockfileEntry{ + Version: "6", + CacheKey: "8c0", + } + lockfile := map[string]*BerryLockfileEntry{"__metadata": &metadata} + + var b bytes.Buffer + err := _writeBerryLockfile(&b, lockfile) + assert.Assert(t, err == nil) + assert.Equal(t, b.String(), ` +__metadata: + version: 6 + cacheKey: 8c0 +`) +} + +func Test_BerryRoundtrip(t *testing.T) { + content, err := getFixture(t, "berry.lock") + if err != nil { + t.Error(err) + } + lockfile, err := DecodeBerryLockfile(content) + if err != nil { + t.Error(err) + } + + var b bytes.Buffer + if err := lockfile.Encode(&b); err != nil { + t.Error(err) + } + + assert.Equal(t, b.String(), string(content)) +} + +func Test_PatchPathExtraction(t *testing.T) { + type Case struct { + locator string + patchPath string + isPatch bool + } + cases := []Case{ + { + locator: "lodash@patch:lodash@npm%3A4.17.21#./.yarn/patches/lodash-npm-4.17.21-6382451519.patch::version=4.17.21&hash=2c6e9e&locator=berry-patch%40workspace%3A.", + patchPath: ".yarn/patches/lodash-npm-4.17.21-6382451519.patch", + isPatch: true, + }, + { + locator: "lodash@npm:4.17.21", + isPatch: false, + }, + { + locator: "resolve@patch:resolve@npm%3A2.0.0-next.4#~builtin::version=2.0.0-next.4&hash=07638b", + patchPath: "~builtin", + isPatch: true, + }, + } + + for _, testCase := range cases { + var locator _Locator + err := locator.parseLocator(testCase.locator) + if err != nil { + t.Error(err) + } + patchPath, isPatch := locator.patchPath() + assert.Equal(t, isPatch, testCase.isPatch, locator) + assert.Equal(t, patchPath, testCase.patchPath, locator) + } +} + +func Test_PatchPrimaryVersion(t *testing.T) { + // todo write tests to make sure extraction actually works + type TestCase struct { + descriptor string + version string + isPatch bool + } + testCases := []TestCase{ + { + descriptor: "lodash@patch:lodash@npm%3A4.17.21#./.yarn/patches/lodash-npm-4.17.21-6382451519.patch::locator=berry-patch%40workspace%3A.", + version: "npm:4.17.21", + isPatch: true, + }, + { + descriptor: "typescript@patch:typescript@^4.5.2#~builtin", + version: "npm:^4.5.2", + isPatch: true, + }, + { + descriptor: "react@npm:18.2.0", + isPatch: false, + }, + } + + for _, testCase := range testCases { + var d _Descriptor + err := d.parseDescriptor(testCase.descriptor) + assert.NilError(t, err, testCase.descriptor) + actual, isPatch := d.primaryVersion() + assert.Equal(t, isPatch, testCase.isPatch, testCase) + if testCase.isPatch { + assert.Equal(t, actual, testCase.version, testCase.descriptor) + } + } +} + +func Test_BerryPruneDescriptors(t *testing.T) { + lockfile := getBerryLockfile(t, "minimal-berry.lock") + prunedLockfile, err := lockfile.Subgraph( + []turbopath.AnchoredSystemPath{ + turbopath.AnchoredUnixPath("packages/a").ToSystemPath(), + turbopath.AnchoredUnixPath("packages/c").ToSystemPath(), + }, + []string{"lodash@npm:4.17.21"}, + ) + if err != nil { + t.Error(err) + } + lockfileA := prunedLockfile.(*BerryLockfile) + + prunedLockfile, err = lockfile.Subgraph( + []turbopath.AnchoredSystemPath{ + turbopath.AnchoredUnixPath("packages/b").ToSystemPath(), + turbopath.AnchoredUnixPath("packages/c").ToSystemPath(), + }, + []string{"lodash@npm:4.17.21"}, + ) + if err != nil { + t.Error(err) + } + lockfileB := prunedLockfile.(*BerryLockfile) + + lodashIdent := _Ident{name: "lodash"} + lodashA := _Descriptor{lodashIdent, "npm:^4.17.0"} + lodashB := _Descriptor{lodashIdent, "npm:^3.0.0 || ^4.0.0"} + + lodashEntryA, hasLodashA := lockfileA.descriptors[lodashA] + lodashEntryB, hasLodashB := lockfileB.descriptors[lodashB] + + assert.Assert(t, hasLodashA, "Expected lockfile a to have descriptor used by a") + assert.Assert(t, hasLodashB, "Expected lockfile b to have descriptor used by b") + assert.DeepEqual(t, lodashEntryA.reference, lodashEntryB.reference) + + _, lockfileAHasB := lockfileA.descriptors[lodashB] + _, lockfileBHasA := lockfileB.descriptors[lodashA] + assert.Assert(t, !lockfileAHasB, "Expected lockfile a not to have descriptor used by b") + assert.Assert(t, !lockfileBHasA, "Expected lockfile b not to have descriptor used by a") +} diff --git a/cli/internal/lockfile/lockfile.go b/cli/internal/lockfile/lockfile.go new file mode 100644 index 0000000..bb36eda --- /dev/null +++ b/cli/internal/lockfile/lockfile.go @@ -0,0 +1,135 @@ +// Package lockfile provides the lockfile interface and implementations for the various package managers +package lockfile + +import ( + "fmt" + "io" + "reflect" + "sort" + + mapset "github.com/deckarep/golang-set" + "github.com/vercel/turbo/cli/internal/turbopath" + "golang.org/x/sync/errgroup" +) + +// Lockfile Interface for general operations that work across all lockfiles +type Lockfile interface { + // ResolvePackage Given a workspace, a package it imports and version returns the key, resolved version, and if it was found + ResolvePackage(workspacePath turbopath.AnchoredUnixPath, name string, version string) (Package, error) + // AllDependencies Given a lockfile key return all (dev/optional/peer) dependencies of that package + AllDependencies(key string) (map[string]string, bool) + // Subgraph Given a list of lockfile keys returns a Lockfile based off the original one that only contains the packages given + Subgraph(workspacePackages []turbopath.AnchoredSystemPath, packages []string) (Lockfile, error) + // Encode encode the lockfile representation and write it to the given writer + Encode(w io.Writer) error + // Patches return a list of patches used in the lockfile + Patches() []turbopath.AnchoredUnixPath + // GlobalChange checks if there are any differences between lockfiles that would completely invalidate + // the cache. + GlobalChange(other Lockfile) bool +} + +// IsNil checks if lockfile is nil +func IsNil(l Lockfile) bool { + return l == nil || reflect.ValueOf(l).IsNil() +} + +// Package Structure representing a possible Pack +type Package struct { + // Key used to lookup a package in the lockfile + Key string + // The resolved version of a package as it appears in the lockfile + Version string + // Set to true iff Key and Version are set + Found bool +} + +// ByKey sort package structures by key +type ByKey []Package + +func (p ByKey) Len() int { + return len(p) +} + +func (p ByKey) Swap(i, j int) { + p[i], p[j] = p[j], p[i] +} + +func (p ByKey) Less(i, j int) bool { + return p[i].Key+p[i].Version < p[j].Key+p[j].Version +} + +var _ (sort.Interface) = (*ByKey)(nil) + +// TransitiveClosure the set of all lockfile keys that pkg depends on +func TransitiveClosure( + workspaceDir turbopath.AnchoredUnixPath, + unresolvedDeps map[string]string, + lockFile Lockfile, +) (mapset.Set, error) { + if lf, ok := lockFile.(*NpmLockfile); ok { + // We special case as Rust implementations have their own dep crawl + return npmTransitiveDeps(lf, workspaceDir, unresolvedDeps) + } + return transitiveClosure(workspaceDir, unresolvedDeps, lockFile) +} + +func transitiveClosure( + workspaceDir turbopath.AnchoredUnixPath, + unresolvedDeps map[string]string, + lockFile Lockfile, +) (mapset.Set, error) { + if IsNil(lockFile) { + return nil, fmt.Errorf("No lockfile available to do analysis on") + } + + resolvedPkgs := mapset.NewSet() + lockfileEg := &errgroup.Group{} + + transitiveClosureHelper(lockfileEg, workspaceDir, lockFile, unresolvedDeps, resolvedPkgs) + + if err := lockfileEg.Wait(); err != nil { + return nil, err + } + + return resolvedPkgs, nil +} + +func transitiveClosureHelper( + wg *errgroup.Group, + workspacePath turbopath.AnchoredUnixPath, + lockfile Lockfile, + unresolvedDirectDeps map[string]string, + resolvedDeps mapset.Set, +) { + for directDepName, unresolvedVersion := range unresolvedDirectDeps { + directDepName := directDepName + unresolvedVersion := unresolvedVersion + wg.Go(func() error { + + lockfilePkg, err := lockfile.ResolvePackage(workspacePath, directDepName, unresolvedVersion) + + if err != nil { + return err + } + + if !lockfilePkg.Found || resolvedDeps.Contains(lockfilePkg) { + return nil + } + + resolvedDeps.Add(lockfilePkg) + + allDeps, ok := lockfile.AllDependencies(lockfilePkg.Key) + + if !ok { + panic(fmt.Sprintf("Unable to find entry for %s", lockfilePkg.Key)) + } + + if len(allDeps) > 0 { + transitiveClosureHelper(wg, workspacePath, lockfile, allDeps, resolvedDeps) + } + + return nil + }) + } +} diff --git a/cli/internal/lockfile/lockfile_test.go b/cli/internal/lockfile/lockfile_test.go new file mode 100644 index 0000000..7c666cc --- /dev/null +++ b/cli/internal/lockfile/lockfile_test.go @@ -0,0 +1,25 @@ +package lockfile + +import ( + "sort" + "testing" + + "gotest.tools/v3/assert" +) + +func Test_ByKeySortIsStable(t *testing.T) { + packagesA := []Package{ + {"/foo/1.2.3", "1.2.3", true}, + {"/baz/1.0.9", "/baz/1.0.9", true}, + {"/bar/1.2.3", "1.2.3", true}, + {"/foo/1.2.3", "/foo/1.2.3", true}, + {"/baz/1.0.9", "1.0.9", true}, + } + packagesB := make([]Package, len(packagesA)) + copy(packagesB, packagesA) + + sort.Sort(ByKey(packagesA)) + sort.Sort(ByKey(packagesB)) + + assert.DeepEqual(t, packagesA, packagesB) +} diff --git a/cli/internal/lockfile/npm_lockfile.go b/cli/internal/lockfile/npm_lockfile.go new file mode 100644 index 0000000..67cd32a --- /dev/null +++ b/cli/internal/lockfile/npm_lockfile.go @@ -0,0 +1,107 @@ +package lockfile + +import ( + "encoding/json" + "io" + + mapset "github.com/deckarep/golang-set" + "github.com/vercel/turbo/cli/internal/ffi" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// NpmLockfile representation of package-lock.json +type NpmLockfile struct { + // We just story the entire lockfile in memory and pass it for every call + contents []byte +} + +// ResolvePackage Given a workspace, a package it imports and version returns the key, resolved version, and if it was found +func (l *NpmLockfile) ResolvePackage(workspacePath turbopath.AnchoredUnixPath, name string, version string) (Package, error) { + // This is only used when doing calculating the transitive deps, but Rust + // implementations do this calculation on the Rust side. + panic("Unreachable") +} + +// AllDependencies Given a lockfile key return all (dev/optional/peer) dependencies of that package +func (l *NpmLockfile) AllDependencies(key string) (map[string]string, bool) { + // This is only used when doing calculating the transitive deps, but Rust + // implementations do this calculation on the Rust side. + panic("Unreachable") +} + +// Subgraph Given a list of lockfile keys returns a Lockfile based off the original one that only contains the packages given +func (l *NpmLockfile) Subgraph(workspacePackages []turbopath.AnchoredSystemPath, packages []string) (Lockfile, error) { + workspaces := make([]string, len(workspacePackages)) + for i, workspace := range workspacePackages { + workspaces[i] = workspace.ToUnixPath().ToString() + } + contents, err := ffi.NpmSubgraph(l.contents, workspaces, packages) + if err != nil { + return nil, err + } + return &NpmLockfile{contents: contents}, nil +} + +// Encode the lockfile representation and write it to the given writer +func (l *NpmLockfile) Encode(w io.Writer) error { + _, err := w.Write(l.contents) + return err +} + +// Patches return a list of patches used in the lockfile +func (l *NpmLockfile) Patches() []turbopath.AnchoredUnixPath { + return nil +} + +// GlobalChange checks if there are any differences between lockfiles that would completely invalidate +// the cache. +func (l *NpmLockfile) GlobalChange(other Lockfile) bool { + o, ok := other.(*NpmLockfile) + if !ok { + return true + } + + // We just grab the few global fields and check if they've changed + type minimalJSON struct { + LockfileVersion string `json:"version"` + Requires bool `json:"requires"` + } + var self minimalJSON + var otherJSON minimalJSON + if err := json.Unmarshal(o.contents, &otherJSON); err != nil { + return true + } + if err := json.Unmarshal(l.contents, &self); err != nil { + return true + } + + return self.LockfileVersion != otherJSON.LockfileVersion || + self.Requires != otherJSON.Requires +} + +var _ (Lockfile) = (*NpmLockfile)(nil) + +// DecodeNpmLockfile Parse contents of package-lock.json into NpmLockfile +func DecodeNpmLockfile(contents []byte) (Lockfile, error) { + return &NpmLockfile{contents: contents}, nil +} + +func npmTransitiveDeps(lockfile *NpmLockfile, workspacePath turbopath.AnchoredUnixPath, unresolvedDeps map[string]string) (mapset.Set, error) { + pkgDir := workspacePath.ToString() + + packages, err := ffi.NpmTransitiveDeps(lockfile.contents, pkgDir, unresolvedDeps) + if err != nil { + return nil, err + } + + deps := make([]interface{}, len(packages)) + for i, pkg := range packages { + deps[i] = Package{ + Found: pkg.Found, + Key: pkg.Key, + Version: pkg.Version, + } + } + + return mapset.NewSetFromSlice(deps), nil +} diff --git a/cli/internal/lockfile/pnpm_lockfile.go b/cli/internal/lockfile/pnpm_lockfile.go new file mode 100644 index 0000000..a51b36e --- /dev/null +++ b/cli/internal/lockfile/pnpm_lockfile.go @@ -0,0 +1,579 @@ +package lockfile + +import ( + "fmt" + "io" + "reflect" + "sort" + "strings" + + "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/yaml" +) + +// PnpmLockfile Go representation of the contents of 'pnpm-lock.yaml' +// Reference https://github.com/pnpm/pnpm/blob/main/packages/lockfile-types/src/index.ts +type PnpmLockfile struct { + isV6 bool + // Before 6.0 version was stored as a float, but as of 6.0+ it's a string + Version interface{} `yaml:"lockfileVersion"` + NeverBuiltDependencies []string `yaml:"neverBuiltDependencies,omitempty"` + OnlyBuiltDependencies []string `yaml:"onlyBuiltDependencies,omitempty"` + Overrides map[string]string `yaml:"overrides,omitempty"` + PackageExtensionsChecksum string `yaml:"packageExtensionsChecksum,omitempty"` + PatchedDependencies map[string]PatchFile `yaml:"patchedDependencies,omitempty"` + Importers map[string]ProjectSnapshot `yaml:"importers"` + Packages map[string]PackageSnapshot `yaml:"packages,omitempty"` + Time map[string]string `yaml:"time,omitempty"` +} + +var _ Lockfile = (*PnpmLockfile)(nil) + +// ProjectSnapshot Snapshot used to represent projects in the importers section +type ProjectSnapshot struct { + // for v6 we omitempty + // for pre v6 we *need* to omit the empty map + Specifiers SpecifierMap `yaml:"specifiers,omitempty"` + + // The values of these maps will be string if lockfileVersion <6 or DependencyV6 if 6+ + Dependencies map[string]yaml.Node `yaml:"dependencies,omitempty"` + OptionalDependencies map[string]yaml.Node `yaml:"optionalDependencies,omitempty"` + DevDependencies map[string]yaml.Node `yaml:"devDependencies,omitempty"` + + DependenciesMeta map[string]DependenciesMeta `yaml:"dependenciesMeta,omitempty"` + PublishDirectory string `yaml:"publishDirectory,omitempty"` +} + +// SpecifierMap is a type wrapper that overrides IsZero for Golang's map +// to match the behavior that pnpm expects +type SpecifierMap map[string]string + +// IsZero is used to check whether an object is zero to +// determine whether it should be omitted when marshaling +// with the omitempty flag. +func (m SpecifierMap) IsZero() bool { + return m == nil +} + +var _ (yaml.IsZeroer) = (*SpecifierMap)(nil) + +// DependencyV6 are dependency entries for lockfileVersion 6.0+ +type DependencyV6 struct { + Specifier string `yaml:"specifier"` + Version string `yaml:"version"` +} + +// Will try to find a resolution in any of the dependency fields +func (p *ProjectSnapshot) findResolution(dependency string) (DependencyV6, bool, error) { + var getResolution func(yaml.Node) (DependencyV6, bool, error) + if len(p.Specifiers) > 0 { + getResolution = func(node yaml.Node) (DependencyV6, bool, error) { + specifier, ok := p.Specifiers[dependency] + if !ok { + return DependencyV6{}, false, nil + } + var version string + if err := node.Decode(&version); err != nil { + return DependencyV6{}, false, err + } + return DependencyV6{Version: version, Specifier: specifier}, true, nil + } + } else { + getResolution = func(node yaml.Node) (DependencyV6, bool, error) { + var resolution DependencyV6 + if err := node.Decode(&resolution); err != nil { + return DependencyV6{}, false, err + } + return resolution, true, nil + } + } + if resolution, ok := p.Dependencies[dependency]; ok { + return getResolution(resolution) + } + if resolution, ok := p.DevDependencies[dependency]; ok { + return getResolution(resolution) + } + if resolution, ok := p.OptionalDependencies[dependency]; ok { + return getResolution(resolution) + } + return DependencyV6{}, false, nil +} + +// PackageSnapshot Snapshot used to represent a package in the packages setion +type PackageSnapshot struct { + Resolution PackageResolution `yaml:"resolution,flow"` + ID string `yaml:"id,omitempty"` + + // only needed for packages that aren't in npm + Name string `yaml:"name,omitempty"` + Version string `yaml:"version,omitempty"` + + Engines struct { + Node string `yaml:"node"` + NPM string `yaml:"npm,omitempty"` + } `yaml:"engines,omitempty,flow"` + CPU []string `yaml:"cpu,omitempty,flow"` + Os []string `yaml:"os,omitempty,flow"` + LibC []string `yaml:"libc,omitempty"` + + Deprecated string `yaml:"deprecated,omitempty"` + HasBin bool `yaml:"hasBin,omitempty"` + Prepare bool `yaml:"prepare,omitempty"` + RequiresBuild bool `yaml:"requiresBuild,omitempty"` + + BundledDependencies []string `yaml:"bundledDependencies,omitempty"` + PeerDependencies map[string]string `yaml:"peerDependencies,omitempty"` + PeerDependenciesMeta map[string]struct { + Optional bool `yaml:"optional"` + } `yaml:"peerDependenciesMeta,omitempty"` + + Dependencies map[string]string `yaml:"dependencies,omitempty"` + OptionalDependencies map[string]string `yaml:"optionalDependencies,omitempty"` + + TransitivePeerDependencies []string `yaml:"transitivePeerDependencies,omitempty"` + Dev bool `yaml:"dev"` + Optional bool `yaml:"optional,omitempty"` + Patched bool `yaml:"patched,omitempty"` +} + +// PackageResolution Various resolution strategies for packages +type PackageResolution struct { + Type string `yaml:"type,omitempty"` + // For npm or tarball + Integrity string `yaml:"integrity,omitempty"` + + // For tarball + Tarball string `yaml:"tarball,omitempty"` + + // For local directory + Dir string `yaml:"directory,omitempty"` + + // For git repo + Repo string `yaml:"repo,omitempty"` + Commit string `yaml:"commit,omitempty"` +} + +// PatchFile represent a patch applied to a package +type PatchFile struct { + Path string `yaml:"path"` + Hash string `yaml:"hash"` +} + +func isSupportedVersion(version interface{}) error { + switch version.(type) { + case string: + if version == "6.0" { + return nil + } + case float64: + if version == 5.3 || version == 5.4 { + return nil + } + default: + return fmt.Errorf("lockfileVersion of type %T is invalid", version) + } + supportedVersions := []string{"5.3", "5.4", "6.0"} + return errors.Errorf("Unable to generate pnpm-lock.yaml with lockfileVersion: %s. Supported lockfile versions are %v", version, supportedVersions) +} + +// DependenciesMeta metadata for dependencies +type DependenciesMeta struct { + Injected bool `yaml:"injected,omitempty"` + Node string `yaml:"node,omitempty"` + Patch string `yaml:"patch,omitempty"` +} + +// DecodePnpmLockfile parse a pnpm lockfile +func DecodePnpmLockfile(contents []byte) (*PnpmLockfile, error) { + var lockfile PnpmLockfile + err := yaml.Unmarshal(contents, &lockfile) + if err != nil { + return nil, errors.Wrap(err, "could not unmarshal lockfile: ") + } + + switch lockfile.Version.(type) { + case float64: + lockfile.isV6 = false + case string: + lockfile.isV6 = true + default: + return nil, fmt.Errorf("Unexpected type of lockfileVersion: '%T', expected float64 or string", lockfile.Version) + } + return &lockfile, nil +} + +// ResolvePackage Given a package and version returns the key, resolved version, and if it was found +func (p *PnpmLockfile) ResolvePackage(workspacePath turbopath.AnchoredUnixPath, name string, version string) (Package, error) { + // Check if version is a key + if _, ok := p.Packages[version]; ok { + return Package{Key: version, Version: p.extractVersion(version), Found: true}, nil + } + + resolvedVersion, ok, err := p.resolveSpecifier(workspacePath, name, version) + if !ok || err != nil { + return Package{}, err + } + key := p.formatKey(name, resolvedVersion) + if entry, ok := (p.Packages)[key]; ok { + var version string + if entry.Version != "" { + version = entry.Version + } else { + version = resolvedVersion + } + return Package{Key: key, Version: version, Found: true}, nil + } + + if entry, ok := p.Packages[resolvedVersion]; ok { + var version string + if entry.Version != "" { + version = entry.Version + } else { + // If there isn't a version field in the entry then the version is + // encoded in the key and we can omit the name from the version. + version = p.extractVersion(resolvedVersion) + } + return Package{Key: resolvedVersion, Version: version, Found: true}, nil + } + + return Package{}, nil +} + +// AllDependencies Given a lockfile key return all (dev/optional/peer) dependencies of that package +func (p *PnpmLockfile) AllDependencies(key string) (map[string]string, bool) { + deps := map[string]string{} + entry, ok := p.Packages[key] + if !ok { + return deps, false + } + + for name, version := range entry.Dependencies { + deps[name] = version + } + + for name, version := range entry.OptionalDependencies { + deps[name] = version + } + + // Peer dependencies appear in the Dependencies map resolved + + return deps, true +} + +// Subgraph Given a list of lockfile keys returns a Lockfile based off the original one that only contains the packages given +func (p *PnpmLockfile) Subgraph(workspacePackages []turbopath.AnchoredSystemPath, packages []string) (Lockfile, error) { + lockfilePackages := make(map[string]PackageSnapshot, len(packages)) + for _, key := range packages { + entry, ok := p.Packages[key] + if ok { + lockfilePackages[key] = entry + } else { + return nil, fmt.Errorf("Unable to find lockfile entry for %s", key) + } + } + + importers, err := pruneImporters(p.Importers, workspacePackages) + if err != nil { + return nil, err + } + + for _, importer := range importers { + for dependency, meta := range importer.DependenciesMeta { + if meta.Injected { + resolution, ok, err := importer.findResolution(dependency) + if err != nil { + return nil, errors.Wrapf(err, "Unable to decode reference to %s", dependency) + } + if !ok { + return nil, fmt.Errorf("Unable to find %s other than reference in dependenciesMeta", dependency) + } + entry, ok := p.Packages[resolution.Version] + if !ok { + return nil, fmt.Errorf("Unable to find package entry for %s", resolution) + } + + lockfilePackages[resolution.Version] = entry + } + } + } + + lockfile := PnpmLockfile{ + Version: p.Version, + Packages: lockfilePackages, + NeverBuiltDependencies: p.NeverBuiltDependencies, + OnlyBuiltDependencies: p.OnlyBuiltDependencies, + Overrides: p.Overrides, + PackageExtensionsChecksum: p.PackageExtensionsChecksum, + PatchedDependencies: p.prunePatches(p.PatchedDependencies, lockfilePackages), + Importers: importers, + } + + return &lockfile, nil +} + +// Prune imports to only those have all of their dependencies in the packages list +func pruneImporters(importers map[string]ProjectSnapshot, workspacePackages []turbopath.AnchoredSystemPath) (map[string]ProjectSnapshot, error) { + prunedImporters := map[string]ProjectSnapshot{} + + // Copy over root level importer + if root, ok := importers["."]; ok { + prunedImporters["."] = root + } + + for _, workspacePath := range workspacePackages { + workspace := workspacePath.ToUnixPath().ToString() + importer, ok := importers[workspace] + + // If a workspace has no dependencies *and* it is only depended on by the + // workspace root it will not show up as an importer. + if ok { + prunedImporters[workspace] = importer + } + + } + + return prunedImporters, nil +} + +func (p *PnpmLockfile) prunePatches(patches map[string]PatchFile, packages map[string]PackageSnapshot) map[string]PatchFile { + if len(patches) == 0 { + return nil + } + + patchPackages := make(map[string]PatchFile, len(patches)) + for dependency := range packages { + if p.isV6 { + // Internally pnpm partially converts the new path format to the old + // format in order for existing parsing logic to work. + dependency = convertNewToOldDepPath(dependency) + } + dp := parseDepPath(dependency) + patchKey := fmt.Sprintf("%s@%s", dp.name, dp.version) + + if patch, ok := patches[patchKey]; ok && patch.Hash == dp.patchHash() { + patchPackages[patchKey] = patch + } + } + + return patchPackages +} + +// Encode encode the lockfile representation and write it to the given writer +func (p *PnpmLockfile) Encode(w io.Writer) error { + if err := isSupportedVersion(p.Version); err != nil { + return err + } + + encoder := yaml.NewEncoder(w) + encoder.SetIndent(2) + + if err := encoder.Encode(p); err != nil { + return errors.Wrap(err, "unable to encode pnpm lockfile") + } + return nil +} + +// Patches return a list of patches used in the lockfile +func (p *PnpmLockfile) Patches() []turbopath.AnchoredUnixPath { + if len(p.PatchedDependencies) == 0 { + return nil + } + patches := make([]string, len(p.PatchedDependencies)) + i := 0 + for _, patch := range p.PatchedDependencies { + patches[i] = patch.Path + i++ + } + sort.Strings(patches) + + patchPaths := make([]turbopath.AnchoredUnixPath, len(p.PatchedDependencies)) + for i, patch := range patches { + patchPaths[i] = turbopath.AnchoredUnixPath(patch) + } + return patchPaths +} + +// GlobalChange checks if there are any differences between lockfiles that would completely invalidate +// the cache. +func (p *PnpmLockfile) GlobalChange(other Lockfile) bool { + o, ok := other.(*PnpmLockfile) + return !ok || + p.Version != o.Version || + p.PackageExtensionsChecksum != o.PackageExtensionsChecksum || + !reflect.DeepEqual(p.Overrides, o.Overrides) || + !reflect.DeepEqual(p.PatchedDependencies, o.PatchedDependencies) +} + +func (p *PnpmLockfile) resolveSpecifier(workspacePath turbopath.AnchoredUnixPath, name string, specifier string) (string, bool, error) { + pnpmWorkspacePath := workspacePath.ToString() + if pnpmWorkspacePath == "" { + // For pnpm, the root is named "." + pnpmWorkspacePath = "." + } + importer, ok := p.Importers[pnpmWorkspacePath] + if !ok { + return "", false, fmt.Errorf("no workspace '%v' found in lockfile", workspacePath) + } + resolution, ok, err := importer.findResolution(name) + if err != nil { + return "", false, err + } + // Verify that the specifier in the importer matches the one given + if !ok { + // Check if the specifier is already a resolved version + if _, ok := p.Packages[p.formatKey(name, specifier)]; ok { + return specifier, true, nil + } + return "", false, fmt.Errorf("Unable to find resolved version for %s@%s in %s", name, specifier, workspacePath) + } + overrideSpecifier := p.applyOverrides(name, specifier) + if resolution.Specifier != overrideSpecifier { + if _, ok := p.Packages[p.formatKey(name, overrideSpecifier)]; ok { + return overrideSpecifier, true, nil + } + return "", false, nil + } + return resolution.Version, true, nil +} + +// Apply pnpm overrides to specifier, see https://pnpm.io/package_json#pnpmoverrides +// Note this is barebones support and will only supports global overrides +// future work will support semver ranges and selector filtering. +func (p *PnpmLockfile) applyOverrides(name string, specifier string) string { + if len(p.Overrides) > 0 { + if new, ok := p.Overrides[name]; ok { + return new + } + } + return specifier +} + +// Formatter of the lockfile key given a package name and version +func (p *PnpmLockfile) formatKey(name string, version string) string { + if p.isV6 { + return fmt.Sprintf("/%s@%s", name, version) + } + return fmt.Sprintf("/%s/%s", name, version) +} + +// Extracts version from lockfile key +func (p *PnpmLockfile) extractVersion(key string) string { + if p.isV6 { + key = convertNewToOldDepPath(key) + } + dp := parseDepPath(key) + if dp.peerSuffix != "" { + sep := "" + if !p.isV6 { + sep = "_" + } + return fmt.Sprintf("%s%s%s", dp.version, sep, dp.peerSuffix) + } + return dp.version +} + +// Parsed representation of a pnpm lockfile key +type depPath struct { + host string + name string + version string + peerSuffix string +} + +func parseDepPath(dependency string) depPath { + // See https://github.com/pnpm/pnpm/blob/185ab01adfc927ea23d2db08a14723bf51d0025f/packages/dependency-path/src/index.ts#L96 + var dp depPath + parts := strings.Split(dependency, "/") + shift := func() string { + if len(parts) == 0 { + return "" + } + val := parts[0] + parts = parts[1:] + return val + } + + isAbsolute := dependency[0] != '/' + // Skip leading '/' + if !isAbsolute { + shift() + } + + if isAbsolute { + dp.host = shift() + } + + if len(parts) == 0 { + return dp + } + + if strings.HasPrefix(parts[0], "@") { + dp.name = fmt.Sprintf("%s/%s", shift(), shift()) + } else { + dp.name = shift() + } + + version := strings.Join(parts, "/") + if len(version) > 0 { + var peerSuffixIndex int + if strings.Contains(version, "(") && strings.HasSuffix(version, ")") { + // v6 encodes peers deps using (peer=version) + // also used to encode patches using (path_hash=hash) + peerSuffixIndex = strings.Index(version, "(") + dp.peerSuffix = version[peerSuffixIndex:] + dp.version = version[0:peerSuffixIndex] + } else { + // pre v6 uses _ to separate version from peer dependencies + // if a dependency is patched and has peer dependencies its version will + // be encoded as version_patchHash_peerDepsHash + peerSuffixIndex = strings.Index(version, "_") + if peerSuffixIndex != -1 { + dp.peerSuffix = version[peerSuffixIndex+1:] + dp.version = version[0:peerSuffixIndex] + } + } + if peerSuffixIndex == -1 { + dp.version = version + } + } + + return dp +} + +var _patchHashKey = "patch_hash=" + +func (d depPath) patchHash() string { + if strings.HasPrefix(d.peerSuffix, "(") && strings.HasSuffix(d.peerSuffix, ")") { + for _, part := range strings.Split(d.peerSuffix, "(") { + if strings.HasPrefix(part, _patchHashKey) { + // drop the enclosing ')' + return part[len(_patchHashKey) : len(part)-1] + } + } + // no patch entry found + return "" + } + + sepIndex := strings.Index(d.peerSuffix, "_") + if sepIndex != -1 { + return d.peerSuffix[:sepIndex] + } + // if a dependency just has a single suffix we can't tell if it's a patch or peer hash + // return it in case it's a patch hash + return d.peerSuffix +} + +// Used to convert v6's dep path of /name@version to v5's /name/version +// See https://github.com/pnpm/pnpm/blob/185ab01adfc927ea23d2db08a14723bf51d0025f/lockfile/lockfile-file/src/experiments/inlineSpecifiersLockfileConverters.ts#L162 +func convertNewToOldDepPath(newPath string) string { + if len(newPath) > 2 && !strings.Contains(newPath[2:], "@") { + return newPath + } + searchStartIndex := strings.Index(newPath, "/@") + 2 + index := strings.Index(newPath[searchStartIndex:], "@") + searchStartIndex + if strings.Contains(newPath, "(") && index > strings.Index(newPath, "(") { + return newPath + } + return fmt.Sprintf("%s/%s", newPath[0:index], newPath[index+1:]) +} diff --git a/cli/internal/lockfile/pnpm_lockfile_test.go b/cli/internal/lockfile/pnpm_lockfile_test.go new file mode 100644 index 0000000..b4c8475 --- /dev/null +++ b/cli/internal/lockfile/pnpm_lockfile_test.go @@ -0,0 +1,405 @@ +package lockfile + +import ( + "bytes" + "os" + "sort" + "testing" + + "github.com/google/go-cmp/cmp/cmpopts" + "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/yaml" + "gotest.tools/v3/assert" +) + +func getFixture(t *testing.T, name string) ([]byte, error) { + defaultCwd, err := os.Getwd() + if err != nil { + t.Errorf("failed to get cwd: %v", err) + } + cwd := turbopath.AbsoluteSystemPath(defaultCwd) + lockfilePath := cwd.UntypedJoin("testdata", name) + if !lockfilePath.FileExists() { + return nil, errors.Errorf("unable to find 'testdata/%s'", name) + } + return os.ReadFile(lockfilePath.ToString()) +} + +func Test_Roundtrip(t *testing.T) { + lockfiles := []string{"pnpm6-workspace.yaml", "pnpm7-workspace.yaml", "pnpm8.yaml"} + + for _, lockfilePath := range lockfiles { + lockfileContent, err := getFixture(t, lockfilePath) + if err != nil { + t.Errorf("failure getting fixture: %s", err) + } + lockfile, err := DecodePnpmLockfile(lockfileContent) + if err != nil { + t.Errorf("decoding failed %s", err) + } + var b bytes.Buffer + if err := lockfile.Encode(&b); err != nil { + t.Errorf("encoding failed %s", err) + } + newLockfile, err := DecodePnpmLockfile(b.Bytes()) + if err != nil { + t.Errorf("decoding failed %s", err) + } + + assert.DeepEqual( + t, + lockfile, + newLockfile, + // Skip over fields that don't get serialized + cmpopts.IgnoreUnexported(PnpmLockfile{}), + cmpopts.IgnoreTypes(yaml.Node{}), + ) + } +} + +func Test_SpecifierResolution(t *testing.T) { + contents, err := getFixture(t, "pnpm7-workspace.yaml") + if err != nil { + t.Error(err) + } + lockfile, err := DecodePnpmLockfile(contents) + if err != nil { + t.Errorf("failure decoding lockfile: %v", err) + } + + type Case struct { + workspacePath turbopath.AnchoredUnixPath + pkg string + specifier string + version string + found bool + err string + } + + cases := []Case{ + {workspacePath: "apps/docs", pkg: "next", specifier: "12.2.5", version: "12.2.5_ir3quccc6i62x6qn6jjhyjjiey", found: true}, + {workspacePath: "apps/web", pkg: "next", specifier: "12.2.5", version: "12.2.5_ir3quccc6i62x6qn6jjhyjjiey", found: true}, + {workspacePath: "apps/web", pkg: "typescript", specifier: "^4.5.3", version: "4.8.3", found: true}, + {workspacePath: "apps/web", pkg: "lodash", specifier: "bad-tag", version: "", found: false}, + {workspacePath: "apps/web", pkg: "lodash", specifier: "^4.17.21", version: "4.17.21_ehchni3mpmovsvjxesffg2i5a4", found: true}, + {workspacePath: "apps/docs", pkg: "dashboard-icons", specifier: "github:peerigon/dashboard-icons", version: "github.com/peerigon/dashboard-icons/ce27ef933144e09cef3911025f3649040a8571b6", found: true}, + {workspacePath: "", pkg: "turbo", specifier: "latest", version: "1.4.6", found: true}, + {workspacePath: "apps/bad_workspace", pkg: "turbo", specifier: "latest", version: "1.4.6", err: "no workspace 'apps/bad_workspace' found in lockfile"}, + } + + for _, testCase := range cases { + actualVersion, actualFound, err := lockfile.resolveSpecifier(testCase.workspacePath, testCase.pkg, testCase.specifier) + if testCase.err != "" { + assert.Error(t, err, testCase.err) + } else { + assert.Equal(t, actualFound, testCase.found, "%s@%s", testCase.pkg, testCase.version) + assert.Equal(t, actualVersion, testCase.version, "%s@%s", testCase.pkg, testCase.version) + } + } +} + +func Test_SpecifierResolutionV6(t *testing.T) { + contents, err := getFixture(t, "pnpm8.yaml") + if err != nil { + t.Error(err) + } + lockfile, err := DecodePnpmLockfile(contents) + if err != nil { + t.Errorf("failure decoding lockfile: %v", err) + } + + type Case struct { + workspacePath turbopath.AnchoredUnixPath + pkg string + specifier string + version string + found bool + err string + } + + cases := []Case{ + {workspacePath: "packages/a", pkg: "c", specifier: "workspace:*", version: "link:../c", found: true}, + {workspacePath: "packages/a", pkg: "is-odd", specifier: "^3.0.1", version: "3.0.1", found: true}, + {workspacePath: "packages/b", pkg: "is-odd", specifier: "^3.0.1", version: "3.0.1", err: "Unable to find resolved version for is-odd@^3.0.1 in packages/b"}, + {workspacePath: "apps/bad_workspace", pkg: "turbo", specifier: "latest", version: "1.4.6", err: "no workspace 'apps/bad_workspace' found in lockfile"}, + } + + for _, testCase := range cases { + actualVersion, actualFound, err := lockfile.resolveSpecifier(testCase.workspacePath, testCase.pkg, testCase.specifier) + if testCase.err != "" { + assert.Error(t, err, testCase.err) + } else { + assert.Equal(t, actualFound, testCase.found, "%s@%s", testCase.pkg, testCase.version) + assert.Equal(t, actualVersion, testCase.version, "%s@%s", testCase.pkg, testCase.version) + } + } +} + +func Test_SubgraphInjectedPackages(t *testing.T) { + contents, err := getFixture(t, "pnpm7-workspace.yaml") + if err != nil { + t.Error(err) + } + lockfile, err := DecodePnpmLockfile(contents) + assert.NilError(t, err, "decode lockfile") + + packageWithInjectedPackage := turbopath.AnchoredUnixPath("apps/docs").ToSystemPath() + + prunedLockfile, err := lockfile.Subgraph([]turbopath.AnchoredSystemPath{packageWithInjectedPackage}, []string{}) + assert.NilError(t, err, "prune lockfile") + + pnpmLockfile, ok := prunedLockfile.(*PnpmLockfile) + assert.Assert(t, ok, "got different lockfile impl") + + _, hasInjectedPackage := pnpmLockfile.Packages["file:packages/ui"] + + assert.Assert(t, hasInjectedPackage, "pruned lockfile is missing injected package") + +} + +func Test_GitPackages(t *testing.T) { + contents, err := getFixture(t, "pnpm7-workspace.yaml") + if err != nil { + t.Error(err) + } + lockfile, err := DecodePnpmLockfile(contents) + assert.NilError(t, err, "decode lockfile") + + pkg, err := lockfile.ResolvePackage(turbopath.AnchoredUnixPath("apps/docs"), "dashboard-icons", "github:peerigon/dashboard-icons") + assert.NilError(t, err, "failure to find package") + assert.Assert(t, pkg.Found) + assert.DeepEqual(t, pkg.Key, "github.com/peerigon/dashboard-icons/ce27ef933144e09cef3911025f3649040a8571b6") + assert.DeepEqual(t, pkg.Version, "1.0.0") + // make sure subgraph produces git dep +} + +func Test_DecodePnpmUnquotedURL(t *testing.T) { + resolutionWithQuestionMark := `{integrity: sha512-deadbeef, tarball: path/to/tarball?foo=bar}` + var resolution map[string]interface{} + err := yaml.Unmarshal([]byte(resolutionWithQuestionMark), &resolution) + assert.NilError(t, err, "valid package entry should be able to be decoded") + assert.Equal(t, resolution["tarball"], "path/to/tarball?foo=bar") +} + +func Test_PnpmLockfilePatches(t *testing.T) { + contents, err := getFixture(t, "pnpm-patch.yaml") + assert.NilError(t, err) + + lockfile, err := DecodePnpmLockfile(contents) + assert.NilError(t, err) + + patches := lockfile.Patches() + assert.Equal(t, len(patches), 3) + assert.Equal(t, patches[0], turbopath.AnchoredUnixPath("patches/@babel__core@7.20.12.patch")) + assert.Equal(t, patches[1], turbopath.AnchoredUnixPath("patches/is-odd@3.0.1.patch")) +} + +func Test_PnpmPrunePatches(t *testing.T) { + contents, err := getFixture(t, "pnpm-patch.yaml") + assert.NilError(t, err) + + lockfile, err := DecodePnpmLockfile(contents) + assert.NilError(t, err) + + prunedLockfile, err := lockfile.Subgraph( + []turbopath.AnchoredSystemPath{turbopath.AnchoredSystemPath("packages/dependency")}, + []string{"/is-odd/3.0.1_nrrwwz7lemethtlvvm75r5bmhq", "/is-number/6.0.0", "/@babel/core/7.20.12_3hyn7hbvzkemudbydlwjmrb65y", "/moleculer/0.14.28_5pk7ojv7qbqha75ozglk4y4f74_kumip57h7zlinbhp4gz3jrbqry"}, + ) + assert.NilError(t, err) + + assert.Equal(t, len(prunedLockfile.Patches()), 3) +} + +func Test_PnpmPrunePatchesV6(t *testing.T) { + contents, err := getFixture(t, "pnpm-patch-v6.yaml") + assert.NilError(t, err) + + lockfile, err := DecodePnpmLockfile(contents) + assert.NilError(t, err) + + prunedLockfile, err := lockfile.Subgraph( + []turbopath.AnchoredSystemPath{turbopath.AnchoredSystemPath("packages/a")}, + []string{"/lodash@4.17.21(patch_hash=lgum37zgng4nfkynzh3cs7wdeq)"}, + ) + assert.NilError(t, err) + + assert.Equal(t, len(prunedLockfile.Patches()), 1) + + prunedLockfile, err = lockfile.Subgraph( + []turbopath.AnchoredSystemPath{turbopath.AnchoredSystemPath("packages/b")}, + []string{"/@babel/helper-string-parser@7.19.4(patch_hash=wjhgmpzh47qmycrzgpeyoyh3ce)(@babel/core@7.21.0)"}, + ) + assert.NilError(t, err) + + assert.Equal(t, len(prunedLockfile.Patches()), 1) +} + +func Test_PnpmAbsoluteDependency(t *testing.T) { + type testCase struct { + fixture string + key string + } + testcases := []testCase{ + {"pnpm-absolute.yaml", "/@scope/child/1.0.0"}, + {"pnpm-absolute-v6.yaml", "/@scope/child@1.0.0"}, + } + for _, tc := range testcases { + contents, err := getFixture(t, tc.fixture) + assert.NilError(t, err, tc.fixture) + + lockfile, err := DecodePnpmLockfile(contents) + assert.NilError(t, err, tc.fixture) + + pkg, err := lockfile.ResolvePackage(turbopath.AnchoredUnixPath("packages/a"), "child", tc.key) + assert.NilError(t, err, "resolve") + assert.Assert(t, pkg.Found, tc.fixture) + assert.DeepEqual(t, pkg.Key, tc.key) + assert.DeepEqual(t, pkg.Version, "1.0.0") + } +} + +func Test_LockfilePeer(t *testing.T) { + contents, err := getFixture(t, "pnpm-peer-v6.yaml") + if err != nil { + t.Error(err) + } + assert.NilError(t, err, "read fixture") + lockfile, err := DecodePnpmLockfile(contents) + assert.NilError(t, err, "parse lockfile") + + pkg, err := lockfile.ResolvePackage(turbopath.AnchoredUnixPath("apps/web"), "next", "13.0.4") + assert.NilError(t, err, "read lockfile") + assert.Assert(t, pkg.Found) + assert.DeepEqual(t, pkg.Version, "13.0.4(react-dom@18.2.0)(react@18.2.0)") + assert.DeepEqual(t, pkg.Key, "/next@13.0.4(react-dom@18.2.0)(react@18.2.0)") +} + +func Test_LockfileTopLevelOverride(t *testing.T) { + contents, err := getFixture(t, "pnpm-top-level-dupe.yaml") + if err != nil { + t.Error(err) + } + lockfile, err := DecodePnpmLockfile(contents) + assert.NilError(t, err, "decode lockfile") + + pkg, err := lockfile.ResolvePackage(turbopath.AnchoredUnixPath("packages/a"), "ci-info", "3.7.1") + assert.NilError(t, err, "resolve package") + + assert.Assert(t, pkg.Found) + assert.DeepEqual(t, pkg.Key, "/ci-info/3.7.1") + assert.DeepEqual(t, pkg.Version, "3.7.1") +} + +func Test_PnpmOverride(t *testing.T) { + contents, err := getFixture(t, "pnpm_override.yaml") + if err != nil { + t.Error(err) + } + lockfile, err := DecodePnpmLockfile(contents) + assert.NilError(t, err, "decode lockfile") + + pkg, err := lockfile.ResolvePackage( + turbopath.AnchoredUnixPath("config/hardhat"), + "@nomiclabs/hardhat-ethers", + "npm:hardhat-deploy-ethers@0.3.0-beta.13", + ) + assert.NilError(t, err, "failure to find package") + assert.Assert(t, pkg.Found) + assert.DeepEqual(t, pkg.Key, "/hardhat-deploy-ethers/0.3.0-beta.13_yab2ug5tvye2kp6e24l5x3z7uy") + assert.DeepEqual(t, pkg.Version, "0.3.0-beta.13_yab2ug5tvye2kp6e24l5x3z7uy") +} + +func Test_DepPathParsing(t *testing.T) { + type testCase struct { + input string + dp depPath + } + testCases := []testCase{ + { + "/foo/1.0.0", + depPath{ + name: "foo", + version: "1.0.0", + }, + }, + { + "/@foo/bar/1.0.0", + depPath{ + name: "@foo/bar", + version: "1.0.0", + }, + }, + { + "example.org/foo/1.0.0", + depPath{ + host: "example.org", + name: "foo", + version: "1.0.0", + }, + }, + { + "/foo/1.0.0_bar@1.0.0", + depPath{ + name: "foo", + version: "1.0.0", + peerSuffix: "bar@1.0.0", + }, + }, + { + "/foo/1.0.0(bar@1.0.0)", + depPath{ + name: "foo", + version: "1.0.0", + peerSuffix: "(bar@1.0.0)", + }, + }, + { + "/foo/1.0.0_patchHash_peerHash", + depPath{ + name: "foo", + version: "1.0.0", + peerSuffix: "patchHash_peerHash", + }, + }, + { + "/@babel/helper-string-parser/7.19.4(patch_hash=wjhgmpzh47qmycrzgpeyoyh3ce)(@babel/core@7.21.0)", + depPath{ + name: "@babel/helper-string-parser", + version: "7.19.4", + peerSuffix: "(patch_hash=wjhgmpzh47qmycrzgpeyoyh3ce)(@babel/core@7.21.0)", + }, + }, + } + + for _, tc := range testCases { + assert.Equal(t, parseDepPath(tc.input), tc.dp, tc.input) + } +} + +func Test_PnpmAliasesOverlap(t *testing.T) { + contents, err := getFixture(t, "pnpm-absolute.yaml") + assert.NilError(t, err) + + lockfile, err := DecodePnpmLockfile(contents) + assert.NilError(t, err) + + closure, err := transitiveClosure("packages/a", map[string]string{"@scope/parent": "^1.0.0", "another": "^1.0.0", "special": "npm:Special@1.2.3"}, lockfile) + assert.NilError(t, err) + + deps := []Package{} + + for _, v := range closure.ToSlice() { + dep := v.(Package) + deps = append(deps, dep) + } + sort.Sort(ByKey(deps)) + + assert.DeepEqual(t, deps, []Package{ + {"/@scope/child/1.0.0", "1.0.0", true}, + {"/@scope/parent/1.0.0", "1.0.0", true}, + {"/Special/1.2.3", "1.2.3", true}, + {"/another/1.0.0", "1.0.0", true}, + {"/foo/1.0.0", "1.0.0", true}, + }) +} diff --git a/cli/internal/lockfile/testdata/berry.lock b/cli/internal/lockfile/testdata/berry.lock new file mode 100644 index 0000000..f4436e4 --- /dev/null +++ b/cli/internal/lockfile/testdata/berry.lock @@ -0,0 +1,3283 @@ +# This file is generated by running "yarn install" inside your project. +# Manual changes might be lost - proceed with caution! + +__metadata: + version: 6 + cacheKey: 8c0 + +"@ampproject/remapping@npm:^2.1.0": + version: 2.2.0 + resolution: "@ampproject/remapping@npm:2.2.0" + dependencies: + "@jridgewell/gen-mapping": ^0.1.0 + "@jridgewell/trace-mapping": ^0.3.9 + checksum: d74d170d06468913921d72430259424b7e4c826b5a7d39ff839a29d547efb97dc577caa8ba3fb5cf023624e9af9d09651afc3d4112a45e2050328abc9b3a2292 + languageName: node + linkType: hard + +"@babel/code-frame@npm:7.12.11": + version: 7.12.11 + resolution: "@babel/code-frame@npm:7.12.11" + dependencies: + "@babel/highlight": ^7.10.4 + checksum: 3963eff3ebfb0e091c7e6f99596ef4b258683e4ba8a134e4e95f77afe85be5c931e184fff6435fb4885d12eba04a5e25532f7fbc292ca13b48e7da943474e2f3 + languageName: node + linkType: hard + +"@babel/code-frame@npm:^7.18.6": + version: 7.18.6 + resolution: "@babel/code-frame@npm:7.18.6" + dependencies: + "@babel/highlight": ^7.18.6 + checksum: 195e2be3172d7684bf95cff69ae3b7a15a9841ea9d27d3c843662d50cdd7d6470fd9c8e64be84d031117e4a4083486effba39f9aef6bbb2c89f7f21bcfba33ba + languageName: node + linkType: hard + +"@babel/compat-data@npm:^7.19.1": + version: 7.19.1 + resolution: "@babel/compat-data@npm:7.19.1" + checksum: f985887ea08a140e4af87a94d3fb17af0345491eb97f5a85b1840255c2e2a97429f32a8fd12a7aae9218af5f1024f1eb12a5cd280d2d69b2337583c17ea506ba + languageName: node + linkType: hard + +"@babel/core@npm:^7.0.0": + version: 7.19.1 + resolution: "@babel/core@npm:7.19.1" + dependencies: + "@ampproject/remapping": ^2.1.0 + "@babel/code-frame": ^7.18.6 + "@babel/generator": ^7.19.0 + "@babel/helper-compilation-targets": ^7.19.1 + "@babel/helper-module-transforms": ^7.19.0 + "@babel/helpers": ^7.19.0 + "@babel/parser": ^7.19.1 + "@babel/template": ^7.18.10 + "@babel/traverse": ^7.19.1 + "@babel/types": ^7.19.0 + convert-source-map: ^1.7.0 + debug: ^4.1.0 + gensync: ^1.0.0-beta.2 + json5: ^2.2.1 + semver: ^6.3.0 + checksum: 941c8c119b80bdba5fafc80bbaa424d51146b6d3c30b8fae35879358dd37c11d3d0926bc7e970a0861229656eedaa8c884d4a3a25cc904086eb73b827a2f1168 + languageName: node + linkType: hard + +"@babel/generator@npm:^7.19.0": + version: 7.19.0 + resolution: "@babel/generator@npm:7.19.0" + dependencies: + "@babel/types": ^7.19.0 + "@jridgewell/gen-mapping": ^0.3.2 + jsesc: ^2.5.1 + checksum: aa3d5785cf8f8e81672dcc61aef351188efeadb20d9f66d79113d82cbcf3bbbdeb829989fa14582108572ddbc4e4027bdceb06ccaf5ec40fa93c2dda8fbcd4aa + languageName: node + linkType: hard + +"@babel/helper-compilation-targets@npm:^7.19.1": + version: 7.19.1 + resolution: "@babel/helper-compilation-targets@npm:7.19.1" + dependencies: + "@babel/compat-data": ^7.19.1 + "@babel/helper-validator-option": ^7.18.6 + browserslist: ^4.21.3 + semver: ^6.3.0 + peerDependencies: + "@babel/core": ^7.0.0 + checksum: c2d3039265e498b341a6b597f855f2fcef02659050fefedf36ad4e6815e6aafe1011a761214cc80d98260ed07ab15a8cbe959a0458e97bec5f05a450e1b1741b + languageName: node + linkType: hard + +"@babel/helper-environment-visitor@npm:^7.18.9": + version: 7.18.9 + resolution: "@babel/helper-environment-visitor@npm:7.18.9" + checksum: b25101f6162ddca2d12da73942c08ad203d7668e06663df685634a8fde54a98bc015f6f62938e8554457a592a024108d45b8f3e651fd6dcdb877275b73cc4420 + languageName: node + linkType: hard + +"@babel/helper-function-name@npm:^7.19.0": + version: 7.19.0 + resolution: "@babel/helper-function-name@npm:7.19.0" + dependencies: + "@babel/template": ^7.18.10 + "@babel/types": ^7.19.0 + checksum: eac1f5db428ba546270c2b8d750c24eb528b8fcfe50c81de2e0bdebf0e20f24bec688d4331533b782e4a907fad435244621ca2193cfcf80a86731299840e0f6e + languageName: node + linkType: hard + +"@babel/helper-hoist-variables@npm:^7.18.6": + version: 7.18.6 + resolution: "@babel/helper-hoist-variables@npm:7.18.6" + dependencies: + "@babel/types": ^7.18.6 + checksum: fd9c35bb435fda802bf9ff7b6f2df06308a21277c6dec2120a35b09f9de68f68a33972e2c15505c1a1a04b36ec64c9ace97d4a9e26d6097b76b4396b7c5fa20f + languageName: node + linkType: hard + +"@babel/helper-module-imports@npm:^7.18.6": + version: 7.18.6 + resolution: "@babel/helper-module-imports@npm:7.18.6" + dependencies: + "@babel/types": ^7.18.6 + checksum: f393f8a3b3304b1b7a288a38c10989de754f01d29caf62ce7c4e5835daf0a27b81f3ac687d9d2780d39685aae7b55267324b512150e7b2be967b0c493b6a1def + languageName: node + linkType: hard + +"@babel/helper-module-transforms@npm:^7.19.0": + version: 7.19.0 + resolution: "@babel/helper-module-transforms@npm:7.19.0" + dependencies: + "@babel/helper-environment-visitor": ^7.18.9 + "@babel/helper-module-imports": ^7.18.6 + "@babel/helper-simple-access": ^7.18.6 + "@babel/helper-split-export-declaration": ^7.18.6 + "@babel/helper-validator-identifier": ^7.18.6 + "@babel/template": ^7.18.10 + "@babel/traverse": ^7.19.0 + "@babel/types": ^7.19.0 + checksum: 4483276c66f56cf3b5b063634092ad9438c2593725de5c143ba277dda82f1501e6d73b311c1b28036f181dbe36eaeff29f24726cde37a599d4e735af294e5359 + languageName: node + linkType: hard + +"@babel/helper-simple-access@npm:^7.18.6": + version: 7.18.6 + resolution: "@babel/helper-simple-access@npm:7.18.6" + dependencies: + "@babel/types": ^7.18.6 + checksum: 37cd36eef199e0517845763c1e6ff6ea5e7876d6d707a6f59c9267c547a50aa0e84260ba9285d49acfaf2cfa0a74a772d92967f32ac1024c961517d40b6c16a5 + languageName: node + linkType: hard + +"@babel/helper-split-export-declaration@npm:^7.18.6": + version: 7.18.6 + resolution: "@babel/helper-split-export-declaration@npm:7.18.6" + dependencies: + "@babel/types": ^7.18.6 + checksum: c6d3dede53878f6be1d869e03e9ffbbb36f4897c7cc1527dc96c56d127d834ffe4520a6f7e467f5b6f3c2843ea0e81a7819d66ae02f707f6ac057f3d57943a2b + languageName: node + linkType: hard + +"@babel/helper-string-parser@npm:^7.18.10": + version: 7.18.10 + resolution: "@babel/helper-string-parser@npm:7.18.10" + checksum: d554a4393365b624916b5c00a4cc21c990c6617e7f3fe30be7d9731f107f12c33229a7a3db9d829bfa110d2eb9f04790745d421640e3bd245bb412dc0ea123c1 + languageName: node + linkType: hard + +"@babel/helper-validator-identifier@npm:^7.18.6": + version: 7.19.1 + resolution: "@babel/helper-validator-identifier@npm:7.19.1" + checksum: 0eca5e86a729162af569b46c6c41a63e18b43dbe09fda1d2a3c8924f7d617116af39cac5e4cd5d431bb760b4dca3c0970e0c444789b1db42bcf1fa41fbad0a3a + languageName: node + linkType: hard + +"@babel/helper-validator-option@npm:^7.18.6": + version: 7.18.6 + resolution: "@babel/helper-validator-option@npm:7.18.6" + checksum: f9cc6eb7cc5d759c5abf006402180f8d5e4251e9198197428a97e05d65eb2f8ae5a0ce73b1dfd2d35af41d0eb780627a64edf98a4e71f064eeeacef8de58f2cf + languageName: node + linkType: hard + +"@babel/helpers@npm:^7.19.0": + version: 7.19.0 + resolution: "@babel/helpers@npm:7.19.0" + dependencies: + "@babel/template": ^7.18.10 + "@babel/traverse": ^7.19.0 + "@babel/types": ^7.19.0 + checksum: e50e78e0dbb0435075fa3f85021a6bcae529589800bca0292721afd7f7c874bea54508d6dc57eca16e5b8224f8142c6b0e32e3b0140029dc09865da747da4623 + languageName: node + linkType: hard + +"@babel/highlight@npm:^7.10.4, @babel/highlight@npm:^7.18.6": + version: 7.18.6 + resolution: "@babel/highlight@npm:7.18.6" + dependencies: + "@babel/helper-validator-identifier": ^7.18.6 + chalk: ^2.0.0 + js-tokens: ^4.0.0 + checksum: 92d8ee61549de5ff5120e945e774728e5ccd57fd3b2ed6eace020ec744823d4a98e242be1453d21764a30a14769ecd62170fba28539b211799bbaf232bbb2789 + languageName: node + linkType: hard + +"@babel/parser@npm:^7.18.10, @babel/parser@npm:^7.19.1": + version: 7.19.1 + resolution: "@babel/parser@npm:7.19.1" + bin: + parser: ./bin/babel-parser.js + checksum: b1e0acb346b2a533c857e1e97ac0886cdcbd76aafef67835a2b23f760c10568eb53ad8a27dd5f862d8ba4e583742e6067f107281ccbd68959d61bc61e4ddaa51 + languageName: node + linkType: hard + +"@babel/runtime-corejs3@npm:^7.10.2": + version: 7.19.1 + resolution: "@babel/runtime-corejs3@npm:7.19.1" + dependencies: + core-js-pure: ^3.25.1 + regenerator-runtime: ^0.13.4 + checksum: 38a1e8fcd2ba1f76c951259c98a5a11052123923adbf30ec8b2fec202dbbe38c6db61658ef9398e00c30f799e2e54ea036e56a09f43229261918bf5ec1b7d03a + languageName: node + linkType: hard + +"@babel/runtime@npm:^7.10.2, @babel/runtime@npm:^7.18.9": + version: 7.19.0 + resolution: "@babel/runtime@npm:7.19.0" + dependencies: + regenerator-runtime: ^0.13.4 + checksum: fa69c351bb05e1db3ceb9a02fdcf620c234180af68cdda02152d3561015f6d55277265d3109815992f96d910f3db709458cae4f8df1c3def66f32e0867d82294 + languageName: node + linkType: hard + +"@babel/template@npm:^7.18.10": + version: 7.18.10 + resolution: "@babel/template@npm:7.18.10" + dependencies: + "@babel/code-frame": ^7.18.6 + "@babel/parser": ^7.18.10 + "@babel/types": ^7.18.10 + checksum: 93a6aa094af5f355a72bd55f67fa1828a046c70e46f01b1606e6118fa1802b6df535ca06be83cc5a5e834022be95c7b714f0a268b5f20af984465a71e28f1473 + languageName: node + linkType: hard + +"@babel/traverse@npm:^7.19.0, @babel/traverse@npm:^7.19.1": + version: 7.19.1 + resolution: "@babel/traverse@npm:7.19.1" + dependencies: + "@babel/code-frame": ^7.18.6 + "@babel/generator": ^7.19.0 + "@babel/helper-environment-visitor": ^7.18.9 + "@babel/helper-function-name": ^7.19.0 + "@babel/helper-hoist-variables": ^7.18.6 + "@babel/helper-split-export-declaration": ^7.18.6 + "@babel/parser": ^7.19.1 + "@babel/types": ^7.19.0 + debug: ^4.1.0 + globals: ^11.1.0 + checksum: 9d782b5089ebc989e54c2406814ed1206cb745ed2734e6602dee3e23d4b6ebbb703ff86e536276630f8de83fda6cde99f0634e3c3d847ddb40572d0303ba8800 + languageName: node + linkType: hard + +"@babel/types@npm:^7.18.10, @babel/types@npm:^7.18.6, @babel/types@npm:^7.19.0, @babel/types@npm:^7.8.3": + version: 7.19.0 + resolution: "@babel/types@npm:7.19.0" + dependencies: + "@babel/helper-string-parser": ^7.18.10 + "@babel/helper-validator-identifier": ^7.18.6 + to-fast-properties: ^2.0.0 + checksum: 9b346715a68aeede70ba9c685a144b0b26c53bcd595d448e24c8fa8df4d5956a5712e56ebadb7c85dcc32f218ee42788e37b93d50d3295c992072224cb3ef3fe + languageName: node + linkType: hard + +"@eslint/eslintrc@npm:^0.4.3": + version: 0.4.3 + resolution: "@eslint/eslintrc@npm:0.4.3" + dependencies: + ajv: ^6.12.4 + debug: ^4.1.1 + espree: ^7.3.0 + globals: ^13.9.0 + ignore: ^4.0.6 + import-fresh: ^3.2.1 + js-yaml: ^3.13.1 + minimatch: ^3.0.4 + strip-json-comments: ^3.1.1 + checksum: 03a7704150b868c318aab6a94d87a33d30dc2ec579d27374575014f06237ba1370ae11178db772f985ef680d469dc237e7b16a1c5d8edaaeb8c3733e7a95a6d3 + languageName: node + linkType: hard + +"@humanwhocodes/config-array@npm:^0.5.0": + version: 0.5.0 + resolution: "@humanwhocodes/config-array@npm:0.5.0" + dependencies: + "@humanwhocodes/object-schema": ^1.2.0 + debug: ^4.1.1 + minimatch: ^3.0.4 + checksum: 44ee6a9f05d93dd9d5935a006b17572328ba9caff8002442f601736cbda79c580cc0f5a49ce9eb88fbacc5c3a6b62098357c2e95326cd17bb9f1a6c61d6e95e7 + languageName: node + linkType: hard + +"@humanwhocodes/object-schema@npm:^1.2.0": + version: 1.2.1 + resolution: "@humanwhocodes/object-schema@npm:1.2.1" + checksum: a824a1ec31591231e4bad5787641f59e9633827d0a2eaae131a288d33c9ef0290bd16fda8da6f7c0fcb014147865d12118df10db57f27f41e20da92369fcb3f1 + languageName: node + linkType: hard + +"@jridgewell/gen-mapping@npm:^0.1.0": + version: 0.1.1 + resolution: "@jridgewell/gen-mapping@npm:0.1.1" + dependencies: + "@jridgewell/set-array": ^1.0.0 + "@jridgewell/sourcemap-codec": ^1.4.10 + checksum: 3bcc21fe786de6ffbf35c399a174faab05eb23ce6a03e8769569de28abbf4facc2db36a9ddb0150545ae23a8d35a7cf7237b2aa9e9356a7c626fb4698287d5cc + languageName: node + linkType: hard + +"@jridgewell/gen-mapping@npm:^0.3.2": + version: 0.3.2 + resolution: "@jridgewell/gen-mapping@npm:0.3.2" + dependencies: + "@jridgewell/set-array": ^1.0.1 + "@jridgewell/sourcemap-codec": ^1.4.10 + "@jridgewell/trace-mapping": ^0.3.9 + checksum: 1832707a1c476afebe4d0fbbd4b9434fdb51a4c3e009ab1e9938648e21b7a97049fa6009393bdf05cab7504108413441df26d8a3c12193996e65493a4efb6882 + languageName: node + linkType: hard + +"@jridgewell/resolve-uri@npm:^3.0.3": + version: 3.1.0 + resolution: "@jridgewell/resolve-uri@npm:3.1.0" + checksum: b5ceaaf9a110fcb2780d1d8f8d4a0bfd216702f31c988d8042e5f8fbe353c55d9b0f55a1733afdc64806f8e79c485d2464680ac48a0d9fcadb9548ee6b81d267 + languageName: node + linkType: hard + +"@jridgewell/set-array@npm:^1.0.0, @jridgewell/set-array@npm:^1.0.1": + version: 1.1.2 + resolution: "@jridgewell/set-array@npm:1.1.2" + checksum: 69a84d5980385f396ff60a175f7177af0b8da4ddb81824cb7016a9ef914eee9806c72b6b65942003c63f7983d4f39a5c6c27185bbca88eb4690b62075602e28e + languageName: node + linkType: hard + +"@jridgewell/sourcemap-codec@npm:^1.4.10": + version: 1.4.14 + resolution: "@jridgewell/sourcemap-codec@npm:1.4.14" + checksum: 61100637b6d173d3ba786a5dff019e1a74b1f394f323c1fee337ff390239f053b87266c7a948777f4b1ee68c01a8ad0ab61e5ff4abb5a012a0b091bec391ab97 + languageName: node + linkType: hard + +"@jridgewell/trace-mapping@npm:^0.3.9": + version: 0.3.15 + resolution: "@jridgewell/trace-mapping@npm:0.3.15" + dependencies: + "@jridgewell/resolve-uri": ^3.0.3 + "@jridgewell/sourcemap-codec": ^1.4.10 + checksum: 38917e9c2b014d469a9f51c016ed506acbe44dd16ec2f6f99b553ebf3764d22abadbf992f2367b6d2b3511f3eae8ed3a8963f6c1030093fda23efd35ecab2bae + languageName: node + linkType: hard + +"@next/env@npm:12.2.5": + version: 12.2.5 + resolution: "@next/env@npm:12.2.5" + checksum: a44939e59b46d5951831529a43dba9daa2e4e467e8680ea96e21ae127d1bf7f11757aaf3a6cff8a51273abfe7af782903e1304405a481361c7ba3e66d47e3238 + languageName: node + linkType: hard + +"@next/eslint-plugin-next@npm:12.3.0": + version: 12.3.0 + resolution: "@next/eslint-plugin-next@npm:12.3.0" + dependencies: + glob: 7.1.7 + checksum: f08582b36ff01a776183b3c33d6d81be3a110c1c3c39c81a33aff91277ea822aa4a952d4f2271a08ce56692ca5c58c9e958aaf4e08348c10cc45a85213b208f0 + languageName: node + linkType: hard + +"@next/swc-android-arm-eabi@npm:12.2.5": + version: 12.2.5 + resolution: "@next/swc-android-arm-eabi@npm:12.2.5" + conditions: os=android & cpu=arm + languageName: node + linkType: hard + +"@next/swc-android-arm64@npm:12.2.5": + version: 12.2.5 + resolution: "@next/swc-android-arm64@npm:12.2.5" + conditions: os=android & cpu=arm64 + languageName: node + linkType: hard + +"@next/swc-darwin-arm64@npm:12.2.5": + version: 12.2.5 + resolution: "@next/swc-darwin-arm64@npm:12.2.5" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + +"@next/swc-darwin-x64@npm:12.2.5": + version: 12.2.5 + resolution: "@next/swc-darwin-x64@npm:12.2.5" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + +"@next/swc-freebsd-x64@npm:12.2.5": + version: 12.2.5 + resolution: "@next/swc-freebsd-x64@npm:12.2.5" + conditions: os=freebsd & cpu=x64 + languageName: node + linkType: hard + +"@next/swc-linux-arm-gnueabihf@npm:12.2.5": + version: 12.2.5 + resolution: "@next/swc-linux-arm-gnueabihf@npm:12.2.5" + conditions: os=linux & cpu=arm + languageName: node + linkType: hard + +"@next/swc-linux-arm64-gnu@npm:12.2.5": + version: 12.2.5 + resolution: "@next/swc-linux-arm64-gnu@npm:12.2.5" + conditions: os=linux & cpu=arm64 & libc=glibc + languageName: node + linkType: hard + +"@next/swc-linux-arm64-musl@npm:12.2.5": + version: 12.2.5 + resolution: "@next/swc-linux-arm64-musl@npm:12.2.5" + conditions: os=linux & cpu=arm64 & libc=musl + languageName: node + linkType: hard + +"@next/swc-linux-x64-gnu@npm:12.2.5": + version: 12.2.5 + resolution: "@next/swc-linux-x64-gnu@npm:12.2.5" + conditions: os=linux & cpu=x64 & libc=glibc + languageName: node + linkType: hard + +"@next/swc-linux-x64-musl@npm:12.2.5": + version: 12.2.5 + resolution: "@next/swc-linux-x64-musl@npm:12.2.5" + conditions: os=linux & cpu=x64 & libc=musl + languageName: node + linkType: hard + +"@next/swc-win32-arm64-msvc@npm:12.2.5": + version: 12.2.5 + resolution: "@next/swc-win32-arm64-msvc@npm:12.2.5" + conditions: os=win32 & cpu=arm64 + languageName: node + linkType: hard + +"@next/swc-win32-ia32-msvc@npm:12.2.5": + version: 12.2.5 + resolution: "@next/swc-win32-ia32-msvc@npm:12.2.5" + conditions: os=win32 & cpu=ia32 + languageName: node + linkType: hard + +"@next/swc-win32-x64-msvc@npm:12.2.5": + version: 12.2.5 + resolution: "@next/swc-win32-x64-msvc@npm:12.2.5" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + +"@nodelib/fs.scandir@npm:2.1.5": + version: 2.1.5 + resolution: "@nodelib/fs.scandir@npm:2.1.5" + dependencies: + "@nodelib/fs.stat": 2.0.5 + run-parallel: ^1.1.9 + checksum: a970d595bd23c66c880e0ef1817791432dbb7acbb8d44b7e7d0e7a22f4521260d4a83f7f9fd61d44fda4610105577f8f58a60718105fb38352baed612fd79e59 + languageName: node + linkType: hard + +"@nodelib/fs.stat@npm:2.0.5, @nodelib/fs.stat@npm:^2.0.2": + version: 2.0.5 + resolution: "@nodelib/fs.stat@npm:2.0.5" + checksum: 012480b5ca9d97bff9261571dbbec7bbc6033f69cc92908bc1ecfad0792361a5a1994bc48674b9ef76419d056a03efadfce5a6cf6dbc0a36559571a7a483f6f0 + languageName: node + linkType: hard + +"@nodelib/fs.walk@npm:^1.2.3": + version: 1.2.8 + resolution: "@nodelib/fs.walk@npm:1.2.8" + dependencies: + "@nodelib/fs.scandir": 2.1.5 + fastq: ^1.6.0 + checksum: 190c643f156d8f8f277bf2a6078af1ffde1fd43f498f187c2db24d35b4b4b5785c02c7dc52e356497b9a1b65b13edc996de08de0b961c32844364da02986dc53 + languageName: node + linkType: hard + +"@rushstack/eslint-patch@npm:^1.1.3": + version: 1.2.0 + resolution: "@rushstack/eslint-patch@npm:1.2.0" + checksum: faa749faae0e83c26ae9eb00ad36a897ac78f3cf27da8e8ff21c00bcf7973b598d823d8f2b3957ef66079288bcf577f94df831eae2d65f3f68d8ca32f18b6aff + languageName: node + linkType: hard + +"@swc/helpers@npm:0.4.3": + version: 0.4.3 + resolution: "@swc/helpers@npm:0.4.3" + dependencies: + tslib: ^2.4.0 + checksum: 5c2f173e950dd3929d84ae48b3586a274d5a874e7cf2013b3d8081e4f8c723fa3a4d4e63b263e84bb7f06431f87b640e91a12655410463c81a3dc2bbc15eceda + languageName: node + linkType: hard + +"@types/json5@npm:^0.0.29": + version: 0.0.29 + resolution: "@types/json5@npm:0.0.29" + checksum: e60b153664572116dfea673c5bda7778dbff150498f44f998e34b5886d8afc47f16799280e4b6e241c0472aef1bc36add771c569c68fc5125fc2ae519a3eb9ac + languageName: node + linkType: hard + +"@types/node@npm:^17.0.12": + version: 17.0.45 + resolution: "@types/node@npm:17.0.45" + checksum: aa04366b9103b7d6cfd6b2ef64182e0eaa7d4462c3f817618486ea0422984c51fc69fd0d436eae6c9e696ddfdbec9ccaa27a917f7c2e8c75c5d57827fe3d95e8 + languageName: node + linkType: hard + +"@types/prop-types@npm:*": + version: 15.7.5 + resolution: "@types/prop-types@npm:15.7.5" + checksum: 5b43b8b15415e1f298243165f1d44390403bb2bd42e662bca3b5b5633fdd39c938e91b7fce3a9483699db0f7a715d08cef220c121f723a634972fdf596aec980 + languageName: node + linkType: hard + +"@types/react-dom@npm:^17.0.11": + version: 17.0.17 + resolution: "@types/react-dom@npm:17.0.17" + dependencies: + "@types/react": ^17 + checksum: 23caf98aa03e968811560f92a2c8f451694253ebe16b670929b24eaf0e7fa62ba549abe9db0ac028a9d8a9086acd6ab9c6c773f163fa21224845edbc00ba6232 + languageName: node + linkType: hard + +"@types/react@npm:18.0.17": + version: 18.0.17 + resolution: "@types/react@npm:18.0.17" + dependencies: + "@types/prop-types": "*" + "@types/scheduler": "*" + csstype: ^3.0.2 + checksum: 18cae64f5bfd6bb58fbd8ee2ba52ec82de844f114254e26de7b513e4b86621f643f9b71d7066958cd571b0d78cb86cbceda449c5289f9349ca573df29ab69252 + languageName: node + linkType: hard + +"@types/react@npm:^17, @types/react@npm:^17.0.37": + version: 17.0.50 + resolution: "@types/react@npm:17.0.50" + dependencies: + "@types/prop-types": "*" + "@types/scheduler": "*" + csstype: ^3.0.2 + checksum: b5629dff7c2f3e9fcba95a19b2b3bfd78d7cacc33ba5fc26413dba653d34afcac3b93ddabe563e8062382688a1eac7db68e93739bb8e712d27637a03aaafbbb8 + languageName: node + linkType: hard + +"@types/scheduler@npm:*": + version: 0.16.2 + resolution: "@types/scheduler@npm:0.16.2" + checksum: b6b4dcfeae6deba2e06a70941860fb1435730576d3689225a421280b7742318d1548b3d22c1f66ab68e414f346a9542f29240bc955b6332c5b11e561077583bc + languageName: node + linkType: hard + +"@typescript-eslint/parser@npm:^5.21.0": + version: 5.37.0 + resolution: "@typescript-eslint/parser@npm:5.37.0" + dependencies: + "@typescript-eslint/scope-manager": 5.37.0 + "@typescript-eslint/types": 5.37.0 + "@typescript-eslint/typescript-estree": 5.37.0 + debug: ^4.3.4 + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 + peerDependenciesMeta: + typescript: + optional: true + checksum: 33343e27c9602820d43ee12de9797365d97a5cf3f716e750fa44de760f2a2c6800f3bc4fa54931ac70c0e0ede77a92224f8151da7f30fed3bf692a029d6659af + languageName: node + linkType: hard + +"@typescript-eslint/scope-manager@npm:5.37.0": + version: 5.37.0 + resolution: "@typescript-eslint/scope-manager@npm:5.37.0" + dependencies: + "@typescript-eslint/types": 5.37.0 + "@typescript-eslint/visitor-keys": 5.37.0 + checksum: 1c439e21ffa63ebaadb8c8363e9d668132a835a28203e5b779366bfa56772f332e5dedb50d63dffb836839b9d9c4e66aa9e3ea47b8c59465b18a0cbd063ec7a3 + languageName: node + linkType: hard + +"@typescript-eslint/types@npm:5.37.0": + version: 5.37.0 + resolution: "@typescript-eslint/types@npm:5.37.0" + checksum: 899e59e7775fa95c2d9fcac5cc02cc49d83af5f1ffc706df495046c3b3733f79d5489568b01bfaf8c9ae4636e057056866adc783113036f774580086d0189f21 + languageName: node + linkType: hard + +"@typescript-eslint/typescript-estree@npm:5.37.0": + version: 5.37.0 + resolution: "@typescript-eslint/typescript-estree@npm:5.37.0" + dependencies: + "@typescript-eslint/types": 5.37.0 + "@typescript-eslint/visitor-keys": 5.37.0 + debug: ^4.3.4 + globby: ^11.1.0 + is-glob: ^4.0.3 + semver: ^7.3.7 + tsutils: ^3.21.0 + peerDependenciesMeta: + typescript: + optional: true + checksum: 80365a50fa11ed39bf54d9ef06e264fbbf3bdbcc55b7d7d555ef0be915edae40ec30e98d08b3f6ef048e1874450cbcb1e7d9f429d4f420dacbbde45d3376a7bc + languageName: node + linkType: hard + +"@typescript-eslint/visitor-keys@npm:5.37.0": + version: 5.37.0 + resolution: "@typescript-eslint/visitor-keys@npm:5.37.0" + dependencies: + "@typescript-eslint/types": 5.37.0 + eslint-visitor-keys: ^3.3.0 + checksum: d6193550f77413aead0cb267e058df80b80a488c8fb4e39beb5f0a70b971c41682a6391903fbc5f3dd859a872016288c434d631b8efc3ac5a04edbdb7b63b5f6 + languageName: node + linkType: hard + +"acorn-jsx@npm:^5.3.1": + version: 5.3.2 + resolution: "acorn-jsx@npm:5.3.2" + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + checksum: c3d3b2a89c9a056b205b69530a37b972b404ee46ec8e5b341666f9513d3163e2a4f214a71f4dfc7370f5a9c07472d2fd1c11c91c3f03d093e37637d95da98950 + languageName: node + linkType: hard + +"acorn@npm:^7.4.0": + version: 7.4.1 + resolution: "acorn@npm:7.4.1" + bin: + acorn: bin/acorn + checksum: 1860f23c2107c910c6177b7b7be71be350db9e1080d814493fae143ae37605189504152d1ba8743ba3178d0b37269ce1ffc42b101547fdc1827078f82671e407 + languageName: node + linkType: hard + +"ajv@npm:^6.10.0, ajv@npm:^6.12.4": + version: 6.12.6 + resolution: "ajv@npm:6.12.6" + dependencies: + fast-deep-equal: ^3.1.1 + fast-json-stable-stringify: ^2.0.0 + json-schema-traverse: ^0.4.1 + uri-js: ^4.2.2 + checksum: 874972efe5c4202ab0a68379481fbd3d1b5d0a7bd6d3cc21d40d3536ebff3352a2a1fabb632d4fd2cc7fe4cbdcd5ed6782084c9bbf7f32a1536d18f9da5007d4 + languageName: node + linkType: hard + +"ajv@npm:^8.0.1": + version: 8.11.0 + resolution: "ajv@npm:8.11.0" + dependencies: + fast-deep-equal: ^3.1.1 + json-schema-traverse: ^1.0.0 + require-from-string: ^2.0.2 + uri-js: ^4.2.2 + checksum: 5e0ff226806763be73e93dd7805b634f6f5921e3e90ca04acdf8db81eed9d8d3f0d4c5f1213047f45ebbf8047ffe0c840fa1ef2ec42c3a644899f69aa72b5bef + languageName: node + linkType: hard + +"ansi-colors@npm:^4.1.1": + version: 4.1.3 + resolution: "ansi-colors@npm:4.1.3" + checksum: a9c2ec842038a1fabc7db9ece7d3177e2fe1c5dc6f0c51ecfbf5f39911427b89c00b5dc6b8bd95f82a26e9b16aaae2e83d45f060e98070ce4d1333038edceb0e + languageName: node + linkType: hard + +"ansi-regex@npm:^5.0.1": + version: 5.0.1 + resolution: "ansi-regex@npm:5.0.1" + checksum: 2aa4bb54caf2d622f1afdad09441695af2a83aa3fe8b8afa581d205e57ed4261c183c4d3877cee25794443fde5876417d859c108078ab788d6af7e4fe52eb66b + languageName: node + linkType: hard + +"ansi-styles@npm:^3.2.1": + version: 3.2.1 + resolution: "ansi-styles@npm:3.2.1" + dependencies: + color-convert: ^1.9.0 + checksum: d85ade01c10e5dd77b6c89f34ed7531da5830d2cb5882c645f330079975b716438cd7ebb81d0d6e6b4f9c577f19ae41ab55f07f19786b02f9dfd9e0377395665 + languageName: node + linkType: hard + +"ansi-styles@npm:^4.0.0, ansi-styles@npm:^4.1.0": + version: 4.3.0 + resolution: "ansi-styles@npm:4.3.0" + dependencies: + color-convert: ^2.0.1 + checksum: 513b44c3b2105dd14cc42a19271e80f386466c4be574bccf60b627432f9198571ebf4ab1e4c3ba17347658f4ee1711c163d574248c0c1cdc2d5917a0ad582ec4 + languageName: node + linkType: hard + +"argparse@npm:^1.0.7": + version: 1.0.10 + resolution: "argparse@npm:1.0.10" + dependencies: + sprintf-js: ~1.0.2 + checksum: 7ca6e45583a28de7258e39e13d81e925cfa25d7d4aacbf806a382d3c02fcb13403a07fb8aeef949f10a7cfe4a62da0e2e807b348a5980554cc28ee573ef95945 + languageName: node + linkType: hard + +"aria-query@npm:^4.2.2": + version: 4.2.2 + resolution: "aria-query@npm:4.2.2" + dependencies: + "@babel/runtime": ^7.10.2 + "@babel/runtime-corejs3": ^7.10.2 + checksum: 38401a9a400f26f3dcc24b84997461a16b32869a9893d323602bed8da40a8bcc0243b8d2880e942249a1496cea7a7de769e93d21c0baa439f01e1ee936fed665 + languageName: node + linkType: hard + +"array-includes@npm:^3.1.4, array-includes@npm:^3.1.5": + version: 3.1.5 + resolution: "array-includes@npm:3.1.5" + dependencies: + call-bind: ^1.0.2 + define-properties: ^1.1.4 + es-abstract: ^1.19.5 + get-intrinsic: ^1.1.1 + is-string: ^1.0.7 + checksum: f6f24d834179604656b7bec3e047251d5cc87e9e87fab7c175c61af48e80e75acd296017abcde21fb52292ab6a2a449ab2ee37213ee48c8709f004d75983f9c5 + languageName: node + linkType: hard + +"array-union@npm:^2.1.0": + version: 2.1.0 + resolution: "array-union@npm:2.1.0" + checksum: 5bee12395cba82da674931df6d0fea23c4aa4660cb3b338ced9f828782a65caa232573e6bf3968f23e0c5eb301764a382cef2f128b170a9dc59de0e36c39f98d + languageName: node + linkType: hard + +"array.prototype.flat@npm:^1.2.5": + version: 1.3.0 + resolution: "array.prototype.flat@npm:1.3.0" + dependencies: + call-bind: ^1.0.2 + define-properties: ^1.1.3 + es-abstract: ^1.19.2 + es-shim-unscopables: ^1.0.0 + checksum: 2a652b3e8dc0bebb6117e42a5ab5738af0203a14c27341d7bb2431467bdb4b348e2c5dc555dfcda8af0a5e4075c400b85311ded73861c87290a71a17c3e0a257 + languageName: node + linkType: hard + +"array.prototype.flatmap@npm:^1.3.0": + version: 1.3.0 + resolution: "array.prototype.flatmap@npm:1.3.0" + dependencies: + call-bind: ^1.0.2 + define-properties: ^1.1.3 + es-abstract: ^1.19.2 + es-shim-unscopables: ^1.0.0 + checksum: 818538f39409c4045d874be85df0dbd195e1446b14d22f95bdcfefea44ae77db44e42dcd89a559254ec5a7c8b338cfc986cc6d641e3472f9a5326b21eb2976a2 + languageName: node + linkType: hard + +"ast-types-flow@npm:^0.0.7": + version: 0.0.7 + resolution: "ast-types-flow@npm:0.0.7" + checksum: a26dcc2182ffee111cad7c471759b0bda22d3b7ebacf27c348b22c55f16896b18ab0a4d03b85b4020dce7f3e634b8f00b593888f622915096ea1927fa51866c4 + languageName: node + linkType: hard + +"astral-regex@npm:^2.0.0": + version: 2.0.0 + resolution: "astral-regex@npm:2.0.0" + checksum: 876231688c66400473ba505731df37ea436e574dd524520294cc3bbc54ea40334865e01fa0d074d74d036ee874ee7e62f486ea38bc421ee8e6a871c06f011766 + languageName: node + linkType: hard + +"axe-core@npm:^4.4.3": + version: 4.4.3 + resolution: "axe-core@npm:4.4.3" + checksum: c3ea000d9ace3ba0bc747c8feafc24b0de62a0f7d93021d0f77b19c73fca15341843510f6170da563d51535d6cfb7a46c5fc0ea36170549dbb44b170208450a2 + languageName: node + linkType: hard + +"axobject-query@npm:^2.2.0": + version: 2.2.0 + resolution: "axobject-query@npm:2.2.0" + checksum: 96b8c7d807ca525f41ad9b286186e2089b561ba63a6d36c3e7d73dc08150714660995c7ad19cda05784458446a0793b45246db45894631e13853f48c1aa3117f + languageName: node + linkType: hard + +"balanced-match@npm:^1.0.0": + version: 1.0.2 + resolution: "balanced-match@npm:1.0.2" + checksum: 9706c088a283058a8a99e0bf91b0a2f75497f185980d9ffa8b304de1d9e58ebda7c72c07ebf01dadedaac5b2907b2c6f566f660d62bd336c3468e960403b9d65 + languageName: node + linkType: hard + +"berry-patch@workspace:.": + version: 0.0.0-use.local + resolution: "berry-patch@workspace:." + dependencies: + eslint-config-custom: "*" + prettier: latest + turbo: latest + languageName: unknown + linkType: soft + +"brace-expansion@npm:^1.1.7": + version: 1.1.11 + resolution: "brace-expansion@npm:1.1.11" + dependencies: + balanced-match: ^1.0.0 + concat-map: 0.0.1 + checksum: faf34a7bb0c3fcf4b59c7808bc5d2a96a40988addf2e7e09dfbb67a2251800e0d14cd2bfc1aa79174f2f5095c54ff27f46fb1289fe2d77dac755b5eb3434cc07 + languageName: node + linkType: hard + +"braces@npm:^3.0.2": + version: 3.0.2 + resolution: "braces@npm:3.0.2" + dependencies: + fill-range: ^7.0.1 + checksum: e2a8e769a863f3d4ee887b5fe21f63193a891c68b612ddb4b68d82d1b5f3ff9073af066c343e9867a393fe4c2555dcb33e89b937195feb9c1613d259edfcd459 + languageName: node + linkType: hard + +"browserslist@npm:^4.21.3": + version: 4.21.4 + resolution: "browserslist@npm:4.21.4" + dependencies: + caniuse-lite: ^1.0.30001400 + electron-to-chromium: ^1.4.251 + node-releases: ^2.0.6 + update-browserslist-db: ^1.0.9 + bin: + browserslist: cli.js + checksum: 4af3793704dbb4615bcd29059ab472344dc7961c8680aa6c4bb84f05340e14038d06a5aead58724eae69455b8fade8b8c69f1638016e87e5578969d74c078b79 + languageName: node + linkType: hard + +"call-bind@npm:^1.0.0, call-bind@npm:^1.0.2": + version: 1.0.2 + resolution: "call-bind@npm:1.0.2" + dependencies: + function-bind: ^1.1.1 + get-intrinsic: ^1.0.2 + checksum: f8e31de9d19988a4b80f3e704788c4a2d6b6f3d17cfec4f57dc29ced450c53a49270dc66bf0fbd693329ee948dd33e6c90a329519aef17474a4d961e8d6426b0 + languageName: node + linkType: hard + +"callsites@npm:^3.0.0": + version: 3.1.0 + resolution: "callsites@npm:3.1.0" + checksum: 072d17b6abb459c2ba96598918b55868af677154bec7e73d222ef95a8fdb9bbf7dae96a8421085cdad8cd190d86653b5b6dc55a4484f2e5b2e27d5e0c3fc15b3 + languageName: node + linkType: hard + +"caniuse-lite@npm:^1.0.30001332, caniuse-lite@npm:^1.0.30001400": + version: 1.0.30001400 + resolution: "caniuse-lite@npm:1.0.30001400" + checksum: 984e29d3c02fd02a59cc92ef4a5e9390fce250de3791056362347cf901f0d91041246961a57cfa8fed800538d03ee341bc4f7eaed19bf7be0ef8a181d94cd848 + languageName: node + linkType: hard + +"chalk@npm:^2.0.0": + version: 2.4.2 + resolution: "chalk@npm:2.4.2" + dependencies: + ansi-styles: ^3.2.1 + escape-string-regexp: ^1.0.5 + supports-color: ^5.3.0 + checksum: ec3661d38fe77f681200f878edbd9448821924e0f93a9cefc0e26a33b145f1027a2084bf19967160d11e1f03bfe4eaffcabf5493b89098b2782c3fe0b03d80c2 + languageName: node + linkType: hard + +"chalk@npm:^4.0.0": + version: 4.1.2 + resolution: "chalk@npm:4.1.2" + dependencies: + ansi-styles: ^4.1.0 + supports-color: ^7.1.0 + checksum: fe75c9d5c76a7a98d45495b91b2172fa3b7a09e0cc9370e5c8feb1c567b85c4288e2b3fded7cfdd7359ac28d6b3844feb8b82b8686842e93d23c827c417e83fc + languageName: node + linkType: hard + +"color-convert@npm:^1.9.0": + version: 1.9.3 + resolution: "color-convert@npm:1.9.3" + dependencies: + color-name: 1.1.3 + checksum: fd7a64a17cde98fb923b1dd05c5f2e6f7aefda1b60d67e8d449f9328b4e53b228a428fd38bfeaeb2db2ff6b6503a776a996150b80cdf224062af08a5c8a3a203 + languageName: node + linkType: hard + +"color-convert@npm:^2.0.1": + version: 2.0.1 + resolution: "color-convert@npm:2.0.1" + dependencies: + color-name: ~1.1.4 + checksum: 79e6bdb9fd479a205c71d89574fccfb22bd9053bd98c6c4d870d65c132e5e904e6034978e55b43d69fcaa7433af2016ee203ce76eeba9cfa554b373e7f7db336 + languageName: node + linkType: hard + +"color-name@npm:1.1.3": + version: 1.1.3 + resolution: "color-name@npm:1.1.3" + checksum: 09c5d3e33d2105850153b14466501f2bfb30324a2f76568a408763a3b7433b0e50e5b4ab1947868e65cb101bb7cb75029553f2c333b6d4b8138a73fcc133d69d + languageName: node + linkType: hard + +"color-name@npm:~1.1.4": + version: 1.1.4 + resolution: "color-name@npm:1.1.4" + checksum: b0445859521eb4021cd0fb0cc1a75cecf67fceecae89b63f62b201cca8d345baf8b952c966862a9d9a2632987d4f6581f0ec8d957dfacece86f0a7919316f610 + languageName: node + linkType: hard + +"concat-map@npm:0.0.1": + version: 0.0.1 + resolution: "concat-map@npm:0.0.1" + checksum: 902a9f5d8967a3e2faf138d5cb784b9979bad2e6db5357c5b21c568df4ebe62bcb15108af1b2253744844eb964fc023fbd9afbbbb6ddd0bcc204c6fb5b7bf3af + languageName: node + linkType: hard + +"convert-source-map@npm:^1.7.0": + version: 1.8.0 + resolution: "convert-source-map@npm:1.8.0" + dependencies: + safe-buffer: ~5.1.1 + checksum: 985d974a2d33e1a2543ada51c93e1ba2f73eaed608dc39f229afc78f71dcc4c8b7d7c684aa647e3c6a3a204027444d69e53e169ce94e8d1fa8d7dee80c9c8fed + languageName: node + linkType: hard + +"core-js-pure@npm:^3.25.1": + version: 3.25.1 + resolution: "core-js-pure@npm:3.25.1" + checksum: 0123131ec7ab3a1e56f0b4df4ae659de03d9c245ce281637d4d0f18f9839d8e0cfbfa989bd577ce1b67826f889a7dcc734421f697cf1bbe59f605f29c537a678 + languageName: node + linkType: hard + +"cross-spawn@npm:^7.0.2": + version: 7.0.3 + resolution: "cross-spawn@npm:7.0.3" + dependencies: + path-key: ^3.1.0 + shebang-command: ^2.0.0 + which: ^2.0.1 + checksum: 671cc7c7288c3a8406f3c69a3ae2fc85555c04169e9d611def9a675635472614f1c0ed0ef80955d5b6d4e724f6ced67f0ad1bb006c2ea643488fcfef994d7f52 + languageName: node + linkType: hard + +"csstype@npm:^3.0.2": + version: 3.1.1 + resolution: "csstype@npm:3.1.1" + checksum: 1f7b4f5fdd955b7444b18ebdddf3f5c699159f13e9cf8ac9027ae4a60ae226aef9bbb14a6e12ca7dba3358b007cee6354b116e720262867c398de6c955ea451d + languageName: node + linkType: hard + +"damerau-levenshtein@npm:^1.0.8": + version: 1.0.8 + resolution: "damerau-levenshtein@npm:1.0.8" + checksum: d240b7757544460ae0586a341a53110ab0a61126570ef2d8c731e3eab3f0cb6e488e2609e6a69b46727635de49be20b071688698744417ff1b6c1d7ccd03e0de + languageName: node + linkType: hard + +"debug@npm:^2.6.9": + version: 2.6.9 + resolution: "debug@npm:2.6.9" + dependencies: + ms: 2.0.0 + checksum: d2f51589ca66df60bf36e1fa6e4386b318c3f1e06772280eea5b1ae9fd3d05e9c2b7fd8a7d862457d00853c75b00451aa2d7459b924629ee385287a650f58fe6 + languageName: node + linkType: hard + +"debug@npm:^3.2.7": + version: 3.2.7 + resolution: "debug@npm:3.2.7" + dependencies: + ms: ^2.1.1 + checksum: b3d8c5940799914d30314b7c3304a43305fd0715581a919dacb8b3176d024a782062368405b47491516d2091d6462d4d11f2f4974a405048094f8bfebfa3071c + languageName: node + linkType: hard + +"debug@npm:^4.0.1, debug@npm:^4.1.0, debug@npm:^4.1.1, debug@npm:^4.3.4": + version: 4.3.4 + resolution: "debug@npm:4.3.4" + dependencies: + ms: 2.1.2 + peerDependenciesMeta: + supports-color: + optional: true + checksum: 3dbad3f94ea64f34431a9cbf0bafb61853eda57bff2880036153438f50fb5a84f27683ba0d8e5426bf41a8c6ff03879488120cf5b3a761e77953169c0600a708 + languageName: node + linkType: hard + +"deep-is@npm:^0.1.3": + version: 0.1.4 + resolution: "deep-is@npm:0.1.4" + checksum: edb65dd0d7d1b9c40b2f50219aef30e116cedd6fc79290e740972c132c09106d2e80aa0bc8826673dd5a00222d4179c84b36a790eef63a4c4bca75a37ef90804 + languageName: node + linkType: hard + +"define-properties@npm:^1.1.3, define-properties@npm:^1.1.4": + version: 1.1.4 + resolution: "define-properties@npm:1.1.4" + dependencies: + has-property-descriptors: ^1.0.0 + object-keys: ^1.1.1 + checksum: ce0aef3f9eb193562b5cfb79b2d2c86b6a109dfc9fdcb5f45d680631a1a908c06824ddcdb72b7573b54e26ace07f0a23420aaba0d5c627b34d2c1de8ef527e2b + languageName: node + linkType: hard + +"dir-glob@npm:^3.0.1": + version: 3.0.1 + resolution: "dir-glob@npm:3.0.1" + dependencies: + path-type: ^4.0.0 + checksum: fa05e18324510d7283f55862f3161c6759a3f2f8dbce491a2fc14c8324c498286c54282c1f0e933cb930da8419b30679389499b919122952a4f8592362ef4615 + languageName: node + linkType: hard + +"docs@workspace:apps/docs": + version: 0.0.0-use.local + resolution: "docs@workspace:apps/docs" + dependencies: + "@babel/core": ^7.0.0 + "@types/node": ^17.0.12 + "@types/react": 18.0.17 + eslint: 7.32.0 + eslint-config-custom: "*" + lodash: ^4.17.21 + next: 12.2.5 + next-transpile-modules: 9.0.0 + react: 18.2.0 + react-dom: 18.2.0 + tsconfig: "*" + typescript: ^4.5.3 + ui: "*" + languageName: unknown + linkType: soft + +"doctrine@npm:^2.1.0": + version: 2.1.0 + resolution: "doctrine@npm:2.1.0" + dependencies: + esutils: ^2.0.2 + checksum: a45e277f7feaed309fe658ace1ff286c6e2002ac515af0aaf37145b8baa96e49899638c7cd47dccf84c3d32abfc113246625b3ac8f552d1046072adee13b0dc8 + languageName: node + linkType: hard + +"doctrine@npm:^3.0.0": + version: 3.0.0 + resolution: "doctrine@npm:3.0.0" + dependencies: + esutils: ^2.0.2 + checksum: fd7673ca77fe26cd5cba38d816bc72d641f500f1f9b25b83e8ce28827fe2da7ad583a8da26ab6af85f834138cf8dae9f69b0cd6ab925f52ddab1754db44d99ce + languageName: node + linkType: hard + +"electron-to-chromium@npm:^1.4.251": + version: 1.4.251 + resolution: "electron-to-chromium@npm:1.4.251" + checksum: 470a04dfe1d34814f8bc7e1dde606851b6f787a6d78655a57df063844fc71feb64ce793c52a3a130ceac1fc368b8d3e25a4c55c847a1e9c02c3090f9dcbf40ac + languageName: node + linkType: hard + +"emoji-regex@npm:^8.0.0": + version: 8.0.0 + resolution: "emoji-regex@npm:8.0.0" + checksum: d4c5c39d5a9868b5fa152f00cada8a936868fd3367f33f71be515ecee4c803132d11b31a6222b2571b1e5f7e13890156a94880345594d0ce7e3c9895f560f192 + languageName: node + linkType: hard + +"emoji-regex@npm:^9.2.2": + version: 9.2.2 + resolution: "emoji-regex@npm:9.2.2" + checksum: 8487182da74aabd810ac6d6f1994111dfc0e331b01271ae01ec1eb0ad7b5ecc2bbbbd2f053c05cb55a1ac30449527d819bbfbf0e3de1023db308cbcb47f86601 + languageName: node + linkType: hard + +"enhanced-resolve@npm:^5.7.0": + version: 5.10.0 + resolution: "enhanced-resolve@npm:5.10.0" + dependencies: + graceful-fs: ^4.2.4 + tapable: ^2.2.0 + checksum: 0bb9830704db271610f900e8d79d70a740ea16f251263362b0c91af545576d09fe50103496606c1300a05e588372d6f9780a9bc2e30ce8ef9b827ec8f44687ff + languageName: node + linkType: hard + +"enquirer@npm:^2.3.5": + version: 2.3.6 + resolution: "enquirer@npm:2.3.6" + dependencies: + ansi-colors: ^4.1.1 + checksum: 1c0911e14a6f8d26721c91e01db06092a5f7675159f0261d69c403396a385afd13dd76825e7678f66daffa930cfaa8d45f506fb35f818a2788463d022af1b884 + languageName: node + linkType: hard + +"es-abstract@npm:^1.19.0, es-abstract@npm:^1.19.1, es-abstract@npm:^1.19.2, es-abstract@npm:^1.19.5": + version: 1.20.2 + resolution: "es-abstract@npm:1.20.2" + dependencies: + call-bind: ^1.0.2 + es-to-primitive: ^1.2.1 + function-bind: ^1.1.1 + function.prototype.name: ^1.1.5 + get-intrinsic: ^1.1.2 + get-symbol-description: ^1.0.0 + has: ^1.0.3 + has-property-descriptors: ^1.0.0 + has-symbols: ^1.0.3 + internal-slot: ^1.0.3 + is-callable: ^1.2.4 + is-negative-zero: ^2.0.2 + is-regex: ^1.1.4 + is-shared-array-buffer: ^1.0.2 + is-string: ^1.0.7 + is-weakref: ^1.0.2 + object-inspect: ^1.12.2 + object-keys: ^1.1.1 + object.assign: ^4.1.4 + regexp.prototype.flags: ^1.4.3 + string.prototype.trimend: ^1.0.5 + string.prototype.trimstart: ^1.0.5 + unbox-primitive: ^1.0.2 + checksum: ab893dd1f849250f5a2da82656b4e21b511f76429b25a4aea5c8b2a3007ff01cb8e112987d0dd7693b9ad9e6399f8f7be133285d6196a5ebd1b13a4ee2258f70 + languageName: node + linkType: hard + +"es-shim-unscopables@npm:^1.0.0": + version: 1.0.0 + resolution: "es-shim-unscopables@npm:1.0.0" + dependencies: + has: ^1.0.3 + checksum: 83e95cadbb6ee44d3644dfad60dcad7929edbc42c85e66c3e99aefd68a3a5c5665f2686885cddb47dfeabfd77bd5ea5a7060f2092a955a729bbd8834f0d86fa1 + languageName: node + linkType: hard + +"es-to-primitive@npm:^1.2.1": + version: 1.2.1 + resolution: "es-to-primitive@npm:1.2.1" + dependencies: + is-callable: ^1.1.4 + is-date-object: ^1.0.1 + is-symbol: ^1.0.2 + checksum: 4ead6671a2c1402619bdd77f3503991232ca15e17e46222b0a41a5d81aebc8740a77822f5b3c965008e631153e9ef0580540007744521e72de8e33599fca2eed + languageName: node + linkType: hard + +"escalade@npm:^3.1.1": + version: 3.1.1 + resolution: "escalade@npm:3.1.1" + checksum: a3e2a99f07acb74b3ad4989c48ca0c3140f69f923e56d0cba0526240ee470b91010f9d39001f2a4a313841d237ede70a729e92125191ba5d21e74b106800b133 + languageName: node + linkType: hard + +"escape-string-regexp@npm:^1.0.5": + version: 1.0.5 + resolution: "escape-string-regexp@npm:1.0.5" + checksum: 6092fda75c63b110c706b6a9bfde8a612ad595b628f0bd2147eea1d3406723020810e591effc7db1da91d80a71a737a313567c5abb3813e8d9c71f4aa595b410 + languageName: node + linkType: hard + +"escape-string-regexp@npm:^4.0.0": + version: 4.0.0 + resolution: "escape-string-regexp@npm:4.0.0" + checksum: 98b48897d93060f2322108bf29db0feba7dd774be96cd069458d1453347b25ce8682ecc39859d4bca2203cc0ab19c237bcc71755eff49a0f8d90beadeeba5cc5 + languageName: node + linkType: hard + +"eslint-config-custom@*, eslint-config-custom@workspace:packages/eslint-config-custom": + version: 0.0.0-use.local + resolution: "eslint-config-custom@workspace:packages/eslint-config-custom" + dependencies: + eslint: ^7.23.0 + eslint-config-next: ^12.0.8 + eslint-config-prettier: ^8.3.0 + eslint-config-turbo: latest + eslint-plugin-react: 7.31.7 + typescript: ^4.7.4 + languageName: unknown + linkType: soft + +"eslint-config-next@npm:^12.0.8": + version: 12.3.0 + resolution: "eslint-config-next@npm:12.3.0" + dependencies: + "@next/eslint-plugin-next": 12.3.0 + "@rushstack/eslint-patch": ^1.1.3 + "@typescript-eslint/parser": ^5.21.0 + eslint-import-resolver-node: ^0.3.6 + eslint-import-resolver-typescript: ^2.7.1 + eslint-plugin-import: ^2.26.0 + eslint-plugin-jsx-a11y: ^6.5.1 + eslint-plugin-react: ^7.29.4 + eslint-plugin-react-hooks: ^4.5.0 + peerDependencies: + eslint: ^7.23.0 || ^8.0.0 + typescript: ">=3.3.1" + peerDependenciesMeta: + typescript: + optional: true + checksum: 50a2e43c515350c689cd848973b953c1d058303b84e05ecba5b5bf0f8feffe3935011de3b574ba35d48de8a5d7d5c42567d21d1a17f02189a701edeb6d76a8e0 + languageName: node + linkType: hard + +"eslint-config-prettier@npm:^8.3.0": + version: 8.5.0 + resolution: "eslint-config-prettier@npm:8.5.0" + peerDependencies: + eslint: ">=7.0.0" + bin: + eslint-config-prettier: bin/cli.js + checksum: 0d0f5c32e7a0ad91249467ce71ca92394ccd343178277d318baf32063b79ea90216f4c81d1065d60f96366fdc60f151d4d68ae7811a58bd37228b84c2083f893 + languageName: node + linkType: hard + +eslint-config-turbo@latest: + version: 0.0.3 + resolution: "eslint-config-turbo@npm:0.0.3" + dependencies: + eslint-plugin-turbo: 0.0.3 + peerDependencies: + eslint: ^7.23.0 || ^8.0.0 + checksum: c92255e91dd0865faeebc857eb3a862e8ca2ccb37fc54ffce93b73cd41e95ad456826ae6634772450dfa9c705b67c288f476e8e413fab3d8194dc271754528e2 + languageName: node + linkType: hard + +"eslint-import-resolver-node@npm:^0.3.6": + version: 0.3.6 + resolution: "eslint-import-resolver-node@npm:0.3.6" + dependencies: + debug: ^3.2.7 + resolve: ^1.20.0 + checksum: 6266733af1e112970e855a5bcc2d2058fb5ae16ad2a6d400705a86b29552b36131ffc5581b744c23d550de844206fb55e9193691619ee4dbf225c4bde526b1c8 + languageName: node + linkType: hard + +"eslint-import-resolver-typescript@npm:^2.7.1": + version: 2.7.1 + resolution: "eslint-import-resolver-typescript@npm:2.7.1" + dependencies: + debug: ^4.3.4 + glob: ^7.2.0 + is-glob: ^4.0.3 + resolve: ^1.22.0 + tsconfig-paths: ^3.14.1 + peerDependencies: + eslint: "*" + eslint-plugin-import: "*" + checksum: 1d81b657b1f73bf95b8f0b745c0305574b91630c1db340318f3ca8918e206fce20a933b95e7c419338cc4452cb80bb2b2d92acaf01b6aa315c78a332d832545c + languageName: node + linkType: hard + +"eslint-module-utils@npm:^2.7.3": + version: 2.7.4 + resolution: "eslint-module-utils@npm:2.7.4" + dependencies: + debug: ^3.2.7 + dependenciesMeta: + debug@4.3.4: + unplugged: true + peerDependenciesMeta: + eslint: + optional: true + checksum: 5da13645daff145a5c922896b258f8bba560722c3767254e458d894ff5fbb505d6dfd945bffa932a5b0ae06714da2379bd41011c4c20d2d59cc83e23895360f7 + languageName: node + linkType: hard + +"eslint-plugin-import@npm:^2.26.0": + version: 2.26.0 + resolution: "eslint-plugin-import@npm:2.26.0" + dependencies: + array-includes: ^3.1.4 + array.prototype.flat: ^1.2.5 + debug: ^2.6.9 + doctrine: ^2.1.0 + eslint-import-resolver-node: ^0.3.6 + eslint-module-utils: ^2.7.3 + has: ^1.0.3 + is-core-module: ^2.8.1 + is-glob: ^4.0.3 + minimatch: ^3.1.2 + object.values: ^1.1.5 + resolve: ^1.22.0 + tsconfig-paths: ^3.14.1 + peerDependencies: + eslint: ^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 + checksum: 0bf77ad80339554481eafa2b1967449e1f816b94c7a6f9614ce33fb4083c4e6c050f10d241dd50b4975d47922880a34de1e42ea9d8e6fd663ebb768baa67e655 + languageName: node + linkType: hard + +"eslint-plugin-jsx-a11y@npm:^6.5.1": + version: 6.6.1 + resolution: "eslint-plugin-jsx-a11y@npm:6.6.1" + dependencies: + "@babel/runtime": ^7.18.9 + aria-query: ^4.2.2 + array-includes: ^3.1.5 + ast-types-flow: ^0.0.7 + axe-core: ^4.4.3 + axobject-query: ^2.2.0 + damerau-levenshtein: ^1.0.8 + emoji-regex: ^9.2.2 + has: ^1.0.3 + jsx-ast-utils: ^3.3.2 + language-tags: ^1.0.5 + minimatch: ^3.1.2 + semver: ^6.3.0 + peerDependencies: + eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 + checksum: baae7377f0e25a0cc9b34dc333a3dc6ead9ee8365e445451eff554c3ca267a0a6cb88127fe90395c578ab1b92cfed246aef7dc8d2b48b603389e10181799e144 + languageName: node + linkType: hard + +"eslint-plugin-react-hooks@npm:^4.5.0": + version: 4.6.0 + resolution: "eslint-plugin-react-hooks@npm:4.6.0" + peerDependencies: + eslint: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 + checksum: 23001801f14c1d16bf0a837ca7970d9dd94e7b560384b41db378b49b6e32dc43d6e2790de1bd737a652a86f81a08d6a91f402525061b47719328f586a57e86c3 + languageName: node + linkType: hard + +"eslint-plugin-react@npm:7.31.7": + version: 7.31.7 + resolution: "eslint-plugin-react@npm:7.31.7" + dependencies: + array-includes: ^3.1.5 + array.prototype.flatmap: ^1.3.0 + doctrine: ^2.1.0 + estraverse: ^5.3.0 + jsx-ast-utils: ^2.4.1 || ^3.0.0 + minimatch: ^3.1.2 + object.entries: ^1.1.5 + object.fromentries: ^2.0.5 + object.hasown: ^1.1.1 + object.values: ^1.1.5 + prop-types: ^15.8.1 + resolve: ^2.0.0-next.3 + semver: ^6.3.0 + string.prototype.matchall: ^4.0.7 + peerDependencies: + eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 + checksum: 582d422f531d7d3894fc09ac941ef8b6ad595782cfca5e1d52af5895ce117def7a0ff8afeea0166bff7b6ceae8baec2313614b1571754f539575cfa9351cd2da + languageName: node + linkType: hard + +"eslint-plugin-react@npm:^7.29.4": + version: 7.31.8 + resolution: "eslint-plugin-react@npm:7.31.8" + dependencies: + array-includes: ^3.1.5 + array.prototype.flatmap: ^1.3.0 + doctrine: ^2.1.0 + estraverse: ^5.3.0 + jsx-ast-utils: ^2.4.1 || ^3.0.0 + minimatch: ^3.1.2 + object.entries: ^1.1.5 + object.fromentries: ^2.0.5 + object.hasown: ^1.1.1 + object.values: ^1.1.5 + prop-types: ^15.8.1 + resolve: ^2.0.0-next.3 + semver: ^6.3.0 + string.prototype.matchall: ^4.0.7 + peerDependencies: + eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 + checksum: 0683e2a624a4df6f08264a3f6bc614a81e8f961c83173bdf2d8d3523f84ed5d234cddc976dbc6815913e007c5984df742ba61be0c0592b27c3daabe0f68165a3 + languageName: node + linkType: hard + +"eslint-plugin-turbo@npm:0.0.3": + version: 0.0.3 + resolution: "eslint-plugin-turbo@npm:0.0.3" + peerDependencies: + eslint: ^7.23.0 || ^8.0.0 + checksum: 18e2b13ede03eee7635d0c67ca792cf46483e90443143bdc06555bf231045fb5f70b2f6f1d67492365b7fe47620408eea22f7548879f3afcb07ccc070aec5c15 + languageName: node + linkType: hard + +"eslint-scope@npm:^5.1.1": + version: 5.1.1 + resolution: "eslint-scope@npm:5.1.1" + dependencies: + esrecurse: ^4.3.0 + estraverse: ^4.1.1 + checksum: 47e4b6a3f0cc29c7feedee6c67b225a2da7e155802c6ea13bbef4ac6b9e10c66cd2dcb987867ef176292bf4e64eccc680a49e35e9e9c669f4a02bac17e86abdb + languageName: node + linkType: hard + +"eslint-utils@npm:^2.1.0": + version: 2.1.0 + resolution: "eslint-utils@npm:2.1.0" + dependencies: + eslint-visitor-keys: ^1.1.0 + checksum: 27500938f348da42100d9e6ad03ae29b3de19ba757ae1a7f4a087bdcf83ac60949bbb54286492ca61fac1f5f3ac8692dd21537ce6214240bf95ad0122f24d71d + languageName: node + linkType: hard + +"eslint-visitor-keys@npm:^1.1.0, eslint-visitor-keys@npm:^1.3.0": + version: 1.3.0 + resolution: "eslint-visitor-keys@npm:1.3.0" + checksum: 37a19b712f42f4c9027e8ba98c2b06031c17e0c0a4c696cd429bd9ee04eb43889c446f2cd545e1ff51bef9593fcec94ecd2c2ef89129fcbbf3adadbef520376a + languageName: node + linkType: hard + +"eslint-visitor-keys@npm:^2.0.0": + version: 2.1.0 + resolution: "eslint-visitor-keys@npm:2.1.0" + checksum: e3081d7dd2611a35f0388bbdc2f5da60b3a3c5b8b6e928daffff7391146b434d691577aa95064c8b7faad0b8a680266bcda0a42439c18c717b80e6718d7e267d + languageName: node + linkType: hard + +"eslint-visitor-keys@npm:^3.3.0": + version: 3.3.0 + resolution: "eslint-visitor-keys@npm:3.3.0" + checksum: d59e68a7c5a6d0146526b0eec16ce87fbf97fe46b8281e0d41384224375c4e52f5ffb9e16d48f4ea50785cde93f766b0c898e31ab89978d88b0e1720fbfb7808 + languageName: node + linkType: hard + +"eslint@npm:7.32.0, eslint@npm:^7.23.0, eslint@npm:^7.32.0": + version: 7.32.0 + resolution: "eslint@npm:7.32.0" + dependencies: + "@babel/code-frame": 7.12.11 + "@eslint/eslintrc": ^0.4.3 + "@humanwhocodes/config-array": ^0.5.0 + ajv: ^6.10.0 + chalk: ^4.0.0 + cross-spawn: ^7.0.2 + debug: ^4.0.1 + doctrine: ^3.0.0 + enquirer: ^2.3.5 + escape-string-regexp: ^4.0.0 + eslint-scope: ^5.1.1 + eslint-utils: ^2.1.0 + eslint-visitor-keys: ^2.0.0 + espree: ^7.3.1 + esquery: ^1.4.0 + esutils: ^2.0.2 + fast-deep-equal: ^3.1.3 + file-entry-cache: ^6.0.1 + functional-red-black-tree: ^1.0.1 + glob-parent: ^5.1.2 + globals: ^13.6.0 + ignore: ^4.0.6 + import-fresh: ^3.0.0 + imurmurhash: ^0.1.4 + is-glob: ^4.0.0 + js-yaml: ^3.13.1 + json-stable-stringify-without-jsonify: ^1.0.1 + levn: ^0.4.1 + lodash.merge: ^4.6.2 + minimatch: ^3.0.4 + natural-compare: ^1.4.0 + optionator: ^0.9.1 + progress: ^2.0.0 + regexpp: ^3.1.0 + semver: ^7.2.1 + strip-ansi: ^6.0.0 + strip-json-comments: ^3.1.0 + table: ^6.0.9 + text-table: ^0.2.0 + v8-compile-cache: ^2.0.3 + bin: + eslint: bin/eslint.js + checksum: cc85af9985a3a11085c011f3d27abe8111006d34cc274291b3c4d7bea51a4e2ff6135780249becd919ba7f6d6d1ecc38a6b73dacb6a7be08d38453b344dc8d37 + languageName: node + linkType: hard + +"espree@npm:^7.3.0, espree@npm:^7.3.1": + version: 7.3.1 + resolution: "espree@npm:7.3.1" + dependencies: + acorn: ^7.4.0 + acorn-jsx: ^5.3.1 + eslint-visitor-keys: ^1.3.0 + checksum: aa9b50dcce883449af2e23bc2b8d9abb77118f96f4cb313935d6b220f77137eaef7724a83c3f6243b96bc0e4ab14766198e60818caad99f9519ae5a336a39b45 + languageName: node + linkType: hard + +"esprima@npm:^4.0.0": + version: 4.0.1 + resolution: "esprima@npm:4.0.1" + bin: + esparse: ./bin/esparse.js + esvalidate: ./bin/esvalidate.js + checksum: b45bc805a613dbea2835278c306b91aff6173c8d034223fa81498c77dcbce3b2931bf6006db816f62eacd9fd4ea975dfd85a5b7f3c6402cfd050d4ca3c13a628 + languageName: node + linkType: hard + +"esquery@npm:^1.4.0": + version: 1.4.0 + resolution: "esquery@npm:1.4.0" + dependencies: + estraverse: ^5.1.0 + checksum: a0807e17abd7fbe5fbd4fab673038d6d8a50675cdae6b04fbaa520c34581be0c5fa24582990e8acd8854f671dd291c78bb2efb9e0ed5b62f33bac4f9cf820210 + languageName: node + linkType: hard + +"esrecurse@npm:^4.3.0": + version: 4.3.0 + resolution: "esrecurse@npm:4.3.0" + dependencies: + estraverse: ^5.2.0 + checksum: ebc17b1a33c51cef46fdc28b958994b1dc43cd2e86237515cbc3b4e5d2be6a811b2315d0a1a4d9d340b6d2308b15322f5c8291059521cc5f4802f65e7ec32837 + languageName: node + linkType: hard + +"estraverse@npm:^4.1.1": + version: 4.3.0 + resolution: "estraverse@npm:4.3.0" + checksum: a6299491f9940bb246124a8d44b7b7a413a8336f5436f9837aaa9330209bd9ee8af7e91a654a3545aee9c54b3308e78ee360cef1d777d37cfef77d2fa33b5827 + languageName: node + linkType: hard + +"estraverse@npm:^5.1.0, estraverse@npm:^5.2.0, estraverse@npm:^5.3.0": + version: 5.3.0 + resolution: "estraverse@npm:5.3.0" + checksum: 072780882dc8416ad144f8fe199628d2b3e7bbc9989d9ed43795d2c90309a2047e6bc5979d7e2322a341163d22cfad9e21f4110597fe487519697389497e4e2b + languageName: node + linkType: hard + +"esutils@npm:^2.0.2": + version: 2.0.3 + resolution: "esutils@npm:2.0.3" + checksum: 22b5b08f74737379a840b8ed2036a5fb35826c709ab000683b092d9054e5c2a82c27818f12604bfc2a9a76b90b6834ef081edbc1c7ae30d1627012e067c6ec87 + languageName: node + linkType: hard + +"fast-deep-equal@npm:^3.1.1, fast-deep-equal@npm:^3.1.3": + version: 3.1.3 + resolution: "fast-deep-equal@npm:3.1.3" + checksum: e21a9d8d84f53493b6aa15efc9cfd53dd5b714a1f23f67fb5dc8f574af80df889b3bce25dc081887c6d25457cce704e636395333abad896ccdec03abaf1f3f9d + languageName: node + linkType: hard + +"fast-glob@npm:^3.2.9": + version: 3.2.12 + resolution: "fast-glob@npm:3.2.12" + dependencies: + "@nodelib/fs.stat": ^2.0.2 + "@nodelib/fs.walk": ^1.2.3 + glob-parent: ^5.1.2 + merge2: ^1.3.0 + micromatch: ^4.0.4 + checksum: 0b1990f6ce831c7e28c4d505edcdaad8e27e88ab9fa65eedadb730438cfc7cde4910d6c975d6b7b8dc8a73da4773702ebcfcd6e3518e73938bb1383badfe01c2 + languageName: node + linkType: hard + +"fast-json-stable-stringify@npm:^2.0.0": + version: 2.1.0 + resolution: "fast-json-stable-stringify@npm:2.1.0" + checksum: b191531e36c607977e5b1c47811158733c34ccb3bfde92c44798929e9b4154884378536d26ad90dfecd32e1ffc09c545d23535ad91b3161a27ddbb8ebe0cbecb + languageName: node + linkType: hard + +"fast-levenshtein@npm:^2.0.6": + version: 2.0.6 + resolution: "fast-levenshtein@npm:2.0.6" + checksum: 92cfec0a8dfafd9c7a15fba8f2cc29cd0b62b85f056d99ce448bbcd9f708e18ab2764bda4dd5158364f4145a7c72788538994f0d1787b956ef0d1062b0f7c24c + languageName: node + linkType: hard + +"fastq@npm:^1.6.0": + version: 1.13.0 + resolution: "fastq@npm:1.13.0" + dependencies: + reusify: ^1.0.4 + checksum: 32cf15c29afe622af187d12fc9cd93e160a0cb7c31a3bb6ace86b7dea3b28e7b72acde89c882663f307b2184e14782c6c664fa315973c03626c7d4bff070bb0b + languageName: node + linkType: hard + +"file-entry-cache@npm:^6.0.1": + version: 6.0.1 + resolution: "file-entry-cache@npm:6.0.1" + dependencies: + flat-cache: ^3.0.4 + checksum: f49701feaa6314c8127c3c2f6173cfefff17612f5ed2daaafc6da13b5c91fd43e3b2a58fd0d63f9f94478a501b167615931e7200e31485e320f74a33885a9c74 + languageName: node + linkType: hard + +"fill-range@npm:^7.0.1": + version: 7.0.1 + resolution: "fill-range@npm:7.0.1" + dependencies: + to-regex-range: ^5.0.1 + checksum: cc283f4e65b504259e64fd969bcf4def4eb08d85565e906b7d36516e87819db52029a76b6363d0f02d0d532f0033c9603b9e2d943d56ee3b0d4f7ad3328ff917 + languageName: node + linkType: hard + +"flat-cache@npm:^3.0.4": + version: 3.0.4 + resolution: "flat-cache@npm:3.0.4" + dependencies: + flatted: ^3.1.0 + rimraf: ^3.0.2 + checksum: 4fdd10ecbcbf7d520f9040dd1340eb5dfe951e6f0ecf2252edeec03ee68d989ec8b9a20f4434270e71bcfd57800dc09b3344fca3966b2eb8f613072c7d9a2365 + languageName: node + linkType: hard + +"flatted@npm:^3.1.0": + version: 3.2.7 + resolution: "flatted@npm:3.2.7" + checksum: 427633049d55bdb80201c68f7eb1cbd533e03eac541f97d3aecab8c5526f12a20ccecaeede08b57503e772c769e7f8680b37e8d482d1e5f8d7e2194687f9ea35 + languageName: node + linkType: hard + +"fs.realpath@npm:^1.0.0": + version: 1.0.0 + resolution: "fs.realpath@npm:1.0.0" + checksum: 99ddea01a7e75aa276c250a04eedeffe5662bce66c65c07164ad6264f9de18fb21be9433ead460e54cff20e31721c811f4fb5d70591799df5f85dce6d6746fd0 + languageName: node + linkType: hard + +"function-bind@npm:^1.1.1": + version: 1.1.1 + resolution: "function-bind@npm:1.1.1" + checksum: b32fbaebb3f8ec4969f033073b43f5c8befbb58f1a79e12f1d7490358150359ebd92f49e72ff0144f65f2c48ea2a605bff2d07965f548f6474fd8efd95bf361a + languageName: node + linkType: hard + +"function.prototype.name@npm:^1.1.5": + version: 1.1.5 + resolution: "function.prototype.name@npm:1.1.5" + dependencies: + call-bind: ^1.0.2 + define-properties: ^1.1.3 + es-abstract: ^1.19.0 + functions-have-names: ^1.2.2 + checksum: acd21d733a9b649c2c442f067567743214af5fa248dbeee69d8278ce7df3329ea5abac572be9f7470b4ec1cd4d8f1040e3c5caccf98ebf2bf861a0deab735c27 + languageName: node + linkType: hard + +"functional-red-black-tree@npm:^1.0.1": + version: 1.0.1 + resolution: "functional-red-black-tree@npm:1.0.1" + checksum: ca6c170f37640e2d94297da8bb4bf27a1d12bea3e00e6a3e007fd7aa32e37e000f5772acf941b4e4f3cf1c95c3752033d0c509af157ad8f526e7f00723b9eb9f + languageName: node + linkType: hard + +"functions-have-names@npm:^1.2.2": + version: 1.2.3 + resolution: "functions-have-names@npm:1.2.3" + checksum: c3f1f5ba20f4e962efb71344ce0a40722163e85bee2101ce25f88214e78182d2d2476aa85ef37950c579eb6cf6ee811c17b3101bb84004bb75655f3e33f3fdb5 + languageName: node + linkType: hard + +"gensync@npm:^1.0.0-beta.2": + version: 1.0.0-beta.2 + resolution: "gensync@npm:1.0.0-beta.2" + checksum: a7437e58c6be12aa6c90f7730eac7fa9833dc78872b4ad2963d2031b00a3367a93f98aec75f9aaac7220848e4026d67a8655e870b24f20a543d103c0d65952ec + languageName: node + linkType: hard + +"get-intrinsic@npm:^1.0.2, get-intrinsic@npm:^1.1.0, get-intrinsic@npm:^1.1.1, get-intrinsic@npm:^1.1.2": + version: 1.1.3 + resolution: "get-intrinsic@npm:1.1.3" + dependencies: + function-bind: ^1.1.1 + has: ^1.0.3 + has-symbols: ^1.0.3 + checksum: 152d79e87251d536cf880ba75cfc3d6c6c50e12b3a64e1ea960e73a3752b47c69f46034456eae1b0894359ce3bc64c55c186f2811f8a788b75b638b06fab228a + languageName: node + linkType: hard + +"get-symbol-description@npm:^1.0.0": + version: 1.0.0 + resolution: "get-symbol-description@npm:1.0.0" + dependencies: + call-bind: ^1.0.2 + get-intrinsic: ^1.1.1 + checksum: 9ceff8fe968f9270a37a1f73bf3f1f7bda69ca80f4f80850670e0e7b9444ff99323f7ac52f96567f8b5f5fbe7ac717a0d81d3407c7313e82810c6199446a5247 + languageName: node + linkType: hard + +"glob-parent@npm:^5.1.2": + version: 5.1.2 + resolution: "glob-parent@npm:5.1.2" + dependencies: + is-glob: ^4.0.1 + checksum: f4f2bfe2425296e8a47e36864e4f42be38a996db40420fe434565e4480e3322f18eb37589617a98640c5dc8fdec1a387007ee18dbb1f3f5553409c34d17f425e + languageName: node + linkType: hard + +"glob@npm:7.1.7": + version: 7.1.7 + resolution: "glob@npm:7.1.7" + dependencies: + fs.realpath: ^1.0.0 + inflight: ^1.0.4 + inherits: 2 + minimatch: ^3.0.4 + once: ^1.3.0 + path-is-absolute: ^1.0.0 + checksum: b61f48973bbdcf5159997b0874a2165db572b368b931135832599875919c237fc05c12984e38fe828e69aa8a921eb0e8a4997266211c517c9cfaae8a93988bb8 + languageName: node + linkType: hard + +"glob@npm:^7.1.3, glob@npm:^7.2.0": + version: 7.2.3 + resolution: "glob@npm:7.2.3" + dependencies: + fs.realpath: ^1.0.0 + inflight: ^1.0.4 + inherits: 2 + minimatch: ^3.1.1 + once: ^1.3.0 + path-is-absolute: ^1.0.0 + checksum: 29452e97b38fa704dabb1d1045350fb2467cf0277e155aa9ff7077e90ad81d1ea9d53d3ee63bd37c05b09a065e90f16aec4a65f5b8de401d1dac40bc5605d133 + languageName: node + linkType: hard + +"globals@npm:^11.1.0": + version: 11.12.0 + resolution: "globals@npm:11.12.0" + checksum: 67051a45eca3db904aee189dfc7cd53c20c7d881679c93f6146ddd4c9f4ab2268e68a919df740d39c71f4445d2b38ee360fc234428baea1dbdfe68bbcb46979e + languageName: node + linkType: hard + +"globals@npm:^13.6.0, globals@npm:^13.9.0": + version: 13.17.0 + resolution: "globals@npm:13.17.0" + dependencies: + type-fest: ^0.20.2 + checksum: fbaf4112e59b92c9f5575e85ce65e9e17c0b82711196ec5f58beb08599bbd92fd72703d6dfc9b080381fd35b644e1b11dcf25b38cc2341ec21df942594cbc8ce + languageName: node + linkType: hard + +"globby@npm:^11.1.0": + version: 11.1.0 + resolution: "globby@npm:11.1.0" + dependencies: + array-union: ^2.1.0 + dir-glob: ^3.0.1 + fast-glob: ^3.2.9 + ignore: ^5.2.0 + merge2: ^1.4.1 + slash: ^3.0.0 + checksum: b4be8885e0cfa018fc783792942d53926c35c50b3aefd3fdcfb9d22c627639dc26bd2327a40a0b74b074100ce95bb7187bfeae2f236856aa3de183af7a02aea6 + languageName: node + linkType: hard + +"graceful-fs@npm:^4.2.4": + version: 4.2.10 + resolution: "graceful-fs@npm:4.2.10" + checksum: 3f109d70ae123951905d85032ebeae3c2a5a7a997430df00ea30df0e3a6c60cf6689b109654d6fdacd28810a053348c4d14642da1d075049e6be1ba5216218da + languageName: node + linkType: hard + +"has-bigints@npm:^1.0.1, has-bigints@npm:^1.0.2": + version: 1.0.2 + resolution: "has-bigints@npm:1.0.2" + checksum: 390e31e7be7e5c6fe68b81babb73dfc35d413604d7ee5f56da101417027a4b4ce6a27e46eff97ad040c835b5d228676eae99a9b5c3bc0e23c8e81a49241ff45b + languageName: node + linkType: hard + +"has-flag@npm:^3.0.0": + version: 3.0.0 + resolution: "has-flag@npm:3.0.0" + checksum: 4a15638b454bf086c8148979aae044dd6e39d63904cd452d970374fa6a87623423da485dfb814e7be882e05c096a7ccf1ebd48e7e7501d0208d8384ff4dea73b + languageName: node + linkType: hard + +"has-flag@npm:^4.0.0": + version: 4.0.0 + resolution: "has-flag@npm:4.0.0" + checksum: 261a1357037ead75e338156b1f9452c016a37dcd3283a972a30d9e4a87441ba372c8b81f818cd0fbcd9c0354b4ae7e18b9e1afa1971164aef6d18c2b6095a8ad + languageName: node + linkType: hard + +"has-property-descriptors@npm:^1.0.0": + version: 1.0.0 + resolution: "has-property-descriptors@npm:1.0.0" + dependencies: + get-intrinsic: ^1.1.1 + checksum: a6d3f0a266d0294d972e354782e872e2fe1b6495b321e6ef678c9b7a06a40408a6891817350c62e752adced73a94ac903c54734fee05bf65b1905ee1368194bb + languageName: node + linkType: hard + +"has-symbols@npm:^1.0.2, has-symbols@npm:^1.0.3": + version: 1.0.3 + resolution: "has-symbols@npm:1.0.3" + checksum: a054c40c631c0d5741a8285010a0777ea0c068f99ed43e5d6eb12972da223f8af553a455132fdb0801bdcfa0e0f443c0c03a68d8555aa529b3144b446c3f2410 + languageName: node + linkType: hard + +"has-tostringtag@npm:^1.0.0": + version: 1.0.0 + resolution: "has-tostringtag@npm:1.0.0" + dependencies: + has-symbols: ^1.0.2 + checksum: cc12eb28cb6ae22369ebaad3a8ab0799ed61270991be88f208d508076a1e99abe4198c965935ce85ea90b60c94ddda73693b0920b58e7ead048b4a391b502c1c + languageName: node + linkType: hard + +"has@npm:^1.0.3": + version: 1.0.3 + resolution: "has@npm:1.0.3" + dependencies: + function-bind: ^1.1.1 + checksum: b9ad53d53be4af90ce5d1c38331e712522417d017d5ef1ebd0507e07c2fbad8686fffb8e12ddecd4c39ca9b9b47431afbb975b8abf7f3c3b82c98e9aad052792 + languageName: node + linkType: hard + +"ignore@npm:^4.0.6": + version: 4.0.6 + resolution: "ignore@npm:4.0.6" + checksum: 248f82e50a430906f9ee7f35e1158e3ec4c3971451dd9f99c9bc1548261b4db2b99709f60ac6c6cac9333494384176cc4cc9b07acbe42d52ac6a09cad734d800 + languageName: node + linkType: hard + +"ignore@npm:^5.2.0": + version: 5.2.0 + resolution: "ignore@npm:5.2.0" + checksum: 6b1f926792d614f64c6c83da3a1f9c83f6196c2839aa41e1e32dd7b8d174cef2e329d75caabb62cb61ce9dc432f75e67d07d122a037312db7caa73166a1bdb77 + languageName: node + linkType: hard + +"import-fresh@npm:^3.0.0, import-fresh@npm:^3.2.1": + version: 3.3.0 + resolution: "import-fresh@npm:3.3.0" + dependencies: + parent-module: ^1.0.0 + resolve-from: ^4.0.0 + checksum: 2cacfad06e652b1edc50be650f7ec3be08c5e5a6f6d12d035c440a42a8cc028e60a5b99ca08a77ab4d6b1346da7d971915828f33cdab730d3d42f08242d09baa + languageName: node + linkType: hard + +"imurmurhash@npm:^0.1.4": + version: 0.1.4 + resolution: "imurmurhash@npm:0.1.4" + checksum: 7cae75c8cd9a50f57dadd77482359f659eaebac0319dd9368bcd1714f55e65badd6929ca58569da2b6494ef13fdd5598cd700b1eba23f8b79c5f19d195a3ecf7 + languageName: node + linkType: hard + +"inflight@npm:^1.0.4": + version: 1.0.6 + resolution: "inflight@npm:1.0.6" + dependencies: + once: ^1.3.0 + wrappy: 1 + checksum: f4f76aa072ce19fae87ce1ef7d221e709afb59d445e05d47fba710e85470923a75de35bfae47da6de1b18afc3ce83d70facf44cfb0aff89f0a3f45c0a0244dfd + languageName: node + linkType: hard + +"inherits@npm:2": + version: 2.0.4 + resolution: "inherits@npm:2.0.4" + checksum: 4a48a733847879d6cf6691860a6b1e3f0f4754176e4d71494c41f3475553768b10f84b5ce1d40fbd0e34e6bfbb864ee35858ad4dd2cf31e02fc4a154b724d7f1 + languageName: node + linkType: hard + +"internal-slot@npm:^1.0.3": + version: 1.0.3 + resolution: "internal-slot@npm:1.0.3" + dependencies: + get-intrinsic: ^1.1.0 + has: ^1.0.3 + side-channel: ^1.0.4 + checksum: 1944f92e981e47aebc98a88ff0db579fd90543d937806104d0b96557b10c1f170c51fb777b97740a8b6ddeec585fca8c39ae99fd08a8e058dfc8ab70937238bf + languageName: node + linkType: hard + +"is-bigint@npm:^1.0.1": + version: 1.0.4 + resolution: "is-bigint@npm:1.0.4" + dependencies: + has-bigints: ^1.0.1 + checksum: c56edfe09b1154f8668e53ebe8252b6f185ee852a50f9b41e8d921cb2bed425652049fbe438723f6cb48a63ca1aa051e948e7e401e093477c99c84eba244f666 + languageName: node + linkType: hard + +"is-boolean-object@npm:^1.1.0": + version: 1.1.2 + resolution: "is-boolean-object@npm:1.1.2" + dependencies: + call-bind: ^1.0.2 + has-tostringtag: ^1.0.0 + checksum: c03b23dbaacadc18940defb12c1c0e3aaece7553ef58b162a0f6bba0c2a7e1551b59f365b91e00d2dbac0522392d576ef322628cb1d036a0fe51eb466db67222 + languageName: node + linkType: hard + +"is-callable@npm:^1.1.4, is-callable@npm:^1.2.4": + version: 1.2.6 + resolution: "is-callable@npm:1.2.6" + checksum: 7667d6a6be66df00741cfa18c657877c46a00139ea7ea7765251e9db0182745c9ee173506941a329d6914e34e59e9cc80029fb3f68bbf8c22a6c155ee6ea77b3 + languageName: node + linkType: hard + +"is-core-module@npm:^2.8.1, is-core-module@npm:^2.9.0": + version: 2.10.0 + resolution: "is-core-module@npm:2.10.0" + dependencies: + has: ^1.0.3 + checksum: 0f3f77811f430af3256fa7bbc806f9639534b140f8ee69476f632c3e1eb4e28a38be0b9d1b8ecf596179c841b53576129279df95e7051d694dac4ceb6f967593 + languageName: node + linkType: hard + +"is-date-object@npm:^1.0.1": + version: 1.0.5 + resolution: "is-date-object@npm:1.0.5" + dependencies: + has-tostringtag: ^1.0.0 + checksum: baa9077cdf15eb7b58c79398604ca57379b2fc4cf9aa7a9b9e295278648f628c9b201400c01c5e0f7afae56507d741185730307cbe7cad3b9f90a77e5ee342fc + languageName: node + linkType: hard + +"is-extglob@npm:^2.1.1": + version: 2.1.1 + resolution: "is-extglob@npm:2.1.1" + checksum: df033653d06d0eb567461e58a7a8c9f940bd8c22274b94bf7671ab36df5719791aae15eef6d83bbb5e23283967f2f984b8914559d4449efda578c775c4be6f85 + languageName: node + linkType: hard + +"is-fullwidth-code-point@npm:^3.0.0": + version: 3.0.0 + resolution: "is-fullwidth-code-point@npm:3.0.0" + checksum: 44a30c29457c7fb8f00297bce733f0a64cd22eca270f83e58c105e0d015e45c019491a4ab2faef91ab51d4738c670daff901c799f6a700e27f7314029e99e348 + languageName: node + linkType: hard + +"is-glob@npm:^4.0.0, is-glob@npm:^4.0.1, is-glob@npm:^4.0.3": + version: 4.0.3 + resolution: "is-glob@npm:4.0.3" + dependencies: + is-extglob: ^2.1.1 + checksum: d381c1319fcb69d341cc6e6c7cd588e17cd94722d9a32dbd60660b993c4fb7d0f19438674e68dfec686d09b7c73139c9166b47597f846af387450224a8101ab4 + languageName: node + linkType: hard + +"is-negative-zero@npm:^2.0.2": + version: 2.0.2 + resolution: "is-negative-zero@npm:2.0.2" + checksum: f3232194c47a549da60c3d509c9a09be442507616b69454716692e37ae9f37c4dea264fb208ad0c9f3efd15a796a46b79df07c7e53c6227c32170608b809149a + languageName: node + linkType: hard + +"is-number-object@npm:^1.0.4": + version: 1.0.7 + resolution: "is-number-object@npm:1.0.7" + dependencies: + has-tostringtag: ^1.0.0 + checksum: d1e8d01bb0a7134c74649c4e62da0c6118a0bfc6771ea3c560914d52a627873e6920dd0fd0ebc0e12ad2ff4687eac4c308f7e80320b973b2c8a2c8f97a7524f7 + languageName: node + linkType: hard + +"is-number@npm:^7.0.0": + version: 7.0.0 + resolution: "is-number@npm:7.0.0" + checksum: 456ac6f8e0f3111ed34668a624e45315201dff921e5ac181f8ec24923b99e9f32ca1a194912dc79d539c97d33dba17dc635202ff0b2cf98326f608323276d27a + languageName: node + linkType: hard + +"is-regex@npm:^1.1.4": + version: 1.1.4 + resolution: "is-regex@npm:1.1.4" + dependencies: + call-bind: ^1.0.2 + has-tostringtag: ^1.0.0 + checksum: 362399b33535bc8f386d96c45c9feb04cf7f8b41c182f54174c1a45c9abbbe5e31290bbad09a458583ff6bf3b2048672cdb1881b13289569a7c548370856a652 + languageName: node + linkType: hard + +"is-shared-array-buffer@npm:^1.0.2": + version: 1.0.2 + resolution: "is-shared-array-buffer@npm:1.0.2" + dependencies: + call-bind: ^1.0.2 + checksum: 9508929cf14fdc1afc9d61d723c6e8d34f5e117f0bffda4d97e7a5d88c3a8681f633a74f8e3ad1fe92d5113f9b921dc5ca44356492079612f9a247efbce7032a + languageName: node + linkType: hard + +"is-string@npm:^1.0.5, is-string@npm:^1.0.7": + version: 1.0.7 + resolution: "is-string@npm:1.0.7" + dependencies: + has-tostringtag: ^1.0.0 + checksum: 323b3d04622f78d45077cf89aab783b2f49d24dc641aa89b5ad1a72114cfeff2585efc8c12ef42466dff32bde93d839ad321b26884cf75e5a7892a938b089989 + languageName: node + linkType: hard + +"is-symbol@npm:^1.0.2, is-symbol@npm:^1.0.3": + version: 1.0.4 + resolution: "is-symbol@npm:1.0.4" + dependencies: + has-symbols: ^1.0.2 + checksum: 92805812ef590738d9de49d677cd17dfd486794773fb6fa0032d16452af46e9b91bb43ffe82c983570f015b37136f4b53b28b8523bfb10b0ece7a66c31a54510 + languageName: node + linkType: hard + +"is-weakref@npm:^1.0.2": + version: 1.0.2 + resolution: "is-weakref@npm:1.0.2" + dependencies: + call-bind: ^1.0.2 + checksum: 95bd9a57cdcb58c63b1c401c60a474b0f45b94719c30f548c891860f051bc2231575c290a6b420c6bc6e7ed99459d424c652bd5bf9a1d5259505dc35b4bf83de + languageName: node + linkType: hard + +"isexe@npm:^2.0.0": + version: 2.0.0 + resolution: "isexe@npm:2.0.0" + checksum: 26bf6c5480dda5161c820c5b5c751ae1e766c587b1f951ea3fcfc973bafb7831ae5b54a31a69bd670220e42e99ec154475025a468eae58ea262f813fdc8d1c62 + languageName: node + linkType: hard + +"js-tokens@npm:^3.0.0 || ^4.0.0, js-tokens@npm:^4.0.0": + version: 4.0.0 + resolution: "js-tokens@npm:4.0.0" + checksum: 8a95213a5a77deb6cbe94d86340e8d9ace2b93bc367790b260101d2f36a2eaf4e4e22d9fa9cf459b38af3a32fb4190e638024cf82ec95ef708680e405ea7cc78 + languageName: node + linkType: hard + +"js-yaml@npm:^3.13.1": + version: 3.14.1 + resolution: "js-yaml@npm:3.14.1" + dependencies: + argparse: ^1.0.7 + esprima: ^4.0.0 + bin: + js-yaml: bin/js-yaml.js + checksum: bef146085f472d44dee30ec34e5cf36bf89164f5d585435a3d3da89e52622dff0b188a580e4ad091c3341889e14cb88cac6e4deb16dc5b1e9623bb0601fc255c + languageName: node + linkType: hard + +"jsesc@npm:^2.5.1": + version: 2.5.2 + resolution: "jsesc@npm:2.5.2" + bin: + jsesc: bin/jsesc + checksum: 4dc190771129e12023f729ce20e1e0bfceac84d73a85bc3119f7f938843fe25a4aeccb54b6494dce26fcf263d815f5f31acdefac7cc9329efb8422a4f4d9fa9d + languageName: node + linkType: hard + +"json-schema-traverse@npm:^0.4.1": + version: 0.4.1 + resolution: "json-schema-traverse@npm:0.4.1" + checksum: 7486074d3ba247769fda17d5181b345c9fb7d12e0da98b22d1d71a5db9698d8b4bd900a3ec1a4ffdd60846fc2556274a5c894d0c48795f14cb03aeae7b55260b + languageName: node + linkType: hard + +"json-schema-traverse@npm:^1.0.0": + version: 1.0.0 + resolution: "json-schema-traverse@npm:1.0.0" + checksum: 02f2f466cdb0362558b2f1fd5e15cce82ef55d60cd7f8fa828cf35ba74330f8d767fcae5c5c2adb7851fa811766c694b9405810879bc4e1ddd78a7c0e03658ad + languageName: node + linkType: hard + +"json-stable-stringify-without-jsonify@npm:^1.0.1": + version: 1.0.1 + resolution: "json-stable-stringify-without-jsonify@npm:1.0.1" + checksum: cff44156ddce9c67c44386ad5cddf91925fe06b1d217f2da9c4910d01f358c6e3989c4d5a02683c7a5667f9727ff05831f7aa8ae66c8ff691c556f0884d49215 + languageName: node + linkType: hard + +"json5@npm:^1.0.1": + version: 1.0.1 + resolution: "json5@npm:1.0.1" + dependencies: + minimist: ^1.2.0 + bin: + json5: lib/cli.js + checksum: e76ea23dbb8fc1348c143da628134a98adf4c5a4e8ea2adaa74a80c455fc2cdf0e2e13e6398ef819bfe92306b610ebb2002668ed9fc1af386d593691ef346fc3 + languageName: node + linkType: hard + +"json5@npm:^2.2.1": + version: 2.2.1 + resolution: "json5@npm:2.2.1" + bin: + json5: lib/cli.js + checksum: 74b8a23b102a6f2bf2d224797ae553a75488b5adbaee9c9b6e5ab8b510a2fc6e38f876d4c77dea672d4014a44b2399e15f2051ac2b37b87f74c0c7602003543b + languageName: node + linkType: hard + +"jsx-ast-utils@npm:^2.4.1 || ^3.0.0, jsx-ast-utils@npm:^3.3.2": + version: 3.3.3 + resolution: "jsx-ast-utils@npm:3.3.3" + dependencies: + array-includes: ^3.1.5 + object.assign: ^4.1.3 + checksum: a2ed78cac49a0f0c4be8b1eafe3c5257a1411341d8e7f1ac740debae003de04e5f6372bfcfbd9d082e954ffd99aac85bcda85b7c6bc11609992483f4cdc0f745 + languageName: node + linkType: hard + +"language-subtag-registry@npm:~0.3.2": + version: 0.3.22 + resolution: "language-subtag-registry@npm:0.3.22" + checksum: 8ab70a7e0e055fe977ac16ea4c261faec7205ac43db5e806f72e5b59606939a3b972c4bd1e10e323b35d6ffa97c3e1c4c99f6553069dad2dfdd22020fa3eb56a + languageName: node + linkType: hard + +"language-tags@npm:^1.0.5": + version: 1.0.5 + resolution: "language-tags@npm:1.0.5" + dependencies: + language-subtag-registry: ~0.3.2 + checksum: c81b5d8b9f5f9cfd06ee71ada6ddfe1cf83044dd5eeefcd1e420ad491944da8957688db4a0a9bc562df4afdc2783425cbbdfd152c01d93179cf86888903123cf + languageName: node + linkType: hard + +"levn@npm:^0.4.1": + version: 0.4.1 + resolution: "levn@npm:0.4.1" + dependencies: + prelude-ls: ^1.2.1 + type-check: ~0.4.0 + checksum: 12c5021c859bd0f5248561bf139121f0358285ec545ebf48bb3d346820d5c61a4309535c7f387ed7d84361cf821e124ce346c6b7cef8ee09a67c1473b46d0fc4 + languageName: node + linkType: hard + +"lodash.merge@npm:^4.6.2": + version: 4.6.2 + resolution: "lodash.merge@npm:4.6.2" + checksum: ad580b4bdbb7ca1f7abf7e1bce63a9a0b98e370cf40194b03380a46b4ed799c9573029599caebc1b14e3f24b111aef72b96674a56cfa105e0f5ac70546cdc005 + languageName: node + linkType: hard + +"lodash.truncate@npm:^4.4.2": + version: 4.4.2 + resolution: "lodash.truncate@npm:4.4.2" + checksum: b463d8a382cfb5f0e71c504dcb6f807a7bd379ff1ea216669aa42c52fc28c54e404bfbd96791aa09e6df0de2c1d7b8f1b7f4b1a61f324d38fe98bc535aeee4f5 + languageName: node + linkType: hard + +"lodash@npm:4.17.21": + version: 4.17.21 + resolution: "lodash@npm:4.17.21" + checksum: eb835a2e51d381e561e508ce932ea50a8e5a68f4ebdd771ea240d3048244a8d13658acbd502cd4829768c56f2e16bdd4340b9ea141297d472517b83868e677f7 + languageName: node + linkType: hard + +"lodash@patch:lodash@npm%3A4.17.21#./.yarn/patches/lodash-npm-4.17.21-6382451519.patch::locator=berry-patch%40workspace%3A.": + version: 4.17.21 + resolution: "lodash@patch:lodash@npm%3A4.17.21#./.yarn/patches/lodash-npm-4.17.21-6382451519.patch::version=4.17.21&hash=2c6e9e&locator=berry-patch%40workspace%3A." + checksum: 0f54b5291a5cfa3322cc3cb85716df4e23503535b79a341f12a41231513baaa6285fd9808d9894100dcea8b36bf91644360c4f783db1814719a4e103a04f59f3 + languageName: node + linkType: hard + +"loose-envify@npm:^1.1.0, loose-envify@npm:^1.4.0": + version: 1.4.0 + resolution: "loose-envify@npm:1.4.0" + dependencies: + js-tokens: ^3.0.0 || ^4.0.0 + bin: + loose-envify: cli.js + checksum: 6517e24e0cad87ec9888f500c5b5947032cdfe6ef65e1c1936a0c48a524b81e65542c9c3edc91c97d5bddc806ee2a985dbc79be89215d613b1de5db6d1cfe6f4 + languageName: node + linkType: hard + +"lru-cache@npm:^6.0.0": + version: 6.0.0 + resolution: "lru-cache@npm:6.0.0" + dependencies: + yallist: ^4.0.0 + checksum: f97f499f898f23e4585742138a22f22526254fdba6d75d41a1c2526b3b6cc5747ef59c5612ba7375f42aca4f8461950e925ba08c991ead0651b4918b7c978297 + languageName: node + linkType: hard + +"merge2@npm:^1.3.0, merge2@npm:^1.4.1": + version: 1.4.1 + resolution: "merge2@npm:1.4.1" + checksum: 7268db63ed5169466540b6fb947aec313200bcf6d40c5ab722c22e242f651994619bcd85601602972d3c85bd2cc45a358a4c61937e9f11a061919a1da569b0c2 + languageName: node + linkType: hard + +"micromatch@npm:^4.0.4": + version: 4.0.5 + resolution: "micromatch@npm:4.0.5" + dependencies: + braces: ^3.0.2 + picomatch: ^2.3.1 + checksum: 02a17b671c06e8fefeeb6ef996119c1e597c942e632a21ef589154f23898c9c6a9858526246abb14f8bca6e77734aa9dcf65476fca47cedfb80d9577d52843fc + languageName: node + linkType: hard + +"minimatch@npm:^3.0.4, minimatch@npm:^3.1.1, minimatch@npm:^3.1.2": + version: 3.1.2 + resolution: "minimatch@npm:3.1.2" + dependencies: + brace-expansion: ^1.1.7 + checksum: c154e566406683e7bcb746e000b84d74465b3a832c45d59912b9b55cd50dee66e5c4b1e5566dba26154040e51672f9aa450a9aef0c97cfc7336b78b7afb9540a + languageName: node + linkType: hard + +"minimist@npm:^1.2.0, minimist@npm:^1.2.6": + version: 1.2.6 + resolution: "minimist@npm:1.2.6" + checksum: d15428cd1e11eb14e1233bcfb88ae07ed7a147de251441d61158619dfb32c4d7e9061d09cab4825fdee18ecd6fce323228c8c47b5ba7cd20af378ca4048fb3fb + languageName: node + linkType: hard + +"ms@npm:2.0.0": + version: 2.0.0 + resolution: "ms@npm:2.0.0" + checksum: 0e6a22b8b746d2e0b65a430519934fefd41b6db0682e3477c10f60c76e947c4c0ad06f63ffdf1d78d335f83edee8c0aa928aa66a36c7cd95b69b26f468d527f4 + languageName: node + linkType: hard + +"ms@npm:2.1.2": + version: 2.1.2 + resolution: "ms@npm:2.1.2" + checksum: 673cdb2c3133eb050c745908d8ce632ed2c02d85640e2edb3ace856a2266a813b30c613569bf3354fdf4ea7d1a1494add3bfa95e2713baa27d0c2c71fc44f58f + languageName: node + linkType: hard + +"ms@npm:^2.1.1": + version: 2.1.3 + resolution: "ms@npm:2.1.3" + checksum: aa92de608021b242401676e35cfa5aa42dd70cbdc082b916da7fb925c542173e36bce97ea3e804923fe92c0ad991434e4a38327e15a1b5b5f945d66df615ae6d + languageName: node + linkType: hard + +"nanoid@npm:^3.3.4": + version: 3.3.4 + resolution: "nanoid@npm:3.3.4" + bin: + nanoid: bin/nanoid.cjs + checksum: 2fddd6dee994b7676f008d3ffa4ab16035a754f4bb586c61df5a22cf8c8c94017aadd360368f47d653829e0569a92b129979152ff97af23a558331e47e37cd9c + languageName: node + linkType: hard + +"natural-compare@npm:^1.4.0": + version: 1.4.0 + resolution: "natural-compare@npm:1.4.0" + checksum: 23ad088b08f898fc9b53011d7bb78ec48e79de7627e01ab5518e806033861bef68d5b0cd0e2205c2f36690ac9571ff6bcb05eb777ced2eeda8d4ac5b44592c3d + languageName: node + linkType: hard + +"next-transpile-modules@npm:9.0.0": + version: 9.0.0 + resolution: "next-transpile-modules@npm:9.0.0" + dependencies: + enhanced-resolve: ^5.7.0 + escalade: ^3.1.1 + checksum: 9a5d86d80cedc2404b2b1d5bd4994f2f7bf60e5e20f24e8cc5cfec34da1418b4a439916f37a95ca336bcf6d81094c3647354ac6a0c6737b3df59e62b6380507d + languageName: node + linkType: hard + +"next@npm:12.2.5": + version: 12.2.5 + resolution: "next@npm:12.2.5" + dependencies: + "@next/env": 12.2.5 + "@next/swc-android-arm-eabi": 12.2.5 + "@next/swc-android-arm64": 12.2.5 + "@next/swc-darwin-arm64": 12.2.5 + "@next/swc-darwin-x64": 12.2.5 + "@next/swc-freebsd-x64": 12.2.5 + "@next/swc-linux-arm-gnueabihf": 12.2.5 + "@next/swc-linux-arm64-gnu": 12.2.5 + "@next/swc-linux-arm64-musl": 12.2.5 + "@next/swc-linux-x64-gnu": 12.2.5 + "@next/swc-linux-x64-musl": 12.2.5 + "@next/swc-win32-arm64-msvc": 12.2.5 + "@next/swc-win32-ia32-msvc": 12.2.5 + "@next/swc-win32-x64-msvc": 12.2.5 + "@swc/helpers": 0.4.3 + caniuse-lite: ^1.0.30001332 + postcss: 8.4.14 + styled-jsx: 5.0.4 + use-sync-external-store: 1.2.0 + peerDependencies: + fibers: ">= 3.1.0" + node-sass: ^6.0.0 || ^7.0.0 + react: ^17.0.2 || ^18.0.0-0 + react-dom: ^17.0.2 || ^18.0.0-0 + sass: ^1.3.0 + dependenciesMeta: + "@next/swc-android-arm-eabi": + optional: true + "@next/swc-android-arm64": + optional: true + "@next/swc-darwin-arm64": + optional: true + "@next/swc-darwin-x64": + optional: true + "@next/swc-freebsd-x64": + optional: true + "@next/swc-linux-arm-gnueabihf": + optional: true + "@next/swc-linux-arm64-gnu": + optional: true + "@next/swc-linux-arm64-musl": + optional: true + "@next/swc-linux-x64-gnu": + optional: true + "@next/swc-linux-x64-musl": + optional: true + "@next/swc-win32-arm64-msvc": + optional: true + "@next/swc-win32-ia32-msvc": + optional: true + "@next/swc-win32-x64-msvc": + optional: true + peerDependenciesMeta: + fibers: + optional: true + node-sass: + optional: true + sass: + optional: true + bin: + next: dist/bin/next + checksum: e8fcbd93d74fda81640fd174a9d380f22db404d3ce0893730db3db806317ae18c86d1dbb502e63e47c92fb21a93812de62639c2f1204330cb569fdac4d3d0573 + languageName: node + linkType: hard + +"node-releases@npm:^2.0.6": + version: 2.0.6 + resolution: "node-releases@npm:2.0.6" + checksum: e86a926dc9fbb3b41b4c4a89d998afdf140e20a4e8dbe6c0a807f7b2948b42ea97d7fd3ad4868041487b6e9ee98409829c6e4d84a734a4215dff060a7fbeb4bf + languageName: node + linkType: hard + +"object-assign@npm:^4.1.1": + version: 4.1.1 + resolution: "object-assign@npm:4.1.1" + checksum: fcc6e4ea8c7fe48abfbb552578b1c53e0d194086e2e6bbbf59e0a536381a292f39943c6e9628af05b5528aa5e3318bb30d6b2e53cadaf5b8fe9e12c4b69af23f + languageName: node + linkType: hard + +"object-inspect@npm:^1.12.2, object-inspect@npm:^1.9.0": + version: 1.12.2 + resolution: "object-inspect@npm:1.12.2" + checksum: a534fc1b8534284ed71f25ce3a496013b7ea030f3d1b77118f6b7b1713829262be9e6243acbcb3ef8c626e2b64186112cb7f6db74e37b2789b9c789ca23048b2 + languageName: node + linkType: hard + +"object-keys@npm:^1.1.1": + version: 1.1.1 + resolution: "object-keys@npm:1.1.1" + checksum: b363c5e7644b1e1b04aa507e88dcb8e3a2f52b6ffd0ea801e4c7a62d5aa559affe21c55a07fd4b1fd55fc03a33c610d73426664b20032405d7b92a1414c34d6a + languageName: node + linkType: hard + +"object.assign@npm:^4.1.3, object.assign@npm:^4.1.4": + version: 4.1.4 + resolution: "object.assign@npm:4.1.4" + dependencies: + call-bind: ^1.0.2 + define-properties: ^1.1.4 + has-symbols: ^1.0.3 + object-keys: ^1.1.1 + checksum: 76cab513a5999acbfe0ff355f15a6a125e71805fcf53de4e9d4e082e1989bdb81d1e329291e1e4e0ae7719f0e4ef80e88fb2d367ae60500d79d25a6224ac8864 + languageName: node + linkType: hard + +"object.entries@npm:^1.1.5": + version: 1.1.5 + resolution: "object.entries@npm:1.1.5" + dependencies: + call-bind: ^1.0.2 + define-properties: ^1.1.3 + es-abstract: ^1.19.1 + checksum: d658696f74fd222060d8428d2a9fda2ce736b700cb06f6bdf4a16a1892d145afb746f453502b2fa55d1dca8ead6f14ddbcf66c545df45adadea757a6c4cd86c7 + languageName: node + linkType: hard + +"object.fromentries@npm:^2.0.5": + version: 2.0.5 + resolution: "object.fromentries@npm:2.0.5" + dependencies: + call-bind: ^1.0.2 + define-properties: ^1.1.3 + es-abstract: ^1.19.1 + checksum: 61a0b565ded97b76df9e30b569729866e1824cce902f98e90bb106e84f378aea20163366f66dc75c9000e2aad2ed0caf65c6f530cb2abc4c0c0f6c982102db4b + languageName: node + linkType: hard + +"object.hasown@npm:^1.1.1": + version: 1.1.1 + resolution: "object.hasown@npm:1.1.1" + dependencies: + define-properties: ^1.1.4 + es-abstract: ^1.19.5 + checksum: d8ed4907ce57f48b93e3b53c418fd6787bf226a51e8d698c91e39b78e80fe5b124cb6282f6a9d5be21cf9e2c7829ab10206dcc6112b7748860eefe641880c793 + languageName: node + linkType: hard + +"object.values@npm:^1.1.5": + version: 1.1.5 + resolution: "object.values@npm:1.1.5" + dependencies: + call-bind: ^1.0.2 + define-properties: ^1.1.3 + es-abstract: ^1.19.1 + checksum: 0f17e99741ebfbd0fa55ce942f6184743d3070c61bd39221afc929c8422c4907618c8da694c6915bc04a83ab3224260c779ba37fc07bb668bdc5f33b66a902a4 + languageName: node + linkType: hard + +"once@npm:^1.3.0": + version: 1.4.0 + resolution: "once@npm:1.4.0" + dependencies: + wrappy: 1 + checksum: cd0a88501333edd640d95f0d2700fbde6bff20b3d4d9bdc521bdd31af0656b5706570d6c6afe532045a20bb8dc0849f8332d6f2a416e0ba6d3d3b98806c7db68 + languageName: node + linkType: hard + +"optionator@npm:^0.9.1": + version: 0.9.1 + resolution: "optionator@npm:0.9.1" + dependencies: + deep-is: ^0.1.3 + fast-levenshtein: ^2.0.6 + levn: ^0.4.1 + prelude-ls: ^1.2.1 + type-check: ^0.4.0 + word-wrap: ^1.2.3 + checksum: dbc6fa065604b24ea57d734261914e697bd73b69eff7f18e967e8912aa2a40a19a9f599a507fa805be6c13c24c4eae8c71306c239d517d42d4c041c942f508a0 + languageName: node + linkType: hard + +"parent-module@npm:^1.0.0": + version: 1.0.1 + resolution: "parent-module@npm:1.0.1" + dependencies: + callsites: ^3.0.0 + checksum: 6ba8b255145cae9470cf5551eb74be2d22281587af787a2626683a6c20fbb464978784661478dd2a3f1dad74d1e802d403e1b03c1a31fab310259eec8ac560ff + languageName: node + linkType: hard + +"path-is-absolute@npm:^1.0.0": + version: 1.0.1 + resolution: "path-is-absolute@npm:1.0.1" + checksum: 060840f92cf8effa293bcc1bea81281bd7d363731d214cbe5c227df207c34cd727430f70c6037b5159c8a870b9157cba65e775446b0ab06fd5ecc7e54615a3b8 + languageName: node + linkType: hard + +"path-key@npm:^3.1.0": + version: 3.1.1 + resolution: "path-key@npm:3.1.1" + checksum: 55cd7a9dd4b343412a8386a743f9c746ef196e57c823d90ca3ab917f90ab9f13dd0ded27252ba49dbdfcab2b091d998bc446f6220cd3cea65db407502a740020 + languageName: node + linkType: hard + +"path-parse@npm:^1.0.7": + version: 1.0.7 + resolution: "path-parse@npm:1.0.7" + checksum: 49abf3d81115642938a8700ec580da6e830dde670be21893c62f4e10bd7dd4c3742ddc603fe24f898cba7eb0c6bc1777f8d9ac14185d34540c6d4d80cd9cae8a + languageName: node + linkType: hard + +"path-type@npm:^4.0.0": + version: 4.0.0 + resolution: "path-type@npm:4.0.0" + checksum: 5b1e2daa247062061325b8fdbfd1fb56dde0a448fb1455453276ea18c60685bdad23a445dc148cf87bc216be1573357509b7d4060494a6fd768c7efad833ee45 + languageName: node + linkType: hard + +"picocolors@npm:^1.0.0": + version: 1.0.0 + resolution: "picocolors@npm:1.0.0" + checksum: a2e8092dd86c8396bdba9f2b5481032848525b3dc295ce9b57896f931e63fc16f79805144321f72976383fc249584672a75cc18d6777c6b757603f372f745981 + languageName: node + linkType: hard + +"picomatch@npm:^2.3.1": + version: 2.3.1 + resolution: "picomatch@npm:2.3.1" + checksum: 050c865ce81119c4822c45d3c84f1ced46f93a0126febae20737bd05ca20589c564d6e9226977df859ed5e03dc73f02584a2b0faad36e896936238238b0446cf + languageName: node + linkType: hard + +"postcss@npm:8.4.14": + version: 8.4.14 + resolution: "postcss@npm:8.4.14" + dependencies: + nanoid: ^3.3.4 + picocolors: ^1.0.0 + source-map-js: ^1.0.2 + checksum: fe58766ff32e4becf65a7d57678995cfd239df6deed2fe0557f038b47c94e4132e7e5f68b5aa820c13adfec32e523b693efaeb65798efb995ce49ccd83953816 + languageName: node + linkType: hard + +"prelude-ls@npm:^1.2.1": + version: 1.2.1 + resolution: "prelude-ls@npm:1.2.1" + checksum: cd192ec0d0a8e4c6da3bb80e4f62afe336df3f76271ac6deb0e6a36187133b6073a19e9727a1ff108cd8b9982e4768850d413baa71214dd80c7979617dca827a + languageName: node + linkType: hard + +prettier@latest: + version: 2.7.1 + resolution: "prettier@npm:2.7.1" + bin: + prettier: bin-prettier.js + checksum: 55a4409182260866ab31284d929b3cb961e5fdb91fe0d2e099dac92eaecec890f36e524b4c19e6ceae839c99c6d7195817579cdffc8e2c80da0cb794463a748b + languageName: node + linkType: hard + +"progress@npm:^2.0.0": + version: 2.0.3 + resolution: "progress@npm:2.0.3" + checksum: f67403fe7b34912148d9252cb7481266a354bd99ce82c835f79070643bb3c6583d10dbcfda4d41e04bbc1d8437e9af0fb1e1f2135727878f5308682a579429b7 + languageName: node + linkType: hard + +"prop-types@npm:^15.8.1": + version: 15.8.1 + resolution: "prop-types@npm:15.8.1" + dependencies: + loose-envify: ^1.4.0 + object-assign: ^4.1.1 + react-is: ^16.13.1 + checksum: c056d3f1c057cb7ff8344c645450e14f088a915d078dcda795041765047fa080d38e5d626560ccaac94a4e16e3aa15f3557c1a9a8d1174530955e992c675e459 + languageName: node + linkType: hard + +"punycode@npm:^2.1.0": + version: 2.1.1 + resolution: "punycode@npm:2.1.1" + checksum: 823bf443c6dd14f669984dea25757b37993f67e8d94698996064035edd43bed8a5a17a9f12e439c2b35df1078c6bec05a6c86e336209eb1061e8025c481168e8 + languageName: node + linkType: hard + +"queue-microtask@npm:^1.2.2": + version: 1.2.3 + resolution: "queue-microtask@npm:1.2.3" + checksum: b676f8c040cdc5b12723ad2f91414d267605b26419d5c821ff03befa817ddd10e238d22b25d604920340fd73efd8ba795465a0377c4adf45a4a41e4234e42dc4 + languageName: node + linkType: hard + +"react-dom@npm:18.2.0": + version: 18.2.0 + resolution: "react-dom@npm:18.2.0" + dependencies: + loose-envify: ^1.1.0 + scheduler: ^0.23.0 + peerDependencies: + react: ^18.2.0 + checksum: 7d323310bea3a91be2965f9468d552f201b1c27891e45ddc2d6b8f717680c95a75ae0bc1e3f5cf41472446a2589a75aed4483aee8169287909fcd59ad149e8cc + languageName: node + linkType: hard + +"react-is@npm:^16.13.1": + version: 16.13.1 + resolution: "react-is@npm:16.13.1" + checksum: f7a19ac3496de32ca9ae12aa030f00f14a3d45374f1ceca0af707c831b2a6098ef0d6bdae51bd437b0a306d7f01d4677fcc8de7c0d331eb47ad0f46130e53c5f + languageName: node + linkType: hard + +"react@npm:18.2.0, react@npm:^18.2.0": + version: 18.2.0 + resolution: "react@npm:18.2.0" + dependencies: + loose-envify: ^1.1.0 + checksum: 88e38092da8839b830cda6feef2e8505dec8ace60579e46aa5490fc3dc9bba0bd50336507dc166f43e3afc1c42939c09fe33b25fae889d6f402721dcd78fca1b + languageName: node + linkType: hard + +"regenerator-runtime@npm:^0.13.4": + version: 0.13.9 + resolution: "regenerator-runtime@npm:0.13.9" + checksum: 65ed455fe5afd799e2897baf691ca21c2772e1a969d19bb0c4695757c2d96249eb74ee3553ea34a91062b2a676beedf630b4c1551cc6299afb937be1426ec55e + languageName: node + linkType: hard + +"regexp.prototype.flags@npm:^1.4.1, regexp.prototype.flags@npm:^1.4.3": + version: 1.4.3 + resolution: "regexp.prototype.flags@npm:1.4.3" + dependencies: + call-bind: ^1.0.2 + define-properties: ^1.1.3 + functions-have-names: ^1.2.2 + checksum: 51228bae732592adb3ededd5e15426be25f289e9c4ef15212f4da73f4ec3919b6140806374b8894036a86020d054a8d2657d3fee6bb9b4d35d8939c20030b7a6 + languageName: node + linkType: hard + +"regexpp@npm:^3.1.0": + version: 3.2.0 + resolution: "regexpp@npm:3.2.0" + checksum: a78dc5c7158ad9ddcfe01aa9144f46e192ddbfa7b263895a70a5c6c73edd9ce85faf7c0430e59ac38839e1734e275b9c3de5c57ee3ab6edc0e0b1bdebefccef8 + languageName: node + linkType: hard + +"require-from-string@npm:^2.0.2": + version: 2.0.2 + resolution: "require-from-string@npm:2.0.2" + checksum: a03ef6895445f33a4015300c426699bc66b2b044ba7b670aa238610381b56d3f07c686251740d575e22f4c87531ba662d06937508f0f3c0f1ddc04db3130560b + languageName: node + linkType: hard + +"resolve-from@npm:^4.0.0": + version: 4.0.0 + resolution: "resolve-from@npm:4.0.0" + checksum: f4ba0b8494846a5066328ad33ef8ac173801a51739eb4d63408c847da9a2e1c1de1e6cbbf72699211f3d13f8fc1325648b169bd15eb7da35688e30a5fb0e4a7f + languageName: node + linkType: hard + +"resolve@npm:^1.20.0, resolve@npm:^1.22.0": + version: 1.22.1 + resolution: "resolve@npm:1.22.1" + dependencies: + is-core-module: ^2.9.0 + path-parse: ^1.0.7 + supports-preserve-symlinks-flag: ^1.0.0 + bin: + resolve: bin/resolve + checksum: 07af5fc1e81aa1d866cbc9e9460fbb67318a10fa3c4deadc35c3ad8a898ee9a71a86a65e4755ac3195e0ea0cfbe201eb323ebe655ce90526fd61917313a34e4e + languageName: node + linkType: hard + +"resolve@npm:^2.0.0-next.3": + version: 2.0.0-next.4 + resolution: "resolve@npm:2.0.0-next.4" + dependencies: + is-core-module: ^2.9.0 + path-parse: ^1.0.7 + supports-preserve-symlinks-flag: ^1.0.0 + bin: + resolve: bin/resolve + checksum: c438ac9a650f2030fd074219d7f12ceb983b475da2d89ad3d6dd05fbf6b7a0a8cd37d4d10b43cb1f632bc19f22246ab7f36ebda54d84a29bfb2910a0680906d3 + languageName: node + linkType: hard + +"resolve@patch:resolve@^1.20.0#~builtin, resolve@patch:resolve@^1.22.0#~builtin": + version: 1.22.1 + resolution: "resolve@patch:resolve@npm%3A1.22.1#~builtin::version=1.22.1&hash=07638b" + dependencies: + is-core-module: ^2.9.0 + path-parse: ^1.0.7 + supports-preserve-symlinks-flag: ^1.0.0 + bin: + resolve: bin/resolve + checksum: 5656f4d0bedcf8eb52685c1abdf8fbe73a1603bb1160a24d716e27a57f6cecbe2432ff9c89c2bd57542c3a7b9d14b1882b73bfe2e9d7849c9a4c0b8b39f02b8b + languageName: node + linkType: hard + +"resolve@patch:resolve@^2.0.0-next.3#~builtin": + version: 2.0.0-next.4 + resolution: "resolve@patch:resolve@npm%3A2.0.0-next.4#~builtin::version=2.0.0-next.4&hash=07638b" + dependencies: + is-core-module: ^2.9.0 + path-parse: ^1.0.7 + supports-preserve-symlinks-flag: ^1.0.0 + bin: + resolve: bin/resolve + checksum: 4bf9f4f8a458607af90518ff73c67a4bc1a38b5a23fef2bb0ccbd45e8be89820a1639b637b0ba377eb2be9eedfb1739a84cde24fe4cd670c8207d8fea922b011 + languageName: node + linkType: hard + +"reusify@npm:^1.0.4": + version: 1.0.4 + resolution: "reusify@npm:1.0.4" + checksum: c3076ebcc22a6bc252cb0b9c77561795256c22b757f40c0d8110b1300723f15ec0fc8685e8d4ea6d7666f36c79ccc793b1939c748bf36f18f542744a4e379fcc + languageName: node + linkType: hard + +"rimraf@npm:^3.0.2": + version: 3.0.2 + resolution: "rimraf@npm:3.0.2" + dependencies: + glob: ^7.1.3 + bin: + rimraf: bin.js + checksum: 87f4164e396f0171b0a3386cc1877a817f572148ee13a7e113b238e48e8a9f2f31d009a92ec38a591ff1567d9662c6b67fd8818a2dbbaed74bc26a87a2a4a9a0 + languageName: node + linkType: hard + +"run-parallel@npm:^1.1.9": + version: 1.2.0 + resolution: "run-parallel@npm:1.2.0" + dependencies: + queue-microtask: ^1.2.2 + checksum: cb4f97ad25a75ebc11a8ef4e33bb962f8af8516bb2001082ceabd8902e15b98f4b84b4f8a9b222e5d57fc3bd1379c483886ed4619367a7680dad65316993021d + languageName: node + linkType: hard + +"safe-buffer@npm:~5.1.1": + version: 5.1.2 + resolution: "safe-buffer@npm:5.1.2" + checksum: f2f1f7943ca44a594893a852894055cf619c1fbcb611237fc39e461ae751187e7baf4dc391a72125e0ac4fb2d8c5c0b3c71529622e6a58f46b960211e704903c + languageName: node + linkType: hard + +"scheduler@npm:^0.23.0": + version: 0.23.0 + resolution: "scheduler@npm:0.23.0" + dependencies: + loose-envify: ^1.1.0 + checksum: d79192eeaa12abef860c195ea45d37cbf2bbf5f66e3c4dcd16f54a7da53b17788a70d109ee3d3dde1a0fd50e6a8fc171f4300356c5aee4fc0171de526bf35f8a + languageName: node + linkType: hard + +"semver@npm:^6.3.0": + version: 6.3.0 + resolution: "semver@npm:6.3.0" + bin: + semver: ./bin/semver.js + checksum: 1b26ecf6db9e8292dd90df4e781d91875c0dcc1b1909e70f5d12959a23c7eebb8f01ea581c00783bbee72ceeaad9505797c381756326073850dc36ed284b21b9 + languageName: node + linkType: hard + +"semver@npm:^7.2.1, semver@npm:^7.3.7": + version: 7.3.7 + resolution: "semver@npm:7.3.7" + dependencies: + lru-cache: ^6.0.0 + bin: + semver: bin/semver.js + checksum: 2fa3e877568cd6ce769c75c211beaed1f9fce80b28338cadd9d0b6c40f2e2862bafd62c19a6cff42f3d54292b7c623277bcab8816a2b5521cf15210d43e75232 + languageName: node + linkType: hard + +"shebang-command@npm:^2.0.0": + version: 2.0.0 + resolution: "shebang-command@npm:2.0.0" + dependencies: + shebang-regex: ^3.0.0 + checksum: 6b52fe87271c12968f6a054e60f6bde5f0f3d2db483a1e5c3e12d657c488a15474121a1d55cd958f6df026a54374ec38a4a963988c213b7570e1d51575cea7fa + languageName: node + linkType: hard + +"shebang-regex@npm:^3.0.0": + version: 3.0.0 + resolution: "shebang-regex@npm:3.0.0" + checksum: 1a2bcae50de99034fcd92ad4212d8e01eedf52c7ec7830eedcf886622804fe36884278f2be8be0ea5fde3fd1c23911643a4e0f726c8685b61871c8908af01222 + languageName: node + linkType: hard + +"side-channel@npm:^1.0.4": + version: 1.0.4 + resolution: "side-channel@npm:1.0.4" + dependencies: + call-bind: ^1.0.0 + get-intrinsic: ^1.0.2 + object-inspect: ^1.9.0 + checksum: 351e41b947079c10bd0858364f32bb3a7379514c399edb64ab3dce683933483fc63fb5e4efe0a15a2e8a7e3c436b6a91736ddb8d8c6591b0460a24bb4a1ee245 + languageName: node + linkType: hard + +"slash@npm:^3.0.0": + version: 3.0.0 + resolution: "slash@npm:3.0.0" + checksum: 94a93fff615f25a999ad4b83c9d5e257a7280c90a32a7cb8b4a87996e4babf322e469c42b7f649fd5796edd8687652f3fb452a86dc97a816f01113183393f11c + languageName: node + linkType: hard + +"slice-ansi@npm:^4.0.0": + version: 4.0.0 + resolution: "slice-ansi@npm:4.0.0" + dependencies: + ansi-styles: ^4.0.0 + astral-regex: ^2.0.0 + is-fullwidth-code-point: ^3.0.0 + checksum: 4a82d7f085b0e1b070e004941ada3c40d3818563ac44766cca4ceadd2080427d337554f9f99a13aaeb3b4a94d9964d9466c807b3d7b7541d1ec37ee32d308756 + languageName: node + linkType: hard + +"source-map-js@npm:^1.0.2": + version: 1.0.2 + resolution: "source-map-js@npm:1.0.2" + checksum: c049a7fc4deb9a7e9b481ae3d424cc793cb4845daa690bc5a05d428bf41bf231ced49b4cf0c9e77f9d42fdb3d20d6187619fc586605f5eabe995a316da8d377c + languageName: node + linkType: hard + +"sprintf-js@npm:~1.0.2": + version: 1.0.3 + resolution: "sprintf-js@npm:1.0.3" + checksum: 19d79aec211f09b99ec3099b5b2ae2f6e9cdefe50bc91ac4c69144b6d3928a640bb6ae5b3def70c2e85a2c3d9f5ec2719921e3a59d3ca3ef4b2fd1a4656a0df3 + languageName: node + linkType: hard + +"string-width@npm:^4.2.3": + version: 4.2.3 + resolution: "string-width@npm:4.2.3" + dependencies: + emoji-regex: ^8.0.0 + is-fullwidth-code-point: ^3.0.0 + strip-ansi: ^6.0.1 + checksum: e52c10dc3fbfcd6c3a15f159f54a90024241d0f149cf8aed2982a2d801d2e64df0bf1dc351cf8e95c3319323f9f220c16e740b06faecd53e2462df1d2b5443fb + languageName: node + linkType: hard + +"string.prototype.matchall@npm:^4.0.7": + version: 4.0.7 + resolution: "string.prototype.matchall@npm:4.0.7" + dependencies: + call-bind: ^1.0.2 + define-properties: ^1.1.3 + es-abstract: ^1.19.1 + get-intrinsic: ^1.1.1 + has-symbols: ^1.0.3 + internal-slot: ^1.0.3 + regexp.prototype.flags: ^1.4.1 + side-channel: ^1.0.4 + checksum: fc09f3ccbfb325de0472bcc87a6be0598a7499e0b4a31db5789676155b15754a4cc4bb83924f15fc9ed48934dac7366ee52c8b9bd160bed6fd072c93b489e75c + languageName: node + linkType: hard + +"string.prototype.trimend@npm:^1.0.5": + version: 1.0.5 + resolution: "string.prototype.trimend@npm:1.0.5" + dependencies: + call-bind: ^1.0.2 + define-properties: ^1.1.4 + es-abstract: ^1.19.5 + checksum: d44f543833112f57224e79182debadc9f4f3bf9d48a0414d6f0cbd2a86f2b3e8c0ca1f95c3f8e5b32ae83e91554d79d932fc746b411895f03f93d89ed3dfb6bc + languageName: node + linkType: hard + +"string.prototype.trimstart@npm:^1.0.5": + version: 1.0.5 + resolution: "string.prototype.trimstart@npm:1.0.5" + dependencies: + call-bind: ^1.0.2 + define-properties: ^1.1.4 + es-abstract: ^1.19.5 + checksum: a4857c5399ad709d159a77371eeaa8f9cc284469a0b5e1bfe405de16f1fd4166a8ea6f4180e55032f348d1b679b1599fd4301fbc7a8b72bdb3e795e43f7b1048 + languageName: node + linkType: hard + +"strip-ansi@npm:^6.0.0, strip-ansi@npm:^6.0.1": + version: 6.0.1 + resolution: "strip-ansi@npm:6.0.1" + dependencies: + ansi-regex: ^5.0.1 + checksum: f3cd25890aef3ba6e1a74e20896c21a46f482e93df4a06567cebf2b57edabb15133f1f94e57434e0a958d61186087b1008e89c94875d019910a213181a14fc8c + languageName: node + linkType: hard + +"strip-bom@npm:^3.0.0": + version: 3.0.0 + resolution: "strip-bom@npm:3.0.0" + checksum: 8d50ff27b7ebe5ecc78f1fe1e00fcdff7af014e73cf724b46fb81ef889eeb1015fc5184b64e81a2efe002180f3ba431bdd77e300da5c6685d702780fbf0c8d5b + languageName: node + linkType: hard + +"strip-json-comments@npm:^3.1.0, strip-json-comments@npm:^3.1.1": + version: 3.1.1 + resolution: "strip-json-comments@npm:3.1.1" + checksum: 492f73e27268f9b1c122733f28ecb0e7e8d8a531a6662efbd08e22cccb3f9475e90a1b82cab06a392f6afae6d2de636f977e231296400d0ec5304ba70f166443 + languageName: node + linkType: hard + +"styled-jsx@npm:5.0.4": + version: 5.0.4 + resolution: "styled-jsx@npm:5.0.4" + peerDependencies: + react: ">= 16.8.0 || 17.x.x || ^18.0.0-0" + peerDependenciesMeta: + "@babel/core": + optional: true + babel-plugin-macros: + optional: true + checksum: db7530155626e5eebc9d80ca117ea5aed6219b0a65469196b0b5727550fbe743117d7eea1499d80511ccb312d31f4a1027a58d1f94a83f0986c9acfdcce8bdd1 + languageName: node + linkType: hard + +"supports-color@npm:^5.3.0": + version: 5.5.0 + resolution: "supports-color@npm:5.5.0" + dependencies: + has-flag: ^3.0.0 + checksum: 95f6f4ba5afdf92f495b5a912d4abee8dcba766ae719b975c56c084f5004845f6f5a5f7769f52d53f40e21952a6d87411bafe34af4a01e65f9926002e38e1dac + languageName: node + linkType: hard + +"supports-color@npm:^7.1.0": + version: 7.2.0 + resolution: "supports-color@npm:7.2.0" + dependencies: + has-flag: ^4.0.0 + checksum: 3dda818de06ebbe5b9653e07842d9479f3555ebc77e9a0280caf5a14fb877ffee9ed57007c3b78f5a6324b8dbeec648d9e97a24e2ed9fdb81ddc69ea07100f4a + languageName: node + linkType: hard + +"supports-preserve-symlinks-flag@npm:^1.0.0": + version: 1.0.0 + resolution: "supports-preserve-symlinks-flag@npm:1.0.0" + checksum: 53b1e247e68e05db7b3808b99b892bd36fb096e6fba213a06da7fab22045e97597db425c724f2bbd6c99a3c295e1e73f3e4de78592289f38431049e1277ca0ae + languageName: node + linkType: hard + +"table@npm:^6.0.9": + version: 6.8.0 + resolution: "table@npm:6.8.0" + dependencies: + ajv: ^8.0.1 + lodash.truncate: ^4.4.2 + slice-ansi: ^4.0.0 + string-width: ^4.2.3 + strip-ansi: ^6.0.1 + checksum: 5b07fe462ee03d2e1fac02cbb578efd2e0b55ac07e3d3db2e950aa9570ade5a4a2b8d3c15e9f25c89e4e50b646bc4269934601ee1eef4ca7968ad31960977690 + languageName: node + linkType: hard + +"tapable@npm:^2.2.0": + version: 2.2.1 + resolution: "tapable@npm:2.2.1" + checksum: 3b7a1b4d86fa940aad46d9e73d1e8739335efd4c48322cb37d073eb6f80f5281889bf0320c6d8ffcfa1a0dd5bfdbd0f9d037e252ef972aca595330538aac4d51 + languageName: node + linkType: hard + +"text-table@npm:^0.2.0": + version: 0.2.0 + resolution: "text-table@npm:0.2.0" + checksum: b6937a38c80c7f84d9c11dd75e49d5c44f71d95e810a3250bd1f1797fc7117c57698204adf676b71497acc205d769d65c16ae8fa10afad832ae1322630aef10a + languageName: node + linkType: hard + +"to-fast-properties@npm:^2.0.0": + version: 2.0.0 + resolution: "to-fast-properties@npm:2.0.0" + checksum: be2de62fe58ead94e3e592680052683b1ec986c72d589e7b21e5697f8744cdbf48c266fa72f6c15932894c10187b5f54573a3bcf7da0bfd964d5caf23d436168 + languageName: node + linkType: hard + +"to-regex-range@npm:^5.0.1": + version: 5.0.1 + resolution: "to-regex-range@npm:5.0.1" + dependencies: + is-number: ^7.0.0 + checksum: f76fa01b3d5be85db6a2a143e24df9f60dd047d151062d0ba3df62953f2f697b16fe5dad9b0ac6191c7efc7b1d9dcaa4b768174b7b29da89d4428e64bc0a20ed + languageName: node + linkType: hard + +"tsconfig-paths@npm:^3.14.1": + version: 3.14.1 + resolution: "tsconfig-paths@npm:3.14.1" + dependencies: + "@types/json5": ^0.0.29 + json5: ^1.0.1 + minimist: ^1.2.6 + strip-bom: ^3.0.0 + checksum: 8afa01c673ebb4782ba53d3a12df97fa837ce524f8ad38ee4e2b2fd57f5ac79abc21c574e9e9eb014d93efe7fe8214001b96233b5c6ea75bd1ea82afe17a4c6d + languageName: node + linkType: hard + +"tsconfig@*, tsconfig@workspace:packages/tsconfig": + version: 0.0.0-use.local + resolution: "tsconfig@workspace:packages/tsconfig" + languageName: unknown + linkType: soft + +"tslib@npm:^1.8.1": + version: 1.14.1 + resolution: "tslib@npm:1.14.1" + checksum: dbe628ef87f66691d5d2959b3e41b9ca0045c3ee3c7c7b906cc1e328b39f199bb1ad9e671c39025bd56122ac57dfbf7385a94843b1cc07c60a4db74795829acd + languageName: node + linkType: hard + +"tslib@npm:^2.4.0": + version: 2.4.0 + resolution: "tslib@npm:2.4.0" + checksum: 8c4aa6a3c5a754bf76aefc38026134180c053b7bd2f81338cb5e5ebf96fefa0f417bff221592bf801077f5bf990562f6264fecbc42cd3309b33872cb6fc3b113 + languageName: node + linkType: hard + +"tsutils@npm:^3.21.0": + version: 3.21.0 + resolution: "tsutils@npm:3.21.0" + dependencies: + tslib: ^1.8.1 + peerDependencies: + typescript: ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" + checksum: 1843f4c1b2e0f975e08c4c21caa4af4f7f65a12ac1b81b3b8489366826259323feb3fc7a243123453d2d1a02314205a7634e048d4a8009921da19f99755cdc48 + languageName: node + linkType: hard + +"turbo-android-arm64@npm:1.4.6": + version: 1.4.6 + resolution: "turbo-android-arm64@npm:1.4.6" + conditions: os=android & cpu=arm64 + languageName: node + linkType: hard + +"turbo-darwin-64@npm:1.4.6": + version: 1.4.6 + resolution: "turbo-darwin-64@npm:1.4.6" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + +"turbo-darwin-arm64@npm:1.4.6": + version: 1.4.6 + resolution: "turbo-darwin-arm64@npm:1.4.6" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + +"turbo-freebsd-64@npm:1.4.6": + version: 1.4.6 + resolution: "turbo-freebsd-64@npm:1.4.6" + conditions: os=freebsd & cpu=x64 + languageName: node + linkType: hard + +"turbo-freebsd-arm64@npm:1.4.6": + version: 1.4.6 + resolution: "turbo-freebsd-arm64@npm:1.4.6" + conditions: os=freebsd & cpu=arm64 + languageName: node + linkType: hard + +"turbo-linux-32@npm:1.4.6": + version: 1.4.6 + resolution: "turbo-linux-32@npm:1.4.6" + conditions: os=linux & cpu=ia32 + languageName: node + linkType: hard + +"turbo-linux-64@npm:1.4.6": + version: 1.4.6 + resolution: "turbo-linux-64@npm:1.4.6" + conditions: os=linux & cpu=x64 + languageName: node + linkType: hard + +"turbo-linux-arm64@npm:1.4.6": + version: 1.4.6 + resolution: "turbo-linux-arm64@npm:1.4.6" + conditions: os=linux & cpu=arm64 + languageName: node + linkType: hard + +"turbo-linux-arm@npm:1.4.6": + version: 1.4.6 + resolution: "turbo-linux-arm@npm:1.4.6" + conditions: os=linux & cpu=arm + languageName: node + linkType: hard + +"turbo-linux-mips64le@npm:1.4.6": + version: 1.4.6 + resolution: "turbo-linux-mips64le@npm:1.4.6" + conditions: os=linux & cpu=mipsel + languageName: node + linkType: hard + +"turbo-linux-ppc64le@npm:1.4.6": + version: 1.4.6 + resolution: "turbo-linux-ppc64le@npm:1.4.6" + conditions: os=linux & cpu=ppc64 + languageName: node + linkType: hard + +"turbo-windows-32@npm:1.4.6": + version: 1.4.6 + resolution: "turbo-windows-32@npm:1.4.6" + conditions: os=win32 & cpu=ia32 + languageName: node + linkType: hard + +"turbo-windows-64@npm:1.4.6": + version: 1.4.6 + resolution: "turbo-windows-64@npm:1.4.6" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + +"turbo-windows-arm64@npm:1.4.6": + version: 1.4.6 + resolution: "turbo-windows-arm64@npm:1.4.6" + conditions: os=win32 & cpu=arm64 + languageName: node + linkType: hard + +turbo@latest: + version: 1.4.6 + resolution: "turbo@npm:1.4.6" + dependencies: + turbo-android-arm64: 1.4.6 + turbo-darwin-64: 1.4.6 + turbo-darwin-arm64: 1.4.6 + turbo-freebsd-64: 1.4.6 + turbo-freebsd-arm64: 1.4.6 + turbo-linux-32: 1.4.6 + turbo-linux-64: 1.4.6 + turbo-linux-arm: 1.4.6 + turbo-linux-arm64: 1.4.6 + turbo-linux-mips64le: 1.4.6 + turbo-linux-ppc64le: 1.4.6 + turbo-windows-32: 1.4.6 + turbo-windows-64: 1.4.6 + turbo-windows-arm64: 1.4.6 + dependenciesMeta: + turbo-android-arm64: + optional: true + turbo-darwin-64: + optional: true + turbo-darwin-arm64: + optional: true + turbo-freebsd-64: + optional: true + turbo-freebsd-arm64: + optional: true + turbo-linux-32: + optional: true + turbo-linux-64: + optional: true + turbo-linux-arm: + optional: true + turbo-linux-arm64: + optional: true + turbo-linux-mips64le: + optional: true + turbo-linux-ppc64le: + optional: true + turbo-windows-32: + optional: true + turbo-windows-64: + optional: true + turbo-windows-arm64: + optional: true + bin: + turbo: bin/turbo + checksum: f7191f36e0abddf6dc88eb9a83a007a8616ebed1edd44c37f9b19e0451f3ce90c4406699f6166a99c0a6f8d39cc1f24d96513b7ef16b21747863827538b9c966 + languageName: node + linkType: hard + +"type-check@npm:^0.4.0, type-check@npm:~0.4.0": + version: 0.4.0 + resolution: "type-check@npm:0.4.0" + dependencies: + prelude-ls: ^1.2.1 + checksum: ec688ebfc9c45d0c30412e41ca9c0cdbd704580eb3a9ccf07b9b576094d7b86a012baebc95681999dd38f4f444afd28504cb3a89f2ef16b31d4ab61a0739025a + languageName: node + linkType: hard + +"type-fest@npm:^0.20.2": + version: 0.20.2 + resolution: "type-fest@npm:0.20.2" + checksum: 4fb3272df21ad1c552486f8a2f8e115c09a521ad7a8db3d56d53718d0c907b62c6e9141ba5f584af3f6830d0872c521357e512381f24f7c44acae583ad517d73 + languageName: node + linkType: hard + +"typescript@npm:^4.5.2, typescript@npm:^4.5.3, typescript@npm:^4.7.4": + version: 4.8.3 + resolution: "typescript@npm:4.8.3" + bin: + tsc: bin/tsc + tsserver: bin/tsserver + checksum: 8286a5edcaf3d68e65c451aa1e7150ad1cf53ee0813c07ec35b7abdfdb10f355ecaa13c6a226a694ae7a67785fd7eeebf89f845da0b4f7e4a35561ddc459aba0 + languageName: node + linkType: hard + +"typescript@patch:typescript@^4.5.2#~builtin, typescript@patch:typescript@^4.5.3#~builtin, typescript@patch:typescript@^4.7.4#~builtin": + version: 4.8.3 + resolution: "typescript@patch:typescript@npm%3A4.8.3#~builtin::version=4.8.3&hash=a1c5e5" + bin: + tsc: bin/tsc + tsserver: bin/tsserver + checksum: 2222d2382fb3146089b1d27ce2b55e9d1f99cc64118f1aba75809b693b856c5d3c324f052f60c75b577947fc538bc1c27bad0eb76cbdba9a63a253489504ba7e + languageName: node + linkType: hard + +"ui@*, ui@workspace:packages/ui": + version: 0.0.0-use.local + resolution: "ui@workspace:packages/ui" + dependencies: + "@types/react": ^17.0.37 + "@types/react-dom": ^17.0.11 + eslint: ^7.32.0 + eslint-config-custom: "*" + react: ^18.2.0 + tsconfig: "*" + typescript: ^4.5.2 + languageName: unknown + linkType: soft + +"unbox-primitive@npm:^1.0.2": + version: 1.0.2 + resolution: "unbox-primitive@npm:1.0.2" + dependencies: + call-bind: ^1.0.2 + has-bigints: ^1.0.2 + has-symbols: ^1.0.3 + which-boxed-primitive: ^1.0.2 + checksum: b7a1cf5862b5e4b5deb091672ffa579aa274f648410009c81cca63fed3b62b610c4f3b773f912ce545bb4e31edc3138975b5bc777fc6e4817dca51affb6380e9 + languageName: node + linkType: hard + +"update-browserslist-db@npm:^1.0.9": + version: 1.0.9 + resolution: "update-browserslist-db@npm:1.0.9" + dependencies: + escalade: ^3.1.1 + picocolors: ^1.0.0 + peerDependencies: + browserslist: ">= 4.21.0" + bin: + browserslist-lint: cli.js + checksum: f625899b236f6a4d7f62b56be1b8da230c5563d1fef84d3ef148f2e1a3f11a5a4b3be4fd7e3703e51274c116194017775b10afb4de09eb2c0d09d36b90f1f578 + languageName: node + linkType: hard + +"uri-js@npm:^4.2.2": + version: 4.4.1 + resolution: "uri-js@npm:4.4.1" + dependencies: + punycode: ^2.1.0 + checksum: 7167432de6817fe8e9e0c9684f1d2de2bb688c94388f7569f7dbdb1587c9f4ca2a77962f134ec90be0cc4d004c939ff0d05acc9f34a0db39a3c797dada262633 + languageName: node + linkType: hard + +"use-sync-external-store@npm:1.2.0": + version: 1.2.0 + resolution: "use-sync-external-store@npm:1.2.0" + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + checksum: 5c639e0f8da3521d605f59ce5be9e094ca772bd44a4ce7322b055a6f58eeed8dda3c94cabd90c7a41fb6fa852210092008afe48f7038792fd47501f33299116a + languageName: node + linkType: hard + +"v8-compile-cache@npm:^2.0.3": + version: 2.3.0 + resolution: "v8-compile-cache@npm:2.3.0" + checksum: adb0a271eaa2297f2f4c536acbfee872d0dd26ec2d76f66921aa7fc437319132773483344207bdbeee169225f4739016d8d2dbf0553913a52bb34da6d0334f8e + languageName: node + linkType: hard + +"web@workspace:apps/web": + version: 0.0.0-use.local + resolution: "web@workspace:apps/web" + dependencies: + "@babel/core": ^7.0.0 + "@types/node": ^17.0.12 + "@types/react": 18.0.17 + eslint: 7.32.0 + eslint-config-custom: "*" + next: 12.2.5 + next-transpile-modules: 9.0.0 + react: 18.2.0 + react-dom: 18.2.0 + tsconfig: "*" + typescript: ^4.5.3 + ui: "*" + languageName: unknown + linkType: soft + +"which-boxed-primitive@npm:^1.0.2": + version: 1.0.2 + resolution: "which-boxed-primitive@npm:1.0.2" + dependencies: + is-bigint: ^1.0.1 + is-boolean-object: ^1.1.0 + is-number-object: ^1.0.4 + is-string: ^1.0.5 + is-symbol: ^1.0.3 + checksum: 53ce774c7379071729533922adcca47220228405e1895f26673bbd71bdf7fb09bee38c1d6399395927c6289476b5ae0629863427fd151491b71c4b6cb04f3a5e + languageName: node + linkType: hard + +"which@npm:^2.0.1": + version: 2.0.2 + resolution: "which@npm:2.0.2" + dependencies: + isexe: ^2.0.0 + bin: + node-which: ./bin/node-which + checksum: 1a5c563d3c1b52d5f893c8b61afe11abc3bab4afac492e8da5bde69d550de701cf9806235f20a47b5c8fa8a1d6a9135841de2596535e998027a54589000e66d1 + languageName: node + linkType: hard + +"word-wrap@npm:^1.2.3": + version: 1.2.3 + resolution: "word-wrap@npm:1.2.3" + checksum: 30b48f91fcf12106ed3186ae4fa86a6a1842416df425be7b60485de14bec665a54a68e4b5156647dec3a70f25e84d270ca8bc8cd23182ed095f5c7206a938c1f + languageName: node + linkType: hard + +"wrappy@npm:1": + version: 1.0.2 + resolution: "wrappy@npm:1.0.2" + checksum: 159da4805f7e84a3d003d8841557196034155008f817172d4e986bd591f74aa82aa7db55929a54222309e01079a65a92a9e6414da5a6aa4b01ee44a511ac3ee5 + languageName: node + linkType: hard + +"yallist@npm:^4.0.0": + version: 4.0.0 + resolution: "yallist@npm:4.0.0" + checksum: 343617202af32df2a15a3be36a5a8c0c8545208f3d3dfbc6bb7c3e3b7e8c6f8e7485432e4f3b88da3031a6e20afa7c711eded32ddfb122896ac5d914e75848d5 + languageName: node + linkType: hard diff --git a/cli/internal/lockfile/testdata/minimal-berry.lock b/cli/internal/lockfile/testdata/minimal-berry.lock new file mode 100644 index 0000000..3844ce3 --- /dev/null +++ b/cli/internal/lockfile/testdata/minimal-berry.lock @@ -0,0 +1,45 @@ +# This file is generated by running "yarn install" inside your project. +# Manual changes might be lost - proceed with caution! + +__metadata: + version: 6 + cacheKey: 8c8 + +"a@workspace:packages/a": + version: 0.0.0-use.local + resolution: "a@workspace:packages/a" + dependencies: + c: "*" + lodash: ^4.17.0 + peerDependencies: + lodash: ^3.0.0 || ^4.0.0 + languageName: unknown + linkType: soft + +"b@workspace:packages/b": + version: 0.0.0-use.local + resolution: "b@workspace:packages/b" + dependencies: + c: "*" + lodash: ^3.0.0 || ^4.0.0 + languageName: unknown + linkType: soft + +"c@*, c@workspace:packages/c": + version: 0.0.0-use.local + resolution: "c@workspace:packages/c" + languageName: unknown + linkType: soft + +"lodash@npm:^3.0.0 || ^4.0.0, lodash@npm:^4.17.0": + version: 4.17.21 + resolution: "lodash@npm:4.17.21" + checksum: eb835a2e51d381e561e508ce932ea50a8e5a68f4ebdd771ea240d3048244a8d13658acbd502cd4829768c56f2e16bdd4340b9ea141297d472517b83868e677f7 + languageName: node + linkType: hard + +"minimal-berry@workspace:.": + version: 0.0.0-use.local + resolution: "minimal-berry@workspace:." + languageName: unknown + linkType: soft diff --git a/cli/internal/lockfile/testdata/npm-lock-workspace-variation.json b/cli/internal/lockfile/testdata/npm-lock-workspace-variation.json new file mode 100644 index 0000000..4dcfc2d --- /dev/null +++ b/cli/internal/lockfile/testdata/npm-lock-workspace-variation.json @@ -0,0 +1,186 @@ +{ + "name": "npm-prune-workspace-variation", + "version": "0.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "npm-prune", + "version": "0.0.0", + "workspaces": { "packages": ["apps/*", "packages/*"] }, + "devDependencies": { + "eslint-config-custom": "*", + "prettier": "latest", + "turbo": "latest" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "apps/docs": { + "version": "0.0.0", + "dependencies": { + "lodash": "^3.0.0", + "next": "12.3.0", + "react": "18.2.0", + "react-dom": "18.2.0", + "ui": "*" + }, + "devDependencies": { + "@babel/core": "^7.0.0", + "@types/node": "^17.0.12", + "@types/react": "18.0.17", + "eslint": "7.32.0", + "eslint-config-custom": "*", + "next-transpile-modules": "9.0.0", + "tsconfig": "*", + "typescript": "^4.5.3" + } + }, + "apps/web": { + "version": "0.0.0", + "dependencies": { + "lodash": "^4.17.21", + "next": "12.3.0", + "react": "18.2.0", + "react-dom": "18.2.0", + "ui": "*" + }, + "devDependencies": { + "@babel/core": "^7.0.0", + "@types/node": "^17.0.12", + "@types/react": "18.0.17", + "eslint": "7.32.0", + "eslint-config-custom": "*", + "next-transpile-modules": "9.0.0", + "tsconfig": "*", + "typescript": "^4.5.3" + } + }, + "apps/web/node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "engines": ["node >= 0.8.0"] + }, + "node_modules/@ampproject/remapping": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz", + "integrity": "sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==", + "dev": true, + "dependencies": { + "@jridgewell/gen-mapping": "^0.1.0", + "@jridgewell/trace-mapping": "^0.3.9" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz", + "integrity": "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.19.3.tgz", + "integrity": "sha512-prBHMK4JYYK+wDjJF1q99KK4JLL+egWS4nmNqdlMUgCExMZ+iZW0hGhyC3VEbsPjvaN0TBhW//VIFwBrk8sEiw==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.19.3.tgz", + "integrity": "sha512-WneDJxdsjEvyKtXKsaBGbDeiyOjR5vYq4HcShxnIbG0qixpoHjI3MqeZM9NDvsojNCEBItQE4juOo/bU6e72gQ==", + "dev": true, + "dependencies": { + "@ampproject/remapping": "^2.1.0", + "@babel/code-frame": "^7.18.6", + "@babel/generator": "^7.19.3", + "@babel/helper-compilation-targets": "^7.19.3", + "@babel/helper-module-transforms": "^7.19.0", + "@babel/helpers": "^7.19.0", + "@babel/parser": "^7.19.3", + "@babel/template": "^7.18.10", + "@babel/traverse": "^7.19.3", + "@babel/types": "^7.19.3", + "convert-source-map": "^1.7.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.1", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.19.3.tgz", + "integrity": "sha512-fqVZnmp1ncvZU757UzDheKZpfPgatqY59XtW2/j/18H7u76akb8xqvjw82f+i2UKd/ksYsSick/BCLQUUtJ/qQ==", + "dev": true, + "dependencies": { + "@babel/types": "^7.19.3", + "@jridgewell/gen-mapping": "^0.3.2", + "jsesc": "^2.5.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/generator/node_modules/@jridgewell/gen-mapping": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz", + "integrity": "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==", + "dev": true, + "dependencies": { + "@jridgewell/set-array": "^1.0.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.9" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.19.3.tgz", + "integrity": "sha512-65ESqLGyGmLvgR0mst5AdW1FkNlj9rQsCKduzEoEPhBCDFGXvz2jW6bXFG6i0/MrV2s7hhXjjb2yAzcPuQlLwg==", + "dev": true, + "dependencies": { + "@babel/compat-data": "^7.19.3", + "@babel/helper-validator-option": "^7.18.6", + "browserslist": "^4.21.3", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-environment-visitor": { + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz", + "integrity": "sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + } + } +} diff --git a/cli/internal/lockfile/testdata/npm-lock.json b/cli/internal/lockfile/testdata/npm-lock.json new file mode 100644 index 0000000..c5607f1 --- /dev/null +++ b/cli/internal/lockfile/testdata/npm-lock.json @@ -0,0 +1,6472 @@ +{ + "name": "npm-prune", + "version": "0.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "npm-prune", + "version": "0.0.0", + "workspaces": ["apps/*", "packages/*"], + "devDependencies": { + "eslint-config-custom": "*", + "prettier": "latest", + "turbo": "latest" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "apps/docs": { + "version": "0.0.0", + "dependencies": { + "lodash": "^3.0.0", + "next": "12.3.0", + "react": "18.2.0", + "react-dom": "18.2.0", + "ui": "*" + }, + "devDependencies": { + "@babel/core": "^7.0.0", + "@types/node": "^17.0.12", + "@types/react": "18.0.17", + "eslint": "7.32.0", + "eslint-config-custom": "*", + "next-transpile-modules": "9.0.0", + "tsconfig": "*", + "typescript": "^4.5.3" + } + }, + "apps/web": { + "version": "0.0.0", + "dependencies": { + "lodash": "^4.17.21", + "next": "12.3.0", + "react": "18.2.0", + "react-dom": "18.2.0", + "ui": "*" + }, + "devDependencies": { + "@babel/core": "^7.0.0", + "@types/node": "^17.0.12", + "@types/react": "18.0.17", + "eslint": "7.32.0", + "eslint-config-custom": "*", + "next-transpile-modules": "9.0.0", + "tsconfig": "*", + "typescript": "^4.5.3" + } + }, + "apps/web/node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "engines": ["node >= 0.8.0"] + }, + "node_modules/@ampproject/remapping": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz", + "integrity": "sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==", + "dev": true, + "dependencies": { + "@jridgewell/gen-mapping": "^0.1.0", + "@jridgewell/trace-mapping": "^0.3.9" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz", + "integrity": "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.19.3.tgz", + "integrity": "sha512-prBHMK4JYYK+wDjJF1q99KK4JLL+egWS4nmNqdlMUgCExMZ+iZW0hGhyC3VEbsPjvaN0TBhW//VIFwBrk8sEiw==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.19.3.tgz", + "integrity": "sha512-WneDJxdsjEvyKtXKsaBGbDeiyOjR5vYq4HcShxnIbG0qixpoHjI3MqeZM9NDvsojNCEBItQE4juOo/bU6e72gQ==", + "dev": true, + "dependencies": { + "@ampproject/remapping": "^2.1.0", + "@babel/code-frame": "^7.18.6", + "@babel/generator": "^7.19.3", + "@babel/helper-compilation-targets": "^7.19.3", + "@babel/helper-module-transforms": "^7.19.0", + "@babel/helpers": "^7.19.0", + "@babel/parser": "^7.19.3", + "@babel/template": "^7.18.10", + "@babel/traverse": "^7.19.3", + "@babel/types": "^7.19.3", + "convert-source-map": "^1.7.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.1", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.19.3.tgz", + "integrity": "sha512-fqVZnmp1ncvZU757UzDheKZpfPgatqY59XtW2/j/18H7u76akb8xqvjw82f+i2UKd/ksYsSick/BCLQUUtJ/qQ==", + "dev": true, + "dependencies": { + "@babel/types": "^7.19.3", + "@jridgewell/gen-mapping": "^0.3.2", + "jsesc": "^2.5.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/generator/node_modules/@jridgewell/gen-mapping": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz", + "integrity": "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==", + "dev": true, + "dependencies": { + "@jridgewell/set-array": "^1.0.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.9" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.19.3.tgz", + "integrity": "sha512-65ESqLGyGmLvgR0mst5AdW1FkNlj9rQsCKduzEoEPhBCDFGXvz2jW6bXFG6i0/MrV2s7hhXjjb2yAzcPuQlLwg==", + "dev": true, + "dependencies": { + "@babel/compat-data": "^7.19.3", + "@babel/helper-validator-option": "^7.18.6", + "browserslist": "^4.21.3", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-environment-visitor": { + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz", + "integrity": "sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-function-name": { + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.19.0.tgz", + "integrity": "sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w==", + "dev": true, + "dependencies": { + "@babel/template": "^7.18.10", + "@babel/types": "^7.19.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-hoist-variables": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz", + "integrity": "sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==", + "dev": true, + "dependencies": { + "@babel/types": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz", + "integrity": "sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==", + "dev": true, + "dependencies": { + "@babel/types": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.19.0.tgz", + "integrity": "sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ==", + "dev": true, + "dependencies": { + "@babel/helper-environment-visitor": "^7.18.9", + "@babel/helper-module-imports": "^7.18.6", + "@babel/helper-simple-access": "^7.18.6", + "@babel/helper-split-export-declaration": "^7.18.6", + "@babel/helper-validator-identifier": "^7.18.6", + "@babel/template": "^7.18.10", + "@babel/traverse": "^7.19.0", + "@babel/types": "^7.19.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-simple-access": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.18.6.tgz", + "integrity": "sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g==", + "dev": true, + "dependencies": { + "@babel/types": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-split-export-declaration": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz", + "integrity": "sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==", + "dev": true, + "dependencies": { + "@babel/types": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.18.10", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.18.10.tgz", + "integrity": "sha512-XtIfWmeNY3i4t7t4D2t02q50HvqHybPqW2ki1kosnvWCwuCMeo81Jf0gwr85jy/neUdg5XDdeFE/80DXiO+njw==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.19.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz", + "integrity": "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz", + "integrity": "sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.19.0.tgz", + "integrity": "sha512-DRBCKGwIEdqY3+rPJgG/dKfQy9+08rHIAJx8q2p+HSWP87s2HCrQmaAMMyMll2kIXKCW0cO1RdQskx15Xakftg==", + "dev": true, + "dependencies": { + "@babel/template": "^7.18.10", + "@babel/traverse": "^7.19.0", + "@babel/types": "^7.19.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz", + "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==", + "dependencies": { + "@babel/helper-validator-identifier": "^7.18.6", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.19.3.tgz", + "integrity": "sha512-pJ9xOlNWHiy9+FuFP09DEAFbAn4JskgRsVcc169w2xRBC3FRGuQEwjeIMMND9L2zc0iEhO/tGv4Zq+km+hxNpQ==", + "dev": true, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/runtime": { + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.19.0.tgz", + "integrity": "sha512-eR8Lo9hnDS7tqkO7NsV+mKvCmv5boaXFSZ70DnfhcgiEne8hv9oCEd36Klw74EtizEqLsy4YnW8UWwpBVolHZA==", + "dependencies": { + "regenerator-runtime": "^0.13.4" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/runtime-corejs3": { + "version": "7.19.1", + "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.19.1.tgz", + "integrity": "sha512-j2vJGnkopRzH+ykJ8h68wrHnEUmtK//E723jjixiAl/PPf6FhqY/vYRcMVlNydRKQjQsTsYEjpx+DZMIvnGk/g==", + "dependencies": { + "core-js-pure": "^3.25.1", + "regenerator-runtime": "^0.13.4" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/template": { + "version": "7.18.10", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.18.10.tgz", + "integrity": "sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.18.6", + "@babel/parser": "^7.18.10", + "@babel/types": "^7.18.10" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.19.3.tgz", + "integrity": "sha512-qh5yf6149zhq2sgIXmwjnsvmnNQC2iw70UFjp4olxucKrWd/dvlUsBI88VSLUsnMNF7/vnOiA+nk1+yLoCqROQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.18.6", + "@babel/generator": "^7.19.3", + "@babel/helper-environment-visitor": "^7.18.9", + "@babel/helper-function-name": "^7.19.0", + "@babel/helper-hoist-variables": "^7.18.6", + "@babel/helper-split-export-declaration": "^7.18.6", + "@babel/parser": "^7.19.3", + "@babel/types": "^7.19.3", + "debug": "^4.1.0", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.19.3.tgz", + "integrity": "sha512-hGCaQzIY22DJlDh9CH7NOxgKkFjBk0Cw9xDO1Xmh2151ti7wiGfQ3LauXzL4HP1fmFlTX6XjpRETTpUcv7wQLw==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.18.10", + "@babel/helper-validator-identifier": "^7.19.1", + "to-fast-properties": "^2.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.3.tgz", + "integrity": "sha512-J6KFFz5QCYUJq3pf0mjEcCJVERbzv71PUIDczuh9JkwGEzced6CO5ADLHB1rbf/+oPBtoPfMYNOpGDzCANlbXw==", + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.1.1", + "espree": "^7.3.0", + "globals": "^13.9.0", + "ignore": "^4.0.6", + "import-fresh": "^3.2.1", + "js-yaml": "^3.13.1", + "minimatch": "^3.0.4", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/@eslint/eslintrc/node_modules/globals": { + "version": "13.17.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.17.0.tgz", + "integrity": "sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw==", + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.5.0.tgz", + "integrity": "sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==", + "dependencies": { + "@humanwhocodes/object-schema": "^1.2.0", + "debug": "^4.1.1", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", + "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==" + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz", + "integrity": "sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==", + "dev": true, + "dependencies": { + "@jridgewell/set-array": "^1.0.0", + "@jridgewell/sourcemap-codec": "^1.4.10" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", + "integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", + "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.4.14", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz", + "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==", + "dev": true + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.15", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.15.tgz", + "integrity": "sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g==", + "dev": true, + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "node_modules/@next/env": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/env/-/env-12.3.0.tgz", + "integrity": "sha512-PTJpjAFVbzBQ9xXpzMTroShvD5YDIIy46jQ7d4LrWpY+/5a8H90Tm8hE3Hvkc5RBRspVo7kvEOnqQms0A+2Q6w==" + }, + "node_modules/@next/eslint-plugin-next": { + "version": "12.3.1", + "resolved": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-12.3.1.tgz", + "integrity": "sha512-sw+lTf6r6P0j+g/n9y4qdWWI2syPqZx+uc0+B/fRENqfR3KpSid6MIKqc9gNwGhJASazEQ5b3w8h4cAET213jw==", + "dependencies": { + "glob": "7.1.7" + } + }, + "node_modules/@next/swc-android-arm-eabi": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-android-arm-eabi/-/swc-android-arm-eabi-12.3.0.tgz", + "integrity": "sha512-/PuirPnAKsYBw93w/7Q9hqy+KGOU9mjYprZ/faxMUJh/dc6v3rYLxkZKNG9nFPIW4QKNTCnhP40xF9hLnxO+xg==", + "cpu": ["arm"], + "optional": true, + "os": ["android"], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-android-arm64": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-android-arm64/-/swc-android-arm64-12.3.0.tgz", + "integrity": "sha512-OaI+FhAM6P9B6Ybwbn0Zl8YwWido0lLwhDBi9WiYCh4RQmIXAyVIoIJPHo4fP05+mXaJ/k1trvDvuURvHOq2qw==", + "cpu": ["arm64"], + "optional": true, + "os": ["android"], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-darwin-arm64": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-12.3.0.tgz", + "integrity": "sha512-9s4d3Mhii+WFce8o8Jok7WC3Bawkr9wEUU++SJRptjU1L5tsfYJMrSYCACHLhZujziNDLyExe4Hwwsccps1sfg==", + "cpu": ["arm64"], + "optional": true, + "os": ["darwin"], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-darwin-x64": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-12.3.0.tgz", + "integrity": "sha512-2scC4MqUTwGwok+wpVxP+zWp7WcCAVOtutki2E1n99rBOTnUOX6qXkgxSy083yBN6GqwuC/dzHeN7hIKjavfRA==", + "cpu": ["x64"], + "optional": true, + "os": ["darwin"], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-freebsd-x64": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-freebsd-x64/-/swc-freebsd-x64-12.3.0.tgz", + "integrity": "sha512-xAlruUREij/bFa+qsE1tmsP28t7vz02N4ZDHt2lh3uJUniE0Ne9idyIDLc1Ed0IF2RjfgOp4ZVunuS3OM0sngw==", + "cpu": ["x64"], + "optional": true, + "os": ["freebsd"], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-arm-gnueabihf": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm-gnueabihf/-/swc-linux-arm-gnueabihf-12.3.0.tgz", + "integrity": "sha512-jin2S4VT/cugc2dSZEUIabhYDJNgrUh7fufbdsaAezgcQzqfdfJqfxl4E9GuafzB4cbRPTaqA0V5uqbp0IyGkQ==", + "cpu": ["arm"], + "optional": true, + "os": ["linux"], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-arm64-gnu": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-12.3.0.tgz", + "integrity": "sha512-RqJHDKe0WImeUrdR0kayTkRWgp4vD/MS7g0r6Xuf8+ellOFH7JAAJffDW3ayuVZeMYOa7RvgNFcOoWnrTUl9Nw==", + "cpu": ["arm64"], + "optional": true, + "os": ["linux"], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-arm64-musl": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-12.3.0.tgz", + "integrity": "sha512-nvNWoUieMjvDjpYJ/4SQe9lQs2xMj6ZRs8N+bmTrVu9leY2Fg3WD6W9p/1uU9hGO8u+OdF13wc4iRShu/WYIHg==", + "cpu": ["arm64"], + "optional": true, + "os": ["linux"], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-x64-gnu": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-12.3.0.tgz", + "integrity": "sha512-4ajhIuVU9PeQCMMhdDgZTLrHmjbOUFuIyg6J19hZqwEwDTSqQyrSLkbJs2Nd7IRiM6Ul/XyrtEFCpk4k+xD2+w==", + "cpu": ["x64"], + "optional": true, + "os": ["linux"], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-linux-x64-musl": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-12.3.0.tgz", + "integrity": "sha512-U092RBYbaGxoMAwpauePJEu2PuZSEoUCGJBvsptQr2/2XIMwAJDYM4c/M5NfYEsBr+yjvsYNsOpYfeQ88D82Yg==", + "cpu": ["x64"], + "optional": true, + "os": ["linux"], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-win32-arm64-msvc": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-12.3.0.tgz", + "integrity": "sha512-pzSzaxjDEJe67bUok9Nxf9rykbJfHXW0owICFsPBsqHyc+cr8vpF7g9e2APTCddtVhvjkga9ILoZJ9NxWS7Yiw==", + "cpu": ["arm64"], + "optional": true, + "os": ["win32"], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-win32-ia32-msvc": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-12.3.0.tgz", + "integrity": "sha512-MQGUpMbYhQmTZ06a9e0hPQJnxFMwETo2WtyAotY3GEzbNCQVbCGhsvqEKcl+ZEHgShlHXUWvSffq1ZscY6gK7A==", + "cpu": ["ia32"], + "optional": true, + "os": ["win32"], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@next/swc-win32-x64-msvc": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-12.3.0.tgz", + "integrity": "sha512-C/nw6OgQpEULWqs+wgMHXGvlJLguPRFFGqR2TAqWBerQ8J+Sg3z1ZTqwelkSi4FoqStGuZ2UdFHIDN1ySmR1xA==", + "cpu": ["x64"], + "optional": true, + "os": ["win32"], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@rushstack/eslint-patch": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@rushstack/eslint-patch/-/eslint-patch-1.2.0.tgz", + "integrity": "sha512-sXo/qW2/pAcmT43VoRKOJbDOfV3cYpq3szSVfIThQXNt+E4DfKj361vaAt3c88U5tPUxzEswam7GW48PJqtKAg==" + }, + "node_modules/@swc/helpers": { + "version": "0.4.11", + "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.4.11.tgz", + "integrity": "sha512-rEUrBSGIoSFuYxwBYtlUFMlE2CwGhmW+w9355/5oduSw8e5h2+Tj4UrAGNNgP9915++wj5vkQo0UuOBqOAq4nw==", + "dependencies": { + "tslib": "^2.4.0" + } + }, + "node_modules/@types/json5": { + "version": "0.0.29", + "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", + "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==" + }, + "node_modules/@types/node": { + "version": "17.0.45", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.45.tgz", + "integrity": "sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==", + "dev": true + }, + "node_modules/@types/prop-types": { + "version": "15.7.5", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz", + "integrity": "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==", + "dev": true + }, + "node_modules/@types/react": { + "version": "18.0.17", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.0.17.tgz", + "integrity": "sha512-38ETy4tL+rn4uQQi7mB81G7V1g0u2ryquNmsVIOKUAEIDK+3CUjZ6rSRpdvS99dNBnkLFL83qfmtLacGOTIhwQ==", + "dev": true, + "dependencies": { + "@types/prop-types": "*", + "@types/scheduler": "*", + "csstype": "^3.0.2" + } + }, + "node_modules/@types/react-dom": { + "version": "17.0.17", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-17.0.17.tgz", + "integrity": "sha512-VjnqEmqGnasQKV0CWLevqMTXBYG9GbwuE6x3VetERLh0cq2LTptFE73MrQi2S7GkKXCf2GgwItB/melLnxfnsg==", + "dev": true, + "dependencies": { + "@types/react": "^17" + } + }, + "node_modules/@types/react-dom/node_modules/@types/react": { + "version": "17.0.50", + "resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.50.tgz", + "integrity": "sha512-ZCBHzpDb5skMnc1zFXAXnL3l1FAdi+xZvwxK+PkglMmBrwjpp9nKaWuEvrGnSifCJmBFGxZOOFuwC6KH/s0NuA==", + "dev": true, + "dependencies": { + "@types/prop-types": "*", + "@types/scheduler": "*", + "csstype": "^3.0.2" + } + }, + "node_modules/@types/scheduler": { + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz", + "integrity": "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==", + "dev": true + }, + "node_modules/@typescript-eslint/parser": { + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.39.0.tgz", + "integrity": "sha512-PhxLjrZnHShe431sBAGHaNe6BDdxAASDySgsBCGxcBecVCi8NQWxQZMcizNA4g0pN51bBAn/FUfkWG3SDVcGlA==", + "dependencies": { + "@typescript-eslint/scope-manager": "5.39.0", + "@typescript-eslint/types": "5.39.0", + "@typescript-eslint/typescript-estree": "5.39.0", + "debug": "^4.3.4" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.39.0.tgz", + "integrity": "sha512-/I13vAqmG3dyqMVSZPjsbuNQlYS082Y7OMkwhCfLXYsmlI0ca4nkL7wJ/4gjX70LD4P8Hnw1JywUVVAwepURBw==", + "dependencies": { + "@typescript-eslint/types": "5.39.0", + "@typescript-eslint/visitor-keys": "5.39.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/types": { + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.39.0.tgz", + "integrity": "sha512-gQMZrnfEBFXK38hYqt8Lkwt8f4U6yq+2H5VDSgP/qiTzC8Nw8JO3OuSUOQ2qW37S/dlwdkHDntkZM6SQhKyPhw==", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.39.0.tgz", + "integrity": "sha512-qLFQP0f398sdnogJoLtd43pUgB18Q50QSA+BTE5h3sUxySzbWDpTSdgt4UyxNSozY/oDK2ta6HVAzvGgq8JYnA==", + "dependencies": { + "@typescript-eslint/types": "5.39.0", + "@typescript-eslint/visitor-keys": "5.39.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.3.7", + "tsutils": "^3.21.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/semver": { + "version": "7.3.7", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", + "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.39.0.tgz", + "integrity": "sha512-yyE3RPwOG+XJBLrhvsxAidUgybJVQ/hG8BhiJo0k8JSAYfk/CshVcxf0HwP4Jt7WZZ6vLmxdo1p6EyN3tzFTkg==", + "dependencies": { + "@typescript-eslint/types": "5.39.0", + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz", + "integrity": "sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/acorn": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-colors": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", + "integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==", + "engines": { + "node": ">=6" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/aria-query": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-4.2.2.tgz", + "integrity": "sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA==", + "dependencies": { + "@babel/runtime": "^7.10.2", + "@babel/runtime-corejs3": "^7.10.2" + }, + "engines": { + "node": ">=6.0" + } + }, + "node_modules/array-includes": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.5.tgz", + "integrity": "sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ==", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.19.5", + "get-intrinsic": "^1.1.1", + "is-string": "^1.0.7" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "engines": { + "node": ">=8" + } + }, + "node_modules/array.prototype.flat": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.0.tgz", + "integrity": "sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw==", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.2", + "es-shim-unscopables": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.flatmap": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.0.tgz", + "integrity": "sha512-PZC9/8TKAIxcWKdyeb77EzULHPrIX/tIZebLJUQOMR1OwYosT8yggdfWScfTBCDj5utONvOuPQQumYsU2ULbkg==", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.2", + "es-shim-unscopables": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ast-types-flow": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.7.tgz", + "integrity": "sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag==" + }, + "node_modules/astral-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", + "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/axe-core": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.4.3.tgz", + "integrity": "sha512-32+ub6kkdhhWick/UjvEwRchgoetXqTK14INLqbGm5U2TzBkBNF3nQtLYm8ovxSkQWArjEQvftCKryjZaATu3w==", + "engines": { + "node": ">=4" + } + }, + "node_modules/axobject-query": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-2.2.0.tgz", + "integrity": "sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA==" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.21.4", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.4.tgz", + "integrity": "sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + } + ], + "dependencies": { + "caniuse-lite": "^1.0.30001400", + "electron-to-chromium": "^1.4.251", + "node-releases": "^2.0.6", + "update-browserslist-db": "^1.0.9" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dependencies": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001414", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001414.tgz", + "integrity": "sha512-t55jfSaWjCdocnFdKQoO+d2ct9C59UZg4dY3OnUlSZ447r8pUtIKdp0hpAzrGFultmTC+Us+KpKi4GZl/LXlFg==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + } + ] + }, + "node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + }, + "node_modules/convert-source-map": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.8.0.tgz", + "integrity": "sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.1" + } + }, + "node_modules/core-js-pure": { + "version": "3.25.5", + "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.25.5.tgz", + "integrity": "sha512-oml3M22pHM+igfWHDfdLVq2ShWmjM2V4L+dQEBs0DWVIqEm9WHCwGAlZ6BmyBQGy5sFrJmcx+856D9lVKyGWYg==", + "hasInstallScript": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/core-js" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/csstype": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.1.tgz", + "integrity": "sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw==", + "dev": true + }, + "node_modules/damerau-levenshtein": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz", + "integrity": "sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==" + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==" + }, + "node_modules/define-properties": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", + "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", + "dependencies": { + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/docs": { + "resolved": "apps/docs", + "link": true + }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.4.270", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.270.tgz", + "integrity": "sha512-KNhIzgLiJmDDC444dj9vEOpZEgsV96ult9Iff98Vanumn+ShJHd5se8aX6KeVxdc0YQeqdrezBZv89rleDbvSg==", + "dev": true + }, + "node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" + }, + "node_modules/enhanced-resolve": { + "version": "5.10.0", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz", + "integrity": "sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/enquirer": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", + "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", + "dependencies": { + "ansi-colors": "^4.1.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/es-abstract": { + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.20.3.tgz", + "integrity": "sha512-AyrnaKVpMzljIdwjzrj+LxGmj8ik2LckwXacHqrJJ/jxz6dDDBcZ7I7nlHM0FvEW8MfbWJwOd+yT2XzYW49Frw==", + "dependencies": { + "call-bind": "^1.0.2", + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "function.prototype.name": "^1.1.5", + "get-intrinsic": "^1.1.3", + "get-symbol-description": "^1.0.0", + "has": "^1.0.3", + "has-property-descriptors": "^1.0.0", + "has-symbols": "^1.0.3", + "internal-slot": "^1.0.3", + "is-callable": "^1.2.6", + "is-negative-zero": "^2.0.2", + "is-regex": "^1.1.4", + "is-shared-array-buffer": "^1.0.2", + "is-string": "^1.0.7", + "is-weakref": "^1.0.2", + "object-inspect": "^1.12.2", + "object-keys": "^1.1.1", + "object.assign": "^4.1.4", + "regexp.prototype.flags": "^1.4.3", + "safe-regex-test": "^1.0.0", + "string.prototype.trimend": "^1.0.5", + "string.prototype.trimstart": "^1.0.5", + "unbox-primitive": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/es-shim-unscopables": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", + "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==", + "dependencies": { + "has": "^1.0.3" + } + }, + "node_modules/es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dependencies": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/eslint": { + "version": "7.32.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.32.0.tgz", + "integrity": "sha512-VHZ8gX+EDfz+97jGcgyGCyRia/dPOd6Xh9yPv8Bl1+SoaIwD+a/vlrOmGRUyOYu7MwUhc7CxqeaDZU13S4+EpA==", + "dependencies": { + "@babel/code-frame": "7.12.11", + "@eslint/eslintrc": "^0.4.3", + "@humanwhocodes/config-array": "^0.5.0", + "ajv": "^6.10.0", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.0.1", + "doctrine": "^3.0.0", + "enquirer": "^2.3.5", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^5.1.1", + "eslint-utils": "^2.1.0", + "eslint-visitor-keys": "^2.0.0", + "espree": "^7.3.1", + "esquery": "^1.4.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "functional-red-black-tree": "^1.0.1", + "glob-parent": "^5.1.2", + "globals": "^13.6.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "js-yaml": "^3.13.1", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.0.4", + "natural-compare": "^1.4.0", + "optionator": "^0.9.1", + "progress": "^2.0.0", + "regexpp": "^3.1.0", + "semver": "^7.2.1", + "strip-ansi": "^6.0.0", + "strip-json-comments": "^3.1.0", + "table": "^6.0.9", + "text-table": "^0.2.0", + "v8-compile-cache": "^2.0.3" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-config-custom": { + "resolved": "packages/eslint-config-custom", + "link": true + }, + "node_modules/eslint-config-next": { + "version": "12.3.1", + "resolved": "https://registry.npmjs.org/eslint-config-next/-/eslint-config-next-12.3.1.tgz", + "integrity": "sha512-EN/xwKPU6jz1G0Qi6Bd/BqMnHLyRAL0VsaQaWA7F3KkjAgZHi4f1uL1JKGWNxdQpHTW/sdGONBd0bzxUka/DJg==", + "dependencies": { + "@next/eslint-plugin-next": "12.3.1", + "@rushstack/eslint-patch": "^1.1.3", + "@typescript-eslint/parser": "^5.21.0", + "eslint-import-resolver-node": "^0.3.6", + "eslint-import-resolver-typescript": "^2.7.1", + "eslint-plugin-import": "^2.26.0", + "eslint-plugin-jsx-a11y": "^6.5.1", + "eslint-plugin-react": "^7.31.7", + "eslint-plugin-react-hooks": "^4.5.0" + }, + "peerDependencies": { + "eslint": "^7.23.0 || ^8.0.0", + "typescript": ">=3.3.1" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/eslint-config-prettier": { + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.5.0.tgz", + "integrity": "sha512-obmWKLUNCnhtQRKc+tmnYuQl0pFU1ibYJQ5BGhTVB08bHe9wC8qUeG7c08dj9XX+AuPj1YSGSQIHl1pnDHZR0Q==", + "bin": { + "eslint-config-prettier": "bin/cli.js" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-config-turbo": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/eslint-config-turbo/-/eslint-config-turbo-0.0.4.tgz", + "integrity": "sha512-HErPS/wfWkSdV9Yd2dDkhZt3W2B78Ih/aWPFfaHmCMjzPalh+5KxRRGTf8MOBQLCebcWJX0lP1Zvc1rZIHlXGg==", + "dependencies": { + "eslint-plugin-turbo": "0.0.4" + }, + "peerDependencies": { + "eslint": "^7.23.0 || ^8.0.0" + } + }, + "node_modules/eslint-import-resolver-node": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz", + "integrity": "sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw==", + "dependencies": { + "debug": "^3.2.7", + "resolve": "^1.20.0" + } + }, + "node_modules/eslint-import-resolver-node/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-import-resolver-typescript": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-2.7.1.tgz", + "integrity": "sha512-00UbgGwV8bSgUv34igBDbTOtKhqoRMy9bFjNehT40bXg6585PNIct8HhXZ0SybqB9rWtXj9crcku8ndDn/gIqQ==", + "dependencies": { + "debug": "^4.3.4", + "glob": "^7.2.0", + "is-glob": "^4.0.3", + "resolve": "^1.22.0", + "tsconfig-paths": "^3.14.1" + }, + "engines": { + "node": ">=4" + }, + "peerDependencies": { + "eslint": "*", + "eslint-plugin-import": "*" + } + }, + "node_modules/eslint-import-resolver-typescript/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/eslint-module-utils": { + "version": "2.7.4", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.7.4.tgz", + "integrity": "sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA==", + "dependencies": { + "debug": "^3.2.7" + }, + "engines": { + "node": ">=4" + }, + "peerDependenciesMeta": { + "eslint": { + "optional": true + } + } + }, + "node_modules/eslint-module-utils/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/eslint-plugin-import": { + "version": "2.26.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz", + "integrity": "sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA==", + "dependencies": { + "array-includes": "^3.1.4", + "array.prototype.flat": "^1.2.5", + "debug": "^2.6.9", + "doctrine": "^2.1.0", + "eslint-import-resolver-node": "^0.3.6", + "eslint-module-utils": "^2.7.3", + "has": "^1.0.3", + "is-core-module": "^2.8.1", + "is-glob": "^4.0.3", + "minimatch": "^3.1.2", + "object.values": "^1.1.5", + "resolve": "^1.22.0", + "tsconfig-paths": "^3.14.1" + }, + "engines": { + "node": ">=4" + }, + "peerDependencies": { + "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8" + } + }, + "node_modules/eslint-plugin-import/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/eslint-plugin-import/node_modules/doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eslint-plugin-import/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/eslint-plugin-jsx-a11y": { + "version": "6.6.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.6.1.tgz", + "integrity": "sha512-sXgFVNHiWffBq23uiS/JaP6eVR622DqwB4yTzKvGZGcPq6/yZ3WmOZfuBks/vHWo9GaFOqC2ZK4i6+C35knx7Q==", + "dependencies": { + "@babel/runtime": "^7.18.9", + "aria-query": "^4.2.2", + "array-includes": "^3.1.5", + "ast-types-flow": "^0.0.7", + "axe-core": "^4.4.3", + "axobject-query": "^2.2.0", + "damerau-levenshtein": "^1.0.8", + "emoji-regex": "^9.2.2", + "has": "^1.0.3", + "jsx-ast-utils": "^3.3.2", + "language-tags": "^1.0.5", + "minimatch": "^3.1.2", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=4.0" + }, + "peerDependencies": { + "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8" + } + }, + "node_modules/eslint-plugin-react": { + "version": "7.31.8", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.31.8.tgz", + "integrity": "sha512-5lBTZmgQmARLLSYiwI71tiGVTLUuqXantZM6vlSY39OaDSV0M7+32K5DnLkmFrwTe+Ksz0ffuLUC91RUviVZfw==", + "dependencies": { + "array-includes": "^3.1.5", + "array.prototype.flatmap": "^1.3.0", + "doctrine": "^2.1.0", + "estraverse": "^5.3.0", + "jsx-ast-utils": "^2.4.1 || ^3.0.0", + "minimatch": "^3.1.2", + "object.entries": "^1.1.5", + "object.fromentries": "^2.0.5", + "object.hasown": "^1.1.1", + "object.values": "^1.1.5", + "prop-types": "^15.8.1", + "resolve": "^2.0.0-next.3", + "semver": "^6.3.0", + "string.prototype.matchall": "^4.0.7" + }, + "engines": { + "node": ">=4" + }, + "peerDependencies": { + "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8" + } + }, + "node_modules/eslint-plugin-react-hooks": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz", + "integrity": "sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g==", + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0" + } + }, + "node_modules/eslint-plugin-react/node_modules/doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eslint-plugin-react/node_modules/resolve": { + "version": "2.0.0-next.4", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.4.tgz", + "integrity": "sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ==", + "dependencies": { + "is-core-module": "^2.9.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/eslint-plugin-turbo": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/eslint-plugin-turbo/-/eslint-plugin-turbo-0.0.4.tgz", + "integrity": "sha512-dfmYE/iPvoJInQq+5E/0mj140y/rYwKtzZkn3uVK8+nvwC5zmWKQ6ehMWrL4bYBkGzSgpOndZM+jOXhPQ2m8Cg==", + "peerDependencies": { + "eslint": "^7.23.0 || ^8.0.0" + } + }, + "node_modules/eslint-scope": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^4.1.1" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/eslint-scope/node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/eslint-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", + "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", + "dependencies": { + "eslint-visitor-keys": "^1.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + } + }, + "node_modules/eslint-utils/node_modules/eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", + "engines": { + "node": ">=4" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", + "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", + "engines": { + "node": ">=10" + } + }, + "node_modules/eslint/node_modules/@babel/code-frame": { + "version": "7.12.11", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz", + "integrity": "sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==", + "dependencies": { + "@babel/highlight": "^7.10.4" + } + }, + "node_modules/eslint/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/eslint/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/eslint/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/eslint/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/eslint/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint/node_modules/globals": { + "version": "13.17.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.17.0.tgz", + "integrity": "sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw==", + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/eslint/node_modules/semver": { + "version": "7.3.7", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", + "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/eslint/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/espree": { + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-7.3.1.tgz", + "integrity": "sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g==", + "dependencies": { + "acorn": "^7.4.0", + "acorn-jsx": "^5.3.1", + "eslint-visitor-keys": "^1.3.0" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/espree/node_modules/eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", + "engines": { + "node": ">=4" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/esquery": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", + "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + }, + "node_modules/fast-glob": { + "version": "3.2.12", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", + "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==" + }, + "node_modules/fastq": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", + "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/flat-cache": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", + "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", + "dependencies": { + "flatted": "^3.1.0", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/flatted": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz", + "integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==" + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "node_modules/function.prototype.name": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz", + "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.0", + "functions-have-names": "^1.2.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/functional-red-black-tree": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", + "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==" + }, + "node_modules/functions-have-names": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", + "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-intrinsic": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", + "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", + "dependencies": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-symbol-description": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", + "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/glob": { + "version": "7.1.7", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", + "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globby/node_modules/ignore": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", + "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==", + "engines": { + "node": ">= 4" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.10", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", + "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", + "dev": true + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-bigints": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", + "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", + "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", + "dependencies": { + "get-intrinsic": "^1.1.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", + "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "dependencies": { + "has-symbols": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/ignore": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", + "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/internal-slot": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz", + "integrity": "sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==", + "dependencies": { + "get-intrinsic": "^1.1.0", + "has": "^1.0.3", + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/is-bigint": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", + "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", + "dependencies": { + "has-bigints": "^1.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-boolean-object": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", + "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-core-module": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.10.0.tgz", + "integrity": "sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg==", + "dependencies": { + "has": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-date-object": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", + "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-negative-zero": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", + "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-number-object": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", + "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-regex": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", + "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", + "dependencies": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-shared-array-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", + "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", + "dependencies": { + "call-bind": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-string": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", + "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-symbol": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", + "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", + "dependencies": { + "has-symbols": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-weakref": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", + "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", + "dependencies": { + "call-bind": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" + }, + "node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "dev": true, + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==" + }, + "node_modules/json5": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.1.tgz", + "integrity": "sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==", + "dev": true, + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/jsx-ast-utils": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.3.tgz", + "integrity": "sha512-fYQHZTZ8jSfmWZ0iyzfwiU4WDX4HpHbMCZ3gPlWYiCl3BoeOTsqKBqnTVfH2rYT7eP5c3sVbeSPHnnJOaTrWiw==", + "dependencies": { + "array-includes": "^3.1.5", + "object.assign": "^4.1.3" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/language-subtag-registry": { + "version": "0.3.22", + "resolved": "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz", + "integrity": "sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w==" + }, + "node_modules/language-tags": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/language-tags/-/language-tags-1.0.5.tgz", + "integrity": "sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ==", + "dependencies": { + "language-subtag-registry": "~0.3.2" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lodash": { + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz", + "integrity": "sha512-9mDDwqVIma6OZX79ZlDACZl8sBm0TEnkf99zV3iMA4GzkIT/9hiqP5mY0HoT1iNLCrKc/R1HByV+yJfRWVJryQ==" + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==" + }, + "node_modules/lodash.truncate": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/lodash.truncate/-/lodash.truncate-4.4.2.tgz", + "integrity": "sha512-jttmRe7bRse52OsWIMDLaXxWqRAmtIUccAQ3garviCqJjafXOfNMO0yMfNpdD6zbGaTU0P5Nz7e7gAT6cKmJRw==" + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "dependencies": { + "braces": "^3.0.2", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", + "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/nanoid": { + "version": "3.3.4", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.4.tgz", + "integrity": "sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==" + }, + "node_modules/next": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/next/-/next-12.3.0.tgz", + "integrity": "sha512-GpzI6me9V1+XYtfK0Ae9WD0mKqHyzQlGq1xH1rzNIYMASo4Tkl4rTe9jSqtBpXFhOS33KohXs9ZY38Akkhdciw==", + "dependencies": { + "@next/env": "12.3.0", + "@swc/helpers": "0.4.11", + "caniuse-lite": "^1.0.30001332", + "postcss": "8.4.14", + "styled-jsx": "5.0.6", + "use-sync-external-store": "1.2.0" + }, + "bin": { + "next": "dist/bin/next" + }, + "engines": { + "node": ">=12.22.0" + }, + "optionalDependencies": { + "@next/swc-android-arm-eabi": "12.3.0", + "@next/swc-android-arm64": "12.3.0", + "@next/swc-darwin-arm64": "12.3.0", + "@next/swc-darwin-x64": "12.3.0", + "@next/swc-freebsd-x64": "12.3.0", + "@next/swc-linux-arm-gnueabihf": "12.3.0", + "@next/swc-linux-arm64-gnu": "12.3.0", + "@next/swc-linux-arm64-musl": "12.3.0", + "@next/swc-linux-x64-gnu": "12.3.0", + "@next/swc-linux-x64-musl": "12.3.0", + "@next/swc-win32-arm64-msvc": "12.3.0", + "@next/swc-win32-ia32-msvc": "12.3.0", + "@next/swc-win32-x64-msvc": "12.3.0" + }, + "peerDependencies": { + "fibers": ">= 3.1.0", + "node-sass": "^6.0.0 || ^7.0.0", + "react": "^17.0.2 || ^18.0.0-0", + "react-dom": "^17.0.2 || ^18.0.0-0", + "sass": "^1.3.0" + }, + "peerDependenciesMeta": { + "fibers": { + "optional": true + }, + "node-sass": { + "optional": true + }, + "sass": { + "optional": true + } + } + }, + "node_modules/next-transpile-modules": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/next-transpile-modules/-/next-transpile-modules-9.0.0.tgz", + "integrity": "sha512-VCNFOazIAnXn1hvgYYSTYMnoWgKgwlYh4lm1pKbSfiB3kj5ZYLcKVhfh3jkPOg1cnd9DP+pte9yCUocdPEUBTQ==", + "dev": true, + "dependencies": { + "enhanced-resolve": "^5.7.0", + "escalade": "^3.1.1" + } + }, + "node_modules/node-releases": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.6.tgz", + "integrity": "sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==", + "dev": true + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", + "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.assign": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", + "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "has-symbols": "^1.0.3", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.entries": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.5.tgz", + "integrity": "sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g==", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.1" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object.fromentries": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.5.tgz", + "integrity": "sha512-CAyG5mWQRRiBU57Re4FKoTBjXfDoNwdFVH2Y1tS9PqCsfUTymAohOkEMSG3aRNKmv4lV3O7p1et7c187q6bynw==", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.hasown": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.1.tgz", + "integrity": "sha512-LYLe4tivNQzq4JdaWW6WO3HMZZJWzkkH8fnI6EebWl0VZth2wL2Lovm74ep2/gZzlaTdV62JZHEqHQ2yVn8Q/A==", + "dependencies": { + "define-properties": "^1.1.4", + "es-abstract": "^1.19.5" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.values": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.5.tgz", + "integrity": "sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg==", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/optionator": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", + "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.3" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "engines": { + "node": ">=8" + } + }, + "node_modules/picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/postcss": { + "version": "8.4.14", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.14.tgz", + "integrity": "sha512-E398TUmfAYFPBSdzgeieK2Y1+1cpdxJx8yXbK/m57nRhKSmk1GB2tO4lbLBtlkfPQTDKfe4Xqv1ASWPpayPEig==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + } + ], + "dependencies": { + "nanoid": "^3.3.4", + "picocolors": "^1.0.0", + "source-map-js": "^1.0.2" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prettier": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.7.1.tgz", + "integrity": "sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==", + "dev": true, + "bin": { + "prettier": "bin-prettier.js" + }, + "engines": { + "node": ">=10.13.0" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + }, + "node_modules/progress": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", + "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/prop-types": { + "version": "15.8.1", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", + "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", + "dependencies": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.13.1" + } + }, + "node_modules/punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "engines": { + "node": ">=6" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/react": { + "version": "18.2.0", + "resolved": "https://registry.npmjs.org/react/-/react-18.2.0.tgz", + "integrity": "sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==", + "dependencies": { + "loose-envify": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dom": { + "version": "18.2.0", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.2.0.tgz", + "integrity": "sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==", + "dependencies": { + "loose-envify": "^1.1.0", + "scheduler": "^0.23.0" + }, + "peerDependencies": { + "react": "^18.2.0" + } + }, + "node_modules/react-is": { + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" + }, + "node_modules/regenerator-runtime": { + "version": "0.13.9", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", + "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==" + }, + "node_modules/regexp.prototype.flags": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz", + "integrity": "sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "functions-have-names": "^1.2.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/regexpp": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", + "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + } + }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", + "integrity": "sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==", + "dependencies": { + "is-core-module": "^2.9.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "engines": { + "node": ">=4" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/safe-regex-test": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz", + "integrity": "sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==", + "dependencies": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.3", + "is-regex": "^1.1.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/scheduler": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.0.tgz", + "integrity": "sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==", + "dependencies": { + "loose-envify": "^1.1.0" + } + }, + "node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "engines": { + "node": ">=8" + } + }, + "node_modules/side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "dependencies": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "engines": { + "node": ">=8" + } + }, + "node_modules/slice-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", + "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", + "dependencies": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, + "node_modules/slice-ansi/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/slice-ansi/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/slice-ansi/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/source-map-js": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", + "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==" + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/string.prototype.matchall": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.7.tgz", + "integrity": "sha512-f48okCX7JiwVi1NXCVWcFnZgADDC/n2vePlQ/KUCNqCikLLilQvwjMO8+BHVKvgzH0JB0J9LEPgxOGT02RoETg==", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.1", + "get-intrinsic": "^1.1.1", + "has-symbols": "^1.0.3", + "internal-slot": "^1.0.3", + "regexp.prototype.flags": "^1.4.1", + "side-channel": "^1.0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimend": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz", + "integrity": "sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.19.5" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimstart": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz", + "integrity": "sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==", + "dependencies": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.19.5" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/styled-jsx": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/styled-jsx/-/styled-jsx-5.0.6.tgz", + "integrity": "sha512-xOeROtkK5MGMDimBQ3J6iPId8q0t/BDoG5XN6oKkZClVz9ISF/hihN8OCn2LggMU6N32aXnrXBdn3auSqNS9fA==", + "engines": { + "node": ">= 12.0.0" + }, + "peerDependencies": { + "react": ">= 16.8.0 || 17.x.x || ^18.0.0-0" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "babel-plugin-macros": { + "optional": true + } + } + }, + "node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/table": { + "version": "6.8.0", + "resolved": "https://registry.npmjs.org/table/-/table-6.8.0.tgz", + "integrity": "sha512-s/fitrbVeEyHKFa7mFdkuQMWlH1Wgw/yEXMt5xACT4ZpzWFluehAxRtUUQKPuWhaLAWhFcVx6w3oC8VKaUfPGA==", + "dependencies": { + "ajv": "^8.0.1", + "lodash.truncate": "^4.4.2", + "slice-ansi": "^4.0.0", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/table/node_modules/ajv": { + "version": "8.11.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/table/node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" + }, + "node_modules/tapable": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", + "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==" + }, + "node_modules/to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/tsconfig": { + "resolved": "packages/tsconfig", + "link": true + }, + "node_modules/tsconfig-paths": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz", + "integrity": "sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ==", + "dependencies": { + "@types/json5": "^0.0.29", + "json5": "^1.0.1", + "minimist": "^1.2.6", + "strip-bom": "^3.0.0" + } + }, + "node_modules/tsconfig-paths/node_modules/json5": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", + "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", + "dependencies": { + "minimist": "^1.2.0" + }, + "bin": { + "json5": "lib/cli.js" + } + }, + "node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/tsutils": { + "version": "3.21.0", + "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", + "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", + "dependencies": { + "tslib": "^1.8.1" + }, + "engines": { + "node": ">= 6" + }, + "peerDependencies": { + "typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" + } + }, + "node_modules/tsutils/node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + }, + "node_modules/turbo": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/turbo/-/turbo-1.5.5.tgz", + "integrity": "sha512-PVQSDl0STC9WXIyHcYUWs9gXsf8JjQig/FuHfuB8N6+XlgCGB3mPbfMEE6zrChGz2hufH4/guKRX1XJuNL6XTA==", + "dev": true, + "hasInstallScript": true, + "bin": { + "turbo": "bin/turbo" + }, + "optionalDependencies": { + "turbo-darwin-64": "1.5.5", + "turbo-darwin-arm64": "1.5.5", + "turbo-linux-64": "1.5.5", + "turbo-linux-arm64": "1.5.5", + "turbo-windows-64": "1.5.5", + "turbo-windows-arm64": "1.5.5" + } + }, + "node_modules/turbo-darwin-64": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/turbo-darwin-64/-/turbo-darwin-64-1.5.5.tgz", + "integrity": "sha512-HvEn6P2B+NXDekq9LRpRgUjcT9/oygLTcK47U0qsAJZXRBSq/2hvD7lx4nAwgY/4W3rhYJeWtHTzbhoN6BXqGQ==", + "cpu": ["x64"], + "dev": true, + "optional": true, + "os": ["darwin"] + }, + "node_modules/turbo-darwin-arm64": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/turbo-darwin-arm64/-/turbo-darwin-arm64-1.5.5.tgz", + "integrity": "sha512-Dmxr09IUy6M0nc7/xWod9galIO2DD500B75sJSkHeT+CCdJOWnlinux0ZPF8CSygNqymwYO8AO2l15/6yxcycg==", + "cpu": ["arm64"], + "dev": true, + "optional": true, + "os": ["darwin"] + }, + "node_modules/turbo-linux-64": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/turbo-linux-64/-/turbo-linux-64-1.5.5.tgz", + "integrity": "sha512-wd07TZ4zXXWjzZE00FcFMLmkybQQK/NV9ff66vvAV0vdiuacSMBCNLrD6Mm4ncfrUPW/rwFW5kU/7hyuEqqtDw==", + "cpu": ["x64"], + "dev": true, + "optional": true, + "os": ["linux"] + }, + "node_modules/turbo-linux-arm64": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/turbo-linux-arm64/-/turbo-linux-arm64-1.5.5.tgz", + "integrity": "sha512-q3q33tuo74R7gicnfvFbnZZvqmlq7Vakcvx0eshifnJw4PR+oMnTCb4w8ElVFx070zsb8DVTibq99y8NJH8T1Q==", + "cpu": ["arm64"], + "dev": true, + "optional": true, + "os": ["linux"] + }, + "node_modules/turbo-windows-64": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/turbo-windows-64/-/turbo-windows-64-1.5.5.tgz", + "integrity": "sha512-lPp9kHonNFfqgovbaW+UAPO5cLmoAN+m3G3FzqcrRPnlzt97vXYsDhDd/4Zy3oAKoAcprtP4CGy0ddisqsKTVw==", + "cpu": ["x64"], + "dev": true, + "optional": true, + "os": ["win32"] + }, + "node_modules/turbo-windows-arm64": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/turbo-windows-arm64/-/turbo-windows-arm64-1.5.5.tgz", + "integrity": "sha512-3AfGULKNZiZVrEzsIE+W79ZRW1+f5r4nM4wLlJ1PTBHyRxBZdD6KTH1tijGfy/uTlcV5acYnKHEkDc6Q9PAXGQ==", + "cpu": ["arm64"], + "dev": true, + "optional": true, + "os": ["win32"] + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typescript": { + "version": "4.8.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.8.4.tgz", + "integrity": "sha512-QCh+85mCy+h0IGff8r5XWzOVSbBO+KfeYrMQh7NJ58QujwcE22u+NUSmUxqF+un70P9GXKxa2HCNiTTMJknyjQ==", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/ui": { + "resolved": "packages/ui", + "link": true + }, + "node_modules/unbox-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", + "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", + "dependencies": { + "call-bind": "^1.0.2", + "has-bigints": "^1.0.2", + "has-symbols": "^1.0.3", + "which-boxed-primitive": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.9.tgz", + "integrity": "sha512-/xsqn21EGVdXI3EXSum1Yckj3ZVZugqyOZQ/CxYPBD/R+ko9NSUScf8tFF4dOKY+2pvSSJA/S+5B8s4Zr4kyvg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + } + ], + "dependencies": { + "escalade": "^3.1.1", + "picocolors": "^1.0.0" + }, + "bin": { + "browserslist-lint": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/use-sync-external-store": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.2.0.tgz", + "integrity": "sha512-eEgnFxGQ1Ife9bzYs6VLi8/4X6CObHMw9Qr9tPY43iKwsPw8xE8+EFsf/2cFZ5S3esXgpWgtSCtLNS41F+sKPA==", + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/v8-compile-cache": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz", + "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==" + }, + "node_modules/web": { + "resolved": "apps/web", + "link": true + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/which-boxed-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", + "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", + "dependencies": { + "is-bigint": "^1.0.1", + "is-boolean-object": "^1.1.0", + "is-number-object": "^1.0.4", + "is-string": "^1.0.5", + "is-symbol": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/word-wrap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "packages/eslint-config-custom": { + "version": "0.0.0", + "license": "MIT", + "dependencies": { + "eslint": "^7.23.0", + "eslint-config-next": "^12.0.8", + "eslint-config-prettier": "^8.3.0", + "eslint-config-turbo": "latest", + "eslint-plugin-react": "7.31.8" + }, + "devDependencies": { + "typescript": "^4.7.4" + } + }, + "packages/tsconfig": { + "version": "0.0.0" + }, + "packages/ui": { + "version": "0.0.0", + "license": "MIT", + "devDependencies": { + "@types/react": "^17.0.37", + "@types/react-dom": "^17.0.11", + "eslint": "^7.32.0", + "eslint-config-custom": "*", + "react": "^18.2.0", + "tsconfig": "*", + "typescript": "^4.5.2" + } + }, + "packages/ui/node_modules/@types/react": { + "version": "17.0.50", + "resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.50.tgz", + "integrity": "sha512-ZCBHzpDb5skMnc1zFXAXnL3l1FAdi+xZvwxK+PkglMmBrwjpp9nKaWuEvrGnSifCJmBFGxZOOFuwC6KH/s0NuA==", + "dev": true, + "dependencies": { + "@types/prop-types": "*", + "@types/scheduler": "*", + "csstype": "^3.0.2" + } + } + }, + "dependencies": { + "@ampproject/remapping": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz", + "integrity": "sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==", + "dev": true, + "requires": { + "@jridgewell/gen-mapping": "^0.1.0", + "@jridgewell/trace-mapping": "^0.3.9" + } + }, + "@babel/code-frame": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz", + "integrity": "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==", + "dev": true, + "requires": { + "@babel/highlight": "^7.18.6" + } + }, + "@babel/compat-data": { + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.19.3.tgz", + "integrity": "sha512-prBHMK4JYYK+wDjJF1q99KK4JLL+egWS4nmNqdlMUgCExMZ+iZW0hGhyC3VEbsPjvaN0TBhW//VIFwBrk8sEiw==", + "dev": true + }, + "@babel/core": { + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.19.3.tgz", + "integrity": "sha512-WneDJxdsjEvyKtXKsaBGbDeiyOjR5vYq4HcShxnIbG0qixpoHjI3MqeZM9NDvsojNCEBItQE4juOo/bU6e72gQ==", + "dev": true, + "requires": { + "@ampproject/remapping": "^2.1.0", + "@babel/code-frame": "^7.18.6", + "@babel/generator": "^7.19.3", + "@babel/helper-compilation-targets": "^7.19.3", + "@babel/helper-module-transforms": "^7.19.0", + "@babel/helpers": "^7.19.0", + "@babel/parser": "^7.19.3", + "@babel/template": "^7.18.10", + "@babel/traverse": "^7.19.3", + "@babel/types": "^7.19.3", + "convert-source-map": "^1.7.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.1", + "semver": "^6.3.0" + } + }, + "@babel/generator": { + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.19.3.tgz", + "integrity": "sha512-fqVZnmp1ncvZU757UzDheKZpfPgatqY59XtW2/j/18H7u76akb8xqvjw82f+i2UKd/ksYsSick/BCLQUUtJ/qQ==", + "dev": true, + "requires": { + "@babel/types": "^7.19.3", + "@jridgewell/gen-mapping": "^0.3.2", + "jsesc": "^2.5.1" + }, + "dependencies": { + "@jridgewell/gen-mapping": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz", + "integrity": "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==", + "dev": true, + "requires": { + "@jridgewell/set-array": "^1.0.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.9" + } + } + } + }, + "@babel/helper-compilation-targets": { + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.19.3.tgz", + "integrity": "sha512-65ESqLGyGmLvgR0mst5AdW1FkNlj9rQsCKduzEoEPhBCDFGXvz2jW6bXFG6i0/MrV2s7hhXjjb2yAzcPuQlLwg==", + "dev": true, + "requires": { + "@babel/compat-data": "^7.19.3", + "@babel/helper-validator-option": "^7.18.6", + "browserslist": "^4.21.3", + "semver": "^6.3.0" + } + }, + "@babel/helper-environment-visitor": { + "version": "7.18.9", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz", + "integrity": "sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==", + "dev": true + }, + "@babel/helper-function-name": { + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.19.0.tgz", + "integrity": "sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w==", + "dev": true, + "requires": { + "@babel/template": "^7.18.10", + "@babel/types": "^7.19.0" + } + }, + "@babel/helper-hoist-variables": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz", + "integrity": "sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==", + "dev": true, + "requires": { + "@babel/types": "^7.18.6" + } + }, + "@babel/helper-module-imports": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz", + "integrity": "sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==", + "dev": true, + "requires": { + "@babel/types": "^7.18.6" + } + }, + "@babel/helper-module-transforms": { + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.19.0.tgz", + "integrity": "sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ==", + "dev": true, + "requires": { + "@babel/helper-environment-visitor": "^7.18.9", + "@babel/helper-module-imports": "^7.18.6", + "@babel/helper-simple-access": "^7.18.6", + "@babel/helper-split-export-declaration": "^7.18.6", + "@babel/helper-validator-identifier": "^7.18.6", + "@babel/template": "^7.18.10", + "@babel/traverse": "^7.19.0", + "@babel/types": "^7.19.0" + } + }, + "@babel/helper-simple-access": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.18.6.tgz", + "integrity": "sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g==", + "dev": true, + "requires": { + "@babel/types": "^7.18.6" + } + }, + "@babel/helper-split-export-declaration": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz", + "integrity": "sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==", + "dev": true, + "requires": { + "@babel/types": "^7.18.6" + } + }, + "@babel/helper-string-parser": { + "version": "7.18.10", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.18.10.tgz", + "integrity": "sha512-XtIfWmeNY3i4t7t4D2t02q50HvqHybPqW2ki1kosnvWCwuCMeo81Jf0gwr85jy/neUdg5XDdeFE/80DXiO+njw==", + "dev": true + }, + "@babel/helper-validator-identifier": { + "version": "7.19.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz", + "integrity": "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==" + }, + "@babel/helper-validator-option": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz", + "integrity": "sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==", + "dev": true + }, + "@babel/helpers": { + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.19.0.tgz", + "integrity": "sha512-DRBCKGwIEdqY3+rPJgG/dKfQy9+08rHIAJx8q2p+HSWP87s2HCrQmaAMMyMll2kIXKCW0cO1RdQskx15Xakftg==", + "dev": true, + "requires": { + "@babel/template": "^7.18.10", + "@babel/traverse": "^7.19.0", + "@babel/types": "^7.19.0" + } + }, + "@babel/highlight": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz", + "integrity": "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==", + "requires": { + "@babel/helper-validator-identifier": "^7.18.6", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + } + }, + "@babel/parser": { + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.19.3.tgz", + "integrity": "sha512-pJ9xOlNWHiy9+FuFP09DEAFbAn4JskgRsVcc169w2xRBC3FRGuQEwjeIMMND9L2zc0iEhO/tGv4Zq+km+hxNpQ==", + "dev": true + }, + "@babel/runtime": { + "version": "7.19.0", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.19.0.tgz", + "integrity": "sha512-eR8Lo9hnDS7tqkO7NsV+mKvCmv5boaXFSZ70DnfhcgiEne8hv9oCEd36Klw74EtizEqLsy4YnW8UWwpBVolHZA==", + "requires": { + "regenerator-runtime": "^0.13.4" + } + }, + "@babel/runtime-corejs3": { + "version": "7.19.1", + "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.19.1.tgz", + "integrity": "sha512-j2vJGnkopRzH+ykJ8h68wrHnEUmtK//E723jjixiAl/PPf6FhqY/vYRcMVlNydRKQjQsTsYEjpx+DZMIvnGk/g==", + "requires": { + "core-js-pure": "^3.25.1", + "regenerator-runtime": "^0.13.4" + } + }, + "@babel/template": { + "version": "7.18.10", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.18.10.tgz", + "integrity": "sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.18.6", + "@babel/parser": "^7.18.10", + "@babel/types": "^7.18.10" + } + }, + "@babel/traverse": { + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.19.3.tgz", + "integrity": "sha512-qh5yf6149zhq2sgIXmwjnsvmnNQC2iw70UFjp4olxucKrWd/dvlUsBI88VSLUsnMNF7/vnOiA+nk1+yLoCqROQ==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.18.6", + "@babel/generator": "^7.19.3", + "@babel/helper-environment-visitor": "^7.18.9", + "@babel/helper-function-name": "^7.19.0", + "@babel/helper-hoist-variables": "^7.18.6", + "@babel/helper-split-export-declaration": "^7.18.6", + "@babel/parser": "^7.19.3", + "@babel/types": "^7.19.3", + "debug": "^4.1.0", + "globals": "^11.1.0" + } + }, + "@babel/types": { + "version": "7.19.3", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.19.3.tgz", + "integrity": "sha512-hGCaQzIY22DJlDh9CH7NOxgKkFjBk0Cw9xDO1Xmh2151ti7wiGfQ3LauXzL4HP1fmFlTX6XjpRETTpUcv7wQLw==", + "dev": true, + "requires": { + "@babel/helper-string-parser": "^7.18.10", + "@babel/helper-validator-identifier": "^7.19.1", + "to-fast-properties": "^2.0.0" + } + }, + "@eslint/eslintrc": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.3.tgz", + "integrity": "sha512-J6KFFz5QCYUJq3pf0mjEcCJVERbzv71PUIDczuh9JkwGEzced6CO5ADLHB1rbf/+oPBtoPfMYNOpGDzCANlbXw==", + "requires": { + "ajv": "^6.12.4", + "debug": "^4.1.1", + "espree": "^7.3.0", + "globals": "^13.9.0", + "ignore": "^4.0.6", + "import-fresh": "^3.2.1", + "js-yaml": "^3.13.1", + "minimatch": "^3.0.4", + "strip-json-comments": "^3.1.1" + }, + "dependencies": { + "globals": { + "version": "13.17.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.17.0.tgz", + "integrity": "sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw==", + "requires": { + "type-fest": "^0.20.2" + } + } + } + }, + "@humanwhocodes/config-array": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.5.0.tgz", + "integrity": "sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==", + "requires": { + "@humanwhocodes/object-schema": "^1.2.0", + "debug": "^4.1.1", + "minimatch": "^3.0.4" + } + }, + "@humanwhocodes/object-schema": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", + "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==" + }, + "@jridgewell/gen-mapping": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz", + "integrity": "sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==", + "dev": true, + "requires": { + "@jridgewell/set-array": "^1.0.0", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "@jridgewell/resolve-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", + "integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==", + "dev": true + }, + "@jridgewell/set-array": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", + "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==", + "dev": true + }, + "@jridgewell/sourcemap-codec": { + "version": "1.4.14", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz", + "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==", + "dev": true + }, + "@jridgewell/trace-mapping": { + "version": "0.3.15", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.15.tgz", + "integrity": "sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g==", + "dev": true, + "requires": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "@next/env": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/env/-/env-12.3.0.tgz", + "integrity": "sha512-PTJpjAFVbzBQ9xXpzMTroShvD5YDIIy46jQ7d4LrWpY+/5a8H90Tm8hE3Hvkc5RBRspVo7kvEOnqQms0A+2Q6w==" + }, + "@next/eslint-plugin-next": { + "version": "12.3.1", + "resolved": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-12.3.1.tgz", + "integrity": "sha512-sw+lTf6r6P0j+g/n9y4qdWWI2syPqZx+uc0+B/fRENqfR3KpSid6MIKqc9gNwGhJASazEQ5b3w8h4cAET213jw==", + "requires": { + "glob": "7.1.7" + } + }, + "@next/swc-android-arm-eabi": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-android-arm-eabi/-/swc-android-arm-eabi-12.3.0.tgz", + "integrity": "sha512-/PuirPnAKsYBw93w/7Q9hqy+KGOU9mjYprZ/faxMUJh/dc6v3rYLxkZKNG9nFPIW4QKNTCnhP40xF9hLnxO+xg==", + "optional": true + }, + "@next/swc-android-arm64": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-android-arm64/-/swc-android-arm64-12.3.0.tgz", + "integrity": "sha512-OaI+FhAM6P9B6Ybwbn0Zl8YwWido0lLwhDBi9WiYCh4RQmIXAyVIoIJPHo4fP05+mXaJ/k1trvDvuURvHOq2qw==", + "optional": true + }, + "@next/swc-darwin-arm64": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-12.3.0.tgz", + "integrity": "sha512-9s4d3Mhii+WFce8o8Jok7WC3Bawkr9wEUU++SJRptjU1L5tsfYJMrSYCACHLhZujziNDLyExe4Hwwsccps1sfg==", + "optional": true + }, + "@next/swc-darwin-x64": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-12.3.0.tgz", + "integrity": "sha512-2scC4MqUTwGwok+wpVxP+zWp7WcCAVOtutki2E1n99rBOTnUOX6qXkgxSy083yBN6GqwuC/dzHeN7hIKjavfRA==", + "optional": true + }, + "@next/swc-freebsd-x64": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-freebsd-x64/-/swc-freebsd-x64-12.3.0.tgz", + "integrity": "sha512-xAlruUREij/bFa+qsE1tmsP28t7vz02N4ZDHt2lh3uJUniE0Ne9idyIDLc1Ed0IF2RjfgOp4ZVunuS3OM0sngw==", + "optional": true + }, + "@next/swc-linux-arm-gnueabihf": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm-gnueabihf/-/swc-linux-arm-gnueabihf-12.3.0.tgz", + "integrity": "sha512-jin2S4VT/cugc2dSZEUIabhYDJNgrUh7fufbdsaAezgcQzqfdfJqfxl4E9GuafzB4cbRPTaqA0V5uqbp0IyGkQ==", + "optional": true + }, + "@next/swc-linux-arm64-gnu": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-12.3.0.tgz", + "integrity": "sha512-RqJHDKe0WImeUrdR0kayTkRWgp4vD/MS7g0r6Xuf8+ellOFH7JAAJffDW3ayuVZeMYOa7RvgNFcOoWnrTUl9Nw==", + "optional": true + }, + "@next/swc-linux-arm64-musl": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-12.3.0.tgz", + "integrity": "sha512-nvNWoUieMjvDjpYJ/4SQe9lQs2xMj6ZRs8N+bmTrVu9leY2Fg3WD6W9p/1uU9hGO8u+OdF13wc4iRShu/WYIHg==", + "optional": true + }, + "@next/swc-linux-x64-gnu": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-12.3.0.tgz", + "integrity": "sha512-4ajhIuVU9PeQCMMhdDgZTLrHmjbOUFuIyg6J19hZqwEwDTSqQyrSLkbJs2Nd7IRiM6Ul/XyrtEFCpk4k+xD2+w==", + "optional": true + }, + "@next/swc-linux-x64-musl": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-12.3.0.tgz", + "integrity": "sha512-U092RBYbaGxoMAwpauePJEu2PuZSEoUCGJBvsptQr2/2XIMwAJDYM4c/M5NfYEsBr+yjvsYNsOpYfeQ88D82Yg==", + "optional": true + }, + "@next/swc-win32-arm64-msvc": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-12.3.0.tgz", + "integrity": "sha512-pzSzaxjDEJe67bUok9Nxf9rykbJfHXW0owICFsPBsqHyc+cr8vpF7g9e2APTCddtVhvjkga9ILoZJ9NxWS7Yiw==", + "optional": true + }, + "@next/swc-win32-ia32-msvc": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-12.3.0.tgz", + "integrity": "sha512-MQGUpMbYhQmTZ06a9e0hPQJnxFMwETo2WtyAotY3GEzbNCQVbCGhsvqEKcl+ZEHgShlHXUWvSffq1ZscY6gK7A==", + "optional": true + }, + "@next/swc-win32-x64-msvc": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-12.3.0.tgz", + "integrity": "sha512-C/nw6OgQpEULWqs+wgMHXGvlJLguPRFFGqR2TAqWBerQ8J+Sg3z1ZTqwelkSi4FoqStGuZ2UdFHIDN1ySmR1xA==", + "optional": true + }, + "@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "requires": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + } + }, + "@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==" + }, + "@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "requires": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + } + }, + "@rushstack/eslint-patch": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@rushstack/eslint-patch/-/eslint-patch-1.2.0.tgz", + "integrity": "sha512-sXo/qW2/pAcmT43VoRKOJbDOfV3cYpq3szSVfIThQXNt+E4DfKj361vaAt3c88U5tPUxzEswam7GW48PJqtKAg==" + }, + "@swc/helpers": { + "version": "0.4.11", + "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.4.11.tgz", + "integrity": "sha512-rEUrBSGIoSFuYxwBYtlUFMlE2CwGhmW+w9355/5oduSw8e5h2+Tj4UrAGNNgP9915++wj5vkQo0UuOBqOAq4nw==", + "requires": { + "tslib": "^2.4.0" + } + }, + "@types/json5": { + "version": "0.0.29", + "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", + "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==" + }, + "@types/node": { + "version": "17.0.45", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.45.tgz", + "integrity": "sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==", + "dev": true + }, + "@types/prop-types": { + "version": "15.7.5", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz", + "integrity": "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==", + "dev": true + }, + "@types/react": { + "version": "18.0.17", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.0.17.tgz", + "integrity": "sha512-38ETy4tL+rn4uQQi7mB81G7V1g0u2ryquNmsVIOKUAEIDK+3CUjZ6rSRpdvS99dNBnkLFL83qfmtLacGOTIhwQ==", + "dev": true, + "requires": { + "@types/prop-types": "*", + "@types/scheduler": "*", + "csstype": "^3.0.2" + } + }, + "@types/react-dom": { + "version": "17.0.17", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-17.0.17.tgz", + "integrity": "sha512-VjnqEmqGnasQKV0CWLevqMTXBYG9GbwuE6x3VetERLh0cq2LTptFE73MrQi2S7GkKXCf2GgwItB/melLnxfnsg==", + "dev": true, + "requires": { + "@types/react": "^17" + }, + "dependencies": { + "@types/react": { + "version": "17.0.50", + "resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.50.tgz", + "integrity": "sha512-ZCBHzpDb5skMnc1zFXAXnL3l1FAdi+xZvwxK+PkglMmBrwjpp9nKaWuEvrGnSifCJmBFGxZOOFuwC6KH/s0NuA==", + "dev": true, + "requires": { + "@types/prop-types": "*", + "@types/scheduler": "*", + "csstype": "^3.0.2" + } + } + } + }, + "@types/scheduler": { + "version": "0.16.2", + "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz", + "integrity": "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==", + "dev": true + }, + "@typescript-eslint/parser": { + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.39.0.tgz", + "integrity": "sha512-PhxLjrZnHShe431sBAGHaNe6BDdxAASDySgsBCGxcBecVCi8NQWxQZMcizNA4g0pN51bBAn/FUfkWG3SDVcGlA==", + "requires": { + "@typescript-eslint/scope-manager": "5.39.0", + "@typescript-eslint/types": "5.39.0", + "@typescript-eslint/typescript-estree": "5.39.0", + "debug": "^4.3.4" + } + }, + "@typescript-eslint/scope-manager": { + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.39.0.tgz", + "integrity": "sha512-/I13vAqmG3dyqMVSZPjsbuNQlYS082Y7OMkwhCfLXYsmlI0ca4nkL7wJ/4gjX70LD4P8Hnw1JywUVVAwepURBw==", + "requires": { + "@typescript-eslint/types": "5.39.0", + "@typescript-eslint/visitor-keys": "5.39.0" + } + }, + "@typescript-eslint/types": { + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.39.0.tgz", + "integrity": "sha512-gQMZrnfEBFXK38hYqt8Lkwt8f4U6yq+2H5VDSgP/qiTzC8Nw8JO3OuSUOQ2qW37S/dlwdkHDntkZM6SQhKyPhw==" + }, + "@typescript-eslint/typescript-estree": { + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.39.0.tgz", + "integrity": "sha512-qLFQP0f398sdnogJoLtd43pUgB18Q50QSA+BTE5h3sUxySzbWDpTSdgt4UyxNSozY/oDK2ta6HVAzvGgq8JYnA==", + "requires": { + "@typescript-eslint/types": "5.39.0", + "@typescript-eslint/visitor-keys": "5.39.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.3.7", + "tsutils": "^3.21.0" + }, + "dependencies": { + "semver": { + "version": "7.3.7", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", + "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", + "requires": { + "lru-cache": "^6.0.0" + } + } + } + }, + "@typescript-eslint/visitor-keys": { + "version": "5.39.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.39.0.tgz", + "integrity": "sha512-yyE3RPwOG+XJBLrhvsxAidUgybJVQ/hG8BhiJo0k8JSAYfk/CshVcxf0HwP4Jt7WZZ6vLmxdo1p6EyN3tzFTkg==", + "requires": { + "@typescript-eslint/types": "5.39.0", + "eslint-visitor-keys": "^3.3.0" + }, + "dependencies": { + "eslint-visitor-keys": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz", + "integrity": "sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==" + } + } + }, + "acorn": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==" + }, + "acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "requires": {} + }, + "ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ansi-colors": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", + "integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==" + }, + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "requires": { + "color-convert": "^1.9.0" + } + }, + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "requires": { + "sprintf-js": "~1.0.2" + } + }, + "aria-query": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-4.2.2.tgz", + "integrity": "sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA==", + "requires": { + "@babel/runtime": "^7.10.2", + "@babel/runtime-corejs3": "^7.10.2" + } + }, + "array-includes": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.5.tgz", + "integrity": "sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.19.5", + "get-intrinsic": "^1.1.1", + "is-string": "^1.0.7" + } + }, + "array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==" + }, + "array.prototype.flat": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.0.tgz", + "integrity": "sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.2", + "es-shim-unscopables": "^1.0.0" + } + }, + "array.prototype.flatmap": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.0.tgz", + "integrity": "sha512-PZC9/8TKAIxcWKdyeb77EzULHPrIX/tIZebLJUQOMR1OwYosT8yggdfWScfTBCDj5utONvOuPQQumYsU2ULbkg==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.2", + "es-shim-unscopables": "^1.0.0" + } + }, + "ast-types-flow": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.7.tgz", + "integrity": "sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag==" + }, + "astral-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", + "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==" + }, + "axe-core": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.4.3.tgz", + "integrity": "sha512-32+ub6kkdhhWick/UjvEwRchgoetXqTK14INLqbGm5U2TzBkBNF3nQtLYm8ovxSkQWArjEQvftCKryjZaATu3w==" + }, + "axobject-query": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-2.2.0.tgz", + "integrity": "sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA==" + }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "requires": { + "fill-range": "^7.0.1" + } + }, + "browserslist": { + "version": "4.21.4", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.4.tgz", + "integrity": "sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw==", + "dev": true, + "requires": { + "caniuse-lite": "^1.0.30001400", + "electron-to-chromium": "^1.4.251", + "node-releases": "^2.0.6", + "update-browserslist-db": "^1.0.9" + } + }, + "call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "requires": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + } + }, + "callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==" + }, + "caniuse-lite": { + "version": "1.0.30001414", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001414.tgz", + "integrity": "sha512-t55jfSaWjCdocnFdKQoO+d2ct9C59UZg4dY3OnUlSZ447r8pUtIKdp0hpAzrGFultmTC+Us+KpKi4GZl/LXlFg==" + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + }, + "convert-source-map": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.8.0.tgz", + "integrity": "sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==", + "dev": true, + "requires": { + "safe-buffer": "~5.1.1" + } + }, + "core-js-pure": { + "version": "3.25.5", + "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.25.5.tgz", + "integrity": "sha512-oml3M22pHM+igfWHDfdLVq2ShWmjM2V4L+dQEBs0DWVIqEm9WHCwGAlZ6BmyBQGy5sFrJmcx+856D9lVKyGWYg==" + }, + "cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "requires": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + } + }, + "csstype": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.1.tgz", + "integrity": "sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw==", + "dev": true + }, + "damerau-levenshtein": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz", + "integrity": "sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==" + }, + "debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "requires": { + "ms": "2.1.2" + } + }, + "deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==" + }, + "define-properties": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", + "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", + "requires": { + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + } + }, + "dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "requires": { + "path-type": "^4.0.0" + } + }, + "docs": { + "version": "file:apps/docs", + "requires": { + "@babel/core": "^7.0.0", + "@types/node": "^17.0.12", + "@types/react": "18.0.17", + "eslint": "7.32.0", + "eslint-config-custom": "*", + "lodash": "^3.0.0", + "next": "12.3.0", + "next-transpile-modules": "9.0.0", + "react": "18.2.0", + "react-dom": "18.2.0", + "tsconfig": "*", + "typescript": "^4.5.3", + "ui": "*" + } + }, + "doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "requires": { + "esutils": "^2.0.2" + } + }, + "electron-to-chromium": { + "version": "1.4.270", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.270.tgz", + "integrity": "sha512-KNhIzgLiJmDDC444dj9vEOpZEgsV96ult9Iff98Vanumn+ShJHd5se8aX6KeVxdc0YQeqdrezBZv89rleDbvSg==", + "dev": true + }, + "emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" + }, + "enhanced-resolve": { + "version": "5.10.0", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz", + "integrity": "sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ==", + "dev": true, + "requires": { + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + } + }, + "enquirer": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", + "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", + "requires": { + "ansi-colors": "^4.1.1" + } + }, + "es-abstract": { + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.20.3.tgz", + "integrity": "sha512-AyrnaKVpMzljIdwjzrj+LxGmj8ik2LckwXacHqrJJ/jxz6dDDBcZ7I7nlHM0FvEW8MfbWJwOd+yT2XzYW49Frw==", + "requires": { + "call-bind": "^1.0.2", + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "function.prototype.name": "^1.1.5", + "get-intrinsic": "^1.1.3", + "get-symbol-description": "^1.0.0", + "has": "^1.0.3", + "has-property-descriptors": "^1.0.0", + "has-symbols": "^1.0.3", + "internal-slot": "^1.0.3", + "is-callable": "^1.2.6", + "is-negative-zero": "^2.0.2", + "is-regex": "^1.1.4", + "is-shared-array-buffer": "^1.0.2", + "is-string": "^1.0.7", + "is-weakref": "^1.0.2", + "object-inspect": "^1.12.2", + "object-keys": "^1.1.1", + "object.assign": "^4.1.4", + "regexp.prototype.flags": "^1.4.3", + "safe-regex-test": "^1.0.0", + "string.prototype.trimend": "^1.0.5", + "string.prototype.trimstart": "^1.0.5", + "unbox-primitive": "^1.0.2" + } + }, + "es-shim-unscopables": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", + "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==", + "requires": { + "has": "^1.0.3" + } + }, + "es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "requires": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + } + }, + "escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true + }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" + }, + "eslint": { + "version": "7.32.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.32.0.tgz", + "integrity": "sha512-VHZ8gX+EDfz+97jGcgyGCyRia/dPOd6Xh9yPv8Bl1+SoaIwD+a/vlrOmGRUyOYu7MwUhc7CxqeaDZU13S4+EpA==", + "requires": { + "@babel/code-frame": "7.12.11", + "@eslint/eslintrc": "^0.4.3", + "@humanwhocodes/config-array": "^0.5.0", + "ajv": "^6.10.0", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.0.1", + "doctrine": "^3.0.0", + "enquirer": "^2.3.5", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^5.1.1", + "eslint-utils": "^2.1.0", + "eslint-visitor-keys": "^2.0.0", + "espree": "^7.3.1", + "esquery": "^1.4.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "functional-red-black-tree": "^1.0.1", + "glob-parent": "^5.1.2", + "globals": "^13.6.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "js-yaml": "^3.13.1", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.0.4", + "natural-compare": "^1.4.0", + "optionator": "^0.9.1", + "progress": "^2.0.0", + "regexpp": "^3.1.0", + "semver": "^7.2.1", + "strip-ansi": "^6.0.0", + "strip-json-comments": "^3.1.0", + "table": "^6.0.9", + "text-table": "^0.2.0", + "v8-compile-cache": "^2.0.3" + }, + "dependencies": { + "@babel/code-frame": { + "version": "7.12.11", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz", + "integrity": "sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==", + "requires": { + "@babel/highlight": "^7.10.4" + } + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==" + }, + "globals": { + "version": "13.17.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.17.0.tgz", + "integrity": "sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw==", + "requires": { + "type-fest": "^0.20.2" + } + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" + }, + "semver": { + "version": "7.3.7", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", + "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", + "requires": { + "lru-cache": "^6.0.0" + } + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "eslint-config-custom": { + "version": "file:packages/eslint-config-custom", + "requires": { + "eslint": "^7.23.0", + "eslint-config-next": "^12.0.8", + "eslint-config-prettier": "^8.3.0", + "eslint-config-turbo": "latest", + "eslint-plugin-react": "7.31.8", + "typescript": "^4.7.4" + } + }, + "eslint-config-next": { + "version": "12.3.1", + "resolved": "https://registry.npmjs.org/eslint-config-next/-/eslint-config-next-12.3.1.tgz", + "integrity": "sha512-EN/xwKPU6jz1G0Qi6Bd/BqMnHLyRAL0VsaQaWA7F3KkjAgZHi4f1uL1JKGWNxdQpHTW/sdGONBd0bzxUka/DJg==", + "requires": { + "@next/eslint-plugin-next": "12.3.1", + "@rushstack/eslint-patch": "^1.1.3", + "@typescript-eslint/parser": "^5.21.0", + "eslint-import-resolver-node": "^0.3.6", + "eslint-import-resolver-typescript": "^2.7.1", + "eslint-plugin-import": "^2.26.0", + "eslint-plugin-jsx-a11y": "^6.5.1", + "eslint-plugin-react": "^7.31.7", + "eslint-plugin-react-hooks": "^4.5.0" + } + }, + "eslint-config-prettier": { + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.5.0.tgz", + "integrity": "sha512-obmWKLUNCnhtQRKc+tmnYuQl0pFU1ibYJQ5BGhTVB08bHe9wC8qUeG7c08dj9XX+AuPj1YSGSQIHl1pnDHZR0Q==", + "requires": {} + }, + "eslint-config-turbo": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/eslint-config-turbo/-/eslint-config-turbo-0.0.4.tgz", + "integrity": "sha512-HErPS/wfWkSdV9Yd2dDkhZt3W2B78Ih/aWPFfaHmCMjzPalh+5KxRRGTf8MOBQLCebcWJX0lP1Zvc1rZIHlXGg==", + "requires": { + "eslint-plugin-turbo": "0.0.4" + } + }, + "eslint-import-resolver-node": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz", + "integrity": "sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw==", + "requires": { + "debug": "^3.2.7", + "resolve": "^1.20.0" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "requires": { + "ms": "^2.1.1" + } + } + } + }, + "eslint-import-resolver-typescript": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-2.7.1.tgz", + "integrity": "sha512-00UbgGwV8bSgUv34igBDbTOtKhqoRMy9bFjNehT40bXg6585PNIct8HhXZ0SybqB9rWtXj9crcku8ndDn/gIqQ==", + "requires": { + "debug": "^4.3.4", + "glob": "^7.2.0", + "is-glob": "^4.0.3", + "resolve": "^1.22.0", + "tsconfig-paths": "^3.14.1" + }, + "dependencies": { + "glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + } + } + }, + "eslint-module-utils": { + "version": "2.7.4", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.7.4.tgz", + "integrity": "sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA==", + "requires": { + "debug": "^3.2.7" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "requires": { + "ms": "^2.1.1" + } + } + } + }, + "eslint-plugin-import": { + "version": "2.26.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz", + "integrity": "sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA==", + "requires": { + "array-includes": "^3.1.4", + "array.prototype.flat": "^1.2.5", + "debug": "^2.6.9", + "doctrine": "^2.1.0", + "eslint-import-resolver-node": "^0.3.6", + "eslint-module-utils": "^2.7.3", + "has": "^1.0.3", + "is-core-module": "^2.8.1", + "is-glob": "^4.0.3", + "minimatch": "^3.1.2", + "object.values": "^1.1.5", + "resolve": "^1.22.0", + "tsconfig-paths": "^3.14.1" + }, + "dependencies": { + "debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "requires": { + "ms": "2.0.0" + } + }, + "doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "requires": { + "esutils": "^2.0.2" + } + }, + "ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + } + } + }, + "eslint-plugin-jsx-a11y": { + "version": "6.6.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.6.1.tgz", + "integrity": "sha512-sXgFVNHiWffBq23uiS/JaP6eVR622DqwB4yTzKvGZGcPq6/yZ3WmOZfuBks/vHWo9GaFOqC2ZK4i6+C35knx7Q==", + "requires": { + "@babel/runtime": "^7.18.9", + "aria-query": "^4.2.2", + "array-includes": "^3.1.5", + "ast-types-flow": "^0.0.7", + "axe-core": "^4.4.3", + "axobject-query": "^2.2.0", + "damerau-levenshtein": "^1.0.8", + "emoji-regex": "^9.2.2", + "has": "^1.0.3", + "jsx-ast-utils": "^3.3.2", + "language-tags": "^1.0.5", + "minimatch": "^3.1.2", + "semver": "^6.3.0" + } + }, + "eslint-plugin-react": { + "version": "7.31.8", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.31.8.tgz", + "integrity": "sha512-5lBTZmgQmARLLSYiwI71tiGVTLUuqXantZM6vlSY39OaDSV0M7+32K5DnLkmFrwTe+Ksz0ffuLUC91RUviVZfw==", + "requires": { + "array-includes": "^3.1.5", + "array.prototype.flatmap": "^1.3.0", + "doctrine": "^2.1.0", + "estraverse": "^5.3.0", + "jsx-ast-utils": "^2.4.1 || ^3.0.0", + "minimatch": "^3.1.2", + "object.entries": "^1.1.5", + "object.fromentries": "^2.0.5", + "object.hasown": "^1.1.1", + "object.values": "^1.1.5", + "prop-types": "^15.8.1", + "resolve": "^2.0.0-next.3", + "semver": "^6.3.0", + "string.prototype.matchall": "^4.0.7" + }, + "dependencies": { + "doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "requires": { + "esutils": "^2.0.2" + } + }, + "resolve": { + "version": "2.0.0-next.4", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.4.tgz", + "integrity": "sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ==", + "requires": { + "is-core-module": "^2.9.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + } + } + } + }, + "eslint-plugin-react-hooks": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz", + "integrity": "sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g==", + "requires": {} + }, + "eslint-plugin-turbo": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/eslint-plugin-turbo/-/eslint-plugin-turbo-0.0.4.tgz", + "integrity": "sha512-dfmYE/iPvoJInQq+5E/0mj140y/rYwKtzZkn3uVK8+nvwC5zmWKQ6ehMWrL4bYBkGzSgpOndZM+jOXhPQ2m8Cg==", + "requires": {} + }, + "eslint-scope": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", + "requires": { + "esrecurse": "^4.3.0", + "estraverse": "^4.1.1" + }, + "dependencies": { + "estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==" + } + } + }, + "eslint-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", + "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", + "requires": { + "eslint-visitor-keys": "^1.1.0" + }, + "dependencies": { + "eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==" + } + } + }, + "eslint-visitor-keys": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", + "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==" + }, + "espree": { + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-7.3.1.tgz", + "integrity": "sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g==", + "requires": { + "acorn": "^7.4.0", + "acorn-jsx": "^5.3.1", + "eslint-visitor-keys": "^1.3.0" + }, + "dependencies": { + "eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==" + } + } + }, + "esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==" + }, + "esquery": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", + "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", + "requires": { + "estraverse": "^5.1.0" + } + }, + "esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "requires": { + "estraverse": "^5.2.0" + } + }, + "estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==" + }, + "esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==" + }, + "fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + }, + "fast-glob": { + "version": "3.2.12", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", + "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", + "requires": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + } + }, + "fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" + }, + "fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==" + }, + "fastq": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", + "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", + "requires": { + "reusify": "^1.0.4" + } + }, + "file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "requires": { + "flat-cache": "^3.0.4" + } + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "flat-cache": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", + "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", + "requires": { + "flatted": "^3.1.0", + "rimraf": "^3.0.2" + } + }, + "flatted": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz", + "integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==" + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "function.prototype.name": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz", + "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.0", + "functions-have-names": "^1.2.2" + } + }, + "functional-red-black-tree": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", + "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==" + }, + "functions-have-names": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", + "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==" + }, + "gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true + }, + "get-intrinsic": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", + "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", + "requires": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.3" + } + }, + "get-symbol-description": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", + "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", + "requires": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.1" + } + }, + "glob": { + "version": "7.1.7", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", + "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "requires": { + "is-glob": "^4.0.1" + } + }, + "globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true + }, + "globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "requires": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "dependencies": { + "ignore": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", + "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==" + } + } + }, + "graceful-fs": { + "version": "4.2.10", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", + "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", + "dev": true + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-bigints": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", + "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==" + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==" + }, + "has-property-descriptors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", + "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", + "requires": { + "get-intrinsic": "^1.1.1" + } + }, + "has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" + }, + "has-tostringtag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", + "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "requires": { + "has-symbols": "^1.0.2" + } + }, + "ignore": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", + "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==" + }, + "import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "requires": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + } + }, + "imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==" + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "internal-slot": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz", + "integrity": "sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==", + "requires": { + "get-intrinsic": "^1.1.0", + "has": "^1.0.3", + "side-channel": "^1.0.4" + } + }, + "is-bigint": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", + "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", + "requires": { + "has-bigints": "^1.0.1" + } + }, + "is-boolean-object": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", + "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", + "requires": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + } + }, + "is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==" + }, + "is-core-module": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.10.0.tgz", + "integrity": "sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg==", + "requires": { + "has": "^1.0.3" + } + }, + "is-date-object": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", + "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", + "requires": { + "has-tostringtag": "^1.0.0" + } + }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==" + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" + }, + "is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-negative-zero": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", + "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==" + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" + }, + "is-number-object": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", + "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", + "requires": { + "has-tostringtag": "^1.0.0" + } + }, + "is-regex": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", + "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", + "requires": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + } + }, + "is-shared-array-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", + "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", + "requires": { + "call-bind": "^1.0.2" + } + }, + "is-string": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", + "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", + "requires": { + "has-tostringtag": "^1.0.0" + } + }, + "is-symbol": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", + "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", + "requires": { + "has-symbols": "^1.0.2" + } + }, + "is-weakref": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", + "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", + "requires": { + "call-bind": "^1.0.2" + } + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + }, + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" + }, + "js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + }, + "jsesc": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "dev": true + }, + "json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + }, + "json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==" + }, + "json5": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.1.tgz", + "integrity": "sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==", + "dev": true + }, + "jsx-ast-utils": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.3.tgz", + "integrity": "sha512-fYQHZTZ8jSfmWZ0iyzfwiU4WDX4HpHbMCZ3gPlWYiCl3BoeOTsqKBqnTVfH2rYT7eP5c3sVbeSPHnnJOaTrWiw==", + "requires": { + "array-includes": "^3.1.5", + "object.assign": "^4.1.3" + } + }, + "language-subtag-registry": { + "version": "0.3.22", + "resolved": "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz", + "integrity": "sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w==" + }, + "language-tags": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/language-tags/-/language-tags-1.0.5.tgz", + "integrity": "sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ==", + "requires": { + "language-subtag-registry": "~0.3.2" + } + }, + "levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "requires": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + } + }, + "lodash": { + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz", + "integrity": "sha512-9mDDwqVIma6OZX79ZlDACZl8sBm0TEnkf99zV3iMA4GzkIT/9hiqP5mY0HoT1iNLCrKc/R1HByV+yJfRWVJryQ==" + }, + "lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==" + }, + "lodash.truncate": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/lodash.truncate/-/lodash.truncate-4.4.2.tgz", + "integrity": "sha512-jttmRe7bRse52OsWIMDLaXxWqRAmtIUccAQ3garviCqJjafXOfNMO0yMfNpdD6zbGaTU0P5Nz7e7gAT6cKmJRw==" + }, + "loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "requires": { + "js-tokens": "^3.0.0 || ^4.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==" + }, + "micromatch": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "requires": { + "braces": "^3.0.2", + "picomatch": "^2.3.1" + } + }, + "minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", + "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "nanoid": { + "version": "3.3.4", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.4.tgz", + "integrity": "sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==" + }, + "natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==" + }, + "next": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/next/-/next-12.3.0.tgz", + "integrity": "sha512-GpzI6me9V1+XYtfK0Ae9WD0mKqHyzQlGq1xH1rzNIYMASo4Tkl4rTe9jSqtBpXFhOS33KohXs9ZY38Akkhdciw==", + "requires": { + "@next/env": "12.3.0", + "@next/swc-android-arm-eabi": "12.3.0", + "@next/swc-android-arm64": "12.3.0", + "@next/swc-darwin-arm64": "12.3.0", + "@next/swc-darwin-x64": "12.3.0", + "@next/swc-freebsd-x64": "12.3.0", + "@next/swc-linux-arm-gnueabihf": "12.3.0", + "@next/swc-linux-arm64-gnu": "12.3.0", + "@next/swc-linux-arm64-musl": "12.3.0", + "@next/swc-linux-x64-gnu": "12.3.0", + "@next/swc-linux-x64-musl": "12.3.0", + "@next/swc-win32-arm64-msvc": "12.3.0", + "@next/swc-win32-ia32-msvc": "12.3.0", + "@next/swc-win32-x64-msvc": "12.3.0", + "@swc/helpers": "0.4.11", + "caniuse-lite": "^1.0.30001332", + "postcss": "8.4.14", + "styled-jsx": "5.0.6", + "use-sync-external-store": "1.2.0" + } + }, + "next-transpile-modules": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/next-transpile-modules/-/next-transpile-modules-9.0.0.tgz", + "integrity": "sha512-VCNFOazIAnXn1hvgYYSTYMnoWgKgwlYh4lm1pKbSfiB3kj5ZYLcKVhfh3jkPOg1cnd9DP+pte9yCUocdPEUBTQ==", + "dev": true, + "requires": { + "enhanced-resolve": "^5.7.0", + "escalade": "^3.1.1" + } + }, + "node-releases": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.6.tgz", + "integrity": "sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==", + "dev": true + }, + "object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==" + }, + "object-inspect": { + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", + "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==" + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==" + }, + "object.assign": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", + "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "has-symbols": "^1.0.3", + "object-keys": "^1.1.1" + } + }, + "object.entries": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.5.tgz", + "integrity": "sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.1" + } + }, + "object.fromentries": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.5.tgz", + "integrity": "sha512-CAyG5mWQRRiBU57Re4FKoTBjXfDoNwdFVH2Y1tS9PqCsfUTymAohOkEMSG3aRNKmv4lV3O7p1et7c187q6bynw==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.1" + } + }, + "object.hasown": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.1.tgz", + "integrity": "sha512-LYLe4tivNQzq4JdaWW6WO3HMZZJWzkkH8fnI6EebWl0VZth2wL2Lovm74ep2/gZzlaTdV62JZHEqHQ2yVn8Q/A==", + "requires": { + "define-properties": "^1.1.4", + "es-abstract": "^1.19.5" + } + }, + "object.values": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.5.tgz", + "integrity": "sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.1" + } + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "requires": { + "wrappy": "1" + } + }, + "optionator": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", + "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", + "requires": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.3" + } + }, + "parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "requires": { + "callsites": "^3.0.0" + } + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==" + }, + "path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" + }, + "path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" + }, + "path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==" + }, + "picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" + }, + "picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==" + }, + "postcss": { + "version": "8.4.14", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.14.tgz", + "integrity": "sha512-E398TUmfAYFPBSdzgeieK2Y1+1cpdxJx8yXbK/m57nRhKSmk1GB2tO4lbLBtlkfPQTDKfe4Xqv1ASWPpayPEig==", + "requires": { + "nanoid": "^3.3.4", + "picocolors": "^1.0.0", + "source-map-js": "^1.0.2" + } + }, + "prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==" + }, + "prettier": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.7.1.tgz", + "integrity": "sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==", + "dev": true + }, + "progress": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", + "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==" + }, + "prop-types": { + "version": "15.8.1", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", + "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", + "requires": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.13.1" + } + }, + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" + }, + "queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==" + }, + "react": { + "version": "18.2.0", + "resolved": "https://registry.npmjs.org/react/-/react-18.2.0.tgz", + "integrity": "sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==", + "requires": { + "loose-envify": "^1.1.0" + } + }, + "react-dom": { + "version": "18.2.0", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.2.0.tgz", + "integrity": "sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==", + "requires": { + "loose-envify": "^1.1.0", + "scheduler": "^0.23.0" + } + }, + "react-is": { + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" + }, + "regenerator-runtime": { + "version": "0.13.9", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz", + "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==" + }, + "regexp.prototype.flags": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz", + "integrity": "sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "functions-have-names": "^1.2.2" + } + }, + "regexpp": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", + "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==" + }, + "require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==" + }, + "resolve": { + "version": "1.22.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", + "integrity": "sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==", + "requires": { + "is-core-module": "^2.9.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + } + }, + "resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==" + }, + "reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==" + }, + "rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "requires": { + "glob": "^7.1.3" + } + }, + "run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "requires": { + "queue-microtask": "^1.2.2" + } + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "safe-regex-test": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz", + "integrity": "sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==", + "requires": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.3", + "is-regex": "^1.1.4" + } + }, + "scheduler": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.0.tgz", + "integrity": "sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==", + "requires": { + "loose-envify": "^1.1.0" + } + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + }, + "shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "requires": { + "shebang-regex": "^3.0.0" + } + }, + "shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" + }, + "side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "requires": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + } + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==" + }, + "slice-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", + "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", + "requires": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "requires": { + "color-convert": "^2.0.1" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + } + } + }, + "source-map-js": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", + "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==" + }, + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==" + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "dependencies": { + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + } + } + }, + "string.prototype.matchall": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.7.tgz", + "integrity": "sha512-f48okCX7JiwVi1NXCVWcFnZgADDC/n2vePlQ/KUCNqCikLLilQvwjMO8+BHVKvgzH0JB0J9LEPgxOGT02RoETg==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.1", + "get-intrinsic": "^1.1.1", + "has-symbols": "^1.0.3", + "internal-slot": "^1.0.3", + "regexp.prototype.flags": "^1.4.1", + "side-channel": "^1.0.4" + } + }, + "string.prototype.trimend": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz", + "integrity": "sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.19.5" + } + }, + "string.prototype.trimstart": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz", + "integrity": "sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.19.5" + } + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "requires": { + "ansi-regex": "^5.0.1" + } + }, + "strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==" + }, + "strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==" + }, + "styled-jsx": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/styled-jsx/-/styled-jsx-5.0.6.tgz", + "integrity": "sha512-xOeROtkK5MGMDimBQ3J6iPId8q0t/BDoG5XN6oKkZClVz9ISF/hihN8OCn2LggMU6N32aXnrXBdn3auSqNS9fA==", + "requires": {} + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "requires": { + "has-flag": "^3.0.0" + } + }, + "supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==" + }, + "table": { + "version": "6.8.0", + "resolved": "https://registry.npmjs.org/table/-/table-6.8.0.tgz", + "integrity": "sha512-s/fitrbVeEyHKFa7mFdkuQMWlH1Wgw/yEXMt5xACT4ZpzWFluehAxRtUUQKPuWhaLAWhFcVx6w3oC8VKaUfPGA==", + "requires": { + "ajv": "^8.0.1", + "lodash.truncate": "^4.4.2", + "slice-ansi": "^4.0.0", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1" + }, + "dependencies": { + "ajv": { + "version": "8.11.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.11.0.tgz", + "integrity": "sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==", + "requires": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + } + }, + "json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" + } + } + }, + "tapable": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", + "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==", + "dev": true + }, + "text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==" + }, + "to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", + "dev": true + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "requires": { + "is-number": "^7.0.0" + } + }, + "tsconfig": { + "version": "file:packages/tsconfig" + }, + "tsconfig-paths": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz", + "integrity": "sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ==", + "requires": { + "@types/json5": "^0.0.29", + "json5": "^1.0.1", + "minimist": "^1.2.6", + "strip-bom": "^3.0.0" + }, + "dependencies": { + "json5": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", + "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", + "requires": { + "minimist": "^1.2.0" + } + } + } + }, + "tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "tsutils": { + "version": "3.21.0", + "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", + "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", + "requires": { + "tslib": "^1.8.1" + }, + "dependencies": { + "tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + } + } + }, + "turbo": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/turbo/-/turbo-1.5.5.tgz", + "integrity": "sha512-PVQSDl0STC9WXIyHcYUWs9gXsf8JjQig/FuHfuB8N6+XlgCGB3mPbfMEE6zrChGz2hufH4/guKRX1XJuNL6XTA==", + "dev": true, + "requires": { + "turbo-darwin-64": "1.5.5", + "turbo-darwin-arm64": "1.5.5", + "turbo-linux-64": "1.5.5", + "turbo-linux-arm64": "1.5.5", + "turbo-windows-64": "1.5.5", + "turbo-windows-arm64": "1.5.5" + } + }, + "turbo-darwin-64": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/turbo-darwin-64/-/turbo-darwin-64-1.5.5.tgz", + "integrity": "sha512-HvEn6P2B+NXDekq9LRpRgUjcT9/oygLTcK47U0qsAJZXRBSq/2hvD7lx4nAwgY/4W3rhYJeWtHTzbhoN6BXqGQ==", + "dev": true, + "optional": true + }, + "turbo-darwin-arm64": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/turbo-darwin-arm64/-/turbo-darwin-arm64-1.5.5.tgz", + "integrity": "sha512-Dmxr09IUy6M0nc7/xWod9galIO2DD500B75sJSkHeT+CCdJOWnlinux0ZPF8CSygNqymwYO8AO2l15/6yxcycg==", + "dev": true, + "optional": true + }, + "turbo-linux-64": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/turbo-linux-64/-/turbo-linux-64-1.5.5.tgz", + "integrity": "sha512-wd07TZ4zXXWjzZE00FcFMLmkybQQK/NV9ff66vvAV0vdiuacSMBCNLrD6Mm4ncfrUPW/rwFW5kU/7hyuEqqtDw==", + "dev": true, + "optional": true + }, + "turbo-linux-arm64": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/turbo-linux-arm64/-/turbo-linux-arm64-1.5.5.tgz", + "integrity": "sha512-q3q33tuo74R7gicnfvFbnZZvqmlq7Vakcvx0eshifnJw4PR+oMnTCb4w8ElVFx070zsb8DVTibq99y8NJH8T1Q==", + "dev": true, + "optional": true + }, + "turbo-windows-64": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/turbo-windows-64/-/turbo-windows-64-1.5.5.tgz", + "integrity": "sha512-lPp9kHonNFfqgovbaW+UAPO5cLmoAN+m3G3FzqcrRPnlzt97vXYsDhDd/4Zy3oAKoAcprtP4CGy0ddisqsKTVw==", + "dev": true, + "optional": true + }, + "turbo-windows-arm64": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/turbo-windows-arm64/-/turbo-windows-arm64-1.5.5.tgz", + "integrity": "sha512-3AfGULKNZiZVrEzsIE+W79ZRW1+f5r4nM4wLlJ1PTBHyRxBZdD6KTH1tijGfy/uTlcV5acYnKHEkDc6Q9PAXGQ==", + "dev": true, + "optional": true + }, + "type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "requires": { + "prelude-ls": "^1.2.1" + } + }, + "type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==" + }, + "typescript": { + "version": "4.8.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.8.4.tgz", + "integrity": "sha512-QCh+85mCy+h0IGff8r5XWzOVSbBO+KfeYrMQh7NJ58QujwcE22u+NUSmUxqF+un70P9GXKxa2HCNiTTMJknyjQ==" + }, + "ui": { + "version": "file:packages/ui", + "requires": { + "@types/react": "^17.0.37", + "@types/react-dom": "^17.0.11", + "eslint": "^7.32.0", + "eslint-config-custom": "*", + "react": "^18.2.0", + "tsconfig": "*", + "typescript": "^4.5.2" + }, + "dependencies": { + "@types/react": { + "version": "17.0.50", + "resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.50.tgz", + "integrity": "sha512-ZCBHzpDb5skMnc1zFXAXnL3l1FAdi+xZvwxK+PkglMmBrwjpp9nKaWuEvrGnSifCJmBFGxZOOFuwC6KH/s0NuA==", + "dev": true, + "requires": { + "@types/prop-types": "*", + "@types/scheduler": "*", + "csstype": "^3.0.2" + } + } + } + }, + "unbox-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", + "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", + "requires": { + "call-bind": "^1.0.2", + "has-bigints": "^1.0.2", + "has-symbols": "^1.0.3", + "which-boxed-primitive": "^1.0.2" + } + }, + "update-browserslist-db": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.9.tgz", + "integrity": "sha512-/xsqn21EGVdXI3EXSum1Yckj3ZVZugqyOZQ/CxYPBD/R+ko9NSUScf8tFF4dOKY+2pvSSJA/S+5B8s4Zr4kyvg==", + "dev": true, + "requires": { + "escalade": "^3.1.1", + "picocolors": "^1.0.0" + } + }, + "uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "requires": { + "punycode": "^2.1.0" + } + }, + "use-sync-external-store": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.2.0.tgz", + "integrity": "sha512-eEgnFxGQ1Ife9bzYs6VLi8/4X6CObHMw9Qr9tPY43iKwsPw8xE8+EFsf/2cFZ5S3esXgpWgtSCtLNS41F+sKPA==", + "requires": {} + }, + "v8-compile-cache": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz", + "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==" + }, + "web": { + "version": "file:apps/web", + "requires": { + "@babel/core": "^7.0.0", + "@types/node": "^17.0.12", + "@types/react": "18.0.17", + "eslint": "7.32.0", + "eslint-config-custom": "*", + "lodash": "^4.17.21", + "next": "12.3.0", + "next-transpile-modules": "9.0.0", + "react": "18.2.0", + "react-dom": "18.2.0", + "tsconfig": "*", + "typescript": "^4.5.3", + "ui": "*" + }, + "dependencies": { + "lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + } + } + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "requires": { + "isexe": "^2.0.0" + } + }, + "which-boxed-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", + "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", + "requires": { + "is-bigint": "^1.0.1", + "is-boolean-object": "^1.1.0", + "is-number-object": "^1.0.4", + "is-string": "^1.0.5", + "is-symbol": "^1.0.3" + } + }, + "word-wrap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==" + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } +} diff --git a/cli/internal/lockfile/testdata/pnpm-absolute-v6.yaml b/cli/internal/lockfile/testdata/pnpm-absolute-v6.yaml new file mode 100644 index 0000000..dc5d0e6 --- /dev/null +++ b/cli/internal/lockfile/testdata/pnpm-absolute-v6.yaml @@ -0,0 +1,18 @@ +lockfileVersion: "6.0" +importers: + packages/a: + dependencies: + "@scope/parent": + specifier: ^1.0.0 + version: 1.0.0 + +packages: + /@scope/parent@1.0.0: + resolution: { integrity: junk } + dependencies: + child: /@scope/child@1.0.0 + dev: false + + /@scope/child@1.0.0: + resolution: { integrity: junk } + dev: false diff --git a/cli/internal/lockfile/testdata/pnpm-absolute.yaml b/cli/internal/lockfile/testdata/pnpm-absolute.yaml new file mode 100644 index 0000000..d39f802 --- /dev/null +++ b/cli/internal/lockfile/testdata/pnpm-absolute.yaml @@ -0,0 +1,38 @@ +lockfileVersion: 5.4 +importers: + packages/a: + specifiers: + another: ^1.0.0 + "@scope/parent": ^1.0.0 + special: npm:Special@1.2.3 + dependencies: + another: 1.0.0 + "@scope/parent": 1.0.0 + special: /Special/1.2.3 + +packages: + /@scope/parent/1.0.0: + resolution: { integrity: junk } + dependencies: + child: /@scope/child/1.0.0 + dev: false + + /@scope/child/1.0.0: + resolution: { integrity: junk } + dev: false + + /another/1.0.0: + resolution: { integrity: junk } + dev: false + dependencies: + foo: 1.0.0 + + /foo/1.0.0: + resolution: { integrity: junk } + dev: false + dependencies: + Special: 1.2.3 + + /Special/1.2.3: + resolution: { integrity: junk } + dev: false diff --git a/cli/internal/lockfile/testdata/pnpm-patch-v6.yaml b/cli/internal/lockfile/testdata/pnpm-patch-v6.yaml new file mode 100644 index 0000000..b620472 --- /dev/null +++ b/cli/internal/lockfile/testdata/pnpm-patch-v6.yaml @@ -0,0 +1,40 @@ +lockfileVersion: "6.0" + +patchedDependencies: + lodash@4.17.21: + hash: lgum37zgng4nfkynzh3cs7wdeq + path: patches/lodash@4.17.21.patch + "@babel/helper-string-parser@7.19.4": + hash: wjhgmpzh47qmycrzgpeyoyh3ce + path: patches/@babel__helper-string-parser@7.19.4.patch + +importers: + .: {} + + packages/a: + dependencies: + lodash: + specifier: ^4.17.21 + version: 4.17.21(patch_hash=lgum37zgng4nfkynzh3cs7wdeq) + + packages/b: + dependencies: + "@babel/helper-string-parser": + specifier: ^7.19.4 + version: 7.19.4(patch_hash=wjhgmpzh47qmycrzgpeyoyh3ce)(@babel/core@7.21.0) + +packages: + /@babel/helper-string-parser@7.19.4(patch_hash=wjhgmpzh47qmycrzgpeyoyh3ce)(@babel/core@7.21.0): + resolution: + { + integrity: sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw==, + } + engines: { node: ">=6.9.0" } + dev: false + + /lodash@4.17.21(patch_hash=lgum37zgng4nfkynzh3cs7wdeq): + resolution: + { + integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==, + } + dev: false diff --git a/cli/internal/lockfile/testdata/pnpm-patch.yaml b/cli/internal/lockfile/testdata/pnpm-patch.yaml new file mode 100644 index 0000000..ea84d72 --- /dev/null +++ b/cli/internal/lockfile/testdata/pnpm-patch.yaml @@ -0,0 +1,63 @@ +lockfileVersion: 5.4 + +patchedDependencies: + is-odd@3.0.1: + hash: nrrwwz7lemethtlvvm75r5bmhq + path: patches/is-odd@3.0.1.patch + "@babel/core@7.20.12": + hash: 3hyn7hbvzkemudbydlwjmrb65y + path: patches/@babel__core@7.20.12.patch + moleculer@0.14.28: + hash: 5pk7ojv7qbqha75ozglk4y4f74 + path: patches/moleculer@0.14.28.patch + +importers: + .: + specifiers: {} + + packages/dependency: + specifiers: + is-odd: ^3.0.1 + "@babel/core": ^7.20.12 + dependencies: + is-odd: 3.0.1_nrrwwz7lemethtlvvm75r5bmhq + "@babel/core": 7.20.12_3hyn7hbvzkemudbydlwjmrb65y + +packages: + /@babel/core/7.20.12_3hyn7hbvzkemudbydlwjmrb65y: + resolution: + { + integrity: sha512-XsMfHovsUYHFMdrIHkZphTN/2Hzzi78R08NuHfDBehym2VsPDL6Zn/JAD/JQdnRvbSsbQc4mVaU1m6JgtTEElg==, + } + engines: { node: ">=6.9.0" } + dev: false + + /is-number/6.0.0: + resolution: + { + integrity: sha512-Wu1VHeILBK8KAWJUAiSZQX94GmOE45Rg6/538fKwiloUu21KncEkYGPqob2oSZ5mUT73vLGrHQjKw3KMPwfDzg==, + } + engines: { node: ">=0.10.0" } + dev: false + + /is-odd/3.0.1_nrrwwz7lemethtlvvm75r5bmhq: + resolution: + { + integrity: sha512-CQpnWPrDwmP1+SMHXZhtLtJv90yiyVfluGsX5iNCVkrhQtU3TQHsUWPG9wkdk9Lgd5yNpAg9jQEo90CBaXgWMA==, + } + engines: { node: ">=4" } + dependencies: + is-number: 6.0.0 + dev: false + patched: true + + /moleculer/0.14.28_5pk7ojv7qbqha75ozglk4y4f74_kumip57h7zlinbhp4gz3jrbqry: + resolution: + { + integrity: sha512-CQpnWPrDwmP1+SMHXZhtLtJv90yiyVfluGsX5iNCVkrhQtU3TQHsUWPG9wkdk9Lgd5yNpAg9jQEo90CBaXgWMA==, + } + engines: { node: ">=4" } + dependencies: + is-number: 6.0.0 + dev: false + patched: true diff --git a/cli/internal/lockfile/testdata/pnpm-peer-v6.yaml b/cli/internal/lockfile/testdata/pnpm-peer-v6.yaml new file mode 100644 index 0000000..feddd07 --- /dev/null +++ b/cli/internal/lockfile/testdata/pnpm-peer-v6.yaml @@ -0,0 +1,67 @@ +lockfileVersion: "6.0" + +importers: + .: {} + + apps/web: + dependencies: + next: + specifier: 13.0.4 + version: 13.0.4(react-dom@18.2.0)(react@18.2.0) + react: + specifier: 18.2.0 + version: 18.2.0 + react-dom: + specifier: 18.2.0 + version: 18.2.0(react@18.2.0) + + packages/next-config: {} + + packages/package-for-ci: {} + + packages/tsconfig: {} + +packages: + /next@13.0.4: + resolution: + { + integrity: sha512-4P0MvbjPCI1E/UPL1GrTXtYlgFnbBbY3JQ+AMY8jYE2SwyvCWctEJySoRjveznAHjrl6TIjuAJeB8u1c2StYUQ==, + } + engines: { node: ">=14.6.0" } + hasBin: true + peerDependencies: + fibers: ">= 3.1.0" + node-sass: ^6.0.0 || ^7.0.0 + react: ^18.2.0 + react-dom: ^18.2.0 + sass: ^1.3.0 + peerDependenciesMeta: + fibers: + optional: true + node-sass: + optional: true + sass: + optional: true + dev: true + + /next@13.0.4(react-dom@18.2.0)(react@18.2.0): + resolution: + { + integrity: sha512-4P0MvbjPCI1E/UPL1GrTXtYlgFnbBbY3JQ+AMY8jYE2SwyvCWctEJySoRjveznAHjrl6TIjuAJeB8u1c2StYUQ==, + } + engines: { node: ">=14.6.0" } + hasBin: true + peerDependencies: + fibers: ">= 3.1.0" + node-sass: ^6.0.0 || ^7.0.0 + react: ^18.2.0 + react-dom: ^18.2.0 + sass: ^1.3.0 + peerDependenciesMeta: + fibers: + optional: true + node-sass: + optional: true + sass: + optional: true + dev: false diff --git a/cli/internal/lockfile/testdata/pnpm-top-level-dupe.yaml b/cli/internal/lockfile/testdata/pnpm-top-level-dupe.yaml new file mode 100644 index 0000000..6837f22 --- /dev/null +++ b/cli/internal/lockfile/testdata/pnpm-top-level-dupe.yaml @@ -0,0 +1,36 @@ +lockfileVersion: 5.4 + +importers: + packages/a: + specifiers: + ci-info: ^2.0.0 + is-ci: ^3.0.1 + dependencies: + ci-info: 2.0.0 + is-ci: 3.0.1 + +packages: + /ci-info/2.0.0: + resolution: + { + integrity: sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==, + } + dev: false + + /ci-info/3.7.1: + resolution: + { + integrity: sha512-4jYS4MOAaCIStSRwiuxc4B8MYhIe676yO1sYGzARnjXkWpmzZMMYxY6zu8WYWDhSuth5zhrQ1rhNSibyyvv4/w==, + } + engines: { node: ">=8" } + dev: false + + /is-ci/3.0.1: + resolution: + { + integrity: sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==, + } + hasBin: true + dependencies: + ci-info: 3.7.1 + dev: false diff --git a/cli/internal/lockfile/testdata/pnpm6-workspace.yaml b/cli/internal/lockfile/testdata/pnpm6-workspace.yaml new file mode 100644 index 0000000..daf92b7 --- /dev/null +++ b/cli/internal/lockfile/testdata/pnpm6-workspace.yaml @@ -0,0 +1,1704 @@ +lockfileVersion: 5.3 + +importers: + .: + specifiers: + "@pnpm/make-dedicated-lockfile": ^0.3.19 + devDependencies: + "@pnpm/make-dedicated-lockfile": 0.3.19 + + packages/a: + specifiers: + b: workspace:* + express: ^4.18.1 + dependencies: + b: link:../b + express: 4.18.1 + + packages/b: + specifiers: + c: workspace:* + lodash: ^4.17.21 + dependencies: + c: link:../c + lodash: 4.17.21 + + packages/c: + specifiers: + chalk: ^5.0.1 + dependencies: + chalk: 5.0.1 + +packages: + /@babel/code-frame/7.18.6: + resolution: + { + integrity: sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==, + } + engines: { node: ">=6.9.0" } + dependencies: + "@babel/highlight": 7.18.6 + dev: true + + /@babel/helper-validator-identifier/7.18.6: + resolution: + { + integrity: sha512-MmetCkz9ej86nJQV+sFCxoGGrUbU3q02kgLciwkrt9QqEB7cP39oKEY0PakknEO0Gu20SskMRi+AYZ3b1TpN9g==, + } + engines: { node: ">=6.9.0" } + dev: true + + /@babel/highlight/7.18.6: + resolution: + { + integrity: sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==, + } + engines: { node: ">=6.9.0" } + dependencies: + "@babel/helper-validator-identifier": 7.18.6 + chalk: 2.4.2 + js-tokens: 4.0.0 + dev: true + + /@pnpm/constants/6.1.0: + resolution: + { + integrity: sha512-L6AiU3OXv9kjKGTJN9j8n1TeJGDcLX9atQlZvAkthlvbXjvKc5SKNWESc/eXhr5nEfuMWhQhiKHDJCpYejmeCQ==, + } + engines: { node: ">=14.19" } + dev: true + + /@pnpm/crypto.base32-hash/1.0.1: + resolution: + { + integrity: sha512-pzAXNn6KxTA3kbcI3iEnYs4vtH51XEVqmK/1EiD18MaPKylhqy8UvMJK3zKG+jeP82cqQbozcTGm4yOQ8i3vNw==, + } + engines: { node: ">=14.6" } + dependencies: + rfc4648: 1.5.2 + dev: true + + /@pnpm/error/3.0.1: + resolution: + { + integrity: sha512-hMlbWbFcfcfolNfSjKjpeaZFow71kNg438LZ8rAd01swiVIYRUf/sRv8gGySru6AijYfz5UqslpIJRDbYBkgQA==, + } + engines: { node: ">=14.19" } + dependencies: + "@pnpm/constants": 6.1.0 + dev: true + + /@pnpm/exec/2.0.0: + resolution: + { + integrity: sha512-b5ALfWEOFQprWKntN7MF8XWCyslBk2c8u20GEDcDDQOs6c0HyHlWxX5lig8riQKdS000U6YyS4L4b32NOleXAQ==, + } + engines: { node: ">=10" } + dependencies: + "@pnpm/self-installer": 2.2.1 + command-exists: 1.2.9 + cross-spawn: 7.0.3 + dev: true + + /@pnpm/exportable-manifest/3.1.2: + resolution: + { + integrity: sha512-IvTBwt3n73pXsU6iS1Y4OipBg3GBN37I/mUR8t3q5N0c5TkVxj9xAsra5/m7mX4dsYCv9BPL6Rw+MuKSV5P1hA==, + } + engines: { node: ">=14.6" } + dependencies: + "@pnpm/error": 3.0.1 + "@pnpm/read-project-manifest": 3.0.9 + "@pnpm/types": 8.5.0 + ramda: /@pnpm/ramda/0.28.1 + dev: true + + /@pnpm/find-workspace-dir/4.0.2: + resolution: + { + integrity: sha512-gU7ycFSWuEGJh7RE/STa33Ch27geODTXIfc+ntiE1BietxfpJIAk34zz51kTUuCFthBkpHlO6yV7jgHD2Tuc3g==, + } + engines: { node: ">=14.6" } + dependencies: + "@pnpm/error": 3.0.1 + find-up: 5.0.0 + dev: true + + /@pnpm/git-utils/0.1.0: + resolution: + { + integrity: sha512-W3zsG9585cKL+FqgcT+IfTgZX5C+CbNkFjOnJN+qbysT1N30+BbvEByCcDMsTy7QDrAk6oS7WU1Rym3U2xlh2Q==, + } + engines: { node: ">=14.6" } + dependencies: + execa: /safe-execa/0.1.2 + dev: true + + /@pnpm/graceful-fs/2.0.0: + resolution: + { + integrity: sha512-ogUZCGf0/UILZt6d8PsO4gA4pXh7f0BumXeFkcCe4AQ65PXPKfAkHC0C30Lheh2EgFOpLZm3twDP1Eiww18gew==, + } + engines: { node: ">=14.19" } + dependencies: + graceful-fs: 4.2.10 + dev: true + + /@pnpm/lockfile-file/5.3.3_@pnpm+logger@4.0.0: + resolution: + { + integrity: sha512-IOvjeMRX+++osG9VsfSd7+hVa/sIzhqdrm/nFcL7AexFhC7wjXbWW3YMlN5Cw4v0fwm93fgRZlikIKJ7BmkBBA==, + } + engines: { node: ">=14.6" } + peerDependencies: + "@pnpm/logger": ^4.0.0 + dependencies: + "@pnpm/constants": 6.1.0 + "@pnpm/error": 3.0.1 + "@pnpm/git-utils": 0.1.0 + "@pnpm/lockfile-types": 4.3.1 + "@pnpm/logger": 4.0.0 + "@pnpm/merge-lockfile-changes": 3.0.9 + "@pnpm/types": 8.5.0 + "@zkochan/rimraf": 2.1.2 + comver-to-semver: 1.0.0 + js-yaml: /@zkochan/js-yaml/0.0.6 + normalize-path: 3.0.0 + ramda: /@pnpm/ramda/0.28.1 + semver: 7.3.7 + sort-keys: 4.2.0 + strip-bom: 4.0.0 + write-file-atomic: 3.0.3 + dev: true + + /@pnpm/lockfile-types/4.3.1: + resolution: + { + integrity: sha512-xoorF+CuuUvpjfi8Uw/xkf8LI9VDzs9W1gjSxkKS8UwK60zU5fu4agILJfVVGlHO1tnjJeGRuspBjp7UZ8ufMA==, + } + engines: { node: ">=14.6" } + dependencies: + "@pnpm/types": 8.5.0 + dev: true + + /@pnpm/logger/4.0.0: + resolution: + { + integrity: sha512-SIShw+k556e7S7tLZFVSIHjCdiVog1qWzcKW2RbLEHPItdisAFVNIe34kYd9fMSswTlSRLS/qRjw3ZblzWmJ9Q==, + } + engines: { node: ">=12.17" } + dependencies: + bole: 4.0.1 + ndjson: 2.0.0 + dev: true + + /@pnpm/make-dedicated-lockfile/0.3.19: + resolution: + { + integrity: sha512-VHllqMh5zviSHds2kOlWSiwmxos3LLGWCVIHpo+HX45D3TXx+oMOgE8k6WB0dSOTVIuGKduoCNTGeSW4p2bD2w==, + } + engines: { node: ">=14.6" } + hasBin: true + dependencies: + "@pnpm/error": 3.0.1 + "@pnpm/exec": 2.0.0 + "@pnpm/exportable-manifest": 3.1.2 + "@pnpm/find-workspace-dir": 4.0.2 + "@pnpm/lockfile-file": 5.3.3_@pnpm+logger@4.0.0 + "@pnpm/logger": 4.0.0 + "@pnpm/prune-lockfile": 4.0.14 + "@pnpm/read-project-manifest": 3.0.9 + "@pnpm/types": 8.5.0 + ramda: /@pnpm/ramda/0.28.1 + rename-overwrite: 4.0.2 + dev: true + + /@pnpm/merge-lockfile-changes/3.0.9: + resolution: + { + integrity: sha512-UOl3AYsi13R8bvQNJPNUml8sZYKBRns0xjAcPQomoX3WTU0dv+KzVyv86Iv86YlApP0aJj9MS8Vq++JOC10RKg==, + } + engines: { node: ">=14.6" } + dependencies: + "@pnpm/lockfile-types": 4.3.1 + comver-to-semver: 1.0.0 + ramda: /@pnpm/ramda/0.28.1 + semver: 7.3.7 + dev: true + + /@pnpm/prune-lockfile/4.0.14: + resolution: + { + integrity: sha512-lICCgm9j3e2Bu75zK4PA1FKjpu9pCcagRbZWruONBf44byyEkHcnTf8b8a9M1MvtoiArhmKOmyOVJ2OFyBBRyA==, + } + engines: { node: ">=14.6" } + dependencies: + "@pnpm/constants": 6.1.0 + "@pnpm/lockfile-types": 4.3.1 + "@pnpm/types": 8.5.0 + dependency-path: 9.2.4 + ramda: /@pnpm/ramda/0.28.1 + dev: true + + /@pnpm/ramda/0.28.1: + resolution: + { + integrity: sha512-zcAG+lvU0fMziNeGXpPyCyCJYp5ZVrPElEE4t14jAmViaihohocZ+dDkcRIyAomox8pQsuZnv1EyHR+pOhmUWw==, + } + dev: true + + /@pnpm/read-project-manifest/3.0.9: + resolution: + { + integrity: sha512-27j40C48hA/tqsCiqk9ApJxp2g6WGrrj2RSs0NKhsSHynxAuA1tIvwatNISQbAiMjZiu1lfhzhq8m1QdblyNmA==, + } + engines: { node: ">=14.6" } + dependencies: + "@pnpm/error": 3.0.1 + "@pnpm/graceful-fs": 2.0.0 + "@pnpm/types": 8.5.0 + "@pnpm/write-project-manifest": 3.0.7 + detect-indent: 6.1.0 + fast-deep-equal: 3.1.3 + is-windows: 1.0.2 + json5: 2.2.1 + parse-json: 5.2.0 + read-yaml-file: 2.1.0 + sort-keys: 4.2.0 + strip-bom: 4.0.0 + dev: true + + /@pnpm/self-installer/2.2.1: + resolution: + { + integrity: sha512-aefLe96wAWghkx6q1PwbVS1Iz1iGE+HKwkTmtzWLFXeGhbknaIdG2voMwaBGIYGCSxm8sDKR1uLO4aRRAYuc+Q==, + } + engines: { node: ">=4" } + hasBin: true + dev: true + + /@pnpm/types/8.5.0: + resolution: + { + integrity: sha512-PSKnhkwgiZtp9dcWZR9mPz2W9UopmADr9o8FTqazo5kjUSh2xQmDUSJOJ/ZWcfNziO64Ix/VbcxKIZeplhog1Q==, + } + engines: { node: ">=14.6" } + dev: true + + /@pnpm/write-project-manifest/3.0.7: + resolution: + { + integrity: sha512-rMgIWR52asESg1D7Cp/vBi3dBsv18iUWPvvtYNynrcOjRdE3NsH5CAdfZP/XN6HJF6CSY8rS9W4YC5Q3JGtxiw==, + } + engines: { node: ">=14.6" } + dependencies: + "@pnpm/types": 8.5.0 + json5: 2.2.1 + write-file-atomic: 3.0.3 + write-yaml-file: 4.2.0 + dev: true + + /@zkochan/js-yaml/0.0.6: + resolution: + { + integrity: sha512-nzvgl3VfhcELQ8LyVrYOru+UtAy1nrygk2+AGbTm8a5YcO6o8lSjAT+pfg3vJWxIoZKOUhrK6UU7xW/+00kQrg==, + } + hasBin: true + dependencies: + argparse: 2.0.1 + dev: true + + /@zkochan/rimraf/2.1.2: + resolution: + { + integrity: sha512-Lc2oK51J6aQWcLWTloobJun5ZF41BbTDdLvE+aMcexoVWFoFqvZmnZoyXR2IZk6NJEVoZW8tjgtvQLfTsmRs2Q==, + } + engines: { node: ">=12.10" } + dependencies: + rimraf: 3.0.2 + dev: true + + /@zkochan/which/2.0.3: + resolution: + { + integrity: sha512-C1ReN7vt2/2O0fyTsx5xnbQuxBrmG5NMSbcIkPKCCfCTJgpZBsuRYzFXHj3nVq8vTfK7vxHUmzfCpSHgO7j4rg==, + } + engines: { node: ">= 8" } + hasBin: true + dependencies: + isexe: 2.0.0 + dev: true + + /accepts/1.3.8: + resolution: + { + integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==, + } + engines: { node: ">= 0.6" } + dependencies: + mime-types: 2.1.35 + negotiator: 0.6.3 + dev: false + + /ansi-styles/3.2.1: + resolution: + { + integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==, + } + engines: { node: ">=4" } + dependencies: + color-convert: 1.9.3 + dev: true + + /argparse/2.0.1: + resolution: + { + integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==, + } + dev: true + + /array-flatten/1.1.1: + resolution: + { + integrity: sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==, + } + dev: false + + /balanced-match/1.0.2: + resolution: + { + integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==, + } + dev: true + + /body-parser/1.20.0: + resolution: + { + integrity: sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg==, + } + engines: { node: ">= 0.8", npm: 1.2.8000 || >= 1.4.16 } + dependencies: + bytes: 3.1.2 + content-type: 1.0.4 + debug: 2.6.9 + depd: 2.0.0 + destroy: 1.2.0 + http-errors: 2.0.0 + iconv-lite: 0.4.24 + on-finished: 2.4.1 + qs: 6.10.3 + raw-body: 2.5.1 + type-is: 1.6.18 + unpipe: 1.0.0 + dev: false + + /bole/4.0.1: + resolution: + { + integrity: sha512-42r0aSOJFJti2l6LasBHq2BuWJzohGs349olQnH/ETlJo87XnoWw7UT8pGE6UstjxzOKkwz7tjoFcmSr6L16vg==, + } + dependencies: + fast-safe-stringify: 2.1.1 + individual: 3.0.0 + dev: true + + /brace-expansion/1.1.11: + resolution: + { + integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==, + } + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + dev: true + + /bytes/3.1.2: + resolution: + { + integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==, + } + engines: { node: ">= 0.8" } + dev: false + + /call-bind/1.0.2: + resolution: + { + integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==, + } + dependencies: + function-bind: 1.1.1 + get-intrinsic: 1.1.2 + dev: false + + /chalk/2.4.2: + resolution: + { + integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==, + } + engines: { node: ">=4" } + dependencies: + ansi-styles: 3.2.1 + escape-string-regexp: 1.0.5 + supports-color: 5.5.0 + dev: true + + /chalk/5.0.1: + resolution: + { + integrity: sha512-Fo07WOYGqMfCWHOzSXOt2CxDbC6skS/jO9ynEcmpANMoPrD+W1r1K6Vx7iNm+AQmETU1Xr2t+n8nzkV9t6xh3w==, + } + engines: { node: ^12.17.0 || ^14.13 || >=16.0.0 } + dev: false + + /color-convert/1.9.3: + resolution: + { + integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==, + } + dependencies: + color-name: 1.1.3 + dev: true + + /color-name/1.1.3: + resolution: + { + integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==, + } + dev: true + + /command-exists/1.2.9: + resolution: + { + integrity: sha512-LTQ/SGc+s0Xc0Fu5WaKnR0YiygZkm9eKFvyS+fRsU7/ZWFF8ykFM6Pc9aCVf1+xasOOZpO3BAVgVrKvsqKHV7w==, + } + dev: true + + /comver-to-semver/1.0.0: + resolution: + { + integrity: sha512-gcGtbRxjwROQOdXLUWH1fQAXqThUVRZ219aAwgtX3KfYw429/Zv6EIJRf5TBSzWdAGwePmqH7w70WTaX4MDqag==, + } + engines: { node: ">=12.17" } + dev: true + + /concat-map/0.0.1: + resolution: + { + integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==, + } + dev: true + + /content-disposition/0.5.4: + resolution: + { + integrity: sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==, + } + engines: { node: ">= 0.6" } + dependencies: + safe-buffer: 5.2.1 + dev: false + + /content-type/1.0.4: + resolution: + { + integrity: sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==, + } + engines: { node: ">= 0.6" } + dev: false + + /cookie-signature/1.0.6: + resolution: + { + integrity: sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==, + } + dev: false + + /cookie/0.5.0: + resolution: + { + integrity: sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==, + } + engines: { node: ">= 0.6" } + dev: false + + /cross-spawn/7.0.3: + resolution: + { + integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==, + } + engines: { node: ">= 8" } + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + dev: true + + /debug/2.6.9: + resolution: + { + integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==, + } + dependencies: + ms: 2.0.0 + dev: false + + /depd/2.0.0: + resolution: + { + integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==, + } + engines: { node: ">= 0.8" } + dev: false + + /dependency-path/9.2.4: + resolution: + { + integrity: sha512-bH29ZcKyo/i5nr4SgnVZGksuoZzroOWpHtKbq8fKdKgJDr0SdUIPu2EwjJkjzbw9SqRzWd912e0opHYJTkFf6w==, + } + engines: { node: ">=14.6" } + dependencies: + "@pnpm/crypto.base32-hash": 1.0.1 + "@pnpm/types": 8.5.0 + encode-registry: 3.0.0 + semver: 7.3.7 + dev: true + + /destroy/1.2.0: + resolution: + { + integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==, + } + engines: { node: ">= 0.8", npm: 1.2.8000 || >= 1.4.16 } + dev: false + + /detect-indent/6.1.0: + resolution: + { + integrity: sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==, + } + engines: { node: ">=8" } + dev: true + + /ee-first/1.1.1: + resolution: + { + integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==, + } + dev: false + + /encode-registry/3.0.0: + resolution: + { + integrity: sha512-2fRYji8K6FwYuQ6EPBKR/J9mcqb7kIoNqt1vGvJr3NrvKfncRiNm00Oxo6gi/YJF8R5Sp2bNFSFdGKTG0rje1Q==, + } + engines: { node: ">=10" } + dependencies: + mem: 8.1.1 + dev: true + + /encodeurl/1.0.2: + resolution: + { + integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==, + } + engines: { node: ">= 0.8" } + dev: false + + /error-ex/1.3.2: + resolution: + { + integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==, + } + dependencies: + is-arrayish: 0.2.1 + dev: true + + /escape-html/1.0.3: + resolution: + { + integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==, + } + dev: false + + /escape-string-regexp/1.0.5: + resolution: + { + integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==, + } + engines: { node: ">=0.8.0" } + dev: true + + /etag/1.8.1: + resolution: + { + integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==, + } + engines: { node: ">= 0.6" } + dev: false + + /execa/5.1.1: + resolution: + { + integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==, + } + engines: { node: ">=10" } + dependencies: + cross-spawn: 7.0.3 + get-stream: 6.0.1 + human-signals: 2.1.0 + is-stream: 2.0.1 + merge-stream: 2.0.0 + npm-run-path: 4.0.1 + onetime: 5.1.2 + signal-exit: 3.0.7 + strip-final-newline: 2.0.0 + dev: true + + /express/4.18.1: + resolution: + { + integrity: sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q==, + } + engines: { node: ">= 0.10.0" } + dependencies: + accepts: 1.3.8 + array-flatten: 1.1.1 + body-parser: 1.20.0 + content-disposition: 0.5.4 + content-type: 1.0.4 + cookie: 0.5.0 + cookie-signature: 1.0.6 + debug: 2.6.9 + depd: 2.0.0 + encodeurl: 1.0.2 + escape-html: 1.0.3 + etag: 1.8.1 + finalhandler: 1.2.0 + fresh: 0.5.2 + http-errors: 2.0.0 + merge-descriptors: 1.0.1 + methods: 1.1.2 + on-finished: 2.4.1 + parseurl: 1.3.3 + path-to-regexp: 0.1.7 + proxy-addr: 2.0.7 + qs: 6.10.3 + range-parser: 1.2.1 + safe-buffer: 5.2.1 + send: 0.18.0 + serve-static: 1.15.0 + setprototypeof: 1.2.0 + statuses: 2.0.1 + type-is: 1.6.18 + utils-merge: 1.0.1 + vary: 1.1.2 + dev: false + + /fast-deep-equal/3.1.3: + resolution: + { + integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==, + } + dev: true + + /fast-safe-stringify/2.1.1: + resolution: + { + integrity: sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==, + } + dev: true + + /finalhandler/1.2.0: + resolution: + { + integrity: sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==, + } + engines: { node: ">= 0.8" } + dependencies: + debug: 2.6.9 + encodeurl: 1.0.2 + escape-html: 1.0.3 + on-finished: 2.4.1 + parseurl: 1.3.3 + statuses: 2.0.1 + unpipe: 1.0.0 + dev: false + + /find-up/5.0.0: + resolution: + { + integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==, + } + engines: { node: ">=10" } + dependencies: + locate-path: 6.0.0 + path-exists: 4.0.0 + dev: true + + /forwarded/0.2.0: + resolution: + { + integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==, + } + engines: { node: ">= 0.6" } + dev: false + + /fresh/0.5.2: + resolution: + { + integrity: sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==, + } + engines: { node: ">= 0.6" } + dev: false + + /fs.realpath/1.0.0: + resolution: + { + integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==, + } + dev: true + + /function-bind/1.1.1: + resolution: + { + integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==, + } + dev: false + + /get-intrinsic/1.1.2: + resolution: + { + integrity: sha512-Jfm3OyCxHh9DJyc28qGk+JmfkpO41A4XkneDSujN9MDXrm4oDKdHvndhZ2dN94+ERNfkYJWDclW6k2L/ZGHjXA==, + } + dependencies: + function-bind: 1.1.1 + has: 1.0.3 + has-symbols: 1.0.3 + dev: false + + /get-stream/6.0.1: + resolution: + { + integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==, + } + engines: { node: ">=10" } + dev: true + + /glob/7.2.3: + resolution: + { + integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==, + } + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 3.1.2 + once: 1.4.0 + path-is-absolute: 1.0.1 + dev: true + + /graceful-fs/4.2.10: + resolution: + { + integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==, + } + dev: true + + /has-flag/3.0.0: + resolution: + { + integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==, + } + engines: { node: ">=4" } + dev: true + + /has-symbols/1.0.3: + resolution: + { + integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==, + } + engines: { node: ">= 0.4" } + dev: false + + /has/1.0.3: + resolution: + { + integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==, + } + engines: { node: ">= 0.4.0" } + dependencies: + function-bind: 1.1.1 + dev: false + + /http-errors/2.0.0: + resolution: + { + integrity: sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==, + } + engines: { node: ">= 0.8" } + dependencies: + depd: 2.0.0 + inherits: 2.0.4 + setprototypeof: 1.2.0 + statuses: 2.0.1 + toidentifier: 1.0.1 + dev: false + + /human-signals/2.1.0: + resolution: + { + integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==, + } + engines: { node: ">=10.17.0" } + dev: true + + /iconv-lite/0.4.24: + resolution: + { + integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==, + } + engines: { node: ">=0.10.0" } + dependencies: + safer-buffer: 2.1.2 + dev: false + + /imurmurhash/0.1.4: + resolution: + { + integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==, + } + engines: { node: ">=0.8.19" } + dev: true + + /individual/3.0.0: + resolution: + { + integrity: sha512-rUY5vtT748NMRbEMrTNiFfy29BgGZwGXUi2NFUVMWQrogSLzlJvQV9eeMWi+g1aVaQ53tpyLAQtd5x/JH0Nh1g==, + } + dev: true + + /inflight/1.0.6: + resolution: + { + integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==, + } + dependencies: + once: 1.4.0 + wrappy: 1.0.2 + dev: true + + /inherits/2.0.4: + resolution: + { + integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==, + } + dev: false + + /ipaddr.js/1.9.1: + resolution: + { + integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==, + } + engines: { node: ">= 0.10" } + dev: false + + /is-arrayish/0.2.1: + resolution: + { + integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==, + } + dev: true + + /is-plain-obj/2.1.0: + resolution: + { + integrity: sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==, + } + engines: { node: ">=8" } + dev: true + + /is-stream/2.0.1: + resolution: + { + integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==, + } + engines: { node: ">=8" } + dev: true + + /is-typedarray/1.0.0: + resolution: + { + integrity: sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==, + } + dev: true + + /is-windows/1.0.2: + resolution: + { + integrity: sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==, + } + engines: { node: ">=0.10.0" } + dev: true + + /isexe/2.0.0: + resolution: + { + integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==, + } + dev: true + + /js-tokens/4.0.0: + resolution: + { + integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==, + } + dev: true + + /js-yaml/4.1.0: + resolution: + { + integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==, + } + hasBin: true + dependencies: + argparse: 2.0.1 + dev: true + + /json-parse-even-better-errors/2.3.1: + resolution: + { + integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==, + } + dev: true + + /json-stringify-safe/5.0.1: + resolution: + { + integrity: sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==, + } + dev: true + + /json5/2.2.1: + resolution: + { + integrity: sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==, + } + engines: { node: ">=6" } + hasBin: true + dev: true + + /lines-and-columns/1.2.4: + resolution: + { + integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==, + } + dev: true + + /locate-path/6.0.0: + resolution: + { + integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==, + } + engines: { node: ">=10" } + dependencies: + p-locate: 5.0.0 + dev: true + + /lodash/4.17.21: + resolution: + { + integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==, + } + dev: false + + /lru-cache/6.0.0: + resolution: + { + integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==, + } + engines: { node: ">=10" } + dependencies: + yallist: 4.0.0 + dev: true + + /map-age-cleaner/0.1.3: + resolution: + { + integrity: sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w==, + } + engines: { node: ">=6" } + dependencies: + p-defer: 1.0.0 + dev: true + + /media-typer/0.3.0: + resolution: + { + integrity: sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==, + } + engines: { node: ">= 0.6" } + dev: false + + /mem/8.1.1: + resolution: + { + integrity: sha512-qFCFUDs7U3b8mBDPyz5EToEKoAkgCzqquIgi9nkkR9bixxOVOre+09lbuH7+9Kn2NFpm56M3GUWVbU2hQgdACA==, + } + engines: { node: ">=10" } + dependencies: + map-age-cleaner: 0.1.3 + mimic-fn: 3.1.0 + dev: true + + /merge-descriptors/1.0.1: + resolution: + { + integrity: sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==, + } + dev: false + + /merge-stream/2.0.0: + resolution: + { + integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==, + } + dev: true + + /methods/1.1.2: + resolution: + { + integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==, + } + engines: { node: ">= 0.6" } + dev: false + + /mime-db/1.52.0: + resolution: + { + integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==, + } + engines: { node: ">= 0.6" } + dev: false + + /mime-types/2.1.35: + resolution: + { + integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==, + } + engines: { node: ">= 0.6" } + dependencies: + mime-db: 1.52.0 + dev: false + + /mime/1.6.0: + resolution: + { + integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==, + } + engines: { node: ">=4" } + hasBin: true + dev: false + + /mimic-fn/2.1.0: + resolution: + { + integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==, + } + engines: { node: ">=6" } + dev: true + + /mimic-fn/3.1.0: + resolution: + { + integrity: sha512-Ysbi9uYW9hFyfrThdDEQuykN4Ey6BuwPD2kpI5ES/nFTDn/98yxYNLZJcgUAKPT/mcrLLKaGzJR9YVxJrIdASQ==, + } + engines: { node: ">=8" } + dev: true + + /minimatch/3.1.2: + resolution: + { + integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==, + } + dependencies: + brace-expansion: 1.1.11 + dev: true + + /minimist/1.2.6: + resolution: + { + integrity: sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==, + } + dev: true + + /ms/2.0.0: + resolution: + { + integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==, + } + dev: false + + /ms/2.1.3: + resolution: + { + integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==, + } + dev: false + + /ndjson/2.0.0: + resolution: + { + integrity: sha512-nGl7LRGrzugTtaFcJMhLbpzJM6XdivmbkdlaGcrk/LXg2KL/YBC6z1g70xh0/al+oFuVFP8N8kiWRucmeEH/qQ==, + } + engines: { node: ">=10" } + hasBin: true + dependencies: + json-stringify-safe: 5.0.1 + minimist: 1.2.6 + readable-stream: 3.6.0 + split2: 3.2.2 + through2: 4.0.2 + dev: true + + /negotiator/0.6.3: + resolution: + { + integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==, + } + engines: { node: ">= 0.6" } + dev: false + + /normalize-path/3.0.0: + resolution: + { + integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==, + } + engines: { node: ">=0.10.0" } + dev: true + + /npm-run-path/4.0.1: + resolution: + { + integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==, + } + engines: { node: ">=8" } + dependencies: + path-key: 3.1.1 + dev: true + + /object-inspect/1.12.2: + resolution: + { + integrity: sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==, + } + dev: false + + /on-finished/2.4.1: + resolution: + { + integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==, + } + engines: { node: ">= 0.8" } + dependencies: + ee-first: 1.1.1 + dev: false + + /once/1.4.0: + resolution: + { + integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==, + } + dependencies: + wrappy: 1.0.2 + dev: true + + /onetime/5.1.2: + resolution: + { + integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==, + } + engines: { node: ">=6" } + dependencies: + mimic-fn: 2.1.0 + dev: true + + /p-defer/1.0.0: + resolution: + { + integrity: sha512-wB3wfAxZpk2AzOfUMJNL+d36xothRSyj8EXOa4f6GMqYDN9BJaaSISbsk+wS9abmnebVw95C2Kb5t85UmpCxuw==, + } + engines: { node: ">=4" } + dev: true + + /p-limit/3.1.0: + resolution: + { + integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==, + } + engines: { node: ">=10" } + dependencies: + yocto-queue: 0.1.0 + dev: true + + /p-locate/5.0.0: + resolution: + { + integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==, + } + engines: { node: ">=10" } + dependencies: + p-limit: 3.1.0 + dev: true + + /parse-json/5.2.0: + resolution: + { + integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==, + } + engines: { node: ">=8" } + dependencies: + "@babel/code-frame": 7.18.6 + error-ex: 1.3.2 + json-parse-even-better-errors: 2.3.1 + lines-and-columns: 1.2.4 + dev: true + + /parseurl/1.3.3: + resolution: + { + integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==, + } + engines: { node: ">= 0.8" } + dev: false + + /path-exists/4.0.0: + resolution: + { + integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==, + } + engines: { node: ">=8" } + dev: true + + /path-is-absolute/1.0.1: + resolution: + { + integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==, + } + engines: { node: ">=0.10.0" } + dev: true + + /path-key/3.1.1: + resolution: + { + integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==, + } + engines: { node: ">=8" } + dev: true + + /path-name/1.0.0: + resolution: + { + integrity: sha512-/dcAb5vMXH0f51yvMuSUqFpxUcA8JelbRmE5mW/p4CUJxrNgK24IkstnV7ENtg2IDGBOu6izKTG6eilbnbNKWQ==, + } + dev: true + + /path-to-regexp/0.1.7: + resolution: + { + integrity: sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==, + } + dev: false + + /proxy-addr/2.0.7: + resolution: + { + integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==, + } + engines: { node: ">= 0.10" } + dependencies: + forwarded: 0.2.0 + ipaddr.js: 1.9.1 + dev: false + + /qs/6.10.3: + resolution: + { + integrity: sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==, + } + engines: { node: ">=0.6" } + dependencies: + side-channel: 1.0.4 + dev: false + + /range-parser/1.2.1: + resolution: + { + integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==, + } + engines: { node: ">= 0.6" } + dev: false + + /raw-body/2.5.1: + resolution: + { + integrity: sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==, + } + engines: { node: ">= 0.8" } + dependencies: + bytes: 3.1.2 + http-errors: 2.0.0 + iconv-lite: 0.4.24 + unpipe: 1.0.0 + dev: false + + /read-yaml-file/2.1.0: + resolution: + { + integrity: sha512-UkRNRIwnhG+y7hpqnycCL/xbTk7+ia9VuVTC0S+zVbwd65DI9eUpRMfsWIGrCWxTU/mi+JW8cHQCrv+zfCbEPQ==, + } + engines: { node: ">=10.13" } + dependencies: + js-yaml: 4.1.0 + strip-bom: 4.0.0 + dev: true + + /readable-stream/3.6.0: + resolution: + { + integrity: sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==, + } + engines: { node: ">= 6" } + dependencies: + inherits: 2.0.4 + string_decoder: 1.3.0 + util-deprecate: 1.0.2 + dev: true + + /rename-overwrite/4.0.2: + resolution: + { + integrity: sha512-L1sgBgagVgOgb1Z6QZr1yJgSMHI4SXQqAH0l/UbeyHnLKxECvKIlyVEmBo4BqsCAZGg0SBSyjCh68lis5PgC7g==, + } + engines: { node: ">=12.10" } + dependencies: + "@zkochan/rimraf": 2.1.2 + dev: true + + /rfc4648/1.5.2: + resolution: + { + integrity: sha512-tLOizhR6YGovrEBLatX1sdcuhoSCXddw3mqNVAcKxGJ+J0hFeJ+SjeWCv5UPA/WU3YzWPPuCVYgXBKZUPGpKtg==, + } + dev: true + + /rimraf/3.0.2: + resolution: + { + integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==, + } + hasBin: true + dependencies: + glob: 7.2.3 + dev: true + + /safe-buffer/5.2.1: + resolution: + { + integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==, + } + dev: false + + /safe-execa/0.1.2: + resolution: + { + integrity: sha512-vdTshSQ2JsRCgT8eKZWNJIL26C6bVqy1SOmuCMlKHegVeo8KYRobRrefOdUq9OozSPUUiSxrylteeRmLOMFfWg==, + } + engines: { node: ">=12" } + dependencies: + "@zkochan/which": 2.0.3 + execa: 5.1.1 + path-name: 1.0.0 + dev: true + + /safer-buffer/2.1.2: + resolution: + { + integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==, + } + dev: false + + /semver/7.3.7: + resolution: + { + integrity: sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==, + } + engines: { node: ">=10" } + hasBin: true + dependencies: + lru-cache: 6.0.0 + dev: true + + /send/0.18.0: + resolution: + { + integrity: sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==, + } + engines: { node: ">= 0.8.0" } + dependencies: + debug: 2.6.9 + depd: 2.0.0 + destroy: 1.2.0 + encodeurl: 1.0.2 + escape-html: 1.0.3 + etag: 1.8.1 + fresh: 0.5.2 + http-errors: 2.0.0 + mime: 1.6.0 + ms: 2.1.3 + on-finished: 2.4.1 + range-parser: 1.2.1 + statuses: 2.0.1 + dev: false + + /serve-static/1.15.0: + resolution: + { + integrity: sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==, + } + engines: { node: ">= 0.8.0" } + dependencies: + encodeurl: 1.0.2 + escape-html: 1.0.3 + parseurl: 1.3.3 + send: 0.18.0 + dev: false + + /setprototypeof/1.2.0: + resolution: + { + integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==, + } + dev: false + + /shebang-command/2.0.0: + resolution: + { + integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==, + } + engines: { node: ">=8" } + dependencies: + shebang-regex: 3.0.0 + dev: true + + /shebang-regex/3.0.0: + resolution: + { + integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==, + } + engines: { node: ">=8" } + dev: true + + /side-channel/1.0.4: + resolution: + { + integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==, + } + dependencies: + call-bind: 1.0.2 + get-intrinsic: 1.1.2 + object-inspect: 1.12.2 + dev: false + + /signal-exit/3.0.7: + resolution: + { + integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==, + } + dev: true + + /sort-keys/4.2.0: + resolution: + { + integrity: sha512-aUYIEU/UviqPgc8mHR6IW1EGxkAXpeRETYcrzg8cLAvUPZcpAlleSXHV2mY7G12GphSH6Gzv+4MMVSSkbdteHg==, + } + engines: { node: ">=8" } + dependencies: + is-plain-obj: 2.1.0 + dev: true + + /split2/3.2.2: + resolution: + { + integrity: sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==, + } + dependencies: + readable-stream: 3.6.0 + dev: true + + /statuses/2.0.1: + resolution: + { + integrity: sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==, + } + engines: { node: ">= 0.8" } + dev: false + + /string_decoder/1.3.0: + resolution: + { + integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==, + } + dependencies: + safe-buffer: 5.2.1 + dev: true + + /strip-bom/4.0.0: + resolution: + { + integrity: sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==, + } + engines: { node: ">=8" } + dev: true + + /strip-final-newline/2.0.0: + resolution: + { + integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==, + } + engines: { node: ">=6" } + dev: true + + /supports-color/5.5.0: + resolution: + { + integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==, + } + engines: { node: ">=4" } + dependencies: + has-flag: 3.0.0 + dev: true + + /through2/4.0.2: + resolution: + { + integrity: sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==, + } + dependencies: + readable-stream: 3.6.0 + dev: true + + /toidentifier/1.0.1: + resolution: + { + integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==, + } + engines: { node: ">=0.6" } + dev: false + + /type-is/1.6.18: + resolution: + { + integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==, + } + engines: { node: ">= 0.6" } + dependencies: + media-typer: 0.3.0 + mime-types: 2.1.35 + dev: false + + /typedarray-to-buffer/3.1.5: + resolution: + { + integrity: sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==, + } + dependencies: + is-typedarray: 1.0.0 + dev: true + + /unpipe/1.0.0: + resolution: + { + integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==, + } + engines: { node: ">= 0.8" } + dev: false + + /util-deprecate/1.0.2: + resolution: + { + integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==, + } + dev: true + + /utils-merge/1.0.1: + resolution: + { + integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==, + } + engines: { node: ">= 0.4.0" } + dev: false + + /vary/1.1.2: + resolution: + { + integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==, + } + engines: { node: ">= 0.8" } + dev: false + + /which/2.0.2: + resolution: + { + integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==, + } + engines: { node: ">= 8" } + hasBin: true + dependencies: + isexe: 2.0.0 + dev: true + + /wrappy/1.0.2: + resolution: + { + integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==, + } + dev: true + + /write-file-atomic/3.0.3: + resolution: + { + integrity: sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==, + } + dependencies: + imurmurhash: 0.1.4 + is-typedarray: 1.0.0 + signal-exit: 3.0.7 + typedarray-to-buffer: 3.1.5 + dev: true + + /write-yaml-file/4.2.0: + resolution: + { + integrity: sha512-LwyucHy0uhWqbrOkh9cBluZBeNVxzHjDaE9mwepZG3n3ZlbM4v3ndrFw51zW/NXYFFqP+QWZ72ihtLWTh05e4Q==, + } + engines: { node: ">=10.13" } + dependencies: + js-yaml: 4.1.0 + write-file-atomic: 3.0.3 + dev: true + + /yallist/4.0.0: + resolution: + { + integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==, + } + dev: true + + /yocto-queue/0.1.0: + resolution: + { + integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==, + } + engines: { node: ">=10" } + dev: true diff --git a/cli/internal/lockfile/testdata/pnpm7-workspace.yaml b/cli/internal/lockfile/testdata/pnpm7-workspace.yaml new file mode 100644 index 0000000..2f7b663 --- /dev/null +++ b/cli/internal/lockfile/testdata/pnpm7-workspace.yaml @@ -0,0 +1,3445 @@ +lockfileVersion: 5.4 + +patchedDependencies: + lodash@4.17.21: + hash: ehchni3mpmovsvjxesffg2i5a4 + path: patches/lodash@4.17.21.patch + underscore@1.13.4: + hash: 3pbfs36izefyn2uycmknwkvuuy + path: patches/underscore@1.13.4.patch + +importers: + .: + specifiers: + eslint-config-custom: workspace:* + prettier: latest + turbo: latest + devDependencies: + eslint-config-custom: link:packages/eslint-config-custom + prettier: 2.7.1 + turbo: 1.4.6 + + apps/docs: + specifiers: + "@babel/core": ^7.0.0 + "@types/node": ^17.0.12 + "@types/react": 18.0.17 + dashboard-icons: github:peerigon/dashboard-icons + eslint: 7.32.0 + eslint-config-custom: workspace:* + next: 12.2.5 + next-transpile-modules: 9.0.0 + react: 18.2.0 + react-dom: 18.2.0 + tsconfig: workspace:* + typescript: ^4.5.3 + ui: workspace:* + underscore: ^1.13.4 + dependencies: + dashboard-icons: github.com/peerigon/dashboard-icons/ce27ef933144e09cef3911025f3649040a8571b6 + next: 12.2.5_ir3quccc6i62x6qn6jjhyjjiey + react: 18.2.0 + react-dom: 18.2.0_react@18.2.0 + ui: file:packages/ui + underscore: 1.13.4_3pbfs36izefyn2uycmknwkvuuy + devDependencies: + "@babel/core": 7.19.1 + "@types/node": 17.0.45 + "@types/react": 18.0.17 + eslint: 7.32.0 + eslint-config-custom: link:../../packages/eslint-config-custom + next-transpile-modules: 9.0.0 + tsconfig: link:../../packages/tsconfig + typescript: 4.8.3 + dependenciesMeta: + ui: + injected: true + + apps/web: + specifiers: + "@babel/core": ^7.0.0 + "@types/node": ^17.0.12 + "@types/react": 18.0.17 + eslint: 7.32.0 + eslint-config-custom: workspace:* + lodash: ^4.17.21 + next: 12.2.5 + next-transpile-modules: 9.0.0 + react: 18.2.0 + react-dom: 18.2.0 + tsconfig: workspace:* + typescript: ^4.5.3 + ui: workspace:* + dependencies: + lodash: 4.17.21_ehchni3mpmovsvjxesffg2i5a4 + next: 12.2.5_ir3quccc6i62x6qn6jjhyjjiey + react: 18.2.0 + react-dom: 18.2.0_react@18.2.0 + ui: link:../../packages/ui + devDependencies: + "@babel/core": 7.19.1 + "@types/node": 17.0.45 + "@types/react": 18.0.17 + eslint: 7.32.0 + eslint-config-custom: link:../../packages/eslint-config-custom + next-transpile-modules: 9.0.0 + tsconfig: link:../../packages/tsconfig + typescript: 4.8.3 + + packages/eslint-config-custom: + specifiers: + eslint: ^7.23.0 + eslint-config-next: ^12.0.8 + eslint-config-prettier: ^8.3.0 + eslint-config-turbo: latest + eslint-plugin-react: 7.31.7 + typescript: ^4.7.4 + dependencies: + eslint: 7.32.0 + eslint-config-next: 12.3.0_dyxdave6dwjbccc5dgiifcmuza + eslint-config-prettier: 8.5.0_eslint@7.32.0 + eslint-config-turbo: 0.0.3_eslint@7.32.0 + eslint-plugin-react: 7.31.7_eslint@7.32.0 + devDependencies: + typescript: 4.8.3 + + packages/tsconfig: + specifiers: {} + + packages/ui: + specifiers: + "@types/react": ^18.0.17 + "@types/react-dom": ^18.0.6 + eslint: ^7.32.0 + eslint-config-custom: workspace:* + react: ^18.2.0 + tsconfig: workspace:* + typescript: ^4.5.2 + devDependencies: + "@types/react": 18.0.20 + "@types/react-dom": 18.0.6 + eslint: 7.32.0 + eslint-config-custom: link:../eslint-config-custom + react: 18.2.0 + tsconfig: link:../tsconfig + typescript: 4.8.3 + +packages: + /@ampproject/remapping/2.2.0: + resolution: + { + integrity: sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w==, + } + engines: { node: ">=6.0.0" } + dependencies: + "@jridgewell/gen-mapping": 0.1.1 + "@jridgewell/trace-mapping": 0.3.15 + + /@babel/code-frame/7.12.11: + resolution: + { + integrity: sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==, + } + dependencies: + "@babel/highlight": 7.18.6 + + /@babel/code-frame/7.18.6: + resolution: + { + integrity: sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==, + } + engines: { node: ">=6.9.0" } + dependencies: + "@babel/highlight": 7.18.6 + + /@babel/compat-data/7.19.1: + resolution: + { + integrity: sha512-72a9ghR0gnESIa7jBN53U32FOVCEoztyIlKaNoU05zRhEecduGK9L9c3ww7Mp06JiR+0ls0GBPFJQwwtjn9ksg==, + } + engines: { node: ">=6.9.0" } + + /@babel/core/7.19.1: + resolution: + { + integrity: sha512-1H8VgqXme4UXCRv7/Wa1bq7RVymKOzC7znjyFM8KiEzwFqcKUKYNoQef4GhdklgNvoBXyW4gYhuBNCM5o1zImw==, + } + engines: { node: ">=6.9.0" } + dependencies: + "@ampproject/remapping": 2.2.0 + "@babel/code-frame": 7.18.6 + "@babel/generator": 7.19.0 + "@babel/helper-compilation-targets": 7.19.1_@babel+core@7.19.1 + "@babel/helper-module-transforms": 7.19.0 + "@babel/helpers": 7.19.0 + "@babel/parser": 7.19.1 + "@babel/template": 7.18.10 + "@babel/traverse": 7.19.1 + "@babel/types": 7.19.0 + convert-source-map: 1.8.0 + debug: 4.3.4 + gensync: 1.0.0-beta.2 + json5: 2.2.1 + semver: 6.3.0 + transitivePeerDependencies: + - supports-color + + /@babel/generator/7.19.0: + resolution: + { + integrity: sha512-S1ahxf1gZ2dpoiFgA+ohK9DIpz50bJ0CWs7Zlzb54Z4sG8qmdIrGrVqmy1sAtTVRb+9CU6U8VqT9L0Zj7hxHVg==, + } + engines: { node: ">=6.9.0" } + dependencies: + "@babel/types": 7.19.0 + "@jridgewell/gen-mapping": 0.3.2 + jsesc: 2.5.2 + + /@babel/helper-compilation-targets/7.19.1_@babel+core@7.19.1: + resolution: + { + integrity: sha512-LlLkkqhCMyz2lkQPvJNdIYU7O5YjWRgC2R4omjCTpZd8u8KMQzZvX4qce+/BluN1rcQiV7BoGUpmQ0LeHerbhg==, + } + engines: { node: ">=6.9.0" } + peerDependencies: + "@babel/core": ^7.0.0 + dependencies: + "@babel/compat-data": 7.19.1 + "@babel/core": 7.19.1 + "@babel/helper-validator-option": 7.18.6 + browserslist: 4.21.3 + semver: 6.3.0 + + /@babel/helper-environment-visitor/7.18.9: + resolution: + { + integrity: sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg==, + } + engines: { node: ">=6.9.0" } + + /@babel/helper-function-name/7.19.0: + resolution: + { + integrity: sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w==, + } + engines: { node: ">=6.9.0" } + dependencies: + "@babel/template": 7.18.10 + "@babel/types": 7.19.0 + + /@babel/helper-hoist-variables/7.18.6: + resolution: + { + integrity: sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q==, + } + engines: { node: ">=6.9.0" } + dependencies: + "@babel/types": 7.19.0 + + /@babel/helper-module-imports/7.18.6: + resolution: + { + integrity: sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA==, + } + engines: { node: ">=6.9.0" } + dependencies: + "@babel/types": 7.19.0 + + /@babel/helper-module-transforms/7.19.0: + resolution: + { + integrity: sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ==, + } + engines: { node: ">=6.9.0" } + dependencies: + "@babel/helper-environment-visitor": 7.18.9 + "@babel/helper-module-imports": 7.18.6 + "@babel/helper-simple-access": 7.18.6 + "@babel/helper-split-export-declaration": 7.18.6 + "@babel/helper-validator-identifier": 7.19.1 + "@babel/template": 7.18.10 + "@babel/traverse": 7.19.1 + "@babel/types": 7.19.0 + transitivePeerDependencies: + - supports-color + + /@babel/helper-simple-access/7.18.6: + resolution: + { + integrity: sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g==, + } + engines: { node: ">=6.9.0" } + dependencies: + "@babel/types": 7.19.0 + + /@babel/helper-split-export-declaration/7.18.6: + resolution: + { + integrity: sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA==, + } + engines: { node: ">=6.9.0" } + dependencies: + "@babel/types": 7.19.0 + + /@babel/helper-string-parser/7.18.10: + resolution: + { + integrity: sha512-XtIfWmeNY3i4t7t4D2t02q50HvqHybPqW2ki1kosnvWCwuCMeo81Jf0gwr85jy/neUdg5XDdeFE/80DXiO+njw==, + } + engines: { node: ">=6.9.0" } + + /@babel/helper-validator-identifier/7.19.1: + resolution: + { + integrity: sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w==, + } + engines: { node: ">=6.9.0" } + + /@babel/helper-validator-option/7.18.6: + resolution: + { + integrity: sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==, + } + engines: { node: ">=6.9.0" } + + /@babel/helpers/7.19.0: + resolution: + { + integrity: sha512-DRBCKGwIEdqY3+rPJgG/dKfQy9+08rHIAJx8q2p+HSWP87s2HCrQmaAMMyMll2kIXKCW0cO1RdQskx15Xakftg==, + } + engines: { node: ">=6.9.0" } + dependencies: + "@babel/template": 7.18.10 + "@babel/traverse": 7.19.1 + "@babel/types": 7.19.0 + transitivePeerDependencies: + - supports-color + + /@babel/highlight/7.18.6: + resolution: + { + integrity: sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g==, + } + engines: { node: ">=6.9.0" } + dependencies: + "@babel/helper-validator-identifier": 7.19.1 + chalk: 2.4.2 + js-tokens: 4.0.0 + + /@babel/parser/7.19.1: + resolution: + { + integrity: sha512-h7RCSorm1DdTVGJf3P2Mhj3kdnkmF/EiysUkzS2TdgAYqyjFdMQJbVuXOBej2SBJaXan/lIVtT6KkGbyyq753A==, + } + engines: { node: ">=6.0.0" } + hasBin: true + dependencies: + "@babel/types": 7.19.0 + + /@babel/runtime-corejs3/7.19.1: + resolution: + { + integrity: sha512-j2vJGnkopRzH+ykJ8h68wrHnEUmtK//E723jjixiAl/PPf6FhqY/vYRcMVlNydRKQjQsTsYEjpx+DZMIvnGk/g==, + } + engines: { node: ">=6.9.0" } + dependencies: + core-js-pure: 3.25.1 + regenerator-runtime: 0.13.9 + dev: false + + /@babel/runtime/7.19.0: + resolution: + { + integrity: sha512-eR8Lo9hnDS7tqkO7NsV+mKvCmv5boaXFSZ70DnfhcgiEne8hv9oCEd36Klw74EtizEqLsy4YnW8UWwpBVolHZA==, + } + engines: { node: ">=6.9.0" } + dependencies: + regenerator-runtime: 0.13.9 + dev: false + + /@babel/template/7.18.10: + resolution: + { + integrity: sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA==, + } + engines: { node: ">=6.9.0" } + dependencies: + "@babel/code-frame": 7.18.6 + "@babel/parser": 7.19.1 + "@babel/types": 7.19.0 + + /@babel/traverse/7.19.1: + resolution: + { + integrity: sha512-0j/ZfZMxKukDaag2PtOPDbwuELqIar6lLskVPPJDjXMXjfLb1Obo/1yjxIGqqAJrmfaTIY3z2wFLAQ7qSkLsuA==, + } + engines: { node: ">=6.9.0" } + dependencies: + "@babel/code-frame": 7.18.6 + "@babel/generator": 7.19.0 + "@babel/helper-environment-visitor": 7.18.9 + "@babel/helper-function-name": 7.19.0 + "@babel/helper-hoist-variables": 7.18.6 + "@babel/helper-split-export-declaration": 7.18.6 + "@babel/parser": 7.19.1 + "@babel/types": 7.19.0 + debug: 4.3.4 + globals: 11.12.0 + transitivePeerDependencies: + - supports-color + + /@babel/types/7.19.0: + resolution: + { + integrity: sha512-YuGopBq3ke25BVSiS6fgF49Ul9gH1x70Bcr6bqRLjWCkcX8Hre1/5+z+IiWOIerRMSSEfGZVB9z9kyq7wVs9YA==, + } + engines: { node: ">=6.9.0" } + dependencies: + "@babel/helper-string-parser": 7.18.10 + "@babel/helper-validator-identifier": 7.19.1 + to-fast-properties: 2.0.0 + + /@eslint/eslintrc/0.4.3: + resolution: + { + integrity: sha512-J6KFFz5QCYUJq3pf0mjEcCJVERbzv71PUIDczuh9JkwGEzced6CO5ADLHB1rbf/+oPBtoPfMYNOpGDzCANlbXw==, + } + engines: { node: ^10.12.0 || >=12.0.0 } + dependencies: + ajv: 6.12.6 + debug: 4.3.4 + espree: 7.3.1 + globals: 13.17.0 + ignore: 4.0.6 + import-fresh: 3.3.0 + js-yaml: 3.14.1 + minimatch: 3.1.2 + strip-json-comments: 3.1.1 + transitivePeerDependencies: + - supports-color + + /@humanwhocodes/config-array/0.5.0: + resolution: + { + integrity: sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==, + } + engines: { node: ">=10.10.0" } + dependencies: + "@humanwhocodes/object-schema": 1.2.1 + debug: 4.3.4 + minimatch: 3.1.2 + transitivePeerDependencies: + - supports-color + + /@humanwhocodes/object-schema/1.2.1: + resolution: + { + integrity: sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==, + } + + /@jridgewell/gen-mapping/0.1.1: + resolution: + { + integrity: sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w==, + } + engines: { node: ">=6.0.0" } + dependencies: + "@jridgewell/set-array": 1.1.2 + "@jridgewell/sourcemap-codec": 1.4.14 + + /@jridgewell/gen-mapping/0.3.2: + resolution: + { + integrity: sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==, + } + engines: { node: ">=6.0.0" } + dependencies: + "@jridgewell/set-array": 1.1.2 + "@jridgewell/sourcemap-codec": 1.4.14 + "@jridgewell/trace-mapping": 0.3.15 + + /@jridgewell/resolve-uri/3.1.0: + resolution: + { + integrity: sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==, + } + engines: { node: ">=6.0.0" } + + /@jridgewell/set-array/1.1.2: + resolution: + { + integrity: sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==, + } + engines: { node: ">=6.0.0" } + + /@jridgewell/sourcemap-codec/1.4.14: + resolution: + { + integrity: sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==, + } + + /@jridgewell/trace-mapping/0.3.15: + resolution: + { + integrity: sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g==, + } + dependencies: + "@jridgewell/resolve-uri": 3.1.0 + "@jridgewell/sourcemap-codec": 1.4.14 + + /@next/env/12.2.5: + resolution: + { + integrity: sha512-vLPLV3cpPGjUPT3PjgRj7e3nio9t6USkuew3JE/jMeon/9Mvp1WyR18v3iwnCuX7eUAm1HmAbJHHLAbcu/EJcw==, + } + dev: false + + /@next/eslint-plugin-next/12.3.0: + resolution: + { + integrity: sha512-jVdq1qYTNDjUtulnE8/hkPv0pHILV4jMg5La99iaY/FFm20WxVnsAZtbNnMvlPbf8dc010oO304SX9yXbg5PAw==, + } + dependencies: + glob: 7.1.7 + dev: false + + /@next/swc-android-arm-eabi/12.2.5: + resolution: + { + integrity: sha512-cPWClKxGhgn2dLWnspW+7psl3MoLQUcNqJqOHk2BhNcou9ARDtC0IjQkKe5qcn9qg7I7U83Gp1yh2aesZfZJMA==, + } + engines: { node: ">= 10" } + cpu: [arm] + os: [android] + requiresBuild: true + dev: false + optional: true + + /@next/swc-android-arm64/12.2.5: + resolution: + { + integrity: sha512-vMj0efliXmC5b7p+wfcQCX0AfU8IypjkzT64GiKJD9PgiA3IILNiGJr1fw2lyUDHkjeWx/5HMlMEpLnTsQslwg==, + } + engines: { node: ">= 10" } + cpu: [arm64] + os: [android] + requiresBuild: true + dev: false + optional: true + + /@next/swc-darwin-arm64/12.2.5: + resolution: + { + integrity: sha512-VOPWbO5EFr6snla/WcxUKtvzGVShfs302TEMOtzYyWni6f9zuOetijJvVh9CCTzInnXAZMtHyNhefijA4HMYLg==, + } + engines: { node: ">= 10" } + cpu: [arm64] + os: [darwin] + requiresBuild: true + dev: false + optional: true + + /@next/swc-darwin-x64/12.2.5: + resolution: + { + integrity: sha512-5o8bTCgAmtYOgauO/Xd27vW52G2/m3i5PX7MUYePquxXAnX73AAtqA3WgPXBRitEB60plSKZgOTkcpqrsh546A==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [darwin] + requiresBuild: true + dev: false + optional: true + + /@next/swc-freebsd-x64/12.2.5: + resolution: + { + integrity: sha512-yYUbyup1JnznMtEBRkK4LT56N0lfK5qNTzr6/DEyDw5TbFVwnuy2hhLBzwCBkScFVjpFdfiC6SQAX3FrAZzuuw==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [freebsd] + requiresBuild: true + dev: false + optional: true + + /@next/swc-linux-arm-gnueabihf/12.2.5: + resolution: + { + integrity: sha512-2ZE2/G921Acks7UopJZVMgKLdm4vN4U0yuzvAMJ6KBavPzqESA2yHJlm85TV/K9gIjKhSk5BVtauIUntFRP8cg==, + } + engines: { node: ">= 10" } + cpu: [arm] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@next/swc-linux-arm64-gnu/12.2.5: + resolution: + { + integrity: sha512-/I6+PWVlz2wkTdWqhlSYYJ1pWWgUVva6SgX353oqTh8njNQp1SdFQuWDqk8LnM6ulheVfSsgkDzxrDaAQZnzjQ==, + } + engines: { node: ">= 10" } + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@next/swc-linux-arm64-musl/12.2.5: + resolution: + { + integrity: sha512-LPQRelfX6asXyVr59p5sTpx5l+0yh2Vjp/R8Wi4X9pnqcayqT4CUJLiHqCvZuLin3IsFdisJL0rKHMoaZLRfmg==, + } + engines: { node: ">= 10" } + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@next/swc-linux-x64-gnu/12.2.5: + resolution: + { + integrity: sha512-0szyAo8jMCClkjNK0hknjhmAngUppoRekW6OAezbEYwHXN/VNtsXbfzgYOqjKWxEx3OoAzrT3jLwAF0HdX2MEw==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@next/swc-linux-x64-musl/12.2.5: + resolution: + { + integrity: sha512-zg/Y6oBar1yVnW6Il1I/08/2ukWtOG6s3acdJdEyIdsCzyQi4RLxbbhkD/EGQyhqBvd3QrC6ZXQEXighQUAZ0g==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [linux] + requiresBuild: true + dev: false + optional: true + + /@next/swc-win32-arm64-msvc/12.2.5: + resolution: + { + integrity: sha512-3/90DRNSqeeSRMMEhj4gHHQlLhhKg5SCCoYfE3kBjGpE63EfnblYUqsszGGZ9ekpKL/R4/SGB40iCQr8tR5Jiw==, + } + engines: { node: ">= 10" } + cpu: [arm64] + os: [win32] + requiresBuild: true + dev: false + optional: true + + /@next/swc-win32-ia32-msvc/12.2.5: + resolution: + { + integrity: sha512-hGLc0ZRAwnaPL4ulwpp4D2RxmkHQLuI8CFOEEHdzZpS63/hMVzv81g8jzYA0UXbb9pus/iTc3VRbVbAM03SRrw==, + } + engines: { node: ">= 10" } + cpu: [ia32] + os: [win32] + requiresBuild: true + dev: false + optional: true + + /@next/swc-win32-x64-msvc/12.2.5: + resolution: + { + integrity: sha512-7h5/ahY7NeaO2xygqVrSG/Y8Vs4cdjxIjowTZ5W6CKoTKn7tmnuxlUc2h74x06FKmbhAd9agOjr/AOKyxYYm9Q==, + } + engines: { node: ">= 10" } + cpu: [x64] + os: [win32] + requiresBuild: true + dev: false + optional: true + + /@nodelib/fs.scandir/2.1.5: + resolution: + { + integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==, + } + engines: { node: ">= 8" } + dependencies: + "@nodelib/fs.stat": 2.0.5 + run-parallel: 1.2.0 + dev: false + + /@nodelib/fs.stat/2.0.5: + resolution: + { + integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==, + } + engines: { node: ">= 8" } + dev: false + + /@nodelib/fs.walk/1.2.8: + resolution: + { + integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==, + } + engines: { node: ">= 8" } + dependencies: + "@nodelib/fs.scandir": 2.1.5 + fastq: 1.13.0 + dev: false + + /@rushstack/eslint-patch/1.1.4: + resolution: + { + integrity: sha512-LwzQKA4vzIct1zNZzBmRKI9QuNpLgTQMEjsQLf3BXuGYb3QPTP4Yjf6mkdX+X1mYttZ808QpOwAzZjv28kq7DA==, + } + dev: false + + /@swc/helpers/0.4.3: + resolution: + { + integrity: sha512-6JrF+fdUK2zbGpJIlN7G3v966PQjyx/dPt1T9km2wj+EUBqgrxCk3uX4Kct16MIm9gGxfKRcfax2hVf5jvlTzA==, + } + dependencies: + tslib: 2.4.0 + dev: false + + /@types/json5/0.0.29: + resolution: + { + integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==, + } + dev: false + + /@types/node/17.0.45: + resolution: + { + integrity: sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==, + } + dev: true + + /@types/prop-types/15.7.5: + resolution: + { + integrity: sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==, + } + dev: true + + /@types/react-dom/18.0.6: + resolution: + { + integrity: sha512-/5OFZgfIPSwy+YuIBP/FgJnQnsxhZhjjrnxudMddeblOouIodEQ75X14Rr4wGSG/bknL+Omy9iWlLo1u/9GzAA==, + } + dependencies: + "@types/react": 18.0.20 + dev: true + + /@types/react/18.0.17: + resolution: + { + integrity: sha512-38ETy4tL+rn4uQQi7mB81G7V1g0u2ryquNmsVIOKUAEIDK+3CUjZ6rSRpdvS99dNBnkLFL83qfmtLacGOTIhwQ==, + } + dependencies: + "@types/prop-types": 15.7.5 + "@types/scheduler": 0.16.2 + csstype: 3.1.1 + dev: true + + /@types/react/18.0.20: + resolution: + { + integrity: sha512-MWul1teSPxujEHVwZl4a5HxQ9vVNsjTchVA+xRqv/VYGCuKGAU6UhfrTdF5aBefwD1BHUD8i/zq+O/vyCm/FrA==, + } + dependencies: + "@types/prop-types": 15.7.5 + "@types/scheduler": 0.16.2 + csstype: 3.1.1 + dev: true + + /@types/scheduler/0.16.2: + resolution: + { + integrity: sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==, + } + dev: true + + /@typescript-eslint/parser/5.37.0_dyxdave6dwjbccc5dgiifcmuza: + resolution: + { + integrity: sha512-01VzI/ipYKuaG5PkE5+qyJ6m02fVALmMPY3Qq5BHflDx3y4VobbLdHQkSMg9VPRS4KdNt4oYTMaomFoHonBGAw==, + } + engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 + typescript: "*" + peerDependenciesMeta: + typescript: + optional: true + dependencies: + "@typescript-eslint/scope-manager": 5.37.0 + "@typescript-eslint/types": 5.37.0 + "@typescript-eslint/typescript-estree": 5.37.0_typescript@4.8.3 + debug: 4.3.4 + eslint: 7.32.0 + typescript: 4.8.3 + transitivePeerDependencies: + - supports-color + dev: false + + /@typescript-eslint/scope-manager/5.37.0: + resolution: + { + integrity: sha512-F67MqrmSXGd/eZnujjtkPgBQzgespu/iCZ+54Ok9X5tALb9L2v3G+QBSoWkXG0p3lcTJsL+iXz5eLUEdSiJU9Q==, + } + engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } + dependencies: + "@typescript-eslint/types": 5.37.0 + "@typescript-eslint/visitor-keys": 5.37.0 + dev: false + + /@typescript-eslint/types/5.37.0: + resolution: + { + integrity: sha512-3frIJiTa5+tCb2iqR/bf7XwU20lnU05r/sgPJnRpwvfZaqCJBrl8Q/mw9vr3NrNdB/XtVyMA0eppRMMBqdJ1bA==, + } + engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } + dev: false + + /@typescript-eslint/typescript-estree/5.37.0_typescript@4.8.3: + resolution: + { + integrity: sha512-JkFoFIt/cx59iqEDSgIGnQpCTRv96MQnXCYvJi7QhBC24uyuzbD8wVbajMB1b9x4I0octYFJ3OwjAwNqk1AjDA==, + } + engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } + peerDependencies: + typescript: "*" + peerDependenciesMeta: + typescript: + optional: true + dependencies: + "@typescript-eslint/types": 5.37.0 + "@typescript-eslint/visitor-keys": 5.37.0 + debug: 4.3.4 + globby: 11.1.0 + is-glob: 4.0.3 + semver: 7.3.7 + tsutils: 3.21.0_typescript@4.8.3 + typescript: 4.8.3 + transitivePeerDependencies: + - supports-color + dev: false + + /@typescript-eslint/visitor-keys/5.37.0: + resolution: + { + integrity: sha512-Hp7rT4cENBPIzMwrlehLW/28EVCOcE9U1Z1BQTc8EA8v5qpr7GRGuG+U58V5tTY48zvUOA3KHvw3rA8tY9fbdA==, + } + engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } + dependencies: + "@typescript-eslint/types": 5.37.0 + eslint-visitor-keys: 3.3.0 + dev: false + + /acorn-jsx/5.3.2_acorn@7.4.1: + resolution: + { + integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==, + } + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + dependencies: + acorn: 7.4.1 + + /acorn/7.4.1: + resolution: + { + integrity: sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==, + } + engines: { node: ">=0.4.0" } + hasBin: true + + /ajv/6.12.6: + resolution: + { + integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==, + } + dependencies: + fast-deep-equal: 3.1.3 + fast-json-stable-stringify: 2.1.0 + json-schema-traverse: 0.4.1 + uri-js: 4.4.1 + + /ajv/8.11.0: + resolution: + { + integrity: sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg==, + } + dependencies: + fast-deep-equal: 3.1.3 + json-schema-traverse: 1.0.0 + require-from-string: 2.0.2 + uri-js: 4.4.1 + + /ansi-colors/4.1.3: + resolution: + { + integrity: sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==, + } + engines: { node: ">=6" } + + /ansi-regex/5.0.1: + resolution: + { + integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==, + } + engines: { node: ">=8" } + + /ansi-styles/3.2.1: + resolution: + { + integrity: sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==, + } + engines: { node: ">=4" } + dependencies: + color-convert: 1.9.3 + + /ansi-styles/4.3.0: + resolution: + { + integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==, + } + engines: { node: ">=8" } + dependencies: + color-convert: 2.0.1 + + /argparse/1.0.10: + resolution: + { + integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==, + } + dependencies: + sprintf-js: 1.0.3 + + /aria-query/4.2.2: + resolution: + { + integrity: sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA==, + } + engines: { node: ">=6.0" } + dependencies: + "@babel/runtime": 7.19.0 + "@babel/runtime-corejs3": 7.19.1 + dev: false + + /array-includes/3.1.5: + resolution: + { + integrity: sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ==, + } + engines: { node: ">= 0.4" } + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.2 + get-intrinsic: 1.1.3 + is-string: 1.0.7 + dev: false + + /array-union/2.1.0: + resolution: + { + integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==, + } + engines: { node: ">=8" } + dev: false + + /array.prototype.flat/1.3.0: + resolution: + { + integrity: sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw==, + } + engines: { node: ">= 0.4" } + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.2 + es-shim-unscopables: 1.0.0 + dev: false + + /array.prototype.flatmap/1.3.0: + resolution: + { + integrity: sha512-PZC9/8TKAIxcWKdyeb77EzULHPrIX/tIZebLJUQOMR1OwYosT8yggdfWScfTBCDj5utONvOuPQQumYsU2ULbkg==, + } + engines: { node: ">= 0.4" } + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.2 + es-shim-unscopables: 1.0.0 + dev: false + + /ast-types-flow/0.0.7: + resolution: + { + integrity: sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag==, + } + dev: false + + /astral-regex/2.0.0: + resolution: + { + integrity: sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==, + } + engines: { node: ">=8" } + + /axe-core/4.4.3: + resolution: + { + integrity: sha512-32+ub6kkdhhWick/UjvEwRchgoetXqTK14INLqbGm5U2TzBkBNF3nQtLYm8ovxSkQWArjEQvftCKryjZaATu3w==, + } + engines: { node: ">=4" } + dev: false + + /axobject-query/2.2.0: + resolution: + { + integrity: sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA==, + } + dev: false + + /balanced-match/1.0.2: + resolution: + { + integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==, + } + + /brace-expansion/1.1.11: + resolution: + { + integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==, + } + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + + /braces/3.0.2: + resolution: + { + integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==, + } + engines: { node: ">=8" } + dependencies: + fill-range: 7.0.1 + dev: false + + /browserslist/4.21.3: + resolution: + { + integrity: sha512-898rgRXLAyRkM1GryrrBHGkqA5hlpkV5MhtZwg9QXeiyLUYs2k00Un05aX5l2/yJIOObYKOpS2JNo8nJDE7fWQ==, + } + engines: { node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7 } + hasBin: true + dependencies: + caniuse-lite: 1.0.30001399 + electron-to-chromium: 1.4.249 + node-releases: 2.0.6 + update-browserslist-db: 1.0.9_browserslist@4.21.3 + + /call-bind/1.0.2: + resolution: + { + integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==, + } + dependencies: + function-bind: 1.1.1 + get-intrinsic: 1.1.3 + dev: false + + /callsites/3.1.0: + resolution: + { + integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==, + } + engines: { node: ">=6" } + + /caniuse-lite/1.0.30001399: + resolution: + { + integrity: sha512-4vQ90tMKS+FkvuVWS5/QY1+d805ODxZiKFzsU8o/RsVJz49ZSRR8EjykLJbqhzdPgadbX6wB538wOzle3JniRA==, + } + + /chalk/2.4.2: + resolution: + { + integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==, + } + engines: { node: ">=4" } + dependencies: + ansi-styles: 3.2.1 + escape-string-regexp: 1.0.5 + supports-color: 5.5.0 + + /chalk/4.1.2: + resolution: + { + integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==, + } + engines: { node: ">=10" } + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + + /color-convert/1.9.3: + resolution: + { + integrity: sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==, + } + dependencies: + color-name: 1.1.3 + + /color-convert/2.0.1: + resolution: + { + integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==, + } + engines: { node: ">=7.0.0" } + dependencies: + color-name: 1.1.4 + + /color-name/1.1.3: + resolution: + { + integrity: sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==, + } + + /color-name/1.1.4: + resolution: + { + integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==, + } + + /concat-map/0.0.1: + resolution: + { + integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==, + } + + /convert-source-map/1.8.0: + resolution: + { + integrity: sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA==, + } + dependencies: + safe-buffer: 5.1.2 + + /core-js-pure/3.25.1: + resolution: + { + integrity: sha512-7Fr74bliUDdeJCBMxkkIuQ4xfxn/SwrVg+HkJUAoNEXVqYLv55l6Af0dJ5Lq2YBUW9yKqSkLXaS5SYPK6MGa/A==, + } + requiresBuild: true + dev: false + + /cross-spawn/7.0.3: + resolution: + { + integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==, + } + engines: { node: ">= 8" } + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + + /csstype/3.1.1: + resolution: + { + integrity: sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw==, + } + dev: true + + /damerau-levenshtein/1.0.8: + resolution: + { + integrity: sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==, + } + dev: false + + /debug/2.6.9: + resolution: + { + integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==, + } + peerDependencies: + supports-color: "*" + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.0.0 + dev: false + + /debug/3.2.7: + resolution: + { + integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==, + } + peerDependencies: + supports-color: "*" + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.1.3 + dev: false + + /debug/4.3.4: + resolution: + { + integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==, + } + engines: { node: ">=6.0" } + peerDependencies: + supports-color: "*" + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.1.2 + + /deep-is/0.1.4: + resolution: + { + integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==, + } + + /define-properties/1.1.4: + resolution: + { + integrity: sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==, + } + engines: { node: ">= 0.4" } + dependencies: + has-property-descriptors: 1.0.0 + object-keys: 1.1.1 + dev: false + + /dir-glob/3.0.1: + resolution: + { + integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==, + } + engines: { node: ">=8" } + dependencies: + path-type: 4.0.0 + dev: false + + /doctrine/2.1.0: + resolution: + { + integrity: sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==, + } + engines: { node: ">=0.10.0" } + dependencies: + esutils: 2.0.3 + dev: false + + /doctrine/3.0.0: + resolution: + { + integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==, + } + engines: { node: ">=6.0.0" } + dependencies: + esutils: 2.0.3 + + /electron-to-chromium/1.4.249: + resolution: + { + integrity: sha512-GMCxR3p2HQvIw47A599crTKYZprqihoBL4lDSAUmr7IYekXFK5t/WgEBrGJDCa2HWIZFQEkGuMqPCi05ceYqPQ==, + } + + /emoji-regex/8.0.0: + resolution: + { + integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==, + } + + /emoji-regex/9.2.2: + resolution: + { + integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==, + } + dev: false + + /enhanced-resolve/5.10.0: + resolution: + { + integrity: sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ==, + } + engines: { node: ">=10.13.0" } + dependencies: + graceful-fs: 4.2.10 + tapable: 2.2.1 + dev: true + + /enquirer/2.3.6: + resolution: + { + integrity: sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==, + } + engines: { node: ">=8.6" } + dependencies: + ansi-colors: 4.1.3 + + /es-abstract/1.20.2: + resolution: + { + integrity: sha512-XxXQuVNrySBNlEkTYJoDNFe5+s2yIOpzq80sUHEdPdQr0S5nTLz4ZPPPswNIpKseDDUS5yghX1gfLIHQZ1iNuQ==, + } + engines: { node: ">= 0.4" } + dependencies: + call-bind: 1.0.2 + es-to-primitive: 1.2.1 + function-bind: 1.1.1 + function.prototype.name: 1.1.5 + get-intrinsic: 1.1.3 + get-symbol-description: 1.0.0 + has: 1.0.3 + has-property-descriptors: 1.0.0 + has-symbols: 1.0.3 + internal-slot: 1.0.3 + is-callable: 1.2.5 + is-negative-zero: 2.0.2 + is-regex: 1.1.4 + is-shared-array-buffer: 1.0.2 + is-string: 1.0.7 + is-weakref: 1.0.2 + object-inspect: 1.12.2 + object-keys: 1.1.1 + object.assign: 4.1.4 + regexp.prototype.flags: 1.4.3 + string.prototype.trimend: 1.0.5 + string.prototype.trimstart: 1.0.5 + unbox-primitive: 1.0.2 + dev: false + + /es-shim-unscopables/1.0.0: + resolution: + { + integrity: sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==, + } + dependencies: + has: 1.0.3 + dev: false + + /es-to-primitive/1.2.1: + resolution: + { + integrity: sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==, + } + engines: { node: ">= 0.4" } + dependencies: + is-callable: 1.2.5 + is-date-object: 1.0.5 + is-symbol: 1.0.4 + dev: false + + /escalade/3.1.1: + resolution: + { + integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==, + } + engines: { node: ">=6" } + + /escape-string-regexp/1.0.5: + resolution: + { + integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==, + } + engines: { node: ">=0.8.0" } + + /escape-string-regexp/4.0.0: + resolution: + { + integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==, + } + engines: { node: ">=10" } + + /eslint-config-next/12.3.0_dyxdave6dwjbccc5dgiifcmuza: + resolution: + { + integrity: sha512-guHSkNyKnTBB8HU35COgAMeMV0E026BiYRYvyEVVaTOeFcnU3i1EI8/Da0Rl7H3Sgua5FEvoA0vYd2s8kdIUXg==, + } + peerDependencies: + eslint: ^7.23.0 || ^8.0.0 + typescript: ">=3.3.1" + peerDependenciesMeta: + typescript: + optional: true + dependencies: + "@next/eslint-plugin-next": 12.3.0 + "@rushstack/eslint-patch": 1.1.4 + "@typescript-eslint/parser": 5.37.0_dyxdave6dwjbccc5dgiifcmuza + eslint: 7.32.0 + eslint-import-resolver-node: 0.3.6 + eslint-import-resolver-typescript: 2.7.1_hpmu7kn6tcn2vnxpfzvv33bxmy + eslint-plugin-import: 2.26.0_xag76ci373f5hzfwsxolrbhy4a + eslint-plugin-jsx-a11y: 6.6.1_eslint@7.32.0 + eslint-plugin-react: 7.31.7_eslint@7.32.0 + eslint-plugin-react-hooks: 4.6.0_eslint@7.32.0 + typescript: 4.8.3 + transitivePeerDependencies: + - eslint-import-resolver-webpack + - supports-color + dev: false + + /eslint-config-prettier/8.5.0_eslint@7.32.0: + resolution: + { + integrity: sha512-obmWKLUNCnhtQRKc+tmnYuQl0pFU1ibYJQ5BGhTVB08bHe9wC8qUeG7c08dj9XX+AuPj1YSGSQIHl1pnDHZR0Q==, + } + hasBin: true + peerDependencies: + eslint: ">=7.0.0" + dependencies: + eslint: 7.32.0 + dev: false + + /eslint-config-turbo/0.0.3_eslint@7.32.0: + resolution: + { + integrity: sha512-hK5MlxDugUWZV9ZKcyfNwLXrlMuM2wPgAUk51cUFBC3nXRCVmCA9uSRFBZsyAIurN1wH7mS7G1NBo5F8VkF7lQ==, + } + peerDependencies: + eslint: ^7.23.0 || ^8.0.0 + dependencies: + eslint: 7.32.0 + eslint-plugin-turbo: 0.0.3_eslint@7.32.0 + dev: false + + /eslint-import-resolver-node/0.3.6: + resolution: + { + integrity: sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw==, + } + dependencies: + debug: 3.2.7 + resolve: 1.22.1 + transitivePeerDependencies: + - supports-color + dev: false + + /eslint-import-resolver-typescript/2.7.1_hpmu7kn6tcn2vnxpfzvv33bxmy: + resolution: + { + integrity: sha512-00UbgGwV8bSgUv34igBDbTOtKhqoRMy9bFjNehT40bXg6585PNIct8HhXZ0SybqB9rWtXj9crcku8ndDn/gIqQ==, + } + engines: { node: ">=4" } + peerDependencies: + eslint: "*" + eslint-plugin-import: "*" + dependencies: + debug: 4.3.4 + eslint: 7.32.0 + eslint-plugin-import: 2.26.0_xag76ci373f5hzfwsxolrbhy4a + glob: 7.2.3 + is-glob: 4.0.3 + resolve: 1.22.1 + tsconfig-paths: 3.14.1 + transitivePeerDependencies: + - supports-color + dev: false + + /eslint-module-utils/2.7.4_qk4u2ghovatg5ueomqmuln4u2e: + resolution: + { + integrity: sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA==, + } + engines: { node: ">=4" } + peerDependencies: + "@typescript-eslint/parser": "*" + eslint: "*" + eslint-import-resolver-node: "*" + eslint-import-resolver-typescript: "*" + eslint-import-resolver-webpack: "*" + peerDependenciesMeta: + "@typescript-eslint/parser": + optional: true + eslint: + optional: true + eslint-import-resolver-node: + optional: true + eslint-import-resolver-typescript: + optional: true + eslint-import-resolver-webpack: + optional: true + dependencies: + "@typescript-eslint/parser": 5.37.0_dyxdave6dwjbccc5dgiifcmuza + debug: 3.2.7 + eslint: 7.32.0 + eslint-import-resolver-node: 0.3.6 + eslint-import-resolver-typescript: 2.7.1_hpmu7kn6tcn2vnxpfzvv33bxmy + transitivePeerDependencies: + - supports-color + dev: false + + /eslint-plugin-import/2.26.0_xag76ci373f5hzfwsxolrbhy4a: + resolution: + { + integrity: sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA==, + } + engines: { node: ">=4" } + peerDependencies: + "@typescript-eslint/parser": "*" + eslint: ^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 + peerDependenciesMeta: + "@typescript-eslint/parser": + optional: true + dependencies: + "@typescript-eslint/parser": 5.37.0_dyxdave6dwjbccc5dgiifcmuza + array-includes: 3.1.5 + array.prototype.flat: 1.3.0 + debug: 2.6.9 + doctrine: 2.1.0 + eslint: 7.32.0 + eslint-import-resolver-node: 0.3.6 + eslint-module-utils: 2.7.4_qk4u2ghovatg5ueomqmuln4u2e + has: 1.0.3 + is-core-module: 2.10.0 + is-glob: 4.0.3 + minimatch: 3.1.2 + object.values: 1.1.5 + resolve: 1.22.1 + tsconfig-paths: 3.14.1 + transitivePeerDependencies: + - eslint-import-resolver-typescript + - eslint-import-resolver-webpack + - supports-color + dev: false + + /eslint-plugin-jsx-a11y/6.6.1_eslint@7.32.0: + resolution: + { + integrity: sha512-sXgFVNHiWffBq23uiS/JaP6eVR622DqwB4yTzKvGZGcPq6/yZ3WmOZfuBks/vHWo9GaFOqC2ZK4i6+C35knx7Q==, + } + engines: { node: ">=4.0" } + peerDependencies: + eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 + dependencies: + "@babel/runtime": 7.19.0 + aria-query: 4.2.2 + array-includes: 3.1.5 + ast-types-flow: 0.0.7 + axe-core: 4.4.3 + axobject-query: 2.2.0 + damerau-levenshtein: 1.0.8 + emoji-regex: 9.2.2 + eslint: 7.32.0 + has: 1.0.3 + jsx-ast-utils: 3.3.3 + language-tags: 1.0.5 + minimatch: 3.1.2 + semver: 6.3.0 + dev: false + + /eslint-plugin-react-hooks/4.6.0_eslint@7.32.0: + resolution: + { + integrity: sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g==, + } + engines: { node: ">=10" } + peerDependencies: + eslint: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 + dependencies: + eslint: 7.32.0 + dev: false + + /eslint-plugin-react/7.31.7_eslint@7.32.0: + resolution: + { + integrity: sha512-8NldBTeYp/kQoTV1uT0XF6HcmDqbgZ0lNPkN0wlRw8DJKXEnaWu+oh/6gt3xIhzvQ35wB2Y545fJhIbJSZ2NNw==, + } + engines: { node: ">=4" } + peerDependencies: + eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 + dependencies: + array-includes: 3.1.5 + array.prototype.flatmap: 1.3.0 + doctrine: 2.1.0 + eslint: 7.32.0 + estraverse: 5.3.0 + jsx-ast-utils: 3.3.3 + minimatch: 3.1.2 + object.entries: 1.1.5 + object.fromentries: 2.0.5 + object.hasown: 1.1.1 + object.values: 1.1.5 + prop-types: 15.8.1 + resolve: 2.0.0-next.4 + semver: 6.3.0 + string.prototype.matchall: 4.0.7 + dev: false + + /eslint-plugin-turbo/0.0.3_eslint@7.32.0: + resolution: + { + integrity: sha512-QjidATGxWtaB9QUrD3NocUySmsgWKZlBMFlw4kX2IIjRLAxMPwukk90h3ZTaNXyRHuaQsrEgh7hhlCZoxP0TTw==, + } + peerDependencies: + eslint: ^7.23.0 || ^8.0.0 + dependencies: + eslint: 7.32.0 + dev: false + + /eslint-scope/5.1.1: + resolution: + { + integrity: sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==, + } + engines: { node: ">=8.0.0" } + dependencies: + esrecurse: 4.3.0 + estraverse: 4.3.0 + + /eslint-utils/2.1.0: + resolution: + { + integrity: sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==, + } + engines: { node: ">=6" } + dependencies: + eslint-visitor-keys: 1.3.0 + + /eslint-visitor-keys/1.3.0: + resolution: + { + integrity: sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==, + } + engines: { node: ">=4" } + + /eslint-visitor-keys/2.1.0: + resolution: + { + integrity: sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==, + } + engines: { node: ">=10" } + + /eslint-visitor-keys/3.3.0: + resolution: + { + integrity: sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==, + } + engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } + dev: false + + /eslint/7.32.0: + resolution: + { + integrity: sha512-VHZ8gX+EDfz+97jGcgyGCyRia/dPOd6Xh9yPv8Bl1+SoaIwD+a/vlrOmGRUyOYu7MwUhc7CxqeaDZU13S4+EpA==, + } + engines: { node: ^10.12.0 || >=12.0.0 } + hasBin: true + dependencies: + "@babel/code-frame": 7.12.11 + "@eslint/eslintrc": 0.4.3 + "@humanwhocodes/config-array": 0.5.0 + ajv: 6.12.6 + chalk: 4.1.2 + cross-spawn: 7.0.3 + debug: 4.3.4 + doctrine: 3.0.0 + enquirer: 2.3.6 + escape-string-regexp: 4.0.0 + eslint-scope: 5.1.1 + eslint-utils: 2.1.0 + eslint-visitor-keys: 2.1.0 + espree: 7.3.1 + esquery: 1.4.0 + esutils: 2.0.3 + fast-deep-equal: 3.1.3 + file-entry-cache: 6.0.1 + functional-red-black-tree: 1.0.1 + glob-parent: 5.1.2 + globals: 13.17.0 + ignore: 4.0.6 + import-fresh: 3.3.0 + imurmurhash: 0.1.4 + is-glob: 4.0.3 + js-yaml: 3.14.1 + json-stable-stringify-without-jsonify: 1.0.1 + levn: 0.4.1 + lodash.merge: 4.6.2 + minimatch: 3.1.2 + natural-compare: 1.4.0 + optionator: 0.9.1 + progress: 2.0.3 + regexpp: 3.2.0 + semver: 7.3.7 + strip-ansi: 6.0.1 + strip-json-comments: 3.1.1 + table: 6.8.0 + text-table: 0.2.0 + v8-compile-cache: 2.3.0 + transitivePeerDependencies: + - supports-color + + /espree/7.3.1: + resolution: + { + integrity: sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g==, + } + engines: { node: ^10.12.0 || >=12.0.0 } + dependencies: + acorn: 7.4.1 + acorn-jsx: 5.3.2_acorn@7.4.1 + eslint-visitor-keys: 1.3.0 + + /esprima/4.0.1: + resolution: + { + integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==, + } + engines: { node: ">=4" } + hasBin: true + + /esquery/1.4.0: + resolution: + { + integrity: sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==, + } + engines: { node: ">=0.10" } + dependencies: + estraverse: 5.3.0 + + /esrecurse/4.3.0: + resolution: + { + integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==, + } + engines: { node: ">=4.0" } + dependencies: + estraverse: 5.3.0 + + /estraverse/4.3.0: + resolution: + { + integrity: sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==, + } + engines: { node: ">=4.0" } + + /estraverse/5.3.0: + resolution: + { + integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==, + } + engines: { node: ">=4.0" } + + /esutils/2.0.3: + resolution: + { + integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==, + } + engines: { node: ">=0.10.0" } + + /fast-deep-equal/3.1.3: + resolution: + { + integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==, + } + + /fast-glob/3.2.12: + resolution: + { + integrity: sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==, + } + engines: { node: ">=8.6.0" } + dependencies: + "@nodelib/fs.stat": 2.0.5 + "@nodelib/fs.walk": 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.5 + dev: false + + /fast-json-stable-stringify/2.1.0: + resolution: + { + integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==, + } + + /fast-levenshtein/2.0.6: + resolution: + { + integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==, + } + + /fastq/1.13.0: + resolution: + { + integrity: sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==, + } + dependencies: + reusify: 1.0.4 + dev: false + + /file-entry-cache/6.0.1: + resolution: + { + integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==, + } + engines: { node: ^10.12.0 || >=12.0.0 } + dependencies: + flat-cache: 3.0.4 + + /fill-range/7.0.1: + resolution: + { + integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==, + } + engines: { node: ">=8" } + dependencies: + to-regex-range: 5.0.1 + dev: false + + /flat-cache/3.0.4: + resolution: + { + integrity: sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==, + } + engines: { node: ^10.12.0 || >=12.0.0 } + dependencies: + flatted: 3.2.7 + rimraf: 3.0.2 + + /flatted/3.2.7: + resolution: + { + integrity: sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==, + } + + /fs.realpath/1.0.0: + resolution: + { + integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==, + } + + /function-bind/1.1.1: + resolution: + { + integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==, + } + dev: false + + /function.prototype.name/1.1.5: + resolution: + { + integrity: sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==, + } + engines: { node: ">= 0.4" } + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.2 + functions-have-names: 1.2.3 + dev: false + + /functional-red-black-tree/1.0.1: + resolution: + { + integrity: sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==, + } + + /functions-have-names/1.2.3: + resolution: + { + integrity: sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==, + } + dev: false + + /gensync/1.0.0-beta.2: + resolution: + { + integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==, + } + engines: { node: ">=6.9.0" } + + /get-intrinsic/1.1.3: + resolution: + { + integrity: sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==, + } + dependencies: + function-bind: 1.1.1 + has: 1.0.3 + has-symbols: 1.0.3 + dev: false + + /get-symbol-description/1.0.0: + resolution: + { + integrity: sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==, + } + engines: { node: ">= 0.4" } + dependencies: + call-bind: 1.0.2 + get-intrinsic: 1.1.3 + dev: false + + /glob-parent/5.1.2: + resolution: + { + integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==, + } + engines: { node: ">= 6" } + dependencies: + is-glob: 4.0.3 + + /glob/7.1.7: + resolution: + { + integrity: sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==, + } + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 3.1.2 + once: 1.4.0 + path-is-absolute: 1.0.1 + dev: false + + /glob/7.2.3: + resolution: + { + integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==, + } + dependencies: + fs.realpath: 1.0.0 + inflight: 1.0.6 + inherits: 2.0.4 + minimatch: 3.1.2 + once: 1.4.0 + path-is-absolute: 1.0.1 + + /globals/11.12.0: + resolution: + { + integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==, + } + engines: { node: ">=4" } + + /globals/13.17.0: + resolution: + { + integrity: sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw==, + } + engines: { node: ">=8" } + dependencies: + type-fest: 0.20.2 + + /globby/11.1.0: + resolution: + { + integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==, + } + engines: { node: ">=10" } + dependencies: + array-union: 2.1.0 + dir-glob: 3.0.1 + fast-glob: 3.2.12 + ignore: 5.2.0 + merge2: 1.4.1 + slash: 3.0.0 + dev: false + + /graceful-fs/4.2.10: + resolution: + { + integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==, + } + dev: true + + /has-bigints/1.0.2: + resolution: + { + integrity: sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==, + } + dev: false + + /has-flag/3.0.0: + resolution: + { + integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==, + } + engines: { node: ">=4" } + + /has-flag/4.0.0: + resolution: + { + integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==, + } + engines: { node: ">=8" } + + /has-property-descriptors/1.0.0: + resolution: + { + integrity: sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==, + } + dependencies: + get-intrinsic: 1.1.3 + dev: false + + /has-symbols/1.0.3: + resolution: + { + integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==, + } + engines: { node: ">= 0.4" } + dev: false + + /has-tostringtag/1.0.0: + resolution: + { + integrity: sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==, + } + engines: { node: ">= 0.4" } + dependencies: + has-symbols: 1.0.3 + dev: false + + /has/1.0.3: + resolution: + { + integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==, + } + engines: { node: ">= 0.4.0" } + dependencies: + function-bind: 1.1.1 + dev: false + + /ignore/4.0.6: + resolution: + { + integrity: sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==, + } + engines: { node: ">= 4" } + + /ignore/5.2.0: + resolution: + { + integrity: sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==, + } + engines: { node: ">= 4" } + dev: false + + /import-fresh/3.3.0: + resolution: + { + integrity: sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==, + } + engines: { node: ">=6" } + dependencies: + parent-module: 1.0.1 + resolve-from: 4.0.0 + + /imurmurhash/0.1.4: + resolution: + { + integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==, + } + engines: { node: ">=0.8.19" } + + /inflight/1.0.6: + resolution: + { + integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==, + } + dependencies: + once: 1.4.0 + wrappy: 1.0.2 + + /inherits/2.0.4: + resolution: + { + integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==, + } + + /internal-slot/1.0.3: + resolution: + { + integrity: sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==, + } + engines: { node: ">= 0.4" } + dependencies: + get-intrinsic: 1.1.3 + has: 1.0.3 + side-channel: 1.0.4 + dev: false + + /is-bigint/1.0.4: + resolution: + { + integrity: sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==, + } + dependencies: + has-bigints: 1.0.2 + dev: false + + /is-boolean-object/1.1.2: + resolution: + { + integrity: sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==, + } + engines: { node: ">= 0.4" } + dependencies: + call-bind: 1.0.2 + has-tostringtag: 1.0.0 + dev: false + + /is-callable/1.2.5: + resolution: + { + integrity: sha512-ZIWRujF6MvYGkEuHMYtFRkL2wAtFw89EHfKlXrkPkjQZZRWeh9L1q3SV13NIfHnqxugjLvAOkEHx9mb1zcMnEw==, + } + engines: { node: ">= 0.4" } + dev: false + + /is-core-module/2.10.0: + resolution: + { + integrity: sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg==, + } + dependencies: + has: 1.0.3 + dev: false + + /is-date-object/1.0.5: + resolution: + { + integrity: sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==, + } + engines: { node: ">= 0.4" } + dependencies: + has-tostringtag: 1.0.0 + dev: false + + /is-extglob/2.1.1: + resolution: + { + integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==, + } + engines: { node: ">=0.10.0" } + + /is-fullwidth-code-point/3.0.0: + resolution: + { + integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==, + } + engines: { node: ">=8" } + + /is-glob/4.0.3: + resolution: + { + integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==, + } + engines: { node: ">=0.10.0" } + dependencies: + is-extglob: 2.1.1 + + /is-negative-zero/2.0.2: + resolution: + { + integrity: sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==, + } + engines: { node: ">= 0.4" } + dev: false + + /is-number-object/1.0.7: + resolution: + { + integrity: sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==, + } + engines: { node: ">= 0.4" } + dependencies: + has-tostringtag: 1.0.0 + dev: false + + /is-number/7.0.0: + resolution: + { + integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==, + } + engines: { node: ">=0.12.0" } + dev: false + + /is-regex/1.1.4: + resolution: + { + integrity: sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==, + } + engines: { node: ">= 0.4" } + dependencies: + call-bind: 1.0.2 + has-tostringtag: 1.0.0 + dev: false + + /is-shared-array-buffer/1.0.2: + resolution: + { + integrity: sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==, + } + dependencies: + call-bind: 1.0.2 + dev: false + + /is-string/1.0.7: + resolution: + { + integrity: sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==, + } + engines: { node: ">= 0.4" } + dependencies: + has-tostringtag: 1.0.0 + dev: false + + /is-symbol/1.0.4: + resolution: + { + integrity: sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==, + } + engines: { node: ">= 0.4" } + dependencies: + has-symbols: 1.0.3 + dev: false + + /is-weakref/1.0.2: + resolution: + { + integrity: sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==, + } + dependencies: + call-bind: 1.0.2 + dev: false + + /isexe/2.0.0: + resolution: + { + integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==, + } + + /js-tokens/4.0.0: + resolution: + { + integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==, + } + + /js-yaml/3.14.1: + resolution: + { + integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==, + } + hasBin: true + dependencies: + argparse: 1.0.10 + esprima: 4.0.1 + + /jsesc/2.5.2: + resolution: + { + integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==, + } + engines: { node: ">=4" } + hasBin: true + + /json-schema-traverse/0.4.1: + resolution: + { + integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==, + } + + /json-schema-traverse/1.0.0: + resolution: + { + integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==, + } + + /json-stable-stringify-without-jsonify/1.0.1: + resolution: + { + integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==, + } + + /json5/1.0.1: + resolution: + { + integrity: sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==, + } + hasBin: true + dependencies: + minimist: 1.2.6 + dev: false + + /json5/2.2.1: + resolution: + { + integrity: sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA==, + } + engines: { node: ">=6" } + hasBin: true + + /jsx-ast-utils/3.3.3: + resolution: + { + integrity: sha512-fYQHZTZ8jSfmWZ0iyzfwiU4WDX4HpHbMCZ3gPlWYiCl3BoeOTsqKBqnTVfH2rYT7eP5c3sVbeSPHnnJOaTrWiw==, + } + engines: { node: ">=4.0" } + dependencies: + array-includes: 3.1.5 + object.assign: 4.1.4 + dev: false + + /language-subtag-registry/0.3.22: + resolution: + { + integrity: sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w==, + } + dev: false + + /language-tags/1.0.5: + resolution: + { + integrity: sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ==, + } + dependencies: + language-subtag-registry: 0.3.22 + dev: false + + /levn/0.4.1: + resolution: + { + integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==, + } + engines: { node: ">= 0.8.0" } + dependencies: + prelude-ls: 1.2.1 + type-check: 0.4.0 + + /lodash.merge/4.6.2: + resolution: + { + integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==, + } + + /lodash.truncate/4.4.2: + resolution: + { + integrity: sha512-jttmRe7bRse52OsWIMDLaXxWqRAmtIUccAQ3garviCqJjafXOfNMO0yMfNpdD6zbGaTU0P5Nz7e7gAT6cKmJRw==, + } + + /lodash/4.17.21_ehchni3mpmovsvjxesffg2i5a4: + resolution: + { + integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==, + } + dev: false + patched: true + + /loose-envify/1.4.0: + resolution: + { + integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==, + } + hasBin: true + dependencies: + js-tokens: 4.0.0 + + /lru-cache/6.0.0: + resolution: + { + integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==, + } + engines: { node: ">=10" } + dependencies: + yallist: 4.0.0 + + /merge2/1.4.1: + resolution: + { + integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==, + } + engines: { node: ">= 8" } + dev: false + + /micromatch/4.0.5: + resolution: + { + integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==, + } + engines: { node: ">=8.6" } + dependencies: + braces: 3.0.2 + picomatch: 2.3.1 + dev: false + + /minimatch/3.1.2: + resolution: + { + integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==, + } + dependencies: + brace-expansion: 1.1.11 + + /minimist/1.2.6: + resolution: + { + integrity: sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==, + } + dev: false + + /ms/2.0.0: + resolution: + { + integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==, + } + dev: false + + /ms/2.1.2: + resolution: + { + integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==, + } + + /ms/2.1.3: + resolution: + { + integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==, + } + dev: false + + /nanoid/3.3.4: + resolution: + { + integrity: sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==, + } + engines: { node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1 } + hasBin: true + dev: false + + /natural-compare/1.4.0: + resolution: + { + integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==, + } + + /next-transpile-modules/9.0.0: + resolution: + { + integrity: sha512-VCNFOazIAnXn1hvgYYSTYMnoWgKgwlYh4lm1pKbSfiB3kj5ZYLcKVhfh3jkPOg1cnd9DP+pte9yCUocdPEUBTQ==, + } + dependencies: + enhanced-resolve: 5.10.0 + escalade: 3.1.1 + dev: true + + /next/12.2.5_ir3quccc6i62x6qn6jjhyjjiey: + resolution: + { + integrity: sha512-tBdjqX5XC/oFs/6gxrZhjmiq90YWizUYU6qOWAfat7zJwrwapJ+BYgX2PmiacunXMaRpeVT4vz5MSPSLgNkrpA==, + } + engines: { node: ">=12.22.0" } + hasBin: true + peerDependencies: + fibers: ">= 3.1.0" + node-sass: ^6.0.0 || ^7.0.0 + react: ^17.0.2 || ^18.0.0-0 + react-dom: ^17.0.2 || ^18.0.0-0 + sass: ^1.3.0 + peerDependenciesMeta: + fibers: + optional: true + node-sass: + optional: true + sass: + optional: true + dependencies: + "@next/env": 12.2.5 + "@swc/helpers": 0.4.3 + caniuse-lite: 1.0.30001399 + postcss: 8.4.14 + react: 18.2.0 + react-dom: 18.2.0_react@18.2.0 + styled-jsx: 5.0.4_3toe27fv7etiytxb5kxc7fxaw4 + use-sync-external-store: 1.2.0_react@18.2.0 + optionalDependencies: + "@next/swc-android-arm-eabi": 12.2.5 + "@next/swc-android-arm64": 12.2.5 + "@next/swc-darwin-arm64": 12.2.5 + "@next/swc-darwin-x64": 12.2.5 + "@next/swc-freebsd-x64": 12.2.5 + "@next/swc-linux-arm-gnueabihf": 12.2.5 + "@next/swc-linux-arm64-gnu": 12.2.5 + "@next/swc-linux-arm64-musl": 12.2.5 + "@next/swc-linux-x64-gnu": 12.2.5 + "@next/swc-linux-x64-musl": 12.2.5 + "@next/swc-win32-arm64-msvc": 12.2.5 + "@next/swc-win32-ia32-msvc": 12.2.5 + "@next/swc-win32-x64-msvc": 12.2.5 + transitivePeerDependencies: + - "@babel/core" + - babel-plugin-macros + dev: false + + /node-releases/2.0.6: + resolution: + { + integrity: sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==, + } + + /object-assign/4.1.1: + resolution: + { + integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==, + } + engines: { node: ">=0.10.0" } + dev: false + + /object-inspect/1.12.2: + resolution: + { + integrity: sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==, + } + dev: false + + /object-keys/1.1.1: + resolution: + { + integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==, + } + engines: { node: ">= 0.4" } + dev: false + + /object.assign/4.1.4: + resolution: + { + integrity: sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==, + } + engines: { node: ">= 0.4" } + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + has-symbols: 1.0.3 + object-keys: 1.1.1 + dev: false + + /object.entries/1.1.5: + resolution: + { + integrity: sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g==, + } + engines: { node: ">= 0.4" } + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.2 + dev: false + + /object.fromentries/2.0.5: + resolution: + { + integrity: sha512-CAyG5mWQRRiBU57Re4FKoTBjXfDoNwdFVH2Y1tS9PqCsfUTymAohOkEMSG3aRNKmv4lV3O7p1et7c187q6bynw==, + } + engines: { node: ">= 0.4" } + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.2 + dev: false + + /object.hasown/1.1.1: + resolution: + { + integrity: sha512-LYLe4tivNQzq4JdaWW6WO3HMZZJWzkkH8fnI6EebWl0VZth2wL2Lovm74ep2/gZzlaTdV62JZHEqHQ2yVn8Q/A==, + } + dependencies: + define-properties: 1.1.4 + es-abstract: 1.20.2 + dev: false + + /object.values/1.1.5: + resolution: + { + integrity: sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg==, + } + engines: { node: ">= 0.4" } + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.2 + dev: false + + /once/1.4.0: + resolution: + { + integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==, + } + dependencies: + wrappy: 1.0.2 + + /optionator/0.9.1: + resolution: + { + integrity: sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==, + } + engines: { node: ">= 0.8.0" } + dependencies: + deep-is: 0.1.4 + fast-levenshtein: 2.0.6 + levn: 0.4.1 + prelude-ls: 1.2.1 + type-check: 0.4.0 + word-wrap: 1.2.3 + + /parent-module/1.0.1: + resolution: + { + integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==, + } + engines: { node: ">=6" } + dependencies: + callsites: 3.1.0 + + /path-is-absolute/1.0.1: + resolution: + { + integrity: sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==, + } + engines: { node: ">=0.10.0" } + + /path-key/3.1.1: + resolution: + { + integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==, + } + engines: { node: ">=8" } + + /path-parse/1.0.7: + resolution: + { + integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==, + } + dev: false + + /path-type/4.0.0: + resolution: + { + integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==, + } + engines: { node: ">=8" } + dev: false + + /picocolors/1.0.0: + resolution: + { + integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==, + } + + /picomatch/2.3.1: + resolution: + { + integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==, + } + engines: { node: ">=8.6" } + dev: false + + /postcss/8.4.14: + resolution: + { + integrity: sha512-E398TUmfAYFPBSdzgeieK2Y1+1cpdxJx8yXbK/m57nRhKSmk1GB2tO4lbLBtlkfPQTDKfe4Xqv1ASWPpayPEig==, + } + engines: { node: ^10 || ^12 || >=14 } + dependencies: + nanoid: 3.3.4 + picocolors: 1.0.0 + source-map-js: 1.0.2 + dev: false + + /prelude-ls/1.2.1: + resolution: + { + integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==, + } + engines: { node: ">= 0.8.0" } + + /prettier/2.7.1: + resolution: + { + integrity: sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==, + } + engines: { node: ">=10.13.0" } + hasBin: true + dev: true + + /progress/2.0.3: + resolution: + { + integrity: sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==, + } + engines: { node: ">=0.4.0" } + + /prop-types/15.8.1: + resolution: + { + integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==, + } + dependencies: + loose-envify: 1.4.0 + object-assign: 4.1.1 + react-is: 16.13.1 + dev: false + + /punycode/2.1.1: + resolution: + { + integrity: sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==, + } + engines: { node: ">=6" } + + /queue-microtask/1.2.3: + resolution: + { + integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==, + } + dev: false + + /react-dom/18.2.0_react@18.2.0: + resolution: + { + integrity: sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==, + } + peerDependencies: + react: ^18.2.0 + dependencies: + loose-envify: 1.4.0 + react: 18.2.0 + scheduler: 0.23.0 + dev: false + + /react-is/16.13.1: + resolution: + { + integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==, + } + dev: false + + /react/18.2.0: + resolution: + { + integrity: sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==, + } + engines: { node: ">=0.10.0" } + dependencies: + loose-envify: 1.4.0 + + /regenerator-runtime/0.13.9: + resolution: + { + integrity: sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==, + } + dev: false + + /regexp.prototype.flags/1.4.3: + resolution: + { + integrity: sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==, + } + engines: { node: ">= 0.4" } + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + functions-have-names: 1.2.3 + dev: false + + /regexpp/3.2.0: + resolution: + { + integrity: sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==, + } + engines: { node: ">=8" } + + /require-from-string/2.0.2: + resolution: + { + integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==, + } + engines: { node: ">=0.10.0" } + + /resolve-from/4.0.0: + resolution: + { + integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==, + } + engines: { node: ">=4" } + + /resolve/1.22.1: + resolution: + { + integrity: sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==, + } + hasBin: true + dependencies: + is-core-module: 2.10.0 + path-parse: 1.0.7 + supports-preserve-symlinks-flag: 1.0.0 + dev: false + + /resolve/2.0.0-next.4: + resolution: + { + integrity: sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ==, + } + hasBin: true + dependencies: + is-core-module: 2.10.0 + path-parse: 1.0.7 + supports-preserve-symlinks-flag: 1.0.0 + dev: false + + /reusify/1.0.4: + resolution: + { + integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==, + } + engines: { iojs: ">=1.0.0", node: ">=0.10.0" } + dev: false + + /rimraf/3.0.2: + resolution: + { + integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==, + } + hasBin: true + dependencies: + glob: 7.2.3 + + /run-parallel/1.2.0: + resolution: + { + integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==, + } + dependencies: + queue-microtask: 1.2.3 + dev: false + + /safe-buffer/5.1.2: + resolution: + { + integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==, + } + + /scheduler/0.23.0: + resolution: + { + integrity: sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==, + } + dependencies: + loose-envify: 1.4.0 + dev: false + + /semver/6.3.0: + resolution: + { + integrity: sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==, + } + hasBin: true + + /semver/7.3.7: + resolution: + { + integrity: sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==, + } + engines: { node: ">=10" } + hasBin: true + dependencies: + lru-cache: 6.0.0 + + /shebang-command/2.0.0: + resolution: + { + integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==, + } + engines: { node: ">=8" } + dependencies: + shebang-regex: 3.0.0 + + /shebang-regex/3.0.0: + resolution: + { + integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==, + } + engines: { node: ">=8" } + + /side-channel/1.0.4: + resolution: + { + integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==, + } + dependencies: + call-bind: 1.0.2 + get-intrinsic: 1.1.3 + object-inspect: 1.12.2 + dev: false + + /slash/3.0.0: + resolution: + { + integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==, + } + engines: { node: ">=8" } + dev: false + + /slice-ansi/4.0.0: + resolution: + { + integrity: sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==, + } + engines: { node: ">=10" } + dependencies: + ansi-styles: 4.3.0 + astral-regex: 2.0.0 + is-fullwidth-code-point: 3.0.0 + + /source-map-js/1.0.2: + resolution: + { + integrity: sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==, + } + engines: { node: ">=0.10.0" } + dev: false + + /sprintf-js/1.0.3: + resolution: + { + integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==, + } + + /string-width/4.2.3: + resolution: + { + integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==, + } + engines: { node: ">=8" } + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + + /string.prototype.matchall/4.0.7: + resolution: + { + integrity: sha512-f48okCX7JiwVi1NXCVWcFnZgADDC/n2vePlQ/KUCNqCikLLilQvwjMO8+BHVKvgzH0JB0J9LEPgxOGT02RoETg==, + } + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.2 + get-intrinsic: 1.1.3 + has-symbols: 1.0.3 + internal-slot: 1.0.3 + regexp.prototype.flags: 1.4.3 + side-channel: 1.0.4 + dev: false + + /string.prototype.trimend/1.0.5: + resolution: + { + integrity: sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==, + } + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.2 + dev: false + + /string.prototype.trimstart/1.0.5: + resolution: + { + integrity: sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==, + } + dependencies: + call-bind: 1.0.2 + define-properties: 1.1.4 + es-abstract: 1.20.2 + dev: false + + /strip-ansi/6.0.1: + resolution: + { + integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==, + } + engines: { node: ">=8" } + dependencies: + ansi-regex: 5.0.1 + + /strip-bom/3.0.0: + resolution: + { + integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==, + } + engines: { node: ">=4" } + dev: false + + /strip-json-comments/3.1.1: + resolution: + { + integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==, + } + engines: { node: ">=8" } + + /styled-jsx/5.0.4_3toe27fv7etiytxb5kxc7fxaw4: + resolution: + { + integrity: sha512-sDFWLbg4zR+UkNzfk5lPilyIgtpddfxXEULxhujorr5jtePTUqiPDc5BC0v1NRqTr/WaFBGQQUoYToGlF4B2KQ==, + } + engines: { node: ">= 12.0.0" } + peerDependencies: + "@babel/core": "*" + babel-plugin-macros: "*" + react: ">= 16.8.0 || 17.x.x || ^18.0.0-0" + peerDependenciesMeta: + "@babel/core": + optional: true + babel-plugin-macros: + optional: true + dependencies: + "@babel/core": 7.19.1 + react: 18.2.0 + dev: false + + /supports-color/5.5.0: + resolution: + { + integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==, + } + engines: { node: ">=4" } + dependencies: + has-flag: 3.0.0 + + /supports-color/7.2.0: + resolution: + { + integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==, + } + engines: { node: ">=8" } + dependencies: + has-flag: 4.0.0 + + /supports-preserve-symlinks-flag/1.0.0: + resolution: + { + integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==, + } + engines: { node: ">= 0.4" } + dev: false + + /table/6.8.0: + resolution: + { + integrity: sha512-s/fitrbVeEyHKFa7mFdkuQMWlH1Wgw/yEXMt5xACT4ZpzWFluehAxRtUUQKPuWhaLAWhFcVx6w3oC8VKaUfPGA==, + } + engines: { node: ">=10.0.0" } + dependencies: + ajv: 8.11.0 + lodash.truncate: 4.4.2 + slice-ansi: 4.0.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + /tapable/2.2.1: + resolution: + { + integrity: sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==, + } + engines: { node: ">=6" } + dev: true + + /text-table/0.2.0: + resolution: + { + integrity: sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==, + } + + /to-fast-properties/2.0.0: + resolution: + { + integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==, + } + engines: { node: ">=4" } + + /to-regex-range/5.0.1: + resolution: + { + integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==, + } + engines: { node: ">=8.0" } + dependencies: + is-number: 7.0.0 + dev: false + + /tsconfig-paths/3.14.1: + resolution: + { + integrity: sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ==, + } + dependencies: + "@types/json5": 0.0.29 + json5: 1.0.1 + minimist: 1.2.6 + strip-bom: 3.0.0 + dev: false + + /tslib/1.14.1: + resolution: + { + integrity: sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==, + } + dev: false + + /tslib/2.4.0: + resolution: + { + integrity: sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==, + } + dev: false + + /tsutils/3.21.0_typescript@4.8.3: + resolution: + { + integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==, + } + engines: { node: ">= 6" } + peerDependencies: + typescript: ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" + dependencies: + tslib: 1.14.1 + typescript: 4.8.3 + dev: false + + /turbo-android-arm64/1.4.6: + resolution: + { + integrity: sha512-YxSlHc64CF5J7yNUMiLBHkeLyzrpe75Oy7tivWb3z7ySG44BXPikk4HDJZPh0T1ELvukDwuPKkvDukJ2oCLJpA==, + } + cpu: [arm64] + os: [android] + requiresBuild: true + dev: true + optional: true + + /turbo-darwin-64/1.4.6: + resolution: + { + integrity: sha512-f6uto7LLpjwZ6iZSF+8uaDpuiTji6xmnWDxNuW23DBE8iv5mxehHd+6Ys851uKDRrPb3QdCu9ctyigKTAla5Vg==, + } + cpu: [x64] + os: [darwin] + requiresBuild: true + dev: true + optional: true + + /turbo-darwin-arm64/1.4.6: + resolution: + { + integrity: sha512-o9C6e5XyuMHQwE0fEhUxfpXxvNr2QXXWX8nxIjygxeF19AqKbk/s08vZBOEmXV6/gx/pRhZ1S2nf0PIUjKBD/Q==, + } + cpu: [arm64] + os: [darwin] + requiresBuild: true + dev: true + optional: true + + /turbo-freebsd-64/1.4.6: + resolution: + { + integrity: sha512-Gg9VOUo6McXYKGevcYjGUSmMryZyZggvpdPh7Dw3QTcT8Tsy6OBtq6WnJ2O4kFDsMigyKtEOJPceD9vDMZt3yQ==, + } + cpu: [x64] + os: [freebsd] + requiresBuild: true + dev: true + optional: true + + /turbo-freebsd-arm64/1.4.6: + resolution: + { + integrity: sha512-W7VrcneWFN1QENKt5cpAPSsf9ArYBBAm3VtPBZEO5tX8kuahGlah1SKdKJXrRxYOY82wyNxDagS/rHpBlrAAzw==, + } + cpu: [arm64] + os: [freebsd] + requiresBuild: true + dev: true + optional: true + + /turbo-linux-32/1.4.6: + resolution: + { + integrity: sha512-76j/zsui6mWPX8pZVMGgF8eiKHPmKuGa2lo0A/Ja0HUvdYCOGUfHsWJGVVIeYbuEp3jsKyVt7OnMDeH9CqO6bg==, + } + cpu: [ia32] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /turbo-linux-64/1.4.6: + resolution: + { + integrity: sha512-z4A37Xm7lZyO9ddtGnvQHWMrsAKX6vFBxdbtb9MY76VRblo7lWSuk4LwCeM+T+ZDJ9LBFiF7aD/diRShlLx9jA==, + } + cpu: [x64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /turbo-linux-arm/1.4.6: + resolution: + { + integrity: sha512-Uh/V3oaAdhyZW6FKPpKihAxQo3EbvLaVNnzzkBmBnvHRkqoDJHhpuG72V7nn8pzxVbJ1++NEVjvbc2kmKFvGjg==, + } + cpu: [arm] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /turbo-linux-arm64/1.4.6: + resolution: + { + integrity: sha512-FW1jmOpZfOoVVvml338N0MPnYjiMyYWTaMb4T+IosgGYymcUE3xJjfXJcqfU/9/uKTyY8zG0qr9/5rw2kpMS2Q==, + } + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /turbo-linux-mips64le/1.4.6: + resolution: + { + integrity: sha512-iWaL3Pwj52BH3T2M8nXScmbSnq4+x47MYK7lJMG7FsZGAIoT5ToO1Wt1iX3GRHTcnIZYm/kCfJ1ptK/NCossLA==, + } + cpu: [mipsel] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /turbo-linux-ppc64le/1.4.6: + resolution: + { + integrity: sha512-Af/KlUmpiORDyELxT7byXNWl3fefErGQMJfeqXEtAdhs8OCKQWuU+lchcZbiBZYNpL+lZoa3PAmP9Fpx7R4plA==, + } + cpu: [ppc64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /turbo-windows-32/1.4.6: + resolution: + { + integrity: sha512-NBd+XPlRSaR//lVN13Q9DOqK3CbowSvafIyGsO4jfvMsGTdyNDL6AYtFsvTKW91/G7ZhATmSEkPn2pZRuhP/DA==, + } + cpu: [ia32] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /turbo-windows-64/1.4.6: + resolution: + { + integrity: sha512-86AbmG+CjzVTpn4RGtwU2CYy4zSyAc9bIQ4pDGLIpCJg6JlD11duaiMJh0SCU/HCqWLJjWDI4qD+f9WNbgPsyQ==, + } + cpu: [x64] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /turbo-windows-arm64/1.4.6: + resolution: + { + integrity: sha512-V+pWcqhTtmQQ3ew8qEjYtUwzyW6tO1RgvP+6OKzItYzTnMTr1Fe42Q21V+tqRNxuNfFDKsgVJdk2p5wB87bvyQ==, + } + cpu: [arm64] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /turbo/1.4.6: + resolution: + { + integrity: sha512-FKtBXlOJ7YjSK22yj4sJLCtDcHFElypt7xw9cZN7Wyv9x4XBrTmh5KP6RmcGnRR1/GJlTNwD2AY2T9QTPnHh+g==, + } + hasBin: true + requiresBuild: true + optionalDependencies: + turbo-android-arm64: 1.4.6 + turbo-darwin-64: 1.4.6 + turbo-darwin-arm64: 1.4.6 + turbo-freebsd-64: 1.4.6 + turbo-freebsd-arm64: 1.4.6 + turbo-linux-32: 1.4.6 + turbo-linux-64: 1.4.6 + turbo-linux-arm: 1.4.6 + turbo-linux-arm64: 1.4.6 + turbo-linux-mips64le: 1.4.6 + turbo-linux-ppc64le: 1.4.6 + turbo-windows-32: 1.4.6 + turbo-windows-64: 1.4.6 + turbo-windows-arm64: 1.4.6 + dev: true + + /type-check/0.4.0: + resolution: + { + integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==, + } + engines: { node: ">= 0.8.0" } + dependencies: + prelude-ls: 1.2.1 + + /type-fest/0.20.2: + resolution: + { + integrity: sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==, + } + engines: { node: ">=10" } + + /typescript/4.8.3: + resolution: + { + integrity: sha512-goMHfm00nWPa8UvR/CPSvykqf6dVV8x/dp0c5mFTMTIu0u0FlGWRioyy7Nn0PGAdHxpJZnuO/ut+PpQ8UiHAig==, + } + engines: { node: ">=4.2.0" } + hasBin: true + + /unbox-primitive/1.0.2: + resolution: + { + integrity: sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==, + } + dependencies: + call-bind: 1.0.2 + has-bigints: 1.0.2 + has-symbols: 1.0.3 + which-boxed-primitive: 1.0.2 + dev: false + + /underscore/1.13.4_3pbfs36izefyn2uycmknwkvuuy: + resolution: + { + integrity: sha512-BQFnUDuAQ4Yf/cYY5LNrK9NCJFKriaRbD9uR1fTeXnBeoa97W0i41qkZfGO9pSo8I5KzjAcSY2XYtdf0oKd7KQ==, + } + dev: false + patched: true + + /update-browserslist-db/1.0.9_browserslist@4.21.3: + resolution: + { + integrity: sha512-/xsqn21EGVdXI3EXSum1Yckj3ZVZugqyOZQ/CxYPBD/R+ko9NSUScf8tFF4dOKY+2pvSSJA/S+5B8s4Zr4kyvg==, + } + hasBin: true + peerDependencies: + browserslist: ">= 4.21.0" + dependencies: + browserslist: 4.21.3 + escalade: 3.1.1 + picocolors: 1.0.0 + + /uri-js/4.4.1: + resolution: + { + integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==, + } + dependencies: + punycode: 2.1.1 + + /use-sync-external-store/1.2.0_react@18.2.0: + resolution: + { + integrity: sha512-eEgnFxGQ1Ife9bzYs6VLi8/4X6CObHMw9Qr9tPY43iKwsPw8xE8+EFsf/2cFZ5S3esXgpWgtSCtLNS41F+sKPA==, + } + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + dependencies: + react: 18.2.0 + dev: false + + /v8-compile-cache/2.3.0: + resolution: + { + integrity: sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==, + } + + /which-boxed-primitive/1.0.2: + resolution: + { + integrity: sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==, + } + dependencies: + is-bigint: 1.0.4 + is-boolean-object: 1.1.2 + is-number-object: 1.0.7 + is-string: 1.0.7 + is-symbol: 1.0.4 + dev: false + + /which/2.0.2: + resolution: + { + integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==, + } + engines: { node: ">= 8" } + hasBin: true + dependencies: + isexe: 2.0.0 + + /word-wrap/1.2.3: + resolution: + { + integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==, + } + engines: { node: ">=0.10.0" } + + /wrappy/1.0.2: + resolution: + { + integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==, + } + + /yallist/4.0.0: + resolution: + { + integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==, + } + + file:packages/ui: + resolution: { directory: packages/ui, type: directory } + name: ui + version: 0.0.0 + dev: false + + github.com/peerigon/dashboard-icons/ce27ef933144e09cef3911025f3649040a8571b6: + resolution: + { + tarball: https://codeload.github.com/peerigon/dashboard-icons/tar.gz/ce27ef933144e09cef3911025f3649040a8571b, + } + name: dashboard-icons + version: 1.0.0 + dev: false diff --git a/cli/internal/lockfile/testdata/pnpm8.yaml b/cli/internal/lockfile/testdata/pnpm8.yaml new file mode 100644 index 0000000..d7d9e27 --- /dev/null +++ b/cli/internal/lockfile/testdata/pnpm8.yaml @@ -0,0 +1,107 @@ +lockfileVersion: "6.0" + +patchedDependencies: + is-even@1.0.0: + hash: trwuddosrpxsvtoqztvint6pca + path: patches/is-even@1.0.0.patch + +importers: + .: {} + + packages/a: + dependencies: + c: + specifier: workspace:* + version: link:../c + is-odd: + specifier: ^3.0.1 + version: 3.0.1 + + packages/b: + dependencies: + c: + specifier: workspace:* + version: link:../c + is-even: + specifier: ^1.0.0 + version: 1.0.0_trwuddosrpxsvtoqztvint6pca + + packages/c: + dependencies: + lodash: + specifier: ^4.17.21 + version: 4.17.21 + +packages: + /is-buffer@1.1.6: + resolution: + { + integrity: sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==, + } + dev: false + + /is-even@1.0.0_trwuddosrpxsvtoqztvint6pca: + resolution: + { + integrity: sha512-LEhnkAdJqic4Dbqn58A0y52IXoHWlsueqQkKfMfdEnIYG8A1sm/GHidKkS6yvXlMoRrkM34csHnXQtOqcb+Jzg==, + } + engines: { node: ">=0.10.0" } + dependencies: + is-odd: 0.1.2 + dev: false + patched: true + + /is-number@3.0.0: + resolution: + { + integrity: sha512-4cboCqIpliH+mAvFNegjZQ4kgKc3ZUhQVr3HvWbSh5q3WH2v82ct+T2Y1hdU5Gdtorx/cLifQjqCbL7bpznLTg==, + } + engines: { node: ">=0.10.0" } + dependencies: + kind-of: 3.2.2 + dev: false + + /is-number@6.0.0: + resolution: + { + integrity: sha512-Wu1VHeILBK8KAWJUAiSZQX94GmOE45Rg6/538fKwiloUu21KncEkYGPqob2oSZ5mUT73vLGrHQjKw3KMPwfDzg==, + } + engines: { node: ">=0.10.0" } + dev: false + + /is-odd@0.1.2: + resolution: + { + integrity: sha512-Ri7C2K7o5IrUU9UEI8losXJCCD/UtsaIrkR5sxIcFg4xQ9cRJXlWA5DQvTE0yDc0krvSNLsRGXN11UPS6KyfBw==, + } + engines: { node: ">=0.10.0" } + dependencies: + is-number: 3.0.0 + dev: false + + /is-odd@3.0.1: + resolution: + { + integrity: sha512-CQpnWPrDwmP1+SMHXZhtLtJv90yiyVfluGsX5iNCVkrhQtU3TQHsUWPG9wkdk9Lgd5yNpAg9jQEo90CBaXgWMA==, + } + engines: { node: ">=4" } + dependencies: + is-number: 6.0.0 + dev: false + + /kind-of@3.2.2: + resolution: + { + integrity: sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ==, + } + engines: { node: ">=0.10.0" } + dependencies: + is-buffer: 1.1.6 + dev: false + + /lodash@4.17.21: + resolution: + { + integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==, + } + dev: false diff --git a/cli/internal/lockfile/testdata/pnpm_override.yaml b/cli/internal/lockfile/testdata/pnpm_override.yaml new file mode 100644 index 0000000..2102192 --- /dev/null +++ b/cli/internal/lockfile/testdata/pnpm_override.yaml @@ -0,0 +1,24 @@ +lockfileVersion: 5.4 + +overrides: + "@nomiclabs/hardhat-ethers": npm:hardhat-deploy-ethers@^0.3.0-beta.13 + +importers: + config/hardhat: + specifiers: + "@nomiclabs/hardhat-ethers": npm:hardhat-deploy-ethers@^0.3.0-beta.13 + dependencies: + "@nomiclabs/hardhat-ethers": /hardhat-deploy-ethers/0.3.0-beta.13_yab2ug5tvye2kp6e24l5x3z7uy + +packages: + /hardhat-deploy-ethers/0.3.0-beta.13_yab2ug5tvye2kp6e24l5x3z7uy: + resolution: + { + integrity: sha512-PdWVcKB9coqWV1L7JTpfXRCI91Cgwsm7KLmBcwZ8f0COSm1xtABHZTyz3fvF6p42cTnz1VM0QnfDvMFlIRkSNw==, + } + peerDependencies: + ethers: ^5.0.0 + hardhat: ^2.0.0 + dependencies: + ethers: 5.7.2 + hardhat: 2.12.4_typescript@4.9.4 diff --git a/cli/internal/lockfile/testdata/yarn.lock b/cli/internal/lockfile/testdata/yarn.lock new file mode 100644 index 0000000..f4272d1 --- /dev/null +++ b/cli/internal/lockfile/testdata/yarn.lock @@ -0,0 +1,2304 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@ampproject/remapping@^2.1.0": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d" + integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== + dependencies: + "@jridgewell/gen-mapping" "^0.1.0" + "@jridgewell/trace-mapping" "^0.3.9" + +"@babel/code-frame@7.12.11": + version "7.12.11" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.12.11.tgz#f4ad435aa263db935b8f10f2c552d23fb716a63f" + integrity sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw== + dependencies: + "@babel/highlight" "^7.10.4" + +"@babel/code-frame@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.18.6.tgz#3b25d38c89600baa2dcc219edfa88a74eb2c427a" + integrity sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q== + dependencies: + "@babel/highlight" "^7.18.6" + +"@babel/compat-data@^7.19.0": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.19.0.tgz#2a592fd89bacb1fcde68de31bee4f2f2dacb0e86" + integrity sha512-y5rqgTTPTmaF5e2nVhOxw+Ur9HDJLsWb6U/KpgUzRZEdPfE6VOubXBKLdbcUTijzRptednSBDQbYZBOSqJxpJw== + +"@babel/core@^7.0.0": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.19.0.tgz#d2f5f4f2033c00de8096be3c9f45772563e150c3" + integrity sha512-reM4+U7B9ss148rh2n1Qs9ASS+w94irYXga7c2jaQv9RVzpS7Mv1a9rnYYwuDa45G+DkORt9g6An2k/V4d9LbQ== + dependencies: + "@ampproject/remapping" "^2.1.0" + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.19.0" + "@babel/helper-compilation-targets" "^7.19.0" + "@babel/helper-module-transforms" "^7.19.0" + "@babel/helpers" "^7.19.0" + "@babel/parser" "^7.19.0" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.0" + "@babel/types" "^7.19.0" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.1" + semver "^6.3.0" + +"@babel/generator@^7.19.0": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.19.0.tgz#785596c06425e59334df2ccee63ab166b738419a" + integrity sha512-S1ahxf1gZ2dpoiFgA+ohK9DIpz50bJ0CWs7Zlzb54Z4sG8qmdIrGrVqmy1sAtTVRb+9CU6U8VqT9L0Zj7hxHVg== + dependencies: + "@babel/types" "^7.19.0" + "@jridgewell/gen-mapping" "^0.3.2" + jsesc "^2.5.1" + +"@babel/helper-compilation-targets@^7.19.0": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.19.0.tgz#537ec8339d53e806ed422f1e06c8f17d55b96bb0" + integrity sha512-Ai5bNWXIvwDvWM7njqsG3feMlL9hCVQsPYXodsZyLwshYkZVJt59Gftau4VrE8S9IT9asd2uSP1hG6wCNw+sXA== + dependencies: + "@babel/compat-data" "^7.19.0" + "@babel/helper-validator-option" "^7.18.6" + browserslist "^4.20.2" + semver "^6.3.0" + +"@babel/helper-environment-visitor@^7.18.9": + version "7.18.9" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz#0c0cee9b35d2ca190478756865bb3528422f51be" + integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg== + +"@babel/helper-function-name@^7.19.0": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.19.0.tgz#941574ed5390682e872e52d3f38ce9d1bef4648c" + integrity sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w== + dependencies: + "@babel/template" "^7.18.10" + "@babel/types" "^7.19.0" + +"@babel/helper-hoist-variables@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678" + integrity sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-module-imports@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz#1e3ebdbbd08aad1437b428c50204db13c5a3ca6e" + integrity sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-module-transforms@^7.19.0": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.19.0.tgz#309b230f04e22c58c6a2c0c0c7e50b216d350c30" + integrity sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ== + dependencies: + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-module-imports" "^7.18.6" + "@babel/helper-simple-access" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/helper-validator-identifier" "^7.18.6" + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.0" + "@babel/types" "^7.19.0" + +"@babel/helper-simple-access@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.18.6.tgz#d6d8f51f4ac2978068df934b569f08f29788c7ea" + integrity sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-split-export-declaration@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz#7367949bc75b20c6d5a5d4a97bba2824ae8ef075" + integrity sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA== + dependencies: + "@babel/types" "^7.18.6" + +"@babel/helper-string-parser@^7.18.10": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.18.10.tgz#181f22d28ebe1b3857fa575f5c290b1aaf659b56" + integrity sha512-XtIfWmeNY3i4t7t4D2t02q50HvqHybPqW2ki1kosnvWCwuCMeo81Jf0gwr85jy/neUdg5XDdeFE/80DXiO+njw== + +"@babel/helper-validator-identifier@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.18.6.tgz#9c97e30d31b2b8c72a1d08984f2ca9b574d7a076" + integrity sha512-MmetCkz9ej86nJQV+sFCxoGGrUbU3q02kgLciwkrt9QqEB7cP39oKEY0PakknEO0Gu20SskMRi+AYZ3b1TpN9g== + +"@babel/helper-validator-option@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz#bf0d2b5a509b1f336099e4ff36e1a63aa5db4db8" + integrity sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw== + +"@babel/helpers@^7.19.0": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.19.0.tgz#f30534657faf246ae96551d88dd31e9d1fa1fc18" + integrity sha512-DRBCKGwIEdqY3+rPJgG/dKfQy9+08rHIAJx8q2p+HSWP87s2HCrQmaAMMyMll2kIXKCW0cO1RdQskx15Xakftg== + dependencies: + "@babel/template" "^7.18.10" + "@babel/traverse" "^7.19.0" + "@babel/types" "^7.19.0" + +"@babel/highlight@^7.10.4", "@babel/highlight@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.18.6.tgz#81158601e93e2563795adcbfbdf5d64be3f2ecdf" + integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g== + dependencies: + "@babel/helper-validator-identifier" "^7.18.6" + chalk "^2.0.0" + js-tokens "^4.0.0" + +"@babel/parser@^7.18.10", "@babel/parser@^7.19.0": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.19.0.tgz#497fcafb1d5b61376959c1c338745ef0577aa02c" + integrity sha512-74bEXKX2h+8rrfQUfsBfuZZHzsEs6Eql4pqy/T4Nn6Y9wNPggQOqD6z6pn5Bl8ZfysKouFZT/UXEH94ummEeQw== + +"@babel/runtime-corejs3@^7.10.2": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/runtime-corejs3/-/runtime-corejs3-7.19.0.tgz#0df75cb8e5ecba3ca9e658898694e5326d52397f" + integrity sha512-JyXXoCu1N8GLuKc2ii8y5RGma5FMpFeO2nAQIe0Yzrbq+rQnN+sFj47auLblR5ka6aHNGPDgv8G/iI2Grb0ldQ== + dependencies: + core-js-pure "^3.20.2" + regenerator-runtime "^0.13.4" + +"@babel/runtime@^7.10.2", "@babel/runtime@^7.18.9": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.19.0.tgz#22b11c037b094d27a8a2504ea4dcff00f50e2259" + integrity sha512-eR8Lo9hnDS7tqkO7NsV+mKvCmv5boaXFSZ70DnfhcgiEne8hv9oCEd36Klw74EtizEqLsy4YnW8UWwpBVolHZA== + dependencies: + regenerator-runtime "^0.13.4" + +"@babel/template@^7.18.10": + version "7.18.10" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.18.10.tgz#6f9134835970d1dbf0835c0d100c9f38de0c5e71" + integrity sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/parser" "^7.18.10" + "@babel/types" "^7.18.10" + +"@babel/traverse@^7.19.0": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.19.0.tgz#eb9c561c7360005c592cc645abafe0c3c4548eed" + integrity sha512-4pKpFRDh+utd2mbRC8JLnlsMUii3PMHjpL6a0SZ4NMZy7YFP9aXORxEhdMVOc9CpWtDF09IkciQLEhK7Ml7gRA== + dependencies: + "@babel/code-frame" "^7.18.6" + "@babel/generator" "^7.19.0" + "@babel/helper-environment-visitor" "^7.18.9" + "@babel/helper-function-name" "^7.19.0" + "@babel/helper-hoist-variables" "^7.18.6" + "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/parser" "^7.19.0" + "@babel/types" "^7.19.0" + debug "^4.1.0" + globals "^11.1.0" + +"@babel/types@^7.18.10", "@babel/types@^7.18.6", "@babel/types@^7.19.0": + version "7.19.0" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.19.0.tgz#75f21d73d73dc0351f3368d28db73465f4814600" + integrity sha512-YuGopBq3ke25BVSiS6fgF49Ul9gH1x70Bcr6bqRLjWCkcX8Hre1/5+z+IiWOIerRMSSEfGZVB9z9kyq7wVs9YA== + dependencies: + "@babel/helper-string-parser" "^7.18.10" + "@babel/helper-validator-identifier" "^7.18.6" + to-fast-properties "^2.0.0" + +"@eslint/eslintrc@^0.4.3": + version "0.4.3" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.4.3.tgz#9e42981ef035beb3dd49add17acb96e8ff6f394c" + integrity sha512-J6KFFz5QCYUJq3pf0mjEcCJVERbzv71PUIDczuh9JkwGEzced6CO5ADLHB1rbf/+oPBtoPfMYNOpGDzCANlbXw== + dependencies: + ajv "^6.12.4" + debug "^4.1.1" + espree "^7.3.0" + globals "^13.9.0" + ignore "^4.0.6" + import-fresh "^3.2.1" + js-yaml "^3.13.1" + minimatch "^3.0.4" + strip-json-comments "^3.1.1" + +"@humanwhocodes/config-array@^0.5.0": + version "0.5.0" + resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.5.0.tgz#1407967d4c6eecd7388f83acf1eaf4d0c6e58ef9" + integrity sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg== + dependencies: + "@humanwhocodes/object-schema" "^1.2.0" + debug "^4.1.1" + minimatch "^3.0.4" + +"@humanwhocodes/object-schema@^1.2.0": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" + integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== + +"@jridgewell/gen-mapping@^0.1.0": + version "0.1.1" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" + integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== + dependencies: + "@jridgewell/set-array" "^1.0.0" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@jridgewell/gen-mapping@^0.3.2": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" + integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/resolve-uri@^3.0.3": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" + integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== + +"@jridgewell/set-array@^1.0.0", "@jridgewell/set-array@^1.0.1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== + +"@jridgewell/sourcemap-codec@^1.4.10": + version "1.4.14" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" + integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== + +"@jridgewell/trace-mapping@^0.3.9": + version "0.3.15" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.15.tgz#aba35c48a38d3fd84b37e66c9c0423f9744f9774" + integrity sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@next/env@12.2.5": + version "12.2.5" + resolved "https://registry.yarnpkg.com/@next/env/-/env-12.2.5.tgz#d908c57b35262b94db3e431e869b72ac3e1ad3e3" + integrity sha512-vLPLV3cpPGjUPT3PjgRj7e3nio9t6USkuew3JE/jMeon/9Mvp1WyR18v3iwnCuX7eUAm1HmAbJHHLAbcu/EJcw== + +"@next/eslint-plugin-next@12.3.0": + version "12.3.0" + resolved "https://registry.yarnpkg.com/@next/eslint-plugin-next/-/eslint-plugin-next-12.3.0.tgz#302c1f03618d5001ce92ea6826c329268759128e" + integrity sha512-jVdq1qYTNDjUtulnE8/hkPv0pHILV4jMg5La99iaY/FFm20WxVnsAZtbNnMvlPbf8dc010oO304SX9yXbg5PAw== + dependencies: + glob "7.1.7" + +"@next/swc-android-arm-eabi@12.2.5": + version "12.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-android-arm-eabi/-/swc-android-arm-eabi-12.2.5.tgz#903a5479ab4c2705d9c08d080907475f7bacf94d" + integrity sha512-cPWClKxGhgn2dLWnspW+7psl3MoLQUcNqJqOHk2BhNcou9ARDtC0IjQkKe5qcn9qg7I7U83Gp1yh2aesZfZJMA== + +"@next/swc-android-arm64@12.2.5": + version "12.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-android-arm64/-/swc-android-arm64-12.2.5.tgz#2f9a98ec4166c7860510963b31bda1f57a77c792" + integrity sha512-vMj0efliXmC5b7p+wfcQCX0AfU8IypjkzT64GiKJD9PgiA3IILNiGJr1fw2lyUDHkjeWx/5HMlMEpLnTsQslwg== + +"@next/swc-darwin-arm64@12.2.5": + version "12.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-12.2.5.tgz#31b1c3c659d54be546120c488a1e1bad21c24a1d" + integrity sha512-VOPWbO5EFr6snla/WcxUKtvzGVShfs302TEMOtzYyWni6f9zuOetijJvVh9CCTzInnXAZMtHyNhefijA4HMYLg== + +"@next/swc-darwin-x64@12.2.5": + version "12.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-12.2.5.tgz#2e44dd82b2b7fef88238d1bc4d3bead5884cedfd" + integrity sha512-5o8bTCgAmtYOgauO/Xd27vW52G2/m3i5PX7MUYePquxXAnX73AAtqA3WgPXBRitEB60plSKZgOTkcpqrsh546A== + +"@next/swc-freebsd-x64@12.2.5": + version "12.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-freebsd-x64/-/swc-freebsd-x64-12.2.5.tgz#e24e75d8c2581bfebc75e4f08f6ddbd116ce9dbd" + integrity sha512-yYUbyup1JnznMtEBRkK4LT56N0lfK5qNTzr6/DEyDw5TbFVwnuy2hhLBzwCBkScFVjpFdfiC6SQAX3FrAZzuuw== + +"@next/swc-linux-arm-gnueabihf@12.2.5": + version "12.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-linux-arm-gnueabihf/-/swc-linux-arm-gnueabihf-12.2.5.tgz#46d8c514d834d2b5f67086013f0bd5e3081e10b9" + integrity sha512-2ZE2/G921Acks7UopJZVMgKLdm4vN4U0yuzvAMJ6KBavPzqESA2yHJlm85TV/K9gIjKhSk5BVtauIUntFRP8cg== + +"@next/swc-linux-arm64-gnu@12.2.5": + version "12.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-12.2.5.tgz#91f725ac217d3a1f4f9f53b553615ba582fd3d9f" + integrity sha512-/I6+PWVlz2wkTdWqhlSYYJ1pWWgUVva6SgX353oqTh8njNQp1SdFQuWDqk8LnM6ulheVfSsgkDzxrDaAQZnzjQ== + +"@next/swc-linux-arm64-musl@12.2.5": + version "12.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-12.2.5.tgz#e627e8c867920995810250303cd9b8e963598383" + integrity sha512-LPQRelfX6asXyVr59p5sTpx5l+0yh2Vjp/R8Wi4X9pnqcayqT4CUJLiHqCvZuLin3IsFdisJL0rKHMoaZLRfmg== + +"@next/swc-linux-x64-gnu@12.2.5": + version "12.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-12.2.5.tgz#83a5e224fbc4d119ef2e0f29d0d79c40cc43887e" + integrity sha512-0szyAo8jMCClkjNK0hknjhmAngUppoRekW6OAezbEYwHXN/VNtsXbfzgYOqjKWxEx3OoAzrT3jLwAF0HdX2MEw== + +"@next/swc-linux-x64-musl@12.2.5": + version "12.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-12.2.5.tgz#be700d48471baac1ec2e9539396625584a317e95" + integrity sha512-zg/Y6oBar1yVnW6Il1I/08/2ukWtOG6s3acdJdEyIdsCzyQi4RLxbbhkD/EGQyhqBvd3QrC6ZXQEXighQUAZ0g== + +"@next/swc-win32-arm64-msvc@12.2.5": + version "12.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-12.2.5.tgz#a93e958133ad3310373fda33a79aa10af2a0aa97" + integrity sha512-3/90DRNSqeeSRMMEhj4gHHQlLhhKg5SCCoYfE3kBjGpE63EfnblYUqsszGGZ9ekpKL/R4/SGB40iCQr8tR5Jiw== + +"@next/swc-win32-ia32-msvc@12.2.5": + version "12.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-12.2.5.tgz#4f5f7ba0a98ff89a883625d4af0125baed8b2e19" + integrity sha512-hGLc0ZRAwnaPL4ulwpp4D2RxmkHQLuI8CFOEEHdzZpS63/hMVzv81g8jzYA0UXbb9pus/iTc3VRbVbAM03SRrw== + +"@next/swc-win32-x64-msvc@12.2.5": + version "12.2.5" + resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-12.2.5.tgz#20fed129b04a0d3f632c6d0de135345bb623b1e4" + integrity sha512-7h5/ahY7NeaO2xygqVrSG/Y8Vs4cdjxIjowTZ5W6CKoTKn7tmnuxlUc2h74x06FKmbhAd9agOjr/AOKyxYYm9Q== + +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== + dependencies: + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.3": + version "1.2.8" + resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== + dependencies: + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" + +"@rushstack/eslint-patch@^1.1.3": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@rushstack/eslint-patch/-/eslint-patch-1.1.4.tgz#0c8b74c50f29ee44f423f7416829c0bf8bb5eb27" + integrity sha512-LwzQKA4vzIct1zNZzBmRKI9QuNpLgTQMEjsQLf3BXuGYb3QPTP4Yjf6mkdX+X1mYttZ808QpOwAzZjv28kq7DA== + +"@swc/helpers@0.4.3": + version "0.4.3" + resolved "https://registry.yarnpkg.com/@swc/helpers/-/helpers-0.4.3.tgz#16593dfc248c53b699d4b5026040f88ddb497012" + integrity sha512-6JrF+fdUK2zbGpJIlN7G3v966PQjyx/dPt1T9km2wj+EUBqgrxCk3uX4Kct16MIm9gGxfKRcfax2hVf5jvlTzA== + dependencies: + tslib "^2.4.0" + +"@types/json5@^0.0.29": + version "0.0.29" + resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" + integrity sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ== + +"@types/node@^17.0.12": + version "17.0.45" + resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.45.tgz#2c0fafd78705e7a18b7906b5201a522719dc5190" + integrity sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw== + +"@types/prop-types@*": + version "15.7.5" + resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf" + integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w== + +"@types/react-dom@^17.0.11": + version "17.0.17" + resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-17.0.17.tgz#2e3743277a793a96a99f1bf87614598289da68a1" + integrity sha512-VjnqEmqGnasQKV0CWLevqMTXBYG9GbwuE6x3VetERLh0cq2LTptFE73MrQi2S7GkKXCf2GgwItB/melLnxfnsg== + dependencies: + "@types/react" "^17" + +"@types/react@18.0.17": + version "18.0.17" + resolved "https://registry.yarnpkg.com/@types/react/-/react-18.0.17.tgz#4583d9c322d67efe4b39a935d223edcc7050ccf4" + integrity sha512-38ETy4tL+rn4uQQi7mB81G7V1g0u2ryquNmsVIOKUAEIDK+3CUjZ6rSRpdvS99dNBnkLFL83qfmtLacGOTIhwQ== + dependencies: + "@types/prop-types" "*" + "@types/scheduler" "*" + csstype "^3.0.2" + +"@types/react@^17", "@types/react@^17.0.37": + version "17.0.49" + resolved "https://registry.yarnpkg.com/@types/react/-/react-17.0.49.tgz#df87ba4ca8b7942209c3dc655846724539dc1049" + integrity sha512-CCBPMZaPhcKkYUTqFs/hOWqKjPxhTEmnZWjlHHgIMop67DsXywf9B5Os9Hz8KSacjNOgIdnZVJamwl232uxoPg== + dependencies: + "@types/prop-types" "*" + "@types/scheduler" "*" + csstype "^3.0.2" + +"@types/scheduler@*": + version "0.16.2" + resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.2.tgz#1a62f89525723dde24ba1b01b092bf5df8ad4d39" + integrity sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew== + +"@typescript-eslint/parser@^5.21.0": + version "5.36.2" + resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-5.36.2.tgz#3ddf323d3ac85a25295a55fcb9c7a49ab4680ddd" + integrity sha512-qS/Kb0yzy8sR0idFspI9Z6+t7mqk/oRjnAYfewG+VN73opAUvmYL3oPIMmgOX6CnQS6gmVIXGshlb5RY/R22pA== + dependencies: + "@typescript-eslint/scope-manager" "5.36.2" + "@typescript-eslint/types" "5.36.2" + "@typescript-eslint/typescript-estree" "5.36.2" + debug "^4.3.4" + +"@typescript-eslint/scope-manager@5.36.2": + version "5.36.2" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.36.2.tgz#a75eb588a3879ae659514780831370642505d1cd" + integrity sha512-cNNP51L8SkIFSfce8B1NSUBTJTu2Ts4nWeWbFrdaqjmn9yKrAaJUBHkyTZc0cL06OFHpb+JZq5AUHROS398Orw== + dependencies: + "@typescript-eslint/types" "5.36.2" + "@typescript-eslint/visitor-keys" "5.36.2" + +"@typescript-eslint/types@5.36.2": + version "5.36.2" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.36.2.tgz#a5066e500ebcfcee36694186ccc57b955c05faf9" + integrity sha512-9OJSvvwuF1L5eS2EQgFUbECb99F0mwq501w0H0EkYULkhFa19Qq7WFbycdw1PexAc929asupbZcgjVIe6OK/XQ== + +"@typescript-eslint/typescript-estree@5.36.2": + version "5.36.2" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.36.2.tgz#0c93418b36c53ba0bc34c61fe9405c4d1d8fe560" + integrity sha512-8fyH+RfbKc0mTspfuEjlfqA4YywcwQK2Amcf6TDOwaRLg7Vwdu4bZzyvBZp4bjt1RRjQ5MDnOZahxMrt2l5v9w== + dependencies: + "@typescript-eslint/types" "5.36.2" + "@typescript-eslint/visitor-keys" "5.36.2" + debug "^4.3.4" + globby "^11.1.0" + is-glob "^4.0.3" + semver "^7.3.7" + tsutils "^3.21.0" + +"@typescript-eslint/visitor-keys@5.36.2": + version "5.36.2" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.36.2.tgz#2f8f78da0a3bad3320d2ac24965791ac39dace5a" + integrity sha512-BtRvSR6dEdrNt7Net2/XDjbYKU5Ml6GqJgVfXT0CxTCJlnIqK7rAGreuWKMT2t8cFUT2Msv5oxw0GMRD7T5J7A== + dependencies: + "@typescript-eslint/types" "5.36.2" + eslint-visitor-keys "^3.3.0" + +acorn-jsx@^5.3.1: + version "5.3.2" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== + +acorn@^7.4.0: + version "7.4.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" + integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== + +ajv@^6.10.0, ajv@^6.12.4: + version "6.12.6" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +ajv@^8.0.1: + version "8.11.0" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.11.0.tgz#977e91dd96ca669f54a11e23e378e33b884a565f" + integrity sha512-wGgprdCvMalC0BztXvitD2hC04YffAvtsUn93JbGXYLAtCUO4xd17mCCZQxUOItiBwZvJScWo8NIvQMQ71rdpg== + dependencies: + fast-deep-equal "^3.1.1" + json-schema-traverse "^1.0.0" + require-from-string "^2.0.2" + uri-js "^4.2.2" + +ansi-colors@^4.1.1: + version "4.1.3" + resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.3.tgz#37611340eb2243e70cc604cad35d63270d48781b" + integrity sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw== + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +ansi-styles@^4.0.0, ansi-styles@^4.1.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +argparse@^1.0.7: + version "1.0.10" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== + dependencies: + sprintf-js "~1.0.2" + +aria-query@^4.2.2: + version "4.2.2" + resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-4.2.2.tgz#0d2ca6c9aceb56b8977e9fed6aed7e15bbd2f83b" + integrity sha512-o/HelwhuKpTj/frsOsbNLNgnNGVIFsVP/SW2BSF14gVl7kAfMOJ6/8wUAUvG1R1NHKrfG+2sHZTu0yauT1qBrA== + dependencies: + "@babel/runtime" "^7.10.2" + "@babel/runtime-corejs3" "^7.10.2" + +array-includes@^3.1.4, array-includes@^3.1.5: + version "3.1.5" + resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.5.tgz#2c320010db8d31031fd2a5f6b3bbd4b1aad31bdb" + integrity sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + get-intrinsic "^1.1.1" + is-string "^1.0.7" + +array-union@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== + +array.prototype.flat@^1.2.5: + version "1.3.0" + resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.3.0.tgz#0b0c1567bf57b38b56b4c97b8aa72ab45e4adc7b" + integrity sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.2" + es-shim-unscopables "^1.0.0" + +array.prototype.flatmap@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.3.0.tgz#a7e8ed4225f4788a70cd910abcf0791e76a5534f" + integrity sha512-PZC9/8TKAIxcWKdyeb77EzULHPrIX/tIZebLJUQOMR1OwYosT8yggdfWScfTBCDj5utONvOuPQQumYsU2ULbkg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.2" + es-shim-unscopables "^1.0.0" + +ast-types-flow@^0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad" + integrity sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag== + +astral-regex@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31" + integrity sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ== + +axe-core@^4.4.3: + version "4.4.3" + resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.4.3.tgz#11c74d23d5013c0fa5d183796729bc3482bd2f6f" + integrity sha512-32+ub6kkdhhWick/UjvEwRchgoetXqTK14INLqbGm5U2TzBkBNF3nQtLYm8ovxSkQWArjEQvftCKryjZaATu3w== + +axobject-query@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-2.2.0.tgz#943d47e10c0b704aa42275e20edf3722648989be" + integrity sha512-Td525n+iPOOyUQIeBfcASuG6uJsDOITl7Mds5gFyerkWiX7qhUTdYUBlSgNMyVqtSJqwpt1kXGLdUt6SykLMRA== + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +braces@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + dependencies: + fill-range "^7.0.1" + +browserslist@^4.20.2: + version "4.21.3" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.3.tgz#5df277694eb3c48bc5c4b05af3e8b7e09c5a6d1a" + integrity sha512-898rgRXLAyRkM1GryrrBHGkqA5hlpkV5MhtZwg9QXeiyLUYs2k00Un05aX5l2/yJIOObYKOpS2JNo8nJDE7fWQ== + dependencies: + caniuse-lite "^1.0.30001370" + electron-to-chromium "^1.4.202" + node-releases "^2.0.6" + update-browserslist-db "^1.0.5" + +call-bind@^1.0.0, call-bind@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" + integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== + dependencies: + function-bind "^1.1.1" + get-intrinsic "^1.0.2" + +callsites@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + +caniuse-lite@^1.0.30001332, caniuse-lite@^1.0.30001370: + version "1.0.30001393" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001393.tgz#1aa161e24fe6af2e2ccda000fc2b94be0b0db356" + integrity sha512-N/od11RX+Gsk+1qY/jbPa0R6zJupEa0lxeBG598EbrtblxVCTJsQwbRBm6+V+rxpc5lHKdsXb9RY83cZIPLseA== + +chalk@^2.0.0: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chalk@^4.0.0: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + +color-convert@^1.9.0: + version "1.9.3" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== + +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== + +convert-source-map@^1.7.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" + integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== + dependencies: + safe-buffer "~5.1.1" + +core-js-pure@^3.20.2: + version "3.25.1" + resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.25.1.tgz#79546518ae87cc362c991d9c2d211f45107991ee" + integrity sha512-7Fr74bliUDdeJCBMxkkIuQ4xfxn/SwrVg+HkJUAoNEXVqYLv55l6Af0dJ5Lq2YBUW9yKqSkLXaS5SYPK6MGa/A== + +cross-spawn@^7.0.2: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +csstype@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.0.tgz#4ddcac3718d787cf9df0d1b7d15033925c8f29f2" + integrity sha512-uX1KG+x9h5hIJsaKR9xHUeUraxf8IODOwq9JLNPq6BwB04a/xgpq3rcx47l5BZu5zBPlgD342tdke3Hom/nJRA== + +damerau-levenshtein@^1.0.8: + version "1.0.8" + resolved "https://registry.yarnpkg.com/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz#b43d286ccbd36bc5b2f7ed41caf2d0aba1f8a6e7" + integrity sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA== + +debug@^2.6.9: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@^3.2.7: + version "3.2.7" + resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== + dependencies: + ms "^2.1.1" + +debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.4: + version "4.3.4" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + +deep-is@^0.1.3: + version "0.1.4" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== + +define-properties@^1.1.3, define-properties@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.4.tgz#0b14d7bd7fbeb2f3572c3a7eda80ea5d57fb05b1" + integrity sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA== + dependencies: + has-property-descriptors "^1.0.0" + object-keys "^1.1.1" + +dir-glob@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== + dependencies: + path-type "^4.0.0" + +doctrine@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== + dependencies: + esutils "^2.0.2" + +doctrine@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== + dependencies: + esutils "^2.0.2" + +electron-to-chromium@^1.4.202: + version "1.4.244" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.244.tgz#ae9b56ed4ae2107e3a860dad80ed662c936e369e" + integrity sha512-E21saXLt2eTDaTxgUtiJtBUqanF9A32wZasAwDZ8gvrqXoxrBrbwtDCx7c/PQTLp81wj4X0OLDeoGQg7eMo3+w== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +emoji-regex@^9.2.2: + version "9.2.2" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" + integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== + +enhanced-resolve@^5.7.0: + version "5.10.0" + resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-5.10.0.tgz#0dc579c3bb2a1032e357ac45b8f3a6f3ad4fb1e6" + integrity sha512-T0yTFjdpldGY8PmuXXR0PyQ1ufZpEGiHVrp7zHKB7jdR4qlmZHhONVM5AQOAWXuF/w3dnHbEQVrNptJgt7F+cQ== + dependencies: + graceful-fs "^4.2.4" + tapable "^2.2.0" + +enquirer@^2.3.5: + version "2.3.6" + resolved "https://registry.yarnpkg.com/enquirer/-/enquirer-2.3.6.tgz#2a7fe5dd634a1e4125a975ec994ff5456dc3734d" + integrity sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg== + dependencies: + ansi-colors "^4.1.1" + +es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.2, es-abstract@^1.19.5: + version "1.20.2" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.20.2.tgz#8495a07bc56d342a3b8ea3ab01bd986700c2ccb3" + integrity sha512-XxXQuVNrySBNlEkTYJoDNFe5+s2yIOpzq80sUHEdPdQr0S5nTLz4ZPPPswNIpKseDDUS5yghX1gfLIHQZ1iNuQ== + dependencies: + call-bind "^1.0.2" + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + function.prototype.name "^1.1.5" + get-intrinsic "^1.1.2" + get-symbol-description "^1.0.0" + has "^1.0.3" + has-property-descriptors "^1.0.0" + has-symbols "^1.0.3" + internal-slot "^1.0.3" + is-callable "^1.2.4" + is-negative-zero "^2.0.2" + is-regex "^1.1.4" + is-shared-array-buffer "^1.0.2" + is-string "^1.0.7" + is-weakref "^1.0.2" + object-inspect "^1.12.2" + object-keys "^1.1.1" + object.assign "^4.1.4" + regexp.prototype.flags "^1.4.3" + string.prototype.trimend "^1.0.5" + string.prototype.trimstart "^1.0.5" + unbox-primitive "^1.0.2" + +es-shim-unscopables@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz#702e632193201e3edf8713635d083d378e510241" + integrity sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w== + dependencies: + has "^1.0.3" + +es-to-primitive@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +escalade@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== + +escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== + +escape-string-regexp@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== + +eslint-config-next@^12.0.8: + version "12.3.0" + resolved "https://registry.yarnpkg.com/eslint-config-next/-/eslint-config-next-12.3.0.tgz#d887ab2d143fe1a2b308e9321e932a613e610800" + integrity sha512-guHSkNyKnTBB8HU35COgAMeMV0E026BiYRYvyEVVaTOeFcnU3i1EI8/Da0Rl7H3Sgua5FEvoA0vYd2s8kdIUXg== + dependencies: + "@next/eslint-plugin-next" "12.3.0" + "@rushstack/eslint-patch" "^1.1.3" + "@typescript-eslint/parser" "^5.21.0" + eslint-import-resolver-node "^0.3.6" + eslint-import-resolver-typescript "^2.7.1" + eslint-plugin-import "^2.26.0" + eslint-plugin-jsx-a11y "^6.5.1" + eslint-plugin-react "^7.29.4" + eslint-plugin-react-hooks "^4.5.0" + +eslint-config-prettier@^8.3.0: + version "8.5.0" + resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-8.5.0.tgz#5a81680ec934beca02c7b1a61cf8ca34b66feab1" + integrity sha512-obmWKLUNCnhtQRKc+tmnYuQl0pFU1ibYJQ5BGhTVB08bHe9wC8qUeG7c08dj9XX+AuPj1YSGSQIHl1pnDHZR0Q== + +eslint-config-turbo@latest: + version "0.0.3" + resolved "https://registry.yarnpkg.com/eslint-config-turbo/-/eslint-config-turbo-0.0.3.tgz#61a3b6fdc4186bb6832ab4b48bb6ed2d3bad57a8" + integrity sha512-hK5MlxDugUWZV9ZKcyfNwLXrlMuM2wPgAUk51cUFBC3nXRCVmCA9uSRFBZsyAIurN1wH7mS7G1NBo5F8VkF7lQ== + dependencies: + eslint-plugin-turbo "0.0.3" + +eslint-import-resolver-node@^0.3.6: + version "0.3.6" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz#4048b958395da89668252001dbd9eca6b83bacbd" + integrity sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw== + dependencies: + debug "^3.2.7" + resolve "^1.20.0" + +eslint-import-resolver-typescript@^2.7.1: + version "2.7.1" + resolved "https://registry.yarnpkg.com/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-2.7.1.tgz#a90a4a1c80da8d632df25994c4c5fdcdd02b8751" + integrity sha512-00UbgGwV8bSgUv34igBDbTOtKhqoRMy9bFjNehT40bXg6585PNIct8HhXZ0SybqB9rWtXj9crcku8ndDn/gIqQ== + dependencies: + debug "^4.3.4" + glob "^7.2.0" + is-glob "^4.0.3" + resolve "^1.22.0" + tsconfig-paths "^3.14.1" + +eslint-module-utils@^2.7.3: + version "2.7.4" + resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.7.4.tgz#4f3e41116aaf13a20792261e61d3a2e7e0583974" + integrity sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA== + dependencies: + debug "^3.2.7" + +eslint-plugin-import@^2.26.0: + version "2.26.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz#f812dc47be4f2b72b478a021605a59fc6fe8b88b" + integrity sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA== + dependencies: + array-includes "^3.1.4" + array.prototype.flat "^1.2.5" + debug "^2.6.9" + doctrine "^2.1.0" + eslint-import-resolver-node "^0.3.6" + eslint-module-utils "^2.7.3" + has "^1.0.3" + is-core-module "^2.8.1" + is-glob "^4.0.3" + minimatch "^3.1.2" + object.values "^1.1.5" + resolve "^1.22.0" + tsconfig-paths "^3.14.1" + +eslint-plugin-jsx-a11y@^6.5.1: + version "6.6.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.6.1.tgz#93736fc91b83fdc38cc8d115deedfc3091aef1ff" + integrity sha512-sXgFVNHiWffBq23uiS/JaP6eVR622DqwB4yTzKvGZGcPq6/yZ3WmOZfuBks/vHWo9GaFOqC2ZK4i6+C35knx7Q== + dependencies: + "@babel/runtime" "^7.18.9" + aria-query "^4.2.2" + array-includes "^3.1.5" + ast-types-flow "^0.0.7" + axe-core "^4.4.3" + axobject-query "^2.2.0" + damerau-levenshtein "^1.0.8" + emoji-regex "^9.2.2" + has "^1.0.3" + jsx-ast-utils "^3.3.2" + language-tags "^1.0.5" + minimatch "^3.1.2" + semver "^6.3.0" + +eslint-plugin-react-hooks@^4.5.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz#4c3e697ad95b77e93f8646aaa1630c1ba607edd3" + integrity sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g== + +eslint-plugin-react@7.31.7, eslint-plugin-react@^7.29.4: + version "7.31.7" + resolved "https://registry.yarnpkg.com/eslint-plugin-react/-/eslint-plugin-react-7.31.7.tgz#36fb1c611a7db5f757fce09cbbcc01682f8b0fbb" + integrity sha512-8NldBTeYp/kQoTV1uT0XF6HcmDqbgZ0lNPkN0wlRw8DJKXEnaWu+oh/6gt3xIhzvQ35wB2Y545fJhIbJSZ2NNw== + dependencies: + array-includes "^3.1.5" + array.prototype.flatmap "^1.3.0" + doctrine "^2.1.0" + estraverse "^5.3.0" + jsx-ast-utils "^2.4.1 || ^3.0.0" + minimatch "^3.1.2" + object.entries "^1.1.5" + object.fromentries "^2.0.5" + object.hasown "^1.1.1" + object.values "^1.1.5" + prop-types "^15.8.1" + resolve "^2.0.0-next.3" + semver "^6.3.0" + string.prototype.matchall "^4.0.7" + +eslint-plugin-turbo@0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/eslint-plugin-turbo/-/eslint-plugin-turbo-0.0.3.tgz#9d86895732f95b0c236d6363177a52368fffdc71" + integrity sha512-QjidATGxWtaB9QUrD3NocUySmsgWKZlBMFlw4kX2IIjRLAxMPwukk90h3ZTaNXyRHuaQsrEgh7hhlCZoxP0TTw== + +eslint-scope@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" + integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== + dependencies: + esrecurse "^4.3.0" + estraverse "^4.1.1" + +eslint-utils@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-2.1.0.tgz#d2de5e03424e707dc10c74068ddedae708741b27" + integrity sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg== + dependencies: + eslint-visitor-keys "^1.1.0" + +eslint-visitor-keys@^1.1.0, eslint-visitor-keys@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e" + integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ== + +eslint-visitor-keys@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" + integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== + +eslint-visitor-keys@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826" + integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA== + +eslint@7.32.0, eslint@^7.23.0, eslint@^7.32.0: + version "7.32.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.32.0.tgz#c6d328a14be3fb08c8d1d21e12c02fdb7a2a812d" + integrity sha512-VHZ8gX+EDfz+97jGcgyGCyRia/dPOd6Xh9yPv8Bl1+SoaIwD+a/vlrOmGRUyOYu7MwUhc7CxqeaDZU13S4+EpA== + dependencies: + "@babel/code-frame" "7.12.11" + "@eslint/eslintrc" "^0.4.3" + "@humanwhocodes/config-array" "^0.5.0" + ajv "^6.10.0" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.0.1" + doctrine "^3.0.0" + enquirer "^2.3.5" + escape-string-regexp "^4.0.0" + eslint-scope "^5.1.1" + eslint-utils "^2.1.0" + eslint-visitor-keys "^2.0.0" + espree "^7.3.1" + esquery "^1.4.0" + esutils "^2.0.2" + fast-deep-equal "^3.1.3" + file-entry-cache "^6.0.1" + functional-red-black-tree "^1.0.1" + glob-parent "^5.1.2" + globals "^13.6.0" + ignore "^4.0.6" + import-fresh "^3.0.0" + imurmurhash "^0.1.4" + is-glob "^4.0.0" + js-yaml "^3.13.1" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.4.1" + lodash.merge "^4.6.2" + minimatch "^3.0.4" + natural-compare "^1.4.0" + optionator "^0.9.1" + progress "^2.0.0" + regexpp "^3.1.0" + semver "^7.2.1" + strip-ansi "^6.0.0" + strip-json-comments "^3.1.0" + table "^6.0.9" + text-table "^0.2.0" + v8-compile-cache "^2.0.3" + +espree@^7.3.0, espree@^7.3.1: + version "7.3.1" + resolved "https://registry.yarnpkg.com/espree/-/espree-7.3.1.tgz#f2df330b752c6f55019f8bd89b7660039c1bbbb6" + integrity sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g== + dependencies: + acorn "^7.4.0" + acorn-jsx "^5.3.1" + eslint-visitor-keys "^1.3.0" + +esprima@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +esquery@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" + integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== + dependencies: + estraverse "^5.1.0" + +esrecurse@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== + dependencies: + estraverse "^5.2.0" + +estraverse@^4.1.1: + version "4.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== + +estraverse@^5.1.0, estraverse@^5.2.0, estraverse@^5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== + +esutils@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-glob@^3.2.9: + version "3.2.11" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.11.tgz#a1172ad95ceb8a16e20caa5c5e56480e5129c1d9" + integrity sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.4" + +fast-json-stable-stringify@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== + +fastq@^1.6.0: + version "1.13.0" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" + integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw== + dependencies: + reusify "^1.0.4" + +file-entry-cache@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== + dependencies: + flat-cache "^3.0.4" + +fill-range@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== + dependencies: + to-regex-range "^5.0.1" + +flat-cache@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" + integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + dependencies: + flatted "^3.1.0" + rimraf "^3.0.2" + +flatted@^3.1.0: + version "3.2.7" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" + integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +function.prototype.name@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" + integrity sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.0" + functions-have-names "^1.2.2" + +functional-red-black-tree@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" + integrity sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g== + +functions-have-names@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" + integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== + +gensync@^1.0.0-beta.2: + version "1.0.0-beta.2" + resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== + +get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1, get-intrinsic@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.2.tgz#336975123e05ad0b7ba41f152ee4aadbea6cf598" + integrity sha512-Jfm3OyCxHh9DJyc28qGk+JmfkpO41A4XkneDSujN9MDXrm4oDKdHvndhZ2dN94+ERNfkYJWDclW6k2L/ZGHjXA== + dependencies: + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.3" + +get-symbol-description@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" + integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.1" + +glob-parent@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + dependencies: + is-glob "^4.0.1" + +glob@7.1.7: + version "7.1.7" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90" + integrity sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +glob@^7.1.3, glob@^7.2.0: + version "7.2.3" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" + integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.1.1" + once "^1.3.0" + path-is-absolute "^1.0.0" + +globals@^11.1.0: + version "11.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +globals@^13.6.0, globals@^13.9.0: + version "13.17.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.17.0.tgz#902eb1e680a41da93945adbdcb5a9f361ba69bd4" + integrity sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw== + dependencies: + type-fest "^0.20.2" + +globby@^11.1.0: + version "11.1.0" + resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.2.9" + ignore "^5.2.0" + merge2 "^1.4.1" + slash "^3.0.0" + +graceful-fs@^4.2.4: + version "4.2.10" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" + integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== + +has-bigints@^1.0.1, has-bigints@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" + integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ== + +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +has-property-descriptors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz#610708600606d36961ed04c196193b6a607fa861" + integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ== + dependencies: + get-intrinsic "^1.1.1" + +has-symbols@^1.0.2, has-symbols@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" + integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== + +has-tostringtag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" + integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== + dependencies: + has-symbols "^1.0.2" + +has@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +ignore@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" + integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== + +ignore@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" + integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== + +import-fresh@^3.0.0, import-fresh@^3.2.1: + version "3.3.0" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + +imurmurhash@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +internal-slot@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.3.tgz#7347e307deeea2faac2ac6205d4bc7d34967f59c" + integrity sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA== + dependencies: + get-intrinsic "^1.1.0" + has "^1.0.3" + side-channel "^1.0.4" + +is-bigint@^1.0.1: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" + integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== + dependencies: + has-bigints "^1.0.1" + +is-boolean-object@^1.1.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" + integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-callable@^1.1.4, is-callable@^1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.4.tgz#47301d58dd0259407865547853df6d61fe471945" + integrity sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w== + +is-core-module@^2.8.1, is-core-module@^2.9.0: + version "2.10.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.10.0.tgz#9012ede0a91c69587e647514e1d5277019e728ed" + integrity sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg== + dependencies: + has "^1.0.3" + +is-date-object@^1.0.1: + version "1.0.5" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" + integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== + dependencies: + has-tostringtag "^1.0.0" + +is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== + dependencies: + is-extglob "^2.1.1" + +is-negative-zero@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" + integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== + +is-number-object@^1.0.4: + version "1.0.7" + resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.7.tgz#59d50ada4c45251784e9904f5246c742f07a42fc" + integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ== + dependencies: + has-tostringtag "^1.0.0" + +is-number@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-regex@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" + integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== + dependencies: + call-bind "^1.0.2" + has-tostringtag "^1.0.0" + +is-shared-array-buffer@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" + integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA== + dependencies: + call-bind "^1.0.2" + +is-string@^1.0.5, is-string@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" + integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== + dependencies: + has-tostringtag "^1.0.0" + +is-symbol@^1.0.2, is-symbol@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" + integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== + dependencies: + has-symbols "^1.0.2" + +is-weakref@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" + integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== + dependencies: + call-bind "^1.0.2" + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== + +"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-yaml@^3.13.1: + version "3.14.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +jsesc@^2.5.1: + version "2.5.2" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-schema-traverse@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" + integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== + +json-stable-stringify-without-jsonify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== + +json5@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" + integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== + dependencies: + minimist "^1.2.0" + +json5@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c" + integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA== + +"jsx-ast-utils@^2.4.1 || ^3.0.0", jsx-ast-utils@^3.3.2: + version "3.3.3" + resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-3.3.3.tgz#76b3e6e6cece5c69d49a5792c3d01bd1a0cdc7ea" + integrity sha512-fYQHZTZ8jSfmWZ0iyzfwiU4WDX4HpHbMCZ3gPlWYiCl3BoeOTsqKBqnTVfH2rYT7eP5c3sVbeSPHnnJOaTrWiw== + dependencies: + array-includes "^3.1.5" + object.assign "^4.1.3" + +language-subtag-registry@~0.3.2: + version "0.3.22" + resolved "https://registry.yarnpkg.com/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz#2e1500861b2e457eba7e7ae86877cbd08fa1fd1d" + integrity sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w== + +language-tags@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/language-tags/-/language-tags-1.0.5.tgz#d321dbc4da30ba8bf3024e040fa5c14661f9193a" + integrity sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ== + dependencies: + language-subtag-registry "~0.3.2" + +levn@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== + dependencies: + prelude-ls "^1.2.1" + type-check "~0.4.0" + +lodash.merge@^4.6.2: + version "4.6.2" + resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== + +lodash.truncate@^4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/lodash.truncate/-/lodash.truncate-4.4.2.tgz#5a350da0b1113b837ecfffd5812cbe58d6eae193" + integrity sha512-jttmRe7bRse52OsWIMDLaXxWqRAmtIUccAQ3garviCqJjafXOfNMO0yMfNpdD6zbGaTU0P5Nz7e7gAT6cKmJRw== + +lodash@^4.17.21: + version "4.17.21" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== + +loose-envify@^1.1.0, loose-envify@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== + dependencies: + js-tokens "^3.0.0 || ^4.0.0" + +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + +merge2@^1.3.0, merge2@^1.4.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +micromatch@^4.0.4: + version "4.0.5" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" + integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== + dependencies: + braces "^3.0.2" + picomatch "^2.3.1" + +minimatch@^3.0.4, minimatch@^3.1.1, minimatch@^3.1.2: + version "3.1.2" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== + dependencies: + brace-expansion "^1.1.7" + +minimist@^1.2.0, minimist@^1.2.6: + version "1.2.6" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" + integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +ms@^2.1.1: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +nanoid@^3.3.4: + version "3.3.4" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.4.tgz#730b67e3cd09e2deacf03c027c81c9d9dbc5e8ab" + integrity sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw== + +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== + +next-transpile-modules@9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/next-transpile-modules/-/next-transpile-modules-9.0.0.tgz#133b1742af082e61cc76b02a0f12ffd40ce2bf90" + integrity sha512-VCNFOazIAnXn1hvgYYSTYMnoWgKgwlYh4lm1pKbSfiB3kj5ZYLcKVhfh3jkPOg1cnd9DP+pte9yCUocdPEUBTQ== + dependencies: + enhanced-resolve "^5.7.0" + escalade "^3.1.1" + +next@12.2.5: + version "12.2.5" + resolved "https://registry.yarnpkg.com/next/-/next-12.2.5.tgz#14fb5975e8841fad09553b8ef41fe1393602b717" + integrity sha512-tBdjqX5XC/oFs/6gxrZhjmiq90YWizUYU6qOWAfat7zJwrwapJ+BYgX2PmiacunXMaRpeVT4vz5MSPSLgNkrpA== + dependencies: + "@next/env" "12.2.5" + "@swc/helpers" "0.4.3" + caniuse-lite "^1.0.30001332" + postcss "8.4.14" + styled-jsx "5.0.4" + use-sync-external-store "1.2.0" + optionalDependencies: + "@next/swc-android-arm-eabi" "12.2.5" + "@next/swc-android-arm64" "12.2.5" + "@next/swc-darwin-arm64" "12.2.5" + "@next/swc-darwin-x64" "12.2.5" + "@next/swc-freebsd-x64" "12.2.5" + "@next/swc-linux-arm-gnueabihf" "12.2.5" + "@next/swc-linux-arm64-gnu" "12.2.5" + "@next/swc-linux-arm64-musl" "12.2.5" + "@next/swc-linux-x64-gnu" "12.2.5" + "@next/swc-linux-x64-musl" "12.2.5" + "@next/swc-win32-arm64-msvc" "12.2.5" + "@next/swc-win32-ia32-msvc" "12.2.5" + "@next/swc-win32-x64-msvc" "12.2.5" + +node-releases@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.6.tgz#8a7088c63a55e493845683ebf3c828d8c51c5503" + integrity sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg== + +object-assign@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== + +object-inspect@^1.12.2, object-inspect@^1.9.0: + version "1.12.2" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea" + integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ== + +object-keys@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object.assign@^4.1.3, object.assign@^4.1.4: + version "4.1.4" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.4.tgz#9673c7c7c351ab8c4d0b516f4343ebf4dfb7799f" + integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + has-symbols "^1.0.3" + object-keys "^1.1.1" + +object.entries@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/object.entries/-/object.entries-1.1.5.tgz#e1acdd17c4de2cd96d5a08487cfb9db84d881861" + integrity sha512-TyxmjUoZggd4OrrU1W66FMDG6CuqJxsFvymeyXI51+vQLN67zYfZseptRge703kKQdo4uccgAKebXFcRCzk4+g== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +object.fromentries@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.5.tgz#7b37b205109c21e741e605727fe8b0ad5fa08251" + integrity sha512-CAyG5mWQRRiBU57Re4FKoTBjXfDoNwdFVH2Y1tS9PqCsfUTymAohOkEMSG3aRNKmv4lV3O7p1et7c187q6bynw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +object.hasown@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object.hasown/-/object.hasown-1.1.1.tgz#ad1eecc60d03f49460600430d97f23882cf592a3" + integrity sha512-LYLe4tivNQzq4JdaWW6WO3HMZZJWzkkH8fnI6EebWl0VZth2wL2Lovm74ep2/gZzlaTdV62JZHEqHQ2yVn8Q/A== + dependencies: + define-properties "^1.1.4" + es-abstract "^1.19.5" + +object.values@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.5.tgz#959f63e3ce9ef108720333082131e4a459b716ac" + integrity sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + +once@^1.3.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== + dependencies: + wrappy "1" + +optionator@^0.9.1: + version "0.9.1" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" + integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== + dependencies: + deep-is "^0.1.3" + fast-levenshtein "^2.0.6" + levn "^0.4.1" + prelude-ls "^1.2.1" + type-check "^0.4.0" + word-wrap "^1.2.3" + +parent-module@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== + +path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-type@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== + +picocolors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" + integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== + +picomatch@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== + +postcss@8.4.14: + version "8.4.14" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.14.tgz#ee9274d5622b4858c1007a74d76e42e56fd21caf" + integrity sha512-E398TUmfAYFPBSdzgeieK2Y1+1cpdxJx8yXbK/m57nRhKSmk1GB2tO4lbLBtlkfPQTDKfe4Xqv1ASWPpayPEig== + dependencies: + nanoid "^3.3.4" + picocolors "^1.0.0" + source-map-js "^1.0.2" + +prelude-ls@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== + +prettier@latest: + version "2.7.1" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.7.1.tgz#e235806850d057f97bb08368a4f7d899f7760c64" + integrity sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g== + +progress@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" + integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== + +prop-types@^15.8.1: + version "15.8.1" + resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.8.1.tgz#67d87bf1a694f48435cf332c24af10214a3140b5" + integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== + dependencies: + loose-envify "^1.4.0" + object-assign "^4.1.1" + react-is "^16.13.1" + +punycode@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +queue-microtask@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + +react-dom@18.2.0: + version "18.2.0" + resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-18.2.0.tgz#22aaf38708db2674ed9ada224ca4aa708d821e3d" + integrity sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g== + dependencies: + loose-envify "^1.1.0" + scheduler "^0.23.0" + +react-is@^16.13.1: + version "16.13.1" + resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" + integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== + +react@18.2.0, react@^18.2.0: + version "18.2.0" + resolved "https://registry.yarnpkg.com/react/-/react-18.2.0.tgz#555bd98592883255fa00de14f1151a917b5d77d5" + integrity sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ== + dependencies: + loose-envify "^1.1.0" + +regenerator-runtime@^0.13.4: + version "0.13.9" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52" + integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA== + +regexp.prototype.flags@^1.4.1, regexp.prototype.flags@^1.4.3: + version "1.4.3" + resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz#87cab30f80f66660181a3bb7bf5981a872b367ac" + integrity sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + functions-have-names "^1.2.2" + +regexpp@^3.1.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2" + integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg== + +require-from-string@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" + integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== + +resolve-from@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + +resolve@^1.20.0, resolve@^1.22.0: + version "1.22.1" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" + integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== + dependencies: + is-core-module "^2.9.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +resolve@^2.0.0-next.3: + version "2.0.0-next.4" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-2.0.0-next.4.tgz#3d37a113d6429f496ec4752d2a2e58efb1fd4660" + integrity sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ== + dependencies: + is-core-module "^2.9.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +reusify@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + +rimraf@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + +run-parallel@^1.1.9: + version "1.2.0" + resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + dependencies: + queue-microtask "^1.2.2" + +safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +scheduler@^0.23.0: + version "0.23.0" + resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.23.0.tgz#ba8041afc3d30eb206a487b6b384002e4e61fdfe" + integrity sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw== + dependencies: + loose-envify "^1.1.0" + +semver@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" + integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== + +semver@^7.2.1, semver@^7.3.7: + version "7.3.7" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" + integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== + dependencies: + lru-cache "^6.0.0" + +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +side-channel@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" + integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== + dependencies: + call-bind "^1.0.0" + get-intrinsic "^1.0.2" + object-inspect "^1.9.0" + +slash@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== + +slice-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-4.0.0.tgz#500e8dd0fd55b05815086255b3195adf2a45fe6b" + integrity sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ== + dependencies: + ansi-styles "^4.0.0" + astral-regex "^2.0.0" + is-fullwidth-code-point "^3.0.0" + +source-map-js@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c" + integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw== + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== + +string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string.prototype.matchall@^4.0.7: + version "4.0.7" + resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.7.tgz#8e6ecb0d8a1fb1fda470d81acecb2dba057a481d" + integrity sha512-f48okCX7JiwVi1NXCVWcFnZgADDC/n2vePlQ/KUCNqCikLLilQvwjMO8+BHVKvgzH0JB0J9LEPgxOGT02RoETg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.1" + get-intrinsic "^1.1.1" + has-symbols "^1.0.3" + internal-slot "^1.0.3" + regexp.prototype.flags "^1.4.1" + side-channel "^1.0.4" + +string.prototype.trimend@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz#914a65baaab25fbdd4ee291ca7dde57e869cb8d0" + integrity sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + +string.prototype.trimstart@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz#5466d93ba58cfa2134839f81d7f42437e8c01fef" + integrity sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-bom@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + integrity sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA== + +strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== + +styled-jsx@5.0.4: + version "5.0.4" + resolved "https://registry.yarnpkg.com/styled-jsx/-/styled-jsx-5.0.4.tgz#5b1bd0b9ab44caae3dd1361295559706e044aa53" + integrity sha512-sDFWLbg4zR+UkNzfk5lPilyIgtpddfxXEULxhujorr5jtePTUqiPDc5BC0v1NRqTr/WaFBGQQUoYToGlF4B2KQ== + +supports-color@^5.3.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +supports-color@^7.1.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== + dependencies: + has-flag "^4.0.0" + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +table@^6.0.9: + version "6.8.0" + resolved "https://registry.yarnpkg.com/table/-/table-6.8.0.tgz#87e28f14fa4321c3377ba286f07b79b281a3b3ca" + integrity sha512-s/fitrbVeEyHKFa7mFdkuQMWlH1Wgw/yEXMt5xACT4ZpzWFluehAxRtUUQKPuWhaLAWhFcVx6w3oC8VKaUfPGA== + dependencies: + ajv "^8.0.1" + lodash.truncate "^4.4.2" + slice-ansi "^4.0.0" + string-width "^4.2.3" + strip-ansi "^6.0.1" + +tapable@^2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/tapable/-/tapable-2.2.1.tgz#1967a73ef4060a82f12ab96af86d52fdb76eeca0" + integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== + +text-table@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== + +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== + +to-regex-range@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +tsconfig-paths@^3.14.1: + version "3.14.1" + resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz#ba0734599e8ea36c862798e920bcf163277b137a" + integrity sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ== + dependencies: + "@types/json5" "^0.0.29" + json5 "^1.0.1" + minimist "^1.2.6" + strip-bom "^3.0.0" + +tslib@^1.8.1: + version "1.14.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== + +tslib@^2.4.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" + integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== + +tsutils@^3.21.0: + version "3.21.0" + resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" + integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== + dependencies: + tslib "^1.8.1" + +turbo-android-arm64@1.4.6: + version "1.4.6" + resolved "https://registry.yarnpkg.com/turbo-android-arm64/-/turbo-android-arm64-1.4.6.tgz#2127110335a86a50856852c2728eb75f7f61b77b" + integrity sha512-YxSlHc64CF5J7yNUMiLBHkeLyzrpe75Oy7tivWb3z7ySG44BXPikk4HDJZPh0T1ELvukDwuPKkvDukJ2oCLJpA== + +turbo-darwin-64@1.4.6: + version "1.4.6" + resolved "https://registry.yarnpkg.com/turbo-darwin-64/-/turbo-darwin-64-1.4.6.tgz#8b3d930ed0d0b8c358d87ed2347381496f4283dd" + integrity sha512-f6uto7LLpjwZ6iZSF+8uaDpuiTji6xmnWDxNuW23DBE8iv5mxehHd+6Ys851uKDRrPb3QdCu9ctyigKTAla5Vg== + +turbo-darwin-arm64@1.4.6: + version "1.4.6" + resolved "https://registry.yarnpkg.com/turbo-darwin-arm64/-/turbo-darwin-arm64-1.4.6.tgz#7f045cbfbb1d6ac18ea28122b9a6a5fdc629488a" + integrity sha512-o9C6e5XyuMHQwE0fEhUxfpXxvNr2QXXWX8nxIjygxeF19AqKbk/s08vZBOEmXV6/gx/pRhZ1S2nf0PIUjKBD/Q== + +turbo-freebsd-64@1.4.6: + version "1.4.6" + resolved "https://registry.yarnpkg.com/turbo-freebsd-64/-/turbo-freebsd-64-1.4.6.tgz#b16c5617f2e818a709627351f1e14d1fd8dcf0e7" + integrity sha512-Gg9VOUo6McXYKGevcYjGUSmMryZyZggvpdPh7Dw3QTcT8Tsy6OBtq6WnJ2O4kFDsMigyKtEOJPceD9vDMZt3yQ== + +turbo-freebsd-arm64@1.4.6: + version "1.4.6" + resolved "https://registry.yarnpkg.com/turbo-freebsd-arm64/-/turbo-freebsd-arm64-1.4.6.tgz#462b8ba68cccac93d169c80cf458d221c662a770" + integrity sha512-W7VrcneWFN1QENKt5cpAPSsf9ArYBBAm3VtPBZEO5tX8kuahGlah1SKdKJXrRxYOY82wyNxDagS/rHpBlrAAzw== + +turbo-linux-32@1.4.6: + version "1.4.6" + resolved "https://registry.yarnpkg.com/turbo-linux-32/-/turbo-linux-32-1.4.6.tgz#0a0859be0941e4bcdc4bff81b97ee36f02cc1ffd" + integrity sha512-76j/zsui6mWPX8pZVMGgF8eiKHPmKuGa2lo0A/Ja0HUvdYCOGUfHsWJGVVIeYbuEp3jsKyVt7OnMDeH9CqO6bg== + +turbo-linux-64@1.4.6: + version "1.4.6" + resolved "https://registry.yarnpkg.com/turbo-linux-64/-/turbo-linux-64-1.4.6.tgz#0a7d76fab78d7850c26d9d6b372c40ffca9835f8" + integrity sha512-z4A37Xm7lZyO9ddtGnvQHWMrsAKX6vFBxdbtb9MY76VRblo7lWSuk4LwCeM+T+ZDJ9LBFiF7aD/diRShlLx9jA== + +turbo-linux-arm64@1.4.6: + version "1.4.6" + resolved "https://registry.yarnpkg.com/turbo-linux-arm64/-/turbo-linux-arm64-1.4.6.tgz#c66d3c6917ccbdb34cd7ce37c900613f4d690ebc" + integrity sha512-FW1jmOpZfOoVVvml338N0MPnYjiMyYWTaMb4T+IosgGYymcUE3xJjfXJcqfU/9/uKTyY8zG0qr9/5rw2kpMS2Q== + +turbo-linux-arm@1.4.6: + version "1.4.6" + resolved "https://registry.yarnpkg.com/turbo-linux-arm/-/turbo-linux-arm-1.4.6.tgz#9a9d73a722bdd6acb40276d0616c155168a32172" + integrity sha512-Uh/V3oaAdhyZW6FKPpKihAxQo3EbvLaVNnzzkBmBnvHRkqoDJHhpuG72V7nn8pzxVbJ1++NEVjvbc2kmKFvGjg== + +turbo-linux-mips64le@1.4.6: + version "1.4.6" + resolved "https://registry.yarnpkg.com/turbo-linux-mips64le/-/turbo-linux-mips64le-1.4.6.tgz#eb74c333c16ef03aa30dcb83fcc29d58218656e4" + integrity sha512-iWaL3Pwj52BH3T2M8nXScmbSnq4+x47MYK7lJMG7FsZGAIoT5ToO1Wt1iX3GRHTcnIZYm/kCfJ1ptK/NCossLA== + +turbo-linux-ppc64le@1.4.6: + version "1.4.6" + resolved "https://registry.yarnpkg.com/turbo-linux-ppc64le/-/turbo-linux-ppc64le-1.4.6.tgz#74597f4c30fe73c92ef8912e4bf25ccbe7c7ec7e" + integrity sha512-Af/KlUmpiORDyELxT7byXNWl3fefErGQMJfeqXEtAdhs8OCKQWuU+lchcZbiBZYNpL+lZoa3PAmP9Fpx7R4plA== + +turbo-windows-32@1.4.6: + version "1.4.6" + resolved "https://registry.yarnpkg.com/turbo-windows-32/-/turbo-windows-32-1.4.6.tgz#df1f3c25fea0bbccf7c5b44111ddbcd57f6fe547" + integrity sha512-NBd+XPlRSaR//lVN13Q9DOqK3CbowSvafIyGsO4jfvMsGTdyNDL6AYtFsvTKW91/G7ZhATmSEkPn2pZRuhP/DA== + +turbo-windows-64@1.4.6: + version "1.4.6" + resolved "https://registry.yarnpkg.com/turbo-windows-64/-/turbo-windows-64-1.4.6.tgz#6a7d8897bb60234b6bc4b5d013adb00fac6f2beb" + integrity sha512-86AbmG+CjzVTpn4RGtwU2CYy4zSyAc9bIQ4pDGLIpCJg6JlD11duaiMJh0SCU/HCqWLJjWDI4qD+f9WNbgPsyQ== + +turbo-windows-arm64@1.4.6: + version "1.4.6" + resolved "https://registry.yarnpkg.com/turbo-windows-arm64/-/turbo-windows-arm64-1.4.6.tgz#4c80528c6670ef50129053ad8279c832190b7234" + integrity sha512-V+pWcqhTtmQQ3ew8qEjYtUwzyW6tO1RgvP+6OKzItYzTnMTr1Fe42Q21V+tqRNxuNfFDKsgVJdk2p5wB87bvyQ== + +turbo@latest: + version "1.4.6" + resolved "https://registry.yarnpkg.com/turbo/-/turbo-1.4.6.tgz#c97c23cf898380bedabd04c5a91ab4eb9829bcdc" + integrity sha512-FKtBXlOJ7YjSK22yj4sJLCtDcHFElypt7xw9cZN7Wyv9x4XBrTmh5KP6RmcGnRR1/GJlTNwD2AY2T9QTPnHh+g== + optionalDependencies: + turbo-android-arm64 "1.4.6" + turbo-darwin-64 "1.4.6" + turbo-darwin-arm64 "1.4.6" + turbo-freebsd-64 "1.4.6" + turbo-freebsd-arm64 "1.4.6" + turbo-linux-32 "1.4.6" + turbo-linux-64 "1.4.6" + turbo-linux-arm "1.4.6" + turbo-linux-arm64 "1.4.6" + turbo-linux-mips64le "1.4.6" + turbo-linux-ppc64le "1.4.6" + turbo-windows-32 "1.4.6" + turbo-windows-64 "1.4.6" + turbo-windows-arm64 "1.4.6" + +type-check@^0.4.0, type-check@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== + dependencies: + prelude-ls "^1.2.1" + +type-fest@^0.20.2: + version "0.20.2" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== + +typescript@^4.5.2, typescript@^4.5.3, typescript@^4.7.4: + version "4.8.3" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.8.3.tgz#d59344522c4bc464a65a730ac695007fdb66dd88" + integrity sha512-goMHfm00nWPa8UvR/CPSvykqf6dVV8x/dp0c5mFTMTIu0u0FlGWRioyy7Nn0PGAdHxpJZnuO/ut+PpQ8UiHAig== + +unbox-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" + integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== + dependencies: + call-bind "^1.0.2" + has-bigints "^1.0.2" + has-symbols "^1.0.3" + which-boxed-primitive "^1.0.2" + +update-browserslist-db@^1.0.5: + version "1.0.7" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.7.tgz#16279639cff1d0f800b14792de43d97df2d11b7d" + integrity sha512-iN/XYesmZ2RmmWAiI4Z5rq0YqSiv0brj9Ce9CfhNE4xIW2h+MFxcgkxIzZ+ShkFPUkjU3gQ+3oypadD3RAMtrg== + dependencies: + escalade "^3.1.1" + picocolors "^1.0.0" + +uri-js@^4.2.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== + dependencies: + punycode "^2.1.0" + +use-sync-external-store@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/use-sync-external-store/-/use-sync-external-store-1.2.0.tgz#7dbefd6ef3fe4e767a0cf5d7287aacfb5846928a" + integrity sha512-eEgnFxGQ1Ife9bzYs6VLi8/4X6CObHMw9Qr9tPY43iKwsPw8xE8+EFsf/2cFZ5S3esXgpWgtSCtLNS41F+sKPA== + +v8-compile-cache@^2.0.3: + version "2.3.0" + resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee" + integrity sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA== + +which-boxed-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" + integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== + dependencies: + is-bigint "^1.0.1" + is-boolean-object "^1.1.0" + is-number-object "^1.0.4" + is-string "^1.0.5" + is-symbol "^1.0.3" + +which@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +word-wrap@^1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" + integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== + +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== diff --git a/cli/internal/lockfile/yarn_lockfile.go b/cli/internal/lockfile/yarn_lockfile.go new file mode 100644 index 0000000..99d7764 --- /dev/null +++ b/cli/internal/lockfile/yarn_lockfile.go @@ -0,0 +1,124 @@ +package lockfile + +import ( + "bytes" + "fmt" + "io" + + "github.com/andybalholm/crlf" + "github.com/iseki0/go-yarnlock" + "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +var _crlfLiteral = []byte("\r\n") + +// YarnLockfile representation of yarn lockfile +type YarnLockfile struct { + inner yarnlock.LockFile + hasCRLF bool +} + +var _ Lockfile = (*YarnLockfile)(nil) + +// ResolvePackage Given a package and version returns the key, resolved version, and if it was found +func (l *YarnLockfile) ResolvePackage(_workspacePath turbopath.AnchoredUnixPath, name string, version string) (Package, error) { + for _, key := range yarnPossibleKeys(name, version) { + if entry, ok := (l.inner)[key]; ok { + return Package{ + Found: true, + Key: key, + Version: entry.Version, + }, nil + } + } + + return Package{}, nil +} + +// AllDependencies Given a lockfile key return all (dev/optional/peer) dependencies of that package +func (l *YarnLockfile) AllDependencies(key string) (map[string]string, bool) { + deps := map[string]string{} + entry, ok := (l.inner)[key] + if !ok { + return deps, false + } + + for name, version := range entry.Dependencies { + deps[name] = version + } + for name, version := range entry.OptionalDependencies { + deps[name] = version + } + + return deps, true +} + +// Subgraph Given a list of lockfile keys returns a Lockfile based off the original one that only contains the packages given +func (l *YarnLockfile) Subgraph(_ []turbopath.AnchoredSystemPath, packages []string) (Lockfile, error) { + lockfile := make(map[string]yarnlock.LockFileEntry, len(packages)) + for _, key := range packages { + entry, ok := (l.inner)[key] + if ok { + lockfile[key] = entry + } + } + + return &YarnLockfile{lockfile, l.hasCRLF}, nil +} + +// Encode encode the lockfile representation and write it to the given writer +func (l *YarnLockfile) Encode(w io.Writer) error { + writer := w + if l.hasCRLF { + writer = crlf.NewWriter(w) + } + if err := l.inner.Encode(writer); err != nil { + return errors.Wrap(err, "Unable to encode yarn.lock") + } + return nil +} + +// Patches return a list of patches used in the lockfile +func (l *YarnLockfile) Patches() []turbopath.AnchoredUnixPath { + return nil +} + +// DecodeYarnLockfile Takes the contents of a yarn lockfile and returns a struct representation +func DecodeYarnLockfile(contents []byte) (*YarnLockfile, error) { + lockfile, err := yarnlock.ParseLockFileData(contents) + hasCRLF := bytes.HasSuffix(contents, _crlfLiteral) + newline := []byte("\n") + + // there's no trailing newline for this file, need to inspect more to see newline style + if !hasCRLF && !bytes.HasSuffix(contents, newline) { + firstNewline := bytes.IndexByte(contents, newline[0]) + if firstNewline != -1 && firstNewline != 0 { + byteBeforeNewline := contents[firstNewline-1] + hasCRLF = byteBeforeNewline == '\r' + } + } + + if err != nil { + return nil, errors.Wrap(err, "Unable to decode yarn.lock") + } + + return &YarnLockfile{lockfile, hasCRLF}, nil +} + +// GlobalChange checks if there are any differences between lockfiles that would completely invalidate +// the cache. +func (l *YarnLockfile) GlobalChange(other Lockfile) bool { + _, ok := other.(*YarnLockfile) + return !ok +} + +func yarnPossibleKeys(name string, version string) []string { + return []string{ + fmt.Sprintf("%v@%v", name, version), + fmt.Sprintf("%v@npm:%v", name, version), + fmt.Sprintf("%v@file:%v", name, version), + fmt.Sprintf("%v@workspace:%v", name, version), + fmt.Sprintf("%v@yarn:%v", name, version), + } +} diff --git a/cli/internal/lockfile/yarn_lockfile_test.go b/cli/internal/lockfile/yarn_lockfile_test.go new file mode 100644 index 0000000..ef4fcb0 --- /dev/null +++ b/cli/internal/lockfile/yarn_lockfile_test.go @@ -0,0 +1,51 @@ +package lockfile + +import ( + "bytes" + "testing" + + "gotest.tools/v3/assert" +) + +func TestRoundtrip(t *testing.T) { + content, err := getFixture(t, "yarn.lock") + if err != nil { + t.Error(err) + } + + lockfile, err := DecodeYarnLockfile(content) + if err != nil { + t.Error(err) + } + + var b bytes.Buffer + if err := lockfile.Encode(&b); err != nil { + t.Error(err) + } + + assert.DeepEqual(t, string(content), b.String()) +} + +func TestKeySplitting(t *testing.T) { + content, err := getFixture(t, "yarn.lock") + if err != nil { + t.Error(err) + } + + lockfile, err := DecodeYarnLockfile(content) + if err != nil { + t.Error(err) + } + + // @babel/types has multiple entries, these should all appear in the lockfile struct + keys := []string{ + "@babel/types@^7.18.10", + "@babel/types@^7.18.6", + "@babel/types@^7.19.0", + } + + for _, key := range keys { + _, ok := lockfile.inner[key] + assert.Assert(t, ok, "Unable to find entry for %s in parsed lockfile", key) + } +} diff --git a/cli/internal/logstreamer/logstreamer.go b/cli/internal/logstreamer/logstreamer.go new file mode 100644 index 0000000..4379c25 --- /dev/null +++ b/cli/internal/logstreamer/logstreamer.go @@ -0,0 +1,159 @@ +// Copyright (c) 2013 Kevin van Zonneveld . All rights reserved. +// Source: https://github.com/kvz/logstreamer +// SPDX-License-Identifier: MIT +package logstreamer + +import ( + "bytes" + "io" + "log" + "os" + "strings" +) + +type Logstreamer struct { + Logger *log.Logger + buf *bytes.Buffer + // If prefix == stdout, colors green + // If prefix == stderr, colors red + // Else, prefix is taken as-is, and prepended to anything + // you throw at Write() + prefix string + // if true, saves output in memory + record bool + persist string + + // Adds color to stdout & stderr if terminal supports it + colorOkay string + colorFail string + colorReset string +} + +func NewLogstreamer(logger *log.Logger, prefix string, record bool) *Logstreamer { + streamer := &Logstreamer{ + Logger: logger, + buf: bytes.NewBuffer([]byte("")), + prefix: prefix, + record: record, + persist: "", + colorOkay: "", + colorFail: "", + colorReset: "", + } + + if strings.HasPrefix(os.Getenv("TERM"), "xterm") { + streamer.colorOkay = "\x1b[32m" + streamer.colorFail = "\x1b[31m" + streamer.colorReset = "\x1b[0m" + } + + return streamer +} + +func (l *Logstreamer) Write(p []byte) (n int, err error) { + if n, err = l.buf.Write(p); err != nil { + return + } + + err = l.OutputLines() + return +} + +func (l *Logstreamer) Close() error { + if err := l.Flush(); err != nil { + return err + } + l.buf = bytes.NewBuffer([]byte("")) + return nil +} + +func (l *Logstreamer) Flush() error { + p := make([]byte, l.buf.Len()) + if _, err := l.buf.Read(p); err != nil { + return err + } + + l.out(string(p)) + return nil +} + +func (l *Logstreamer) OutputLines() error { + for { + line, err := l.buf.ReadString('\n') + + if len(line) > 0 { + if strings.HasSuffix(line, "\n") { + l.out(line) + } else { + // put back into buffer, it's not a complete line yet + // Close() or Flush() have to be used to flush out + // the last remaining line if it does not end with a newline + if _, err := l.buf.WriteString(line); err != nil { + return err + } + } + } + + if err == io.EOF { + break + } + + if err != nil { + return err + } + } + + return nil +} + +func (l *Logstreamer) FlushRecord() string { + buffer := l.persist + l.persist = "" + return buffer +} + +func (l *Logstreamer) out(str string) { + if len(str) < 1 { + return + } + + if l.record { + l.persist = l.persist + str + } + + if l.prefix == "stdout" { + str = l.colorOkay + l.prefix + l.colorReset + " " + str + } else if l.prefix == "stderr" { + str = l.colorFail + l.prefix + l.colorReset + " " + str + } + + l.Logger.Print(str) +} + +// PrettyStdoutWriter wraps an ioWriter so it can add string +// prefixes to every message it writes to stdout. +type PrettyStdoutWriter struct { + w io.Writer + Prefix string +} + +var _ io.Writer = (*PrettyStdoutWriter)(nil) + +// NewPrettyStdoutWriter returns an instance of PrettyStdoutWriter +func NewPrettyStdoutWriter(prefix string) *PrettyStdoutWriter { + return &PrettyStdoutWriter{ + w: os.Stdout, + Prefix: prefix, + } +} + +func (psw *PrettyStdoutWriter) Write(p []byte) (int, error) { + str := psw.Prefix + string(p) + n, err := psw.w.Write([]byte(str)) + + if err != nil { + return n, err + } + + return len(p), nil +} diff --git a/cli/internal/logstreamer/logstreamer_test.go b/cli/internal/logstreamer/logstreamer_test.go new file mode 100644 index 0000000..94d8a82 --- /dev/null +++ b/cli/internal/logstreamer/logstreamer_test.go @@ -0,0 +1,114 @@ +// Copyright (c) 2013 Kevin van Zonneveld . All rights reserved. +// Source: https://github.com/kvz/logstreamer +// SPDX-License-Identifier: MIT +package logstreamer + +import ( + "bufio" + "bytes" + "fmt" + "log" + "os" + "os/exec" + "strings" + "testing" +) + +func TestLogstreamerOk(t *testing.T) { + // Create a logger (your app probably already has one) + logger := log.New(os.Stdout, "--> ", log.Ldate|log.Ltime) + + // Setup a streamer that we'll pipe cmd.Stdout to + logStreamerOut := NewLogstreamer(logger, "stdout", false) + defer logStreamerOut.Close() + // Setup a streamer that we'll pipe cmd.Stderr to. + // We want to record/buffer anything that's written to this (3rd argument true) + logStreamerErr := NewLogstreamer(logger, "stderr", true) + defer logStreamerErr.Close() + + // Execute something that succeeds + cmd := exec.Command( + "ls", + "-al", + ) + cmd.Stderr = logStreamerErr + cmd.Stdout = logStreamerOut + + // Reset any error we recorded + logStreamerErr.FlushRecord() + + // Execute command + err := cmd.Start() + + // Failed to spawn? + if err != nil { + t.Fatal("ERROR could not spawn command.", err.Error()) + } + + // Failed to execute? + err = cmd.Wait() + if err != nil { + t.Fatal("ERROR command finished with error. ", err.Error(), logStreamerErr.FlushRecord()) + } +} + +func TestLogstreamerErr(t *testing.T) { + // Create a logger (your app probably already has one) + logger := log.New(os.Stdout, "--> ", log.Ldate|log.Ltime) + + // Setup a streamer that we'll pipe cmd.Stdout to + logStreamerOut := NewLogstreamer(logger, "stdout", false) + defer logStreamerOut.Close() + // Setup a streamer that we'll pipe cmd.Stderr to. + // We want to record/buffer anything that's written to this (3rd argument true) + logStreamerErr := NewLogstreamer(logger, "stderr", true) + defer logStreamerErr.Close() + + // Execute something that succeeds + cmd := exec.Command( + "ls", + "nonexisting", + ) + cmd.Stderr = logStreamerErr + cmd.Stdout = logStreamerOut + + // Reset any error we recorded + logStreamerErr.FlushRecord() + + // Execute command + err := cmd.Start() + + // Failed to spawn? + if err != nil { + logger.Print("ERROR could not spawn command. ") + } + + // Failed to execute? + err = cmd.Wait() + if err != nil { + fmt.Printf("Good. command finished with %s. %s. \n", err.Error(), logStreamerErr.FlushRecord()) + } else { + t.Fatal("This command should have failed") + } +} + +func TestLogstreamerFlush(t *testing.T) { + const text = "Text without newline" + + var buffer bytes.Buffer + byteWriter := bufio.NewWriter(&buffer) + + logger := log.New(byteWriter, "", 0) + logStreamerOut := NewLogstreamer(logger, "", false) + defer logStreamerOut.Close() + + logStreamerOut.Write([]byte(text)) + logStreamerOut.Flush() + byteWriter.Flush() + + s := strings.TrimSpace(buffer.String()) + + if s != text { + t.Fatalf("Expected '%s', got '%s'.", text, s) + } +} diff --git a/cli/internal/nodes/packagetask.go b/cli/internal/nodes/packagetask.go new file mode 100644 index 0000000..e2dcb27 --- /dev/null +++ b/cli/internal/nodes/packagetask.go @@ -0,0 +1,45 @@ +// Package nodes defines the nodes that are present in the execution graph used by turbo. +package nodes + +import ( + "fmt" + + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/util" +) + +// PackageTask represents running a particular task in a particular package +type PackageTask struct { + TaskID string + Task string + PackageName string + Pkg *fs.PackageJSON + EnvMode util.EnvMode + TaskDefinition *fs.TaskDefinition + Dir string + Command string + Outputs []string + ExcludedOutputs []string + LogFile string + Hash string +} + +// OutputPrefix returns the prefix to be used for logging and ui for this task +func (pt *PackageTask) OutputPrefix(isSinglePackage bool) string { + if isSinglePackage { + return pt.Task + } + return fmt.Sprintf("%v:%v", pt.PackageName, pt.Task) +} + +// HashableOutputs returns the package-relative globs for files to be considered outputs +// of this task +func (pt *PackageTask) HashableOutputs() fs.TaskOutputs { + inclusionOutputs := []string{fmt.Sprintf(".turbo/turbo-%v.log", pt.Task)} + inclusionOutputs = append(inclusionOutputs, pt.TaskDefinition.Outputs.Inclusions...) + + return fs.TaskOutputs{ + Inclusions: inclusionOutputs, + Exclusions: pt.TaskDefinition.Outputs.Exclusions, + } +} diff --git a/cli/internal/packagemanager/berry.go b/cli/internal/packagemanager/berry.go new file mode 100644 index 0000000..d6264b1 --- /dev/null +++ b/cli/internal/packagemanager/berry.go @@ -0,0 +1,156 @@ +package packagemanager + +import ( + "fmt" + "os/exec" + "strings" + + "github.com/Masterminds/semver" + "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/lockfile" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/util" +) + +var nodejsBerry = PackageManager{ + Name: "nodejs-berry", + Slug: "yarn", + Command: "yarn", + Specfile: "package.json", + Lockfile: "yarn.lock", + PackageDir: "node_modules", + + getWorkspaceGlobs: func(rootpath turbopath.AbsoluteSystemPath) ([]string, error) { + pkg, err := fs.ReadPackageJSON(rootpath.UntypedJoin("package.json")) + if err != nil { + return nil, fmt.Errorf("package.json: %w", err) + } + if len(pkg.Workspaces) == 0 { + return nil, fmt.Errorf("package.json: no workspaces found. Turborepo requires Yarn workspaces to be defined in the root package.json") + } + return pkg.Workspaces, nil + }, + + getWorkspaceIgnores: func(pm PackageManager, rootpath turbopath.AbsoluteSystemPath) ([]string, error) { + // Matches upstream values: + // Key code: https://github.com/yarnpkg/berry/blob/8e0c4b897b0881878a1f901230ea49b7c8113fbe/packages/yarnpkg-core/sources/Workspace.ts#L64-L70 + return []string{ + "**/node_modules", + "**/.git", + "**/.yarn", + }, nil + }, + + canPrune: func(cwd turbopath.AbsoluteSystemPath) (bool, error) { + if isNMLinker, err := util.IsNMLinker(cwd.ToStringDuringMigration()); err != nil { + return false, errors.Wrap(err, "could not determine if yarn is using `nodeLinker: node-modules`") + } else if !isNMLinker { + return false, errors.New("only yarn v2/v3 with `nodeLinker: node-modules` is supported at this time") + } + return true, nil + }, + + // Versions newer than 2.0 are berry, and before that we simply call them yarn. + Matches: func(manager string, version string) (bool, error) { + if manager != "yarn" { + return false, nil + } + + v, err := semver.NewVersion(version) + if err != nil { + return false, fmt.Errorf("could not parse yarn version: %w", err) + } + // -0 allows pre-releases versions to be considered valid + c, err := semver.NewConstraint(">=2.0.0-0") + if err != nil { + return false, fmt.Errorf("could not create constraint: %w", err) + } + + return c.Check(v), nil + }, + + // Detect for berry needs to identify which version of yarn is running on the system. + // Further, berry can be configured in an incompatible way, so we check for compatibility here as well. + detect: func(projectDirectory turbopath.AbsoluteSystemPath, packageManager *PackageManager) (bool, error) { + specfileExists := projectDirectory.UntypedJoin(packageManager.Specfile).FileExists() + lockfileExists := projectDirectory.UntypedJoin(packageManager.Lockfile).FileExists() + + // Short-circuit, definitely not Yarn. + if !specfileExists || !lockfileExists { + return false, nil + } + + cmd := exec.Command("yarn", "--version") + cmd.Dir = projectDirectory.ToString() + out, err := cmd.Output() + if err != nil { + return false, fmt.Errorf("could not detect yarn version: %w", err) + } + + // See if we're a match when we compare these two things. + matches, _ := packageManager.Matches(packageManager.Slug, string(out)) + + // Short-circuit, definitely not Berry because version number says we're Yarn. + if !matches { + return false, nil + } + + // We're Berry! + + // Check for supported configuration. + isNMLinker, err := util.IsNMLinker(projectDirectory.ToStringDuringMigration()) + + if err != nil { + // Failed to read the linker state, so we treat an unknown configuration as a failure. + return false, fmt.Errorf("could not check if yarn is using nm-linker: %w", err) + } else if !isNMLinker { + // Not using nm-linker, so unsupported configuration. + return false, fmt.Errorf("only yarn nm-linker is supported") + } + + // Berry, supported configuration. + return true, nil + }, + + UnmarshalLockfile: func(_rootPackageJSON *fs.PackageJSON, contents []byte) (lockfile.Lockfile, error) { + return lockfile.DecodeBerryLockfile(contents) + }, + + prunePatches: func(pkgJSON *fs.PackageJSON, patches []turbopath.AnchoredUnixPath) error { + pkgJSON.Mu.Lock() + defer pkgJSON.Mu.Unlock() + + keysToDelete := []string{} + resolutions, ok := pkgJSON.RawJSON["resolutions"].(map[string]interface{}) + if !ok { + return fmt.Errorf("Invalid structure for resolutions field in package.json") + } + + for dependency, untypedPatch := range resolutions { + inPatches := false + patch, ok := untypedPatch.(string) + if !ok { + return fmt.Errorf("Expected value of %s in package.json to be a string, got %v", dependency, untypedPatch) + } + + for _, wantedPatch := range patches { + if strings.HasSuffix(patch, wantedPatch.ToString()) { + inPatches = true + break + } + } + + // We only want to delete unused patches as they are the only ones that throw if unused + if !inPatches && strings.HasSuffix(patch, ".patch") { + keysToDelete = append(keysToDelete, dependency) + } + } + + for _, key := range keysToDelete { + delete(resolutions, key) + } + + return nil + }, +} diff --git a/cli/internal/packagemanager/fixtures/package.json b/cli/internal/packagemanager/fixtures/package.json new file mode 100644 index 0000000..6b27f7c --- /dev/null +++ b/cli/internal/packagemanager/fixtures/package.json @@ -0,0 +1,7 @@ +{ + "name": "fixture", + "workspaces": [ + "apps/*", + "packages/*" + ] +} diff --git a/cli/internal/packagemanager/fixtures/pnpm-patches.json b/cli/internal/packagemanager/fixtures/pnpm-patches.json new file mode 100644 index 0000000..f772bc3 --- /dev/null +++ b/cli/internal/packagemanager/fixtures/pnpm-patches.json @@ -0,0 +1,11 @@ +{ + "name": "turborepo-prune-removes-patched", + "version": "1.0.0", + "packageManager": "pnpm@7.15.0", + "workspaces": ["packages/*"], + "pnpm": { + "patchedDependencies": { + "is-odd@3.0.1": "patches/is-odd@3.0.1.patch" + } + } +} diff --git a/cli/internal/packagemanager/fixtures/pnpm-workspace.yaml b/cli/internal/packagemanager/fixtures/pnpm-workspace.yaml new file mode 100644 index 0000000..7fbb770 --- /dev/null +++ b/cli/internal/packagemanager/fixtures/pnpm-workspace.yaml @@ -0,0 +1,3 @@ +packages: + - "packages/*" + - "!packages/skip" diff --git a/cli/internal/packagemanager/infer_root.go b/cli/internal/packagemanager/infer_root.go new file mode 100644 index 0000000..7920f12 --- /dev/null +++ b/cli/internal/packagemanager/infer_root.go @@ -0,0 +1,146 @@ +package packagemanager + +import ( + "path/filepath" + + "github.com/vercel/turbo/cli/internal/doublestar" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// PackageType represents the mode in which turbo is running. +type PackageType string + +const ( + // Single is for single-package mode. + Single PackageType = "single" + // Multi is for monorepo mode. + Multi PackageType = "multi" +) + +func candidateDirectoryWorkspaceGlobs(directory turbopath.AbsoluteSystemPath) []string { + packageManagers := []PackageManager{ + nodejsNpm, + nodejsPnpm, + } + + for _, pm := range packageManagers { + globs, err := pm.getWorkspaceGlobs(directory) + if err != nil { + // Try the other package manager workspace formats. + continue + } + + return globs + } + + return nil +} + +func isOneOfTheWorkspaces(globs []string, nearestPackageJSONDir turbopath.AbsoluteSystemPath, currentPackageJSONDir turbopath.AbsoluteSystemPath) bool { + for _, glob := range globs { + globpattern := currentPackageJSONDir.UntypedJoin(filepath.FromSlash(glob)).ToString() + match, _ := doublestar.PathMatch(globpattern, nearestPackageJSONDir.ToString()) + if match { + return true + } + } + + return false +} + +// InferRoot identifies which directory we should treat as the root, and which mode +// turbo should be in when operating at that directory. +func InferRoot(directory turbopath.AbsoluteSystemPath) (turbopath.AbsoluteSystemPath, PackageType) { + // Go doesn't have iterators, so this is very not-elegant. + + // Scenarios: + // 0. Has a turbo.json but doesn't have a peer package.json. directory + multi + // 1. Nearest turbo.json, check peer package.json/pnpm-workspace.yaml. + // A. Has workspaces, multi package mode. + // B. No workspaces, single package mode. + // 2. If no turbo.json find the closest package.json parent. + // A. No parent package.json, default to current behavior. + // B. Nearest package.json defines workspaces. Can't be in single-package mode, so we bail. (This could be changed in the future.) + // 3. Closest package.json does not define workspaces. Traverse toward the root looking for package.jsons. + // A. No parent package.json with workspaces. nearestPackageJson + single + // B. Stop at the first one that has workspaces. + // i. If we are one of the workspaces, directory + multi. (This could be changed in the future.) + // ii. If we're not one of the workspaces, nearestPackageJson + single. + + nearestTurboJSON, findTurboJSONErr := directory.Findup("turbo.json") + if nearestTurboJSON == "" || findTurboJSONErr != nil { + // We didn't find a turbo.json. We're in situation 2 or 3. + + // Unroll the first loop for Scenario 2 + nearestPackageJSON, nearestPackageJSONErr := directory.Findup("package.json") + + // If we fail to find any package.json files we aren't in single package mode. + // We let things go through our existing failure paths. + // Scenario 2A. + if nearestPackageJSON == "" || nearestPackageJSONErr != nil { + return directory, Multi + } + + // If we find a package.json which has workspaces we aren't in single package mode. + // We let things go through our existing failure paths. + // Scenario 2B. + if candidateDirectoryWorkspaceGlobs(nearestPackageJSON.Dir()) != nil { + // In a future world we could maybe change this behavior. + // return nearestPackageJson.Dir(), Multi + return directory, Multi + } + + // Scenario 3. + // Find the nearest package.json that has workspaces. + // If found _and_ the nearestPackageJson is one of the workspaces, thatPackageJson + multi. + // Else, nearestPackageJson + single + cursor := nearestPackageJSON.Dir().UntypedJoin("..") + for { + nextPackageJSON, nextPackageJSONErr := cursor.Findup("package.json") + if nextPackageJSON == "" || nextPackageJSONErr != nil { + // We haven't found a parent defining workspaces. + // So we're single package mode at nearestPackageJson. + // Scenario 3A. + return nearestPackageJSON.Dir(), Single + } + + // Found a package.json file, see if it has workspaces. + // Workspaces are not allowed to be recursive, so we know what to + // return the moment we find something with workspaces. + globs := candidateDirectoryWorkspaceGlobs(nextPackageJSON.Dir()) + if globs != nil { + if isOneOfTheWorkspaces(globs, nearestPackageJSON.Dir(), nextPackageJSON.Dir()) { + // If it has workspaces, and nearestPackageJson is one of them, we're multi. + // We don't infer in this scenario. + // Scenario 3BI. + // TODO: return nextPackageJson.Dir(), Multi + return directory, Multi + } + + // We found a parent with workspaces, but we're not one of them. + // We choose to operate in single package mode. + // Scenario 3BII + return nearestPackageJSON.Dir(), Single + } + + // Loop around and see if we have another parent. + cursor = nextPackageJSON.Dir().UntypedJoin("..") + } + } else { + // If there is no sibling package.json we do no inference. + siblingPackageJSONPath := nearestTurboJSON.Dir().UntypedJoin("package.json") + if !siblingPackageJSONPath.Exists() { + // We do no inference. + // Scenario 0 + return directory, Multi + } + + if candidateDirectoryWorkspaceGlobs(nearestTurboJSON.Dir()) != nil { + // Scenario 1A. + return nearestTurboJSON.Dir(), Multi + } + + // Scenario 1B. + return nearestTurboJSON.Dir(), Single + } +} diff --git a/cli/internal/packagemanager/infer_root_test.go b/cli/internal/packagemanager/infer_root_test.go new file mode 100644 index 0000000..2e37a80 --- /dev/null +++ b/cli/internal/packagemanager/infer_root_test.go @@ -0,0 +1,347 @@ +package packagemanager + +import ( + "reflect" + "testing" + + "github.com/vercel/turbo/cli/internal/turbopath" + "gotest.tools/v3/assert" +) + +func TestInferRoot(t *testing.T) { + type file struct { + path turbopath.AnchoredSystemPath + content []byte + } + + tests := []struct { + name string + fs []file + executionDirectory turbopath.AnchoredSystemPath + rootPath turbopath.AnchoredSystemPath + packageMode PackageType + }{ + // Scenario 0 + { + name: "turbo.json at current dir, no package.json", + fs: []file{ + {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, + }, + executionDirectory: turbopath.AnchoredUnixPath("").ToSystemPath(), + rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), + packageMode: Multi, + }, + { + name: "turbo.json at parent dir, no package.json", + fs: []file{ + {path: turbopath.AnchoredUnixPath("execution/path/subdir/.file").ToSystemPath()}, + {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, + }, + executionDirectory: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), + // This is "no inference" + rootPath: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), + packageMode: Multi, + }, + // Scenario 1A + { + name: "turbo.json at current dir, has package.json, has workspaces key", + fs: []file{ + {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, + { + path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), + content: []byte("{ \"workspaces\": [ \"exists\" ] }"), + }, + }, + executionDirectory: turbopath.AnchoredUnixPath("").ToSystemPath(), + rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), + packageMode: Multi, + }, + { + name: "turbo.json at parent dir, has package.json, has workspaces key", + fs: []file{ + {path: turbopath.AnchoredUnixPath("execution/path/subdir/.file").ToSystemPath()}, + {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, + { + path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), + content: []byte("{ \"workspaces\": [ \"exists\" ] }"), + }, + }, + executionDirectory: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), + rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), + packageMode: Multi, + }, + { + name: "turbo.json at parent dir, has package.json, has pnpm workspaces", + fs: []file{ + {path: turbopath.AnchoredUnixPath("execution/path/subdir/.file").ToSystemPath()}, + {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, + { + path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), + content: []byte("{}"), + }, + { + path: turbopath.AnchoredUnixPath("pnpm-workspace.yaml").ToSystemPath(), + content: []byte("packages:\n - docs"), + }, + }, + executionDirectory: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), + rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), + packageMode: Multi, + }, + // Scenario 1A aware of the weird thing we do for packages. + { + name: "turbo.json at current dir, has package.json, has packages key", + fs: []file{ + {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, + { + path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), + content: []byte("{ \"packages\": [ \"exists\" ] }"), + }, + }, + executionDirectory: turbopath.AnchoredUnixPath("").ToSystemPath(), + rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), + packageMode: Single, + }, + { + name: "turbo.json at parent dir, has package.json, has packages key", + fs: []file{ + {path: turbopath.AnchoredUnixPath("execution/path/subdir/.file").ToSystemPath()}, + {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, + { + path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), + content: []byte("{ \"packages\": [ \"exists\" ] }"), + }, + }, + executionDirectory: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), + rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), + packageMode: Single, + }, + // Scenario 1A aware of the the weird thing we do for packages when both methods of specification exist. + { + name: "turbo.json at current dir, has package.json, has workspace and packages key", + fs: []file{ + {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, + { + path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), + content: []byte("{ \"workspaces\": [ \"clobbered\" ], \"packages\": [ \"exists\" ] }"), + }, + }, + executionDirectory: turbopath.AnchoredUnixPath("").ToSystemPath(), + rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), + packageMode: Multi, + }, + { + name: "turbo.json at parent dir, has package.json, has workspace and packages key", + fs: []file{ + {path: turbopath.AnchoredUnixPath("execution/path/subdir/.file").ToSystemPath()}, + {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, + { + path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), + content: []byte("{ \"workspaces\": [ \"clobbered\" ], \"packages\": [ \"exists\" ] }"), + }, + }, + executionDirectory: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), + rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), + packageMode: Multi, + }, + // Scenario 1B + { + name: "turbo.json at current dir, has package.json, no workspaces", + fs: []file{ + {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, + { + path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), + content: []byte("{}"), + }, + }, + executionDirectory: turbopath.AnchoredUnixPath("").ToSystemPath(), + rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), + packageMode: Single, + }, + { + name: "turbo.json at parent dir, has package.json, no workspaces", + fs: []file{ + {path: turbopath.AnchoredUnixPath("execution/path/subdir/.file").ToSystemPath()}, + {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, + { + path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), + content: []byte("{}"), + }, + }, + executionDirectory: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), + rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), + packageMode: Single, + }, + { + name: "turbo.json at parent dir, has package.json, no workspaces, includes pnpm", + fs: []file{ + {path: turbopath.AnchoredUnixPath("execution/path/subdir/.file").ToSystemPath()}, + {path: turbopath.AnchoredUnixPath("turbo.json").ToSystemPath()}, + { + path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), + content: []byte("{}"), + }, + { + path: turbopath.AnchoredUnixPath("pnpm-workspace.yaml").ToSystemPath(), + content: []byte(""), + }, + }, + executionDirectory: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), + rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), + packageMode: Single, + }, + // Scenario 2A + { + name: "no turbo.json, no package.json at current", + fs: []file{}, + executionDirectory: turbopath.AnchoredUnixPath("").ToSystemPath(), + rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), + packageMode: Multi, + }, + { + name: "no turbo.json, no package.json at parent", + fs: []file{ + {path: turbopath.AnchoredUnixPath("execution/path/subdir/.file").ToSystemPath()}, + }, + executionDirectory: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), + rootPath: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), + packageMode: Multi, + }, + // Scenario 2B + { + name: "no turbo.json, has package.json with workspaces at current", + fs: []file{ + { + path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), + content: []byte("{ \"workspaces\": [ \"exists\" ] }"), + }, + }, + executionDirectory: turbopath.AnchoredUnixPath("").ToSystemPath(), + rootPath: turbopath.AnchoredUnixPath("").ToSystemPath(), + packageMode: Multi, + }, + { + name: "no turbo.json, has package.json with workspaces at parent", + fs: []file{ + {path: turbopath.AnchoredUnixPath("execution/path/subdir/.file").ToSystemPath()}, + { + path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), + content: []byte("{ \"workspaces\": [ \"exists\" ] }"), + }, + }, + executionDirectory: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), + rootPath: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), + packageMode: Multi, + }, + { + name: "no turbo.json, has package.json with pnpm workspaces at parent", + fs: []file{ + {path: turbopath.AnchoredUnixPath("execution/path/subdir/.file").ToSystemPath()}, + { + path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), + content: []byte("{ \"workspaces\": [ \"exists\" ] }"), + }, + { + path: turbopath.AnchoredUnixPath("pnpm-workspace.yaml").ToSystemPath(), + content: []byte("packages:\n - docs"), + }, + }, + executionDirectory: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), + rootPath: turbopath.AnchoredUnixPath("execution/path/subdir").ToSystemPath(), + packageMode: Multi, + }, + // Scenario 3A + { + name: "no turbo.json, lots of package.json files but no workspaces", + fs: []file{ + { + path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), + content: []byte("{}"), + }, + { + path: turbopath.AnchoredUnixPath("one/package.json").ToSystemPath(), + content: []byte("{}"), + }, + { + path: turbopath.AnchoredUnixPath("one/two/package.json").ToSystemPath(), + content: []byte("{}"), + }, + { + path: turbopath.AnchoredUnixPath("one/two/three/package.json").ToSystemPath(), + content: []byte("{}"), + }, + }, + executionDirectory: turbopath.AnchoredUnixPath("one/two/three").ToSystemPath(), + rootPath: turbopath.AnchoredUnixPath("one/two/three").ToSystemPath(), + packageMode: Single, + }, + // Scenario 3BI + { + name: "no turbo.json, lots of package.json files, and a workspace at the root that matches execution directory", + fs: []file{ + { + path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), + content: []byte("{ \"workspaces\": [ \"one/two/three\" ] }"), + }, + { + path: turbopath.AnchoredUnixPath("one/package.json").ToSystemPath(), + content: []byte("{}"), + }, + { + path: turbopath.AnchoredUnixPath("one/two/package.json").ToSystemPath(), + content: []byte("{}"), + }, + { + path: turbopath.AnchoredUnixPath("one/two/three/package.json").ToSystemPath(), + content: []byte("{}"), + }, + }, + executionDirectory: turbopath.AnchoredUnixPath("one/two/three").ToSystemPath(), + rootPath: turbopath.AnchoredUnixPath("one/two/three").ToSystemPath(), + packageMode: Multi, + }, + // Scenario 3BII + { + name: "no turbo.json, lots of package.json files, and a workspace at the root that matches execution directory", + fs: []file{ + { + path: turbopath.AnchoredUnixPath("package.json").ToSystemPath(), + content: []byte("{ \"workspaces\": [ \"does-not-exist\" ] }"), + }, + { + path: turbopath.AnchoredUnixPath("one/package.json").ToSystemPath(), + content: []byte("{}"), + }, + { + path: turbopath.AnchoredUnixPath("one/two/package.json").ToSystemPath(), + content: []byte("{}"), + }, + { + path: turbopath.AnchoredUnixPath("one/two/three/package.json").ToSystemPath(), + content: []byte("{}"), + }, + }, + executionDirectory: turbopath.AnchoredUnixPath("one/two/three").ToSystemPath(), + rootPath: turbopath.AnchoredUnixPath("one/two/three").ToSystemPath(), + packageMode: Single, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + fsRoot := turbopath.AbsoluteSystemPath(t.TempDir()) + for _, file := range tt.fs { + path := file.path.RestoreAnchor(fsRoot) + assert.NilError(t, path.Dir().MkdirAll(0777)) + assert.NilError(t, path.WriteFile(file.content, 0777)) + } + + turboRoot, packageMode := InferRoot(tt.executionDirectory.RestoreAnchor(fsRoot)) + if !reflect.DeepEqual(turboRoot, tt.rootPath.RestoreAnchor(fsRoot)) { + t.Errorf("InferRoot() turboRoot = %v, want %v", turboRoot, tt.rootPath.RestoreAnchor(fsRoot)) + } + if packageMode != tt.packageMode { + t.Errorf("InferRoot() packageMode = %v, want %v", packageMode, tt.packageMode) + } + }) + } +} diff --git a/cli/internal/packagemanager/npm.go b/cli/internal/packagemanager/npm.go new file mode 100644 index 0000000..ce2eb8c --- /dev/null +++ b/cli/internal/packagemanager/npm.go @@ -0,0 +1,59 @@ +package packagemanager + +import ( + "fmt" + + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/lockfile" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +var nodejsNpm = PackageManager{ + Name: "nodejs-npm", + Slug: "npm", + Command: "npm", + Specfile: "package.json", + Lockfile: "package-lock.json", + PackageDir: "node_modules", + ArgSeparator: []string{"--"}, + + getWorkspaceGlobs: func(rootpath turbopath.AbsoluteSystemPath) ([]string, error) { + pkg, err := fs.ReadPackageJSON(rootpath.UntypedJoin("package.json")) + if err != nil { + return nil, fmt.Errorf("package.json: %w", err) + } + if len(pkg.Workspaces) == 0 { + return nil, fmt.Errorf("package.json: no workspaces found. Turborepo requires npm workspaces to be defined in the root package.json") + } + return pkg.Workspaces, nil + }, + + getWorkspaceIgnores: func(pm PackageManager, rootpath turbopath.AbsoluteSystemPath) ([]string, error) { + // Matches upstream values: + // function: https://github.com/npm/map-workspaces/blob/a46503543982cb35f51cc2d6253d4dcc6bca9b32/lib/index.js#L73 + // key code: https://github.com/npm/map-workspaces/blob/a46503543982cb35f51cc2d6253d4dcc6bca9b32/lib/index.js#L90-L96 + // call site: https://github.com/npm/cli/blob/7a858277171813b37d46a032e49db44c8624f78f/lib/workspaces/get-workspaces.js#L14 + return []string{ + "**/node_modules/**", + }, nil + }, + + Matches: func(manager string, version string) (bool, error) { + return manager == "npm", nil + }, + + detect: func(projectDirectory turbopath.AbsoluteSystemPath, packageManager *PackageManager) (bool, error) { + specfileExists := projectDirectory.UntypedJoin(packageManager.Specfile).FileExists() + lockfileExists := projectDirectory.UntypedJoin(packageManager.Lockfile).FileExists() + + return (specfileExists && lockfileExists), nil + }, + + canPrune: func(cwd turbopath.AbsoluteSystemPath) (bool, error) { + return true, nil + }, + + UnmarshalLockfile: func(_rootPackageJSON *fs.PackageJSON, contents []byte) (lockfile.Lockfile, error) { + return lockfile.DecodeNpmLockfile(contents) + }, +} diff --git a/cli/internal/packagemanager/packagemanager.go b/cli/internal/packagemanager/packagemanager.go new file mode 100644 index 0000000..dc5b966 --- /dev/null +++ b/cli/internal/packagemanager/packagemanager.go @@ -0,0 +1,197 @@ +// Adapted from https://github.com/replit/upm +// Copyright (c) 2019 Neoreason d/b/a Repl.it. All rights reserved. +// SPDX-License-Identifier: MIT + +package packagemanager + +import ( + "fmt" + "path/filepath" + "regexp" + "strings" + + "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/globby" + "github.com/vercel/turbo/cli/internal/lockfile" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/util" +) + +// PackageManager is an abstraction across package managers +type PackageManager struct { + // The descriptive name of the Package Manager. + Name string + + // The unique identifier of the Package Manager. + Slug string + + // The command used to invoke the Package Manager. + Command string + + // The location of the package spec file used by the Package Manager. + Specfile string + + // The location of the package lock file used by the Package Manager. + Lockfile string + + // The directory in which package assets are stored by the Package Manager. + PackageDir string + + // The location of the file that defines the workspace. Empty if workspaces defined in package.json + WorkspaceConfigurationPath string + + // The separator that the Package Manger uses to identify arguments that + // should be passed through to the underlying script. + ArgSeparator []string + + // Return the list of workspace glob + getWorkspaceGlobs func(rootpath turbopath.AbsoluteSystemPath) ([]string, error) + + // Return the list of workspace ignore globs + getWorkspaceIgnores func(pm PackageManager, rootpath turbopath.AbsoluteSystemPath) ([]string, error) + + // Detect if Turbo knows how to produce a pruned workspace for the project + canPrune func(cwd turbopath.AbsoluteSystemPath) (bool, error) + + // Test a manager and version tuple to see if it is the Package Manager. + Matches func(manager string, version string) (bool, error) + + // Detect if the project is using the Package Manager by inspecting the system. + detect func(projectDirectory turbopath.AbsoluteSystemPath, packageManager *PackageManager) (bool, error) + + // Read a lockfile for a given package manager + UnmarshalLockfile func(rootPackageJSON *fs.PackageJSON, contents []byte) (lockfile.Lockfile, error) + + // Prune the given pkgJSON to only include references to the given patches + prunePatches func(pkgJSON *fs.PackageJSON, patches []turbopath.AnchoredUnixPath) error +} + +var packageManagers = []PackageManager{ + nodejsYarn, + nodejsBerry, + nodejsNpm, + nodejsPnpm, + nodejsPnpm6, +} + +var ( + packageManagerPattern = `(npm|pnpm|yarn)@(\d+)\.\d+\.\d+(-.+)?` + packageManagerRegex = regexp.MustCompile(packageManagerPattern) +) + +// ParsePackageManagerString takes a package manager version string parses it into consituent components +func ParsePackageManagerString(packageManager string) (manager string, version string, err error) { + match := packageManagerRegex.FindString(packageManager) + if len(match) == 0 { + return "", "", fmt.Errorf("We could not parse packageManager field in package.json, expected: %s, received: %s", packageManagerPattern, packageManager) + } + + return strings.Split(match, "@")[0], strings.Split(match, "@")[1], nil +} + +// GetPackageManager attempts all methods for identifying the package manager in use. +func GetPackageManager(projectDirectory turbopath.AbsoluteSystemPath, pkg *fs.PackageJSON) (packageManager *PackageManager, err error) { + result, _ := readPackageManager(pkg) + if result != nil { + return result, nil + } + + return detectPackageManager(projectDirectory) +} + +// readPackageManager attempts to read the package manager from the package.json. +func readPackageManager(pkg *fs.PackageJSON) (packageManager *PackageManager, err error) { + if pkg.PackageManager != "" { + manager, version, err := ParsePackageManagerString(pkg.PackageManager) + if err != nil { + return nil, err + } + + for _, packageManager := range packageManagers { + isResponsible, err := packageManager.Matches(manager, version) + if isResponsible && (err == nil) { + return &packageManager, nil + } + } + } + + return nil, errors.New(util.Sprintf("We did not find a package manager specified in your root package.json. Please set the \"packageManager\" property in your root package.json (${UNDERLINE}https://nodejs.org/api/packages.html#packagemanager)${RESET} or run `npx @turbo/codemod add-package-manager` in the root of your monorepo.")) +} + +// detectPackageManager attempts to detect the package manager by inspecting the project directory state. +func detectPackageManager(projectDirectory turbopath.AbsoluteSystemPath) (packageManager *PackageManager, err error) { + for _, packageManager := range packageManagers { + isResponsible, err := packageManager.detect(projectDirectory, &packageManager) + if err != nil { + return nil, err + } + if isResponsible { + return &packageManager, nil + } + } + + return nil, errors.New(util.Sprintf("We did not detect an in-use package manager for your project. Please set the \"packageManager\" property in your root package.json (${UNDERLINE}https://nodejs.org/api/packages.html#packagemanager)${RESET} or run `npx @turbo/codemod add-package-manager` in the root of your monorepo.")) +} + +// GetWorkspaces returns the list of package.json files for the current repository. +func (pm PackageManager) GetWorkspaces(rootpath turbopath.AbsoluteSystemPath) ([]string, error) { + globs, err := pm.getWorkspaceGlobs(rootpath) + if err != nil { + return nil, err + } + + justJsons := make([]string, len(globs)) + for i, space := range globs { + justJsons[i] = filepath.Join(space, "package.json") + } + + ignores, err := pm.getWorkspaceIgnores(pm, rootpath) + if err != nil { + return nil, err + } + + f, err := globby.GlobFiles(rootpath.ToStringDuringMigration(), justJsons, ignores) + if err != nil { + return nil, err + } + + return f, nil +} + +// GetWorkspaceIgnores returns an array of globs not to search for workspaces. +func (pm PackageManager) GetWorkspaceIgnores(rootpath turbopath.AbsoluteSystemPath) ([]string, error) { + return pm.getWorkspaceIgnores(pm, rootpath) +} + +// CanPrune returns if turbo can produce a pruned workspace. Can error if fs issues occur +func (pm PackageManager) CanPrune(projectDirectory turbopath.AbsoluteSystemPath) (bool, error) { + if pm.canPrune != nil { + return pm.canPrune(projectDirectory) + } + return false, nil +} + +// ReadLockfile will read the applicable lockfile into memory +func (pm PackageManager) ReadLockfile(projectDirectory turbopath.AbsoluteSystemPath, rootPackageJSON *fs.PackageJSON) (lockfile.Lockfile, error) { + if pm.UnmarshalLockfile == nil { + return nil, nil + } + contents, err := projectDirectory.UntypedJoin(pm.Lockfile).ReadFile() + if err != nil { + return nil, fmt.Errorf("reading %s: %w", pm.Lockfile, err) + } + lf, err := pm.UnmarshalLockfile(rootPackageJSON, contents) + if err != nil { + return nil, errors.Wrapf(err, "error in %v", pm.Lockfile) + } + return lf, nil +} + +// PrunePatchedPackages will alter the provided pkgJSON to only reference the provided patches +func (pm PackageManager) PrunePatchedPackages(pkgJSON *fs.PackageJSON, patches []turbopath.AnchoredUnixPath) error { + if pm.prunePatches != nil { + return pm.prunePatches(pkgJSON, patches) + } + return nil +} diff --git a/cli/internal/packagemanager/packagemanager_test.go b/cli/internal/packagemanager/packagemanager_test.go new file mode 100644 index 0000000..a5dc472 --- /dev/null +++ b/cli/internal/packagemanager/packagemanager_test.go @@ -0,0 +1,411 @@ +package packagemanager + +import ( + "os" + "path/filepath" + "reflect" + "sort" + "testing" + + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbopath" + "gotest.tools/v3/assert" +) + +func TestParsePackageManagerString(t *testing.T) { + tests := []struct { + name string + packageManager string + wantManager string + wantVersion string + wantErr bool + }{ + { + name: "errors with a tag version", + packageManager: "npm@latest", + wantManager: "", + wantVersion: "", + wantErr: true, + }, + { + name: "errors with no version", + packageManager: "npm", + wantManager: "", + wantVersion: "", + wantErr: true, + }, + { + name: "requires fully-qualified semver versions (one digit)", + packageManager: "npm@1", + wantManager: "", + wantVersion: "", + wantErr: true, + }, + { + name: "requires fully-qualified semver versions (two digits)", + packageManager: "npm@1.2", + wantManager: "", + wantVersion: "", + wantErr: true, + }, + { + name: "supports custom labels", + packageManager: "npm@1.2.3-alpha.1", + wantManager: "npm", + wantVersion: "1.2.3-alpha.1", + wantErr: false, + }, + { + name: "only supports specified package managers", + packageManager: "pip@1.2.3", + wantManager: "", + wantVersion: "", + wantErr: true, + }, + { + name: "supports npm", + packageManager: "npm@0.0.1", + wantManager: "npm", + wantVersion: "0.0.1", + wantErr: false, + }, + { + name: "supports pnpm", + packageManager: "pnpm@0.0.1", + wantManager: "pnpm", + wantVersion: "0.0.1", + wantErr: false, + }, + { + name: "supports yarn", + packageManager: "yarn@111.0.1", + wantManager: "yarn", + wantVersion: "111.0.1", + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotManager, gotVersion, err := ParsePackageManagerString(tt.packageManager) + if (err != nil) != tt.wantErr { + t.Errorf("ParsePackageManagerString() error = %v, wantErr %v", err, tt.wantErr) + return + } + if gotManager != tt.wantManager { + t.Errorf("ParsePackageManagerString() got manager = %v, want manager %v", gotManager, tt.wantManager) + } + if gotVersion != tt.wantVersion { + t.Errorf("ParsePackageManagerString() got version = %v, want version %v", gotVersion, tt.wantVersion) + } + }) + } +} + +func TestGetPackageManager(t *testing.T) { + cwdRaw, err := os.Getwd() + assert.NilError(t, err, "os.Getwd") + cwd, err := fs.GetCwd(cwdRaw) + assert.NilError(t, err, "GetCwd") + tests := []struct { + name string + projectDirectory turbopath.AbsoluteSystemPath + pkg *fs.PackageJSON + want string + wantErr bool + }{ + { + name: "finds npm from a package manager string", + projectDirectory: cwd, + pkg: &fs.PackageJSON{PackageManager: "npm@1.2.3"}, + want: "nodejs-npm", + wantErr: false, + }, + { + name: "finds pnpm6 from a package manager string", + projectDirectory: cwd, + pkg: &fs.PackageJSON{PackageManager: "pnpm@1.2.3"}, + want: "nodejs-pnpm6", + wantErr: false, + }, + { + name: "finds pnpm from a package manager string", + projectDirectory: cwd, + pkg: &fs.PackageJSON{PackageManager: "pnpm@7.8.9"}, + want: "nodejs-pnpm", + wantErr: false, + }, + { + name: "finds yarn from a package manager string", + projectDirectory: cwd, + pkg: &fs.PackageJSON{PackageManager: "yarn@1.2.3"}, + want: "nodejs-yarn", + wantErr: false, + }, + { + name: "finds berry from a package manager string", + projectDirectory: cwd, + pkg: &fs.PackageJSON{PackageManager: "yarn@2.3.4"}, + want: "nodejs-berry", + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotPackageManager, err := GetPackageManager(tt.projectDirectory, tt.pkg) + if (err != nil) != tt.wantErr { + t.Errorf("GetPackageManager() error = %v, wantErr %v", err, tt.wantErr) + return + } + if gotPackageManager.Name != tt.want { + t.Errorf("GetPackageManager() = %v, want %v", gotPackageManager.Name, tt.want) + } + }) + } +} + +func Test_readPackageManager(t *testing.T) { + tests := []struct { + name string + pkg *fs.PackageJSON + want string + wantErr bool + }{ + { + name: "finds npm from a package manager string", + pkg: &fs.PackageJSON{PackageManager: "npm@1.2.3"}, + want: "nodejs-npm", + wantErr: false, + }, + { + name: "finds pnpm6 from a package manager string", + pkg: &fs.PackageJSON{PackageManager: "pnpm@1.2.3"}, + want: "nodejs-pnpm6", + wantErr: false, + }, + { + name: "finds pnpm from a package manager string", + pkg: &fs.PackageJSON{PackageManager: "pnpm@7.8.9"}, + want: "nodejs-pnpm", + wantErr: false, + }, + { + name: "finds yarn from a package manager string", + pkg: &fs.PackageJSON{PackageManager: "yarn@1.2.3"}, + want: "nodejs-yarn", + wantErr: false, + }, + { + name: "finds berry from a package manager string", + pkg: &fs.PackageJSON{PackageManager: "yarn@2.3.4"}, + want: "nodejs-berry", + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotPackageManager, err := readPackageManager(tt.pkg) + if (err != nil) != tt.wantErr { + t.Errorf("readPackageManager() error = %v, wantErr %v", err, tt.wantErr) + return + } + if gotPackageManager.Name != tt.want { + t.Errorf("readPackageManager() = %v, want %v", gotPackageManager.Name, tt.want) + } + }) + } +} + +func Test_GetWorkspaces(t *testing.T) { + type test struct { + name string + pm PackageManager + rootPath turbopath.AbsoluteSystemPath + want []string + wantErr bool + } + + cwd, _ := os.Getwd() + + repoRoot, err := fs.GetCwd(cwd) + assert.NilError(t, err, "GetCwd") + rootPath := map[string]turbopath.AbsoluteSystemPath{ + "nodejs-npm": repoRoot.UntypedJoin("../../../examples/with-yarn"), + "nodejs-berry": repoRoot.UntypedJoin("../../../examples/with-yarn"), + "nodejs-yarn": repoRoot.UntypedJoin("../../../examples/with-yarn"), + "nodejs-pnpm": repoRoot.UntypedJoin("../../../examples/basic"), + "nodejs-pnpm6": repoRoot.UntypedJoin("../../../examples/basic"), + } + + want := map[string][]string{ + "nodejs-npm": { + filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/apps/docs/package.json")), + filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/apps/web/package.json")), + filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/packages/eslint-config-custom/package.json")), + filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/packages/tsconfig/package.json")), + filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/packages/ui/package.json")), + }, + "nodejs-berry": { + filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/apps/docs/package.json")), + filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/apps/web/package.json")), + filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/packages/eslint-config-custom/package.json")), + filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/packages/tsconfig/package.json")), + filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/packages/ui/package.json")), + }, + "nodejs-yarn": { + filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/apps/docs/package.json")), + filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/apps/web/package.json")), + filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/packages/eslint-config-custom/package.json")), + filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/packages/tsconfig/package.json")), + filepath.ToSlash(filepath.Join(cwd, "../../../examples/with-yarn/packages/ui/package.json")), + }, + "nodejs-pnpm": { + filepath.ToSlash(filepath.Join(cwd, "../../../examples/basic/apps/docs/package.json")), + filepath.ToSlash(filepath.Join(cwd, "../../../examples/basic/apps/web/package.json")), + filepath.ToSlash(filepath.Join(cwd, "../../../examples/basic/packages/eslint-config-custom/package.json")), + filepath.ToSlash(filepath.Join(cwd, "../../../examples/basic/packages/tsconfig/package.json")), + filepath.ToSlash(filepath.Join(cwd, "../../../examples/basic/packages/ui/package.json")), + }, + "nodejs-pnpm6": { + filepath.ToSlash(filepath.Join(cwd, "../../../examples/basic/apps/docs/package.json")), + filepath.ToSlash(filepath.Join(cwd, "../../../examples/basic/apps/web/package.json")), + filepath.ToSlash(filepath.Join(cwd, "../../../examples/basic/packages/eslint-config-custom/package.json")), + filepath.ToSlash(filepath.Join(cwd, "../../../examples/basic/packages/tsconfig/package.json")), + filepath.ToSlash(filepath.Join(cwd, "../../../examples/basic/packages/ui/package.json")), + }, + } + + tests := make([]test, len(packageManagers)) + for i, packageManager := range packageManagers { + tests[i] = test{ + name: packageManager.Name, + pm: packageManager, + rootPath: rootPath[packageManager.Name], + want: want[packageManager.Name], + wantErr: false, + } + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotWorkspaces, err := tt.pm.GetWorkspaces(tt.rootPath) + + gotToSlash := make([]string, len(gotWorkspaces)) + for index, workspace := range gotWorkspaces { + gotToSlash[index] = filepath.ToSlash(workspace) + } + + if (err != nil) != tt.wantErr { + t.Errorf("GetWorkspaces() error = %v, wantErr %v", err, tt.wantErr) + return + } + sort.Strings(gotToSlash) + if !reflect.DeepEqual(gotToSlash, tt.want) { + t.Errorf("GetWorkspaces() = %v, want %v", gotToSlash, tt.want) + } + }) + } +} + +func Test_GetWorkspaceIgnores(t *testing.T) { + type test struct { + name string + pm PackageManager + rootPath turbopath.AbsoluteSystemPath + want []string + wantErr bool + } + + cwdRaw, err := os.Getwd() + assert.NilError(t, err, "os.Getwd") + cwd, err := fs.GetCwd(cwdRaw) + assert.NilError(t, err, "GetCwd") + want := map[string][]string{ + "nodejs-npm": {"**/node_modules/**"}, + "nodejs-berry": {"**/node_modules", "**/.git", "**/.yarn"}, + "nodejs-yarn": {"apps/*/node_modules/**", "packages/*/node_modules/**"}, + "nodejs-pnpm": {"**/node_modules/**", "**/bower_components/**", "packages/skip"}, + "nodejs-pnpm6": {"**/node_modules/**", "**/bower_components/**", "packages/skip"}, + } + + tests := make([]test, len(packageManagers)) + for i, packageManager := range packageManagers { + tests[i] = test{ + name: packageManager.Name, + pm: packageManager, + rootPath: cwd.UntypedJoin("fixtures"), + want: want[packageManager.Name], + wantErr: false, + } + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotWorkspaceIgnores, err := tt.pm.GetWorkspaceIgnores(tt.rootPath) + + gotToSlash := make([]string, len(gotWorkspaceIgnores)) + for index, ignore := range gotWorkspaceIgnores { + gotToSlash[index] = filepath.ToSlash(ignore) + } + + if (err != nil) != tt.wantErr { + t.Errorf("GetWorkspaceIgnores() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(gotToSlash, tt.want) { + t.Errorf("GetWorkspaceIgnores() = %v, want %v", gotToSlash, tt.want) + } + }) + } +} + +func Test_CanPrune(t *testing.T) { + type test struct { + name string + pm PackageManager + rootPath turbopath.AbsoluteSystemPath + want bool + wantErr bool + } + + type want struct { + want bool + wantErr bool + } + + cwdRaw, err := os.Getwd() + assert.NilError(t, err, "os.Getwd") + cwd, err := fs.GetCwd(cwdRaw) + assert.NilError(t, err, "GetCwd") + wants := map[string]want{ + "nodejs-npm": {true, false}, + "nodejs-berry": {false, true}, + "nodejs-yarn": {true, false}, + "nodejs-pnpm": {true, false}, + "nodejs-pnpm6": {true, false}, + } + + tests := make([]test, len(packageManagers)) + for i, packageManager := range packageManagers { + tests[i] = test{ + name: packageManager.Name, + pm: packageManager, + rootPath: cwd.UntypedJoin("../../../examples/with-yarn"), + want: wants[packageManager.Name].want, + wantErr: wants[packageManager.Name].wantErr, + } + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + canPrune, err := tt.pm.CanPrune(tt.rootPath) + + if (err != nil) != tt.wantErr { + t.Errorf("CanPrune() error = %v, wantErr %v", err, tt.wantErr) + return + } + if canPrune != tt.want { + t.Errorf("CanPrune() = %v, want %v", canPrune, tt.want) + } + }) + } +} diff --git a/cli/internal/packagemanager/pnpm.go b/cli/internal/packagemanager/pnpm.go new file mode 100644 index 0000000..e65a4dc --- /dev/null +++ b/cli/internal/packagemanager/pnpm.go @@ -0,0 +1,168 @@ +package packagemanager + +import ( + "fmt" + "strings" + + "github.com/Masterminds/semver" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/lockfile" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/yaml" +) + +// PnpmWorkspaces is a representation of workspace package globs found +// in pnpm-workspace.yaml +type PnpmWorkspaces struct { + Packages []string `yaml:"packages,omitempty"` +} + +func readPnpmWorkspacePackages(workspaceFile turbopath.AbsoluteSystemPath) ([]string, error) { + bytes, err := workspaceFile.ReadFile() + if err != nil { + return nil, fmt.Errorf("%v: %w", workspaceFile, err) + } + var pnpmWorkspaces PnpmWorkspaces + if err := yaml.Unmarshal(bytes, &pnpmWorkspaces); err != nil { + return nil, fmt.Errorf("%v: %w", workspaceFile, err) + } + return pnpmWorkspaces.Packages, nil +} + +func getPnpmWorkspaceGlobs(rootpath turbopath.AbsoluteSystemPath) ([]string, error) { + pkgGlobs, err := readPnpmWorkspacePackages(rootpath.UntypedJoin("pnpm-workspace.yaml")) + if err != nil { + return nil, err + } + + if len(pkgGlobs) == 0 { + return nil, fmt.Errorf("pnpm-workspace.yaml: no packages found. Turborepo requires pnpm workspaces and thus packages to be defined in the root pnpm-workspace.yaml") + } + + filteredPkgGlobs := []string{} + for _, pkgGlob := range pkgGlobs { + if !strings.HasPrefix(pkgGlob, "!") { + filteredPkgGlobs = append(filteredPkgGlobs, pkgGlob) + } + } + return filteredPkgGlobs, nil +} + +func getPnpmWorkspaceIgnores(pm PackageManager, rootpath turbopath.AbsoluteSystemPath) ([]string, error) { + // Matches upstream values: + // function: https://github.com/pnpm/pnpm/blob/d99daa902442e0c8ab945143ebaf5cdc691a91eb/packages/find-packages/src/index.ts#L27 + // key code: https://github.com/pnpm/pnpm/blob/d99daa902442e0c8ab945143ebaf5cdc691a91eb/packages/find-packages/src/index.ts#L30 + // call site: https://github.com/pnpm/pnpm/blob/d99daa902442e0c8ab945143ebaf5cdc691a91eb/packages/find-workspace-packages/src/index.ts#L32-L39 + ignores := []string{ + "**/node_modules/**", + "**/bower_components/**", + } + pkgGlobs, err := readPnpmWorkspacePackages(rootpath.UntypedJoin("pnpm-workspace.yaml")) + if err != nil { + return nil, err + } + for _, pkgGlob := range pkgGlobs { + if strings.HasPrefix(pkgGlob, "!") { + ignores = append(ignores, pkgGlob[1:]) + } + } + return ignores, nil +} + +var nodejsPnpm = PackageManager{ + Name: "nodejs-pnpm", + Slug: "pnpm", + Command: "pnpm", + Specfile: "package.json", + Lockfile: "pnpm-lock.yaml", + PackageDir: "node_modules", + // pnpm v7+ changed their handling of '--'. We no longer need to pass it to pass args to + // the script being run, and in fact doing so will cause the '--' to be passed through verbatim, + // potentially breaking scripts that aren't expecting it. + // We are allowed to use nil here because ArgSeparator already has a type, so it's a typed nil, + // This could just as easily be []string{}, but the style guide says to prefer + // nil for empty slices. + ArgSeparator: nil, + WorkspaceConfigurationPath: "pnpm-workspace.yaml", + + getWorkspaceGlobs: getPnpmWorkspaceGlobs, + + getWorkspaceIgnores: getPnpmWorkspaceIgnores, + + Matches: func(manager string, version string) (bool, error) { + if manager != "pnpm" { + return false, nil + } + + v, err := semver.NewVersion(version) + if err != nil { + return false, fmt.Errorf("could not parse pnpm version: %w", err) + } + c, err := semver.NewConstraint(">=7.0.0") + if err != nil { + return false, fmt.Errorf("could not create constraint: %w", err) + } + + return c.Check(v), nil + }, + + detect: func(projectDirectory turbopath.AbsoluteSystemPath, packageManager *PackageManager) (bool, error) { + specfileExists := projectDirectory.UntypedJoin(packageManager.Specfile).FileExists() + lockfileExists := projectDirectory.UntypedJoin(packageManager.Lockfile).FileExists() + + return (specfileExists && lockfileExists), nil + }, + + canPrune: func(cwd turbopath.AbsoluteSystemPath) (bool, error) { + return true, nil + }, + + UnmarshalLockfile: func(_rootPackageJSON *fs.PackageJSON, contents []byte) (lockfile.Lockfile, error) { + return lockfile.DecodePnpmLockfile(contents) + }, + + prunePatches: func(pkgJSON *fs.PackageJSON, patches []turbopath.AnchoredUnixPath) error { + return pnpmPrunePatches(pkgJSON, patches) + }, +} + +func pnpmPrunePatches(pkgJSON *fs.PackageJSON, patches []turbopath.AnchoredUnixPath) error { + pkgJSON.Mu.Lock() + defer pkgJSON.Mu.Unlock() + + keysToDelete := []string{} + pnpmConfig, ok := pkgJSON.RawJSON["pnpm"].(map[string]interface{}) + if !ok { + return fmt.Errorf("Invalid structure for pnpm field in package.json") + } + patchedDependencies, ok := pnpmConfig["patchedDependencies"].(map[string]interface{}) + if !ok { + return fmt.Errorf("Invalid structure for patchedDependencies field in package.json") + } + + for dependency, untypedPatch := range patchedDependencies { + patch, ok := untypedPatch.(string) + if !ok { + return fmt.Errorf("Expected only strings in patchedDependencies. Got %v", untypedPatch) + } + + inPatches := false + + for _, wantedPatch := range patches { + if wantedPatch.ToString() == patch { + inPatches = true + break + } + } + + if !inPatches { + keysToDelete = append(keysToDelete, dependency) + } + } + + for _, key := range keysToDelete { + delete(patchedDependencies, key) + } + + return nil +} diff --git a/cli/internal/packagemanager/pnpm6.go b/cli/internal/packagemanager/pnpm6.go new file mode 100644 index 0000000..6039966 --- /dev/null +++ b/cli/internal/packagemanager/pnpm6.go @@ -0,0 +1,63 @@ +package packagemanager + +import ( + "fmt" + + "github.com/Masterminds/semver" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/lockfile" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// Pnpm6Workspaces is a representation of workspace package globs found +// in pnpm-workspace.yaml +type Pnpm6Workspaces struct { + Packages []string `yaml:"packages,omitempty"` +} + +var nodejsPnpm6 = PackageManager{ + Name: "nodejs-pnpm6", + Slug: "pnpm", + Command: "pnpm", + Specfile: "package.json", + Lockfile: "pnpm-lock.yaml", + PackageDir: "node_modules", + ArgSeparator: []string{"--"}, + WorkspaceConfigurationPath: "pnpm-workspace.yaml", + + getWorkspaceGlobs: getPnpmWorkspaceGlobs, + + getWorkspaceIgnores: getPnpmWorkspaceIgnores, + + Matches: func(manager string, version string) (bool, error) { + if manager != "pnpm" { + return false, nil + } + + v, err := semver.NewVersion(version) + if err != nil { + return false, fmt.Errorf("could not parse pnpm version: %w", err) + } + c, err := semver.NewConstraint("<7.0.0") + if err != nil { + return false, fmt.Errorf("could not create constraint: %w", err) + } + + return c.Check(v), nil + }, + + detect: func(projectDirectory turbopath.AbsoluteSystemPath, packageManager *PackageManager) (bool, error) { + specfileExists := projectDirectory.UntypedJoin(packageManager.Specfile).FileExists() + lockfileExists := projectDirectory.UntypedJoin(packageManager.Lockfile).FileExists() + + return (specfileExists && lockfileExists), nil + }, + + canPrune: func(cwd turbopath.AbsoluteSystemPath) (bool, error) { + return true, nil + }, + + UnmarshalLockfile: func(_rootPackageJSON *fs.PackageJSON, contents []byte) (lockfile.Lockfile, error) { + return lockfile.DecodePnpmLockfile(contents) + }, +} diff --git a/cli/internal/packagemanager/pnpm_test.go b/cli/internal/packagemanager/pnpm_test.go new file mode 100644 index 0000000..c05bc43 --- /dev/null +++ b/cli/internal/packagemanager/pnpm_test.go @@ -0,0 +1,57 @@ +package packagemanager + +import ( + "os" + "testing" + + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbopath" + "gotest.tools/v3/assert" +) + +func pnpmPatchesSection(t *testing.T, pkgJSON *fs.PackageJSON) map[string]interface{} { + t.Helper() + pnpmSection, ok := pkgJSON.RawJSON["pnpm"].(map[string]interface{}) + assert.Assert(t, ok) + patchesSection, ok := pnpmSection["patchedDependencies"].(map[string]interface{}) + assert.Assert(t, ok) + return patchesSection +} + +func getPnpmPackageJSON(t *testing.T) *fs.PackageJSON { + t.Helper() + rawCwd, err := os.Getwd() + assert.NilError(t, err) + cwd, err := fs.CheckedToAbsoluteSystemPath(rawCwd) + assert.NilError(t, err) + pkgJSONPath := cwd.Join("fixtures", "pnpm-patches.json") + pkgJSON, err := fs.ReadPackageJSON(pkgJSONPath) + assert.NilError(t, err) + return pkgJSON +} + +func Test_PnpmPrunePatches_KeepsNecessary(t *testing.T) { + pkgJSON := getPnpmPackageJSON(t) + initialPatches := pnpmPatchesSection(t, pkgJSON) + + assert.DeepEqual(t, initialPatches, map[string]interface{}{"is-odd@3.0.1": "patches/is-odd@3.0.1.patch"}) + + err := pnpmPrunePatches(pkgJSON, []turbopath.AnchoredUnixPath{turbopath.AnchoredUnixPath("patches/is-odd@3.0.1.patch")}) + assert.NilError(t, err) + + newPatches := pnpmPatchesSection(t, pkgJSON) + assert.DeepEqual(t, newPatches, map[string]interface{}{"is-odd@3.0.1": "patches/is-odd@3.0.1.patch"}) +} + +func Test_PnpmPrunePatches_RemovesExtra(t *testing.T) { + pkgJSON := getPnpmPackageJSON(t) + initialPatches := pnpmPatchesSection(t, pkgJSON) + + assert.DeepEqual(t, initialPatches, map[string]interface{}{"is-odd@3.0.1": "patches/is-odd@3.0.1.patch"}) + + err := pnpmPrunePatches(pkgJSON, nil) + assert.NilError(t, err) + + newPatches := pnpmPatchesSection(t, pkgJSON) + assert.DeepEqual(t, newPatches, map[string]interface{}{}) +} diff --git a/cli/internal/packagemanager/yarn.go b/cli/internal/packagemanager/yarn.go new file mode 100644 index 0000000..8779c5f --- /dev/null +++ b/cli/internal/packagemanager/yarn.go @@ -0,0 +1,116 @@ +package packagemanager + +import ( + "errors" + "fmt" + "os/exec" + "path/filepath" + "strings" + + "github.com/Masterminds/semver" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/lockfile" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// NoWorkspacesFoundError is a custom error used so that upstream implementations can switch on it +type NoWorkspacesFoundError struct{} + +func (e *NoWorkspacesFoundError) Error() string { + return "package.json: no workspaces found. Turborepo requires Yarn workspaces to be defined in the root package.json" +} + +var nodejsYarn = PackageManager{ + Name: "nodejs-yarn", + Slug: "yarn", + Command: "yarn", + Specfile: "package.json", + Lockfile: "yarn.lock", + PackageDir: "node_modules", + ArgSeparator: []string{"--"}, + + getWorkspaceGlobs: func(rootpath turbopath.AbsoluteSystemPath) ([]string, error) { + pkg, err := fs.ReadPackageJSON(rootpath.UntypedJoin("package.json")) + if err != nil { + return nil, fmt.Errorf("package.json: %w", err) + } + if len(pkg.Workspaces) == 0 { + return nil, &NoWorkspacesFoundError{} + } + return pkg.Workspaces, nil + }, + + getWorkspaceIgnores: func(pm PackageManager, rootpath turbopath.AbsoluteSystemPath) ([]string, error) { + // function: https://github.com/yarnpkg/yarn/blob/3119382885ea373d3c13d6a846de743eca8c914b/src/config.js#L799 + + // Yarn is unique in ignore patterns handling. + // The only time it does globbing is for package.json or yarn.json and it scopes the search to each workspace. + // For example: `apps/*/node_modules/**/+(package.json|yarn.json)` + // The `extglob` `+(package.json|yarn.json)` (from micromatch) after node_modules/** is redundant. + + globs, err := pm.getWorkspaceGlobs(rootpath) + if err != nil { + // In case of a non-monorepo, the workspaces field is empty and only node_modules in the root should be ignored + var e *NoWorkspacesFoundError + if errors.As(err, &e) { + return []string{"node_modules/**"}, nil + } + + return nil, err + } + + ignores := make([]string, len(globs)) + + for i, glob := range globs { + ignores[i] = filepath.Join(glob, "/node_modules/**") + } + + return ignores, nil + }, + + canPrune: func(cwd turbopath.AbsoluteSystemPath) (bool, error) { + return true, nil + }, + + // Versions older than 2.0 are yarn, after that they become berry + Matches: func(manager string, version string) (bool, error) { + if manager != "yarn" { + return false, nil + } + + v, err := semver.NewVersion(version) + if err != nil { + return false, fmt.Errorf("could not parse yarn version: %w", err) + } + c, err := semver.NewConstraint("<2.0.0-0") + if err != nil { + return false, fmt.Errorf("could not create constraint: %w", err) + } + + return c.Check(v), nil + }, + + // Detect for yarn needs to identify which version of yarn is running on the system. + detect: func(projectDirectory turbopath.AbsoluteSystemPath, packageManager *PackageManager) (bool, error) { + specfileExists := projectDirectory.UntypedJoin(packageManager.Specfile).FileExists() + lockfileExists := projectDirectory.UntypedJoin(packageManager.Lockfile).FileExists() + + // Short-circuit, definitely not Yarn. + if !specfileExists || !lockfileExists { + return false, nil + } + + cmd := exec.Command("yarn", "--version") + cmd.Dir = projectDirectory.ToString() + out, err := cmd.Output() + if err != nil { + return false, fmt.Errorf("could not detect yarn version: %w", err) + } + + return packageManager.Matches(packageManager.Slug, strings.TrimSpace(string(out))) + }, + + UnmarshalLockfile: func(_rootPackageJSON *fs.PackageJSON, contents []byte) (lockfile.Lockfile, error) { + return lockfile.DecodeYarnLockfile(contents) + }, +} diff --git a/cli/internal/process/child.go b/cli/internal/process/child.go new file mode 100644 index 0000000..1c3e6e7 --- /dev/null +++ b/cli/internal/process/child.go @@ -0,0 +1,406 @@ +package process + +/** + * Code in this file is based on the source code at + * https://github.com/hashicorp/consul-template/tree/3ea7d99ad8eff17897e0d63dac86d74770170bb8/child/child.go + * + * Major changes include removing the ability to restart a child process, + * requiring a fully-formed exec.Cmd to be passed in, and including cmd.Dir + * in the description of a child process. + */ + +import ( + "errors" + "fmt" + "math/rand" + "os" + "os/exec" + "strings" + "sync" + "syscall" + "time" + + "github.com/hashicorp/go-hclog" +) + +func init() { + // Seed the default rand Source with current time to produce better random + // numbers used with splay + rand.Seed(time.Now().UnixNano()) +} + +var ( + // ErrMissingCommand is the error returned when no command is specified + // to run. + ErrMissingCommand = errors.New("missing command") + + // ExitCodeOK is the default OK exit code. + ExitCodeOK = 0 + + // ExitCodeError is the default error code returned when the child exits with + // an error without a more specific code. + ExitCodeError = 127 +) + +// Child is a wrapper around a child process which can be used to send signals +// and manage the processes' lifecycle. +type Child struct { + sync.RWMutex + + timeout time.Duration + + killSignal os.Signal + killTimeout time.Duration + + splay time.Duration + + // cmd is the actual child process under management. + cmd *exec.Cmd + + // exitCh is the channel where the processes exit will be returned. + exitCh chan int + + // stopLock is the mutex to lock when stopping. stopCh is the circuit breaker + // to force-terminate any waiting splays to kill the process now. stopped is + // a boolean that tells us if we have previously been stopped. + stopLock sync.RWMutex + stopCh chan struct{} + stopped bool + + // whether to set process group id or not (default on) + setpgid bool + + Label string + + logger hclog.Logger +} + +// NewInput is input to the NewChild function. +type NewInput struct { + // Cmd is the unstarted, preconfigured command to run + Cmd *exec.Cmd + + // Timeout is the maximum amount of time to allow the command to execute. If + // set to 0, the command is permitted to run infinitely. + Timeout time.Duration + + // KillSignal is the signal to send to gracefully kill this process. This + // value may be nil. + KillSignal os.Signal + + // KillTimeout is the amount of time to wait for the process to gracefully + // terminate before force-killing. + KillTimeout time.Duration + + // Splay is the maximum random amount of time to wait before sending signals. + // This option helps reduce the thundering herd problem by effectively + // sleeping for a random amount of time before sending the signal. This + // prevents multiple processes from all signaling at the same time. This value + // may be zero (which disables the splay entirely). + Splay time.Duration + + // Logger receives debug log lines about the process state and transitions + Logger hclog.Logger +} + +// New creates a new child process for management with high-level APIs for +// sending signals to the child process, restarting the child process, and +// gracefully terminating the child process. +func newChild(i NewInput) (*Child, error) { + // exec.Command prepends the command to be run to the arguments list, so + // we only need the arguments here, it will include the command itself. + label := fmt.Sprintf("(%v) %v", i.Cmd.Dir, strings.Join(i.Cmd.Args, " ")) + child := &Child{ + cmd: i.Cmd, + timeout: i.Timeout, + killSignal: i.KillSignal, + killTimeout: i.KillTimeout, + splay: i.Splay, + stopCh: make(chan struct{}, 1), + setpgid: true, + Label: label, + logger: i.Logger.Named(label), + } + + return child, nil +} + +// ExitCh returns the current exit channel for this child process. This channel +// may change if the process is restarted, so implementers must not cache this +// value. +func (c *Child) ExitCh() <-chan int { + c.RLock() + defer c.RUnlock() + return c.exitCh +} + +// Pid returns the pid of the child process. If no child process exists, 0 is +// returned. +func (c *Child) Pid() int { + c.RLock() + defer c.RUnlock() + return c.pid() +} + +// Command returns the human-formatted command with arguments. +func (c *Child) Command() string { + return c.Label +} + +// Start starts and begins execution of the child process. A buffered channel +// is returned which is where the command's exit code will be returned upon +// exit. Any errors that occur prior to starting the command will be returned +// as the second error argument, but any errors returned by the command after +// execution will be returned as a non-zero value over the exit code channel. +func (c *Child) Start() error { + // log.Printf("[INFO] (child) spawning: %s", c.Command()) + c.Lock() + defer c.Unlock() + return c.start() +} + +// Signal sends the signal to the child process, returning any errors that +// occur. +func (c *Child) Signal(s os.Signal) error { + c.logger.Debug("receiving signal %q", s.String()) + c.RLock() + defer c.RUnlock() + return c.signal(s) +} + +// Kill sends the kill signal to the child process and waits for successful +// termination. If no kill signal is defined, the process is killed with the +// most aggressive kill signal. If the process does not gracefully stop within +// the provided KillTimeout, the process is force-killed. If a splay was +// provided, this function will sleep for a random period of time between 0 and +// the provided splay value to reduce the thundering herd problem. This function +// does not return any errors because it guarantees the process will be dead by +// the return of the function call. +func (c *Child) Kill() { + c.logger.Debug("killing process") + c.Lock() + defer c.Unlock() + c.kill(false) +} + +// Stop behaves almost identical to Kill except it suppresses future processes +// from being started by this child and it prevents the killing of the child +// process from sending its value back up the exit channel. This is useful +// when doing a graceful shutdown of an application. +func (c *Child) Stop() { + c.internalStop(false) +} + +// StopImmediately behaves almost identical to Stop except it does not wait +// for any random splay if configured. This is used for performing a fast +// shutdown of consul-template and its children when a kill signal is received. +func (c *Child) StopImmediately() { + c.internalStop(true) +} + +func (c *Child) internalStop(immediately bool) { + c.Lock() + defer c.Unlock() + + c.stopLock.Lock() + defer c.stopLock.Unlock() + if c.stopped { + return + } + c.kill(immediately) + close(c.stopCh) + c.stopped = true +} + +func (c *Child) start() error { + setSetpgid(c.cmd, c.setpgid) + if err := c.cmd.Start(); err != nil { + return err + } + + // Create a new exitCh so that previously invoked commands (if any) don't + // cause us to exit, and start a goroutine to wait for that process to end. + exitCh := make(chan int, 1) + go func() { + var code int + // It's possible that kill is called before we even + // manage to get here. Make sure we still have a valid + // cmd before waiting on it. + c.RLock() + var cmd = c.cmd + c.RUnlock() + var err error + if cmd != nil { + err = cmd.Wait() + } + if err == nil { + code = ExitCodeOK + } else { + code = ExitCodeError + if exiterr, ok := err.(*exec.ExitError); ok { + if status, ok := exiterr.Sys().(syscall.WaitStatus); ok { + code = status.ExitStatus() + } + } + } + + // If the child is in the process of killing, do not send a response back + // down the exit channel. + c.stopLock.RLock() + defer c.stopLock.RUnlock() + if !c.stopped { + select { + case <-c.stopCh: + case exitCh <- code: + } + } + + close(exitCh) + }() + + c.exitCh = exitCh + + // If a timeout was given, start the timer to wait for the child to exit + if c.timeout != 0 { + select { + case code := <-exitCh: + if code != 0 { + return fmt.Errorf( + "command exited with a non-zero exit status:\n"+ + "\n"+ + " %s\n"+ + "\n"+ + "This is assumed to be a failure. Please ensure the command\n"+ + "exits with a zero exit status.", + c.Command(), + ) + } + case <-time.After(c.timeout): + // Force-kill the process + c.stopLock.Lock() + defer c.stopLock.Unlock() + if c.cmd != nil && c.cmd.Process != nil { + c.cmd.Process.Kill() + } + + return fmt.Errorf( + "command did not exit within %q:\n"+ + "\n"+ + " %s\n"+ + "\n"+ + "Commands must exit in a timely manner in order for processing to\n"+ + "continue. Consider using a process supervisor or utilizing the\n"+ + "built-in exec mode instead.", + c.timeout, + c.Command(), + ) + } + } + + return nil +} + +func (c *Child) pid() int { + if !c.running() { + return 0 + } + return c.cmd.Process.Pid +} + +func (c *Child) signal(s os.Signal) error { + if !c.running() { + return nil + } + + sig, ok := s.(syscall.Signal) + if !ok { + return fmt.Errorf("bad signal: %s", s) + } + pid := c.cmd.Process.Pid + if c.setpgid { + // kill takes negative pid to indicate that you want to use gpid + pid = -(pid) + } + // cross platform way to signal process/process group + p, err := os.FindProcess(pid) + if err != nil { + return err + } + return p.Signal(sig) +} + +// kill sends the signal to kill the process using the configured signal +// if set, else the default system signal +func (c *Child) kill(immediately bool) { + + if !c.running() { + c.logger.Debug("Kill() called but process dead; not waiting for splay.") + return + } else if immediately { + c.logger.Debug("Kill() called but performing immediate shutdown; not waiting for splay.") + } else { + c.logger.Debug("Kill(%v) called", immediately) + select { + case <-c.stopCh: + case <-c.randomSplay(): + } + } + + var exited bool + defer func() { + if !exited { + c.logger.Debug("PKill") + c.cmd.Process.Kill() + } + c.cmd = nil + }() + + if c.killSignal == nil { + return + } + + if err := c.signal(c.killSignal); err != nil { + c.logger.Debug("Kill failed: %s", err) + if processNotFoundErr(err) { + exited = true // checked in defer + } + return + } + + killCh := make(chan struct{}, 1) + go func() { + defer close(killCh) + c.cmd.Process.Wait() + }() + + select { + case <-c.stopCh: + case <-killCh: + exited = true + case <-time.After(c.killTimeout): + c.logger.Debug("timeout") + } +} + +func (c *Child) running() bool { + select { + case <-c.exitCh: + return false + default: + } + return c.cmd != nil && c.cmd.Process != nil +} + +func (c *Child) randomSplay() <-chan time.Time { + if c.splay == 0 { + return time.After(0) + } + + ns := c.splay.Nanoseconds() + offset := rand.Int63n(ns) + t := time.Duration(offset) + + c.logger.Debug("waiting %.2fs for random splay", t.Seconds()) + + return time.After(t) +} diff --git a/cli/internal/process/child_nix_test.go b/cli/internal/process/child_nix_test.go new file mode 100644 index 0000000..7311d18 --- /dev/null +++ b/cli/internal/process/child_nix_test.go @@ -0,0 +1,190 @@ +//go:build !windows +// +build !windows + +package process + +/** + * Code in this file is based on the source code at + * https://github.com/hashicorp/consul-template/tree/3ea7d99ad8eff17897e0d63dac86d74770170bb8/child/child_test.go + * + * Tests in this file use signals or pgid features not available on windows + */ + +import ( + "os/exec" + "syscall" + "testing" + "time" + + "github.com/hashicorp/go-gatedio" +) + +func TestSignal(t *testing.T) { + + c := testChild(t) + cmd := exec.Command("sh", "-c", "trap 'echo one; exit' USR1; while true; do sleep 0.2; done") + c.cmd = cmd + + out := gatedio.NewByteBuffer() + c.cmd.Stdout = out + + if err := c.Start(); err != nil { + t.Fatal(err) + } + defer c.Stop() + + // For some reason bash doesn't start immediately + time.Sleep(fileWaitSleepDelay) + + if err := c.Signal(syscall.SIGUSR1); err != nil { + t.Fatal(err) + } + + // Give time for the file to flush + time.Sleep(fileWaitSleepDelay) + + expected := "one\n" + if out.String() != expected { + t.Errorf("expected %q to be %q", out.String(), expected) + } +} + +func TestStop_childAlreadyDead(t *testing.T) { + c := testChild(t) + c.cmd = exec.Command("sh", "-c", "exit 1") + c.splay = 100 * time.Second + c.killSignal = syscall.SIGTERM + + if err := c.Start(); err != nil { + t.Fatal(err) + } + + // For some reason bash doesn't start immediately + time.Sleep(fileWaitSleepDelay) + + killStartTime := time.Now() + c.Stop() + killEndTime := time.Now() + + if killEndTime.Sub(killStartTime) > fileWaitSleepDelay { + t.Error("expected not to wait for splay") + } +} + +func TestSignal_noProcess(t *testing.T) { + + c := testChild(t) + if err := c.Signal(syscall.SIGUSR1); err != nil { + // Just assert there is no error + t.Fatal(err) + } +} + +func TestKill_signal(t *testing.T) { + + c := testChild(t) + cmd := exec.Command("sh", "-c", "trap 'echo one; exit' USR1; while true; do sleep 0.2; done") + c.killSignal = syscall.SIGUSR1 + + out := gatedio.NewByteBuffer() + cmd.Stdout = out + c.cmd = cmd + + if err := c.Start(); err != nil { + t.Fatal(err) + } + defer c.Stop() + + // For some reason bash doesn't start immediately + time.Sleep(fileWaitSleepDelay) + + c.Kill() + + // Give time for the file to flush + time.Sleep(fileWaitSleepDelay) + + expected := "one\n" + if out.String() != expected { + t.Errorf("expected %q to be %q", out.String(), expected) + } +} + +func TestKill_noProcess(t *testing.T) { + c := testChild(t) + c.killSignal = syscall.SIGUSR1 + c.Kill() +} + +func TestStop_noWaitForSplay(t *testing.T) { + c := testChild(t) + c.cmd = exec.Command("sh", "-c", "trap 'echo one; exit' USR1; while true; do sleep 0.2; done") + c.splay = 100 * time.Second + c.killSignal = syscall.SIGUSR1 + + out := gatedio.NewByteBuffer() + c.cmd.Stdout = out + + if err := c.Start(); err != nil { + t.Fatal(err) + } + + // For some reason bash doesn't start immediately + time.Sleep(fileWaitSleepDelay) + + killStartTime := time.Now() + c.StopImmediately() + killEndTime := time.Now() + + expected := "one\n" + if out.String() != expected { + t.Errorf("expected %q to be %q", out.String(), expected) + } + + if killEndTime.Sub(killStartTime) > fileWaitSleepDelay { + t.Error("expected not to wait for splay") + } +} + +func TestSetpgid(t *testing.T) { + t.Run("true", func(t *testing.T) { + c := testChild(t) + c.cmd = exec.Command("sh", "-c", "while true; do sleep 0.2; done") + // default, but to be explicit for the test + c.setpgid = true + + if err := c.Start(); err != nil { + t.Fatal(err) + } + defer c.Stop() + + // when setpgid is true, the pid and gpid should be the same + gpid, err := syscall.Getpgid(c.Pid()) + if err != nil { + t.Fatal("Getpgid error:", err) + } + + if c.Pid() != gpid { + t.Fatal("pid and gpid should match") + } + }) + t.Run("false", func(t *testing.T) { + c := testChild(t) + c.cmd = exec.Command("sh", "-c", "while true; do sleep 0.2; done") + c.setpgid = false + + if err := c.Start(); err != nil { + t.Fatal(err) + } + defer c.Stop() + + // when setpgid is true, the pid and gpid should be the same + gpid, err := syscall.Getpgid(c.Pid()) + if err != nil { + t.Fatal("Getpgid error:", err) + } + + if c.Pid() == gpid { + t.Fatal("pid and gpid should NOT match") + } + }) +} diff --git a/cli/internal/process/child_test.go b/cli/internal/process/child_test.go new file mode 100644 index 0000000..63dee22 --- /dev/null +++ b/cli/internal/process/child_test.go @@ -0,0 +1,193 @@ +package process + +/** + * Code in this file is based on the source code at + * https://github.com/hashicorp/consul-template/tree/3ea7d99ad8eff17897e0d63dac86d74770170bb8/child/child_test.go + * + * Major changes include supporting api changes in child.go and removing + * tests for reloading, which was removed in child.go + */ + +import ( + "io/ioutil" + "os" + "os/exec" + "strings" + "testing" + "time" + + "github.com/hashicorp/go-gatedio" + "github.com/hashicorp/go-hclog" +) + +const fileWaitSleepDelay = 150 * time.Millisecond + +func testChild(t *testing.T) *Child { + cmd := exec.Command("echo", "hello", "world") + cmd.Stdout = ioutil.Discard + cmd.Stderr = ioutil.Discard + c, err := newChild(NewInput{ + Cmd: cmd, + KillSignal: os.Kill, + KillTimeout: 2 * time.Second, + Splay: 0 * time.Second, + Logger: hclog.Default(), + }) + if err != nil { + t.Fatal(err) + } + return c +} + +func TestNew(t *testing.T) { + + stdin := gatedio.NewByteBuffer() + stdout := gatedio.NewByteBuffer() + stderr := gatedio.NewByteBuffer() + command := "echo" + args := []string{"hello", "world"} + env := []string{"a=b", "c=d"} + killSignal := os.Kill + killTimeout := fileWaitSleepDelay + splay := fileWaitSleepDelay + + cmd := exec.Command(command, args...) + cmd.Stdin = stdin + cmd.Stderr = stderr + cmd.Stdout = stdout + cmd.Env = env + c, err := newChild(NewInput{ + Cmd: cmd, + KillSignal: killSignal, + KillTimeout: killTimeout, + Splay: splay, + Logger: hclog.Default(), + }) + if err != nil { + t.Fatal(err) + } + + if c.killSignal != killSignal { + t.Errorf("expected %q to be %q", c.killSignal, killSignal) + } + + if c.killTimeout != killTimeout { + t.Errorf("expected %q to be %q", c.killTimeout, killTimeout) + } + + if c.splay != splay { + t.Errorf("expected %q to be %q", c.splay, splay) + } + + if c.stopCh == nil { + t.Errorf("expected %#v to be", c.stopCh) + } +} + +func TestExitCh_noProcess(t *testing.T) { + + c := testChild(t) + ch := c.ExitCh() + if ch != nil { + t.Errorf("expected %#v to be nil", ch) + } +} + +func TestExitCh(t *testing.T) { + + c := testChild(t) + if err := c.Start(); err != nil { + t.Fatal(err) + } + println("Started") + defer c.Stop() + + ch := c.ExitCh() + if ch == nil { + t.Error("expected ch to exist") + } +} + +func TestPid_noProcess(t *testing.T) { + + c := testChild(t) + pid := c.Pid() + if pid != 0 { + t.Errorf("expected %q to be 0", pid) + } +} + +func TestPid(t *testing.T) { + + c := testChild(t) + if err := c.Start(); err != nil { + t.Fatal(err) + } + defer c.Stop() + + pid := c.Pid() + if pid == 0 { + t.Error("expected pid to not be 0") + } +} + +func TestStart(t *testing.T) { + + c := testChild(t) + + // Set our own reader and writer so we can verify they are wired to the child. + stdin := gatedio.NewByteBuffer() + stdout := gatedio.NewByteBuffer() + stderr := gatedio.NewByteBuffer() + // Custom env and command + env := []string{"a=b", "c=d"} + cmd := exec.Command("env") + cmd.Stdin = stdin + cmd.Stdout = stdout + cmd.Stderr = stderr + cmd.Env = env + c.cmd = cmd + + if err := c.Start(); err != nil { + t.Fatal(err) + } + defer c.Stop() + + select { + case <-c.ExitCh(): + case <-time.After(fileWaitSleepDelay): + t.Fatal("process should have exited") + } + + output := stdout.String() + for _, envVar := range env { + if !strings.Contains(output, envVar) { + t.Errorf("expected to find %q in %q", envVar, output) + } + } +} + +func TestKill_noSignal(t *testing.T) { + + c := testChild(t) + c.cmd = exec.Command("sh", "-c", "while true; do sleep 0.2; done") + c.killTimeout = 20 * time.Millisecond + c.killSignal = nil + + if err := c.Start(); err != nil { + t.Fatal(err) + } + defer c.Stop() + + // For some reason bash doesn't start immediately + time.Sleep(fileWaitSleepDelay) + + c.Kill() + + // Give time for the file to flush + time.Sleep(fileWaitSleepDelay) + + if c.cmd != nil { + t.Errorf("expected cmd to be nil") + } +} diff --git a/cli/internal/process/manager.go b/cli/internal/process/manager.go new file mode 100644 index 0000000..0488a29 --- /dev/null +++ b/cli/internal/process/manager.go @@ -0,0 +1,120 @@ +package process + +import ( + "errors" + "fmt" + "os" + "os/exec" + "sync" + "time" + + "github.com/hashicorp/go-hclog" +) + +// ErrClosing is returned when the process manager is in the process of closing, +// meaning that no more child processes can be Exec'd, and existing, non-failed +// child processes will be stopped with this error. +var ErrClosing = errors.New("process manager is already closing") + +// ChildExit is returned when a child process exits with a non-zero exit code +type ChildExit struct { + ExitCode int + Command string +} + +func (ce *ChildExit) Error() string { + return fmt.Sprintf("command %s exited (%d)", ce.Command, ce.ExitCode) +} + +// Manager tracks all of the child processes that have been spawned +type Manager struct { + done bool + children map[*Child]struct{} + mu sync.Mutex + doneCh chan struct{} + logger hclog.Logger +} + +// NewManager creates a new properly-initialized Manager instance +func NewManager(logger hclog.Logger) *Manager { + return &Manager{ + children: make(map[*Child]struct{}), + doneCh: make(chan struct{}), + logger: logger, + } +} + +// Exec spawns a child process to run the given command, then blocks +// until it completes. Returns a nil error if the child process finished +// successfully, ErrClosing if the manager closed during execution, and +// a ChildExit error if the child process exited with a non-zero exit code. +func (m *Manager) Exec(cmd *exec.Cmd) error { + m.mu.Lock() + if m.done { + m.mu.Unlock() + return ErrClosing + } + + child, err := newChild(NewInput{ + Cmd: cmd, + // Run forever by default + Timeout: 0, + // When it's time to exit, give a 10 second timeout + KillTimeout: 10 * time.Second, + // Send SIGINT to stop children + KillSignal: os.Interrupt, + Logger: m.logger, + }) + if err != nil { + return err + } + + m.children[child] = struct{}{} + m.mu.Unlock() + err = child.Start() + if err != nil { + m.mu.Lock() + delete(m.children, child) + m.mu.Unlock() + return err + } + err = nil + exitCode, ok := <-child.ExitCh() + if !ok { + err = ErrClosing + } else if exitCode != ExitCodeOK { + err = &ChildExit{ + ExitCode: exitCode, + Command: child.Command(), + } + } + + m.mu.Lock() + delete(m.children, child) + m.mu.Unlock() + return err +} + +// Close sends SIGINT to all child processes if it hasn't been done yet, +// and in either case blocks until they all exit or timeout +func (m *Manager) Close() { + m.mu.Lock() + if m.done { + m.mu.Unlock() + <-m.doneCh + return + } + wg := sync.WaitGroup{} + m.done = true + for child := range m.children { + child := child + wg.Add(1) + go func() { + child.Stop() + wg.Done() + }() + } + m.mu.Unlock() + wg.Wait() + close(m.doneCh) +} diff --git a/cli/internal/process/manager_test.go b/cli/internal/process/manager_test.go new file mode 100644 index 0000000..fb40ffa --- /dev/null +++ b/cli/internal/process/manager_test.go @@ -0,0 +1,94 @@ +package process + +import ( + "errors" + "os/exec" + "sync" + "testing" + "time" + + "github.com/hashicorp/go-gatedio" + "github.com/hashicorp/go-hclog" +) + +func newManager() *Manager { + return NewManager(hclog.Default()) +} + +func TestExec_simple(t *testing.T) { + mgr := newManager() + + out := gatedio.NewByteBuffer() + cmd := exec.Command("env") + cmd.Stdout = out + + err := mgr.Exec(cmd) + if err != nil { + t.Errorf("expected %q to be nil", err) + } + + output := out.String() + if output == "" { + t.Error("expected output from running 'env', got empty string") + } +} + +func TestClose(t *testing.T) { + mgr := newManager() + + wg := sync.WaitGroup{} + tasks := 4 + errors := make([]error, tasks) + start := time.Now() + for i := 0; i < tasks; i++ { + wg.Add(1) + go func(index int) { + cmd := exec.Command("sleep", "0.5") + err := mgr.Exec(cmd) + if err != nil { + errors[index] = err + } + wg.Done() + }(i) + } + // let processes kick off + time.Sleep(50 * time.Millisecond) + mgr.Close() + end := time.Now() + wg.Wait() + duration := end.Sub(start) + if duration >= 500*time.Millisecond { + t.Errorf("expected to close, total time was %q", duration) + } + for _, err := range errors { + if err != ErrClosing { + t.Errorf("expected manager closing error, found %q", err) + } + } +} + +func TestClose_alreadyClosed(t *testing.T) { + mgr := newManager() + mgr.Close() + + // repeated closing does not error + mgr.Close() + + err := mgr.Exec(exec.Command("sleep", "1")) + if err != ErrClosing { + t.Errorf("expected manager closing error, found %q", err) + } +} + +func TestExitCode(t *testing.T) { + mgr := newManager() + + err := mgr.Exec(exec.Command("ls", "doesnotexist")) + exitErr := &ChildExit{} + if !errors.As(err, &exitErr) { + t.Errorf("expected a ChildExit err, got %q", err) + } + if exitErr.ExitCode == 0 { + t.Error("expected non-zero exit code , got 0") + } +} diff --git a/cli/internal/process/sys_nix.go b/cli/internal/process/sys_nix.go new file mode 100644 index 0000000..0e6c003 --- /dev/null +++ b/cli/internal/process/sys_nix.go @@ -0,0 +1,23 @@ +//go:build !windows +// +build !windows + +package process + +/** + * Code in this file is based on the source code at + * https://github.com/hashicorp/consul-template/tree/3ea7d99ad8eff17897e0d63dac86d74770170bb8/child/sys_nix.go + */ + +import ( + "os/exec" + "syscall" +) + +func setSetpgid(cmd *exec.Cmd, value bool) { + cmd.SysProcAttr = &syscall.SysProcAttr{Setpgid: value} +} + +func processNotFoundErr(err error) bool { + // ESRCH == no such process, ie. already exited + return err == syscall.ESRCH +} diff --git a/cli/internal/process/sys_windows.go b/cli/internal/process/sys_windows.go new file mode 100644 index 0000000..c626c22 --- /dev/null +++ b/cli/internal/process/sys_windows.go @@ -0,0 +1,17 @@ +//go:build windows +// +build windows + +package process + +/** + * Code in this file is based on the source code at + * https://github.com/hashicorp/consul-template/tree/3ea7d99ad8eff17897e0d63dac86d74770170bb8/child/sys_windows.go + */ + +import "os/exec" + +func setSetpgid(cmd *exec.Cmd, value bool) {} + +func processNotFoundErr(err error) bool { + return false +} diff --git a/cli/internal/prune/prune.go b/cli/internal/prune/prune.go new file mode 100644 index 0000000..a82023f --- /dev/null +++ b/cli/internal/prune/prune.go @@ -0,0 +1,314 @@ +package prune + +import ( + "bufio" + "fmt" + "os" + "strings" + + "github.com/vercel/turbo/cli/internal/cmdutil" + "github.com/vercel/turbo/cli/internal/context" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/lockfile" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/turbostate" + "github.com/vercel/turbo/cli/internal/ui" + "github.com/vercel/turbo/cli/internal/util" + + "github.com/fatih/color" + "github.com/hashicorp/go-hclog" + "github.com/mitchellh/cli" + "github.com/pkg/errors" +) + +type opts struct { + scope []string + docker bool + outputDir string +} + +// ExecutePrune executes the `prune` command. +func ExecutePrune(helper *cmdutil.Helper, args *turbostate.ParsedArgsFromRust) error { + base, err := helper.GetCmdBase(args) + if err != nil { + return err + } + if len(args.Command.Prune.Scope) == 0 { + err := errors.New("at least one target must be specified") + base.LogError(err.Error()) + return err + } + p := &prune{ + base, + } + if err := p.prune(args.Command.Prune); err != nil { + logError(p.base.Logger, p.base.UI, err) + return err + } + return nil +} + +func logError(logger hclog.Logger, ui cli.Ui, err error) { + logger.Error(fmt.Sprintf("error: %v", err)) + pref := color.New(color.Bold, color.FgRed, color.ReverseVideo).Sprint(" ERROR ") + ui.Error(fmt.Sprintf("%s%s", pref, color.RedString(" %v", err))) +} + +type prune struct { + base *cmdutil.CmdBase +} + +// Prune creates a smaller monorepo with only the required workspaces +func (p *prune) prune(opts *turbostate.PrunePayload) error { + rootPackageJSONPath := p.base.RepoRoot.UntypedJoin("package.json") + rootPackageJSON, err := fs.ReadPackageJSON(rootPackageJSONPath) + if err != nil { + return fmt.Errorf("failed to read package.json: %w", err) + } + ctx, err := context.BuildPackageGraph(p.base.RepoRoot, rootPackageJSON) + if err != nil { + return errors.Wrap(err, "could not construct graph") + } + outDir := p.base.RepoRoot.UntypedJoin(opts.OutputDir) + fullDir := outDir + if opts.Docker { + fullDir = fullDir.UntypedJoin("full") + } + + p.base.Logger.Trace("scope", "value", strings.Join(opts.Scope, ", ")) + p.base.Logger.Trace("docker", "value", opts.Docker) + p.base.Logger.Trace("out dir", "value", outDir.ToString()) + + for _, scope := range opts.Scope { + p.base.Logger.Trace("scope", "value", scope) + target, scopeIsValid := ctx.WorkspaceInfos.PackageJSONs[scope] + if !scopeIsValid { + return errors.Errorf("invalid scope: package %v not found", scope) + } + p.base.Logger.Trace("target", "value", target.Name) + p.base.Logger.Trace("directory", "value", target.Dir) + p.base.Logger.Trace("external deps", "value", target.UnresolvedExternalDeps) + p.base.Logger.Trace("internal deps", "value", target.InternalDeps) + } + + canPrune, err := ctx.PackageManager.CanPrune(p.base.RepoRoot) + if err != nil { + return err + } + if !canPrune { + return errors.Errorf("this command is not yet implemented for %s", ctx.PackageManager.Name) + } + if lockfile.IsNil(ctx.Lockfile) { + return errors.New("Cannot prune without parsed lockfile") + } + + p.base.UI.Output(fmt.Sprintf("Generating pruned monorepo for %v in %v", ui.Bold(strings.Join(opts.Scope, ", ")), ui.Bold(outDir.ToString()))) + + packageJSONPath := outDir.UntypedJoin("package.json") + if err := packageJSONPath.EnsureDir(); err != nil { + return errors.Wrap(err, "could not create output directory") + } + if workspacePath := ctx.PackageManager.WorkspaceConfigurationPath; workspacePath != "" && p.base.RepoRoot.UntypedJoin(workspacePath).FileExists() { + workspaceFile := fs.LstatCachedFile{Path: p.base.RepoRoot.UntypedJoin(workspacePath)} + if err := fs.CopyFile(&workspaceFile, outDir.UntypedJoin(ctx.PackageManager.WorkspaceConfigurationPath).ToStringDuringMigration()); err != nil { + return errors.Wrapf(err, "could not copy %s", ctx.PackageManager.WorkspaceConfigurationPath) + } + if err := fs.CopyFile(&workspaceFile, fullDir.UntypedJoin(ctx.PackageManager.WorkspaceConfigurationPath).ToStringDuringMigration()); err != nil { + return errors.Wrapf(err, "could not copy %s", ctx.PackageManager.WorkspaceConfigurationPath) + } + if opts.Docker { + if err := fs.CopyFile(&workspaceFile, outDir.UntypedJoin("json", ctx.PackageManager.WorkspaceConfigurationPath).ToStringDuringMigration()); err != nil { + return errors.Wrapf(err, "could not copy %s", ctx.PackageManager.WorkspaceConfigurationPath) + } + } + } + workspaces := []turbopath.AnchoredSystemPath{} + targets, err := ctx.InternalDependencies(append(opts.Scope, util.RootPkgName)) + if err != nil { + return errors.Wrap(err, "could not traverse the dependency graph to find topological dependencies") + } + p.base.Logger.Trace("targets", "value", targets) + + lockfileKeys := make([]string, 0, len(rootPackageJSON.TransitiveDeps)) + for _, pkg := range rootPackageJSON.TransitiveDeps { + lockfileKeys = append(lockfileKeys, pkg.Key) + } + + for _, internalDep := range targets { + // We skip over the pseudo root node and the root package + if internalDep == ctx.RootNode || internalDep == util.RootPkgName { + continue + } + + workspaces = append(workspaces, ctx.WorkspaceInfos.PackageJSONs[internalDep].Dir) + originalDir := ctx.WorkspaceInfos.PackageJSONs[internalDep].Dir.RestoreAnchor(p.base.RepoRoot) + info, err := originalDir.Lstat() + if err != nil { + return errors.Wrapf(err, "failed to lstat %s", originalDir) + } + targetDir := ctx.WorkspaceInfos.PackageJSONs[internalDep].Dir.RestoreAnchor(fullDir) + if err := targetDir.MkdirAllMode(info.Mode()); err != nil { + return errors.Wrapf(err, "failed to create folder %s for %v", targetDir, internalDep) + } + + if err := fs.RecursiveCopy(ctx.WorkspaceInfos.PackageJSONs[internalDep].Dir.ToStringDuringMigration(), targetDir.ToStringDuringMigration()); err != nil { + return errors.Wrapf(err, "failed to copy %v into %v", internalDep, targetDir) + } + if opts.Docker { + jsonDir := outDir.UntypedJoin("json", ctx.WorkspaceInfos.PackageJSONs[internalDep].PackageJSONPath.ToStringDuringMigration()) + if err := jsonDir.EnsureDir(); err != nil { + return errors.Wrapf(err, "failed to create folder %v for %v", jsonDir, internalDep) + } + if err := fs.RecursiveCopy(ctx.WorkspaceInfos.PackageJSONs[internalDep].PackageJSONPath.ToStringDuringMigration(), jsonDir.ToStringDuringMigration()); err != nil { + return errors.Wrapf(err, "failed to copy %v into %v", internalDep, jsonDir) + } + } + + for _, pkg := range ctx.WorkspaceInfos.PackageJSONs[internalDep].TransitiveDeps { + lockfileKeys = append(lockfileKeys, pkg.Key) + } + + p.base.UI.Output(fmt.Sprintf(" - Added %v", ctx.WorkspaceInfos.PackageJSONs[internalDep].Name)) + } + p.base.Logger.Trace("new workspaces", "value", workspaces) + + lockfile, err := ctx.Lockfile.Subgraph(workspaces, lockfileKeys) + if err != nil { + return errors.Wrap(err, "Failed creating pruned lockfile") + } + + lockfilePath := outDir.UntypedJoin(ctx.PackageManager.Lockfile) + lockfileFile, err := lockfilePath.Create() + if err != nil { + return errors.Wrap(err, "Failed to create lockfile") + } + + lockfileWriter := bufio.NewWriter(lockfileFile) + if err := lockfile.Encode(lockfileWriter); err != nil { + return errors.Wrap(err, "Failed to encode pruned lockfile") + } + + if err := lockfileWriter.Flush(); err != nil { + return errors.Wrap(err, "Failed to flush pruned lockfile") + } + + if fs.FileExists(".gitignore") { + if err := fs.CopyFile(&fs.LstatCachedFile{Path: p.base.RepoRoot.UntypedJoin(".gitignore")}, fullDir.UntypedJoin(".gitignore").ToStringDuringMigration()); err != nil { + return errors.Wrap(err, "failed to copy root .gitignore") + } + } + + if fs.FileExists(".npmrc") { + if err := fs.CopyFile(&fs.LstatCachedFile{Path: p.base.RepoRoot.UntypedJoin(".npmrc")}, fullDir.UntypedJoin(".npmrc").ToStringDuringMigration()); err != nil { + return errors.Wrap(err, "failed to copy root .npmrc") + } + if opts.Docker { + if err := fs.CopyFile(&fs.LstatCachedFile{Path: p.base.RepoRoot.UntypedJoin(".npmrc")}, outDir.UntypedJoin("json/.npmrc").ToStringDuringMigration()); err != nil { + return errors.Wrap(err, "failed to copy root .npmrc") + } + } + } + + turboJSON, err := fs.LoadTurboConfig(p.base.RepoRoot, rootPackageJSON, false) + if err != nil && !errors.Is(err, os.ErrNotExist) { + return errors.Wrap(err, "failed to read turbo.json") + } + if turboJSON != nil { + // when executing a prune, it is not enough to simply copy the file, as + // tasks may refer to scopes that no longer exist. to remedy this, we need + // to remove from the Pipeline the TaskDefinitions that no longer apply + for pipelineTask := range turboJSON.Pipeline { + includeTask := false + for _, includedPackage := range targets { + if util.IsTaskInPackage(pipelineTask, includedPackage) { + includeTask = true + break + } + } + + if !includeTask { + delete(turboJSON.Pipeline, pipelineTask) + } + } + + bytes, err := turboJSON.MarshalJSON() + + if err != nil { + return errors.Wrap(err, "failed to write turbo.json") + } + + if err := fullDir.UntypedJoin("turbo.json").WriteFile(bytes, 0644); err != nil { + return errors.Wrap(err, "failed to prune workspace tasks from turbo.json") + } + } + + originalPackageJSON := fs.LstatCachedFile{Path: p.base.RepoRoot.UntypedJoin("package.json")} + newPackageJSONPath := fullDir.UntypedJoin("package.json") + // If the original lockfile uses any patches we rewrite the package.json to make sure it doesn't + // include any patches that might have been pruned. + if originalPatches := ctx.Lockfile.Patches(); originalPatches != nil { + patches := lockfile.Patches() + if err := ctx.PackageManager.PrunePatchedPackages(rootPackageJSON, patches); err != nil { + return errors.Wrapf(err, "Unable to prune patches section of %s", rootPackageJSONPath) + } + packageJSONContent, err := fs.MarshalPackageJSON(rootPackageJSON) + if err != nil { + return err + } + + info, err := originalPackageJSON.GetInfo() + if err != nil { + return err + } + newPackageJSON, err := newPackageJSONPath.Create() + if err != nil { + return err + } + if _, err := newPackageJSON.Write(packageJSONContent); err != nil { + return err + } + if err := newPackageJSON.Chmod(info.Mode()); err != nil { + return err + } + if err := newPackageJSON.Close(); err != nil { + return err + } + + for _, patch := range patches { + if err := fs.CopyFile( + &fs.LstatCachedFile{Path: p.base.RepoRoot.UntypedJoin(patch.ToString())}, + fullDir.UntypedJoin(patch.ToString()).ToStringDuringMigration(), + ); err != nil { + return errors.Wrap(err, "Failed copying patch file") + } + if opts.Docker { + jsonDir := outDir.Join(turbopath.RelativeSystemPath("json")) + if err := fs.CopyFile( + &fs.LstatCachedFile{Path: p.base.RepoRoot.UntypedJoin(patch.ToString())}, + patch.ToSystemPath().RestoreAnchor(jsonDir).ToStringDuringMigration(), + ); err != nil { + return errors.Wrap(err, "Failed copying patch file") + } + } + } + } else { + if err := fs.CopyFile( + &originalPackageJSON, + fullDir.UntypedJoin("package.json").ToStringDuringMigration(), + ); err != nil { + return errors.Wrap(err, "failed to copy root package.json") + } + } + + if opts.Docker { + // Copy from the package.json in the full directory so we get the pruned version if needed + if err := fs.CopyFile( + &fs.LstatCachedFile{Path: newPackageJSONPath}, + outDir.Join(turbopath.RelativeUnixPath("json/package.json").ToSystemPath()).ToString(), + ); err != nil { + return errors.Wrap(err, "failed to copy root package.json") + } + } + + return nil +} diff --git a/cli/internal/run/dry_run.go b/cli/internal/run/dry_run.go new file mode 100644 index 0000000..eeee431 --- /dev/null +++ b/cli/internal/run/dry_run.go @@ -0,0 +1,122 @@ +// Package run implements `turbo run` +// This file implements the logic for `turbo run --dry` +package run + +import ( + gocontext "context" + "sync" + + "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/cache" + "github.com/vercel/turbo/cli/internal/cmdutil" + "github.com/vercel/turbo/cli/internal/core" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/graph" + "github.com/vercel/turbo/cli/internal/nodes" + "github.com/vercel/turbo/cli/internal/runsummary" + "github.com/vercel/turbo/cli/internal/taskhash" + "github.com/vercel/turbo/cli/internal/util" +) + +// DryRun gets all the info needed from tasks and prints out a summary, but doesn't actually +// execute the task. +func DryRun( + ctx gocontext.Context, + g *graph.CompleteGraph, + rs *runSpec, + engine *core.Engine, + _ *taskhash.Tracker, // unused, but keep here for parity with RealRun method signature + turboCache cache.Cache, + _ *fs.TurboJSON, // unused, but keep here for parity with RealRun method signature + globalEnvMode util.EnvMode, + base *cmdutil.CmdBase, + summary runsummary.Meta, +) error { + defer turboCache.Shutdown() + + taskSummaries := []*runsummary.TaskSummary{} + + mu := sync.Mutex{} + execFunc := func(ctx gocontext.Context, packageTask *nodes.PackageTask, taskSummary *runsummary.TaskSummary) error { + // Assign some fallbacks if they were missing + if taskSummary.Command == "" { + taskSummary.Command = runsummary.MissingTaskLabel + } + + if taskSummary.Framework == "" { + taskSummary.Framework = runsummary.MissingFrameworkLabel + } + + // This mutex is not _really_ required, since we are using Concurrency: 1 as an execution + // option, but we add it here to match the shape of RealRuns execFunc. + mu.Lock() + defer mu.Unlock() + taskSummaries = append(taskSummaries, taskSummary) + return nil + } + + // This setup mirrors a real run. We call engine.execute() with + // a visitor function and some hardcoded execOpts. + // Note: we do not currently attempt to parallelize the graph walking + // (as we do in real execution) + getArgs := func(taskID string) []string { + return rs.ArgsForTask(taskID) + } + + visitorFn := g.GetPackageTaskVisitor(ctx, engine.TaskGraph, globalEnvMode, getArgs, base.Logger, execFunc) + execOpts := core.EngineExecutionOptions{ + Concurrency: 1, + Parallel: false, + } + + if errs := engine.Execute(visitorFn, execOpts); len(errs) > 0 { + for _, err := range errs { + base.UI.Error(err.Error()) + } + return errors.New("errors occurred during dry-run graph traversal") + } + + // We walk the graph with no concurrency. + // Populating the cache state is parallelizable. + // Do this _after_ walking the graph. + populateCacheState(turboCache, taskSummaries) + + // Assign the Task Summaries to the main summary + summary.RunSummary.Tasks = taskSummaries + + // The exitCode isn't really used by the Run Summary Close() method for dry runs + // but we pass in a successful value to match Real Runs. + return summary.Close(ctx, 0, g.WorkspaceInfos) +} + +func populateCacheState(turboCache cache.Cache, taskSummaries []*runsummary.TaskSummary) { + // We make at most 8 requests at a time for cache state. + maxParallelRequests := 8 + taskCount := len(taskSummaries) + + parallelRequestCount := maxParallelRequests + if taskCount < maxParallelRequests { + parallelRequestCount = taskCount + } + + queue := make(chan int, taskCount) + + wg := &sync.WaitGroup{} + for i := 0; i < parallelRequestCount; i++ { + wg.Add(1) + go func() { + defer wg.Done() + for index := range queue { + task := taskSummaries[index] + itemStatus := turboCache.Exists(task.Hash) + task.CacheSummary = runsummary.NewTaskCacheSummary(itemStatus, nil) + } + }() + } + + for index := range taskSummaries { + queue <- index + } + close(queue) + wg.Wait() +} diff --git a/cli/internal/run/global_hash.go b/cli/internal/run/global_hash.go new file mode 100644 index 0000000..2ebf642 --- /dev/null +++ b/cli/internal/run/global_hash.go @@ -0,0 +1,164 @@ +package run + +import ( + "fmt" + "path/filepath" + "strings" + + "github.com/hashicorp/go-hclog" + "github.com/mitchellh/cli" + "github.com/vercel/turbo/cli/internal/env" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/globby" + "github.com/vercel/turbo/cli/internal/hashing" + "github.com/vercel/turbo/cli/internal/lockfile" + "github.com/vercel/turbo/cli/internal/packagemanager" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/util" +) + +const _globalCacheKey = "Buffalo buffalo Buffalo buffalo buffalo buffalo Buffalo buffalo" + +// Variables that we always include +var _defaultEnvVars = []string{ + "VERCEL_ANALYTICS_ID", +} + +// GlobalHashable represents all the things that we use to create the global hash +type GlobalHashable struct { + globalFileHashMap map[turbopath.AnchoredUnixPath]string + rootExternalDepsHash string + envVars env.DetailedMap + globalCacheKey string + pipeline fs.PristinePipeline + envVarPassthroughs []string + envMode util.EnvMode +} + +// This exists because the global hash used to have different fields. Changing +// to a new struct layout changes the global hash. We can remove this converter +// when we are going to have to update the global hash for something else. +type oldGlobalHashable struct { + globalFileHashMap map[turbopath.AnchoredUnixPath]string + rootExternalDepsHash string + envVars env.EnvironmentVariablePairs + globalCacheKey string + pipeline fs.PristinePipeline +} + +// calculateGlobalHashFromHashable returns a hash string from the globalHashable +func calculateGlobalHashFromHashable(full GlobalHashable) (string, error) { + switch full.envMode { + case util.Infer: + if full.envVarPassthroughs != nil { + // In infer mode, if there is any passThru config (even if it is an empty array) + // we'll hash the whole object, so we can detect changes to that config + // Further, resolve the envMode to the concrete value. + full.envMode = util.Strict + return fs.HashObject(full) + } + + // If we're in infer mode, and there is no global pass through config, + // we use the old struct layout. this will be true for everyone not using the strict env + // feature, and we don't want to break their cache. + return fs.HashObject(oldGlobalHashable{ + globalFileHashMap: full.globalFileHashMap, + rootExternalDepsHash: full.rootExternalDepsHash, + envVars: full.envVars.All.ToHashable(), + globalCacheKey: full.globalCacheKey, + pipeline: full.pipeline, + }) + case util.Loose: + // Remove the passthroughs from hash consideration if we're explicitly loose. + full.envVarPassthroughs = nil + return fs.HashObject(full) + case util.Strict: + // Collapse `nil` and `[]` in strict mode. + if full.envVarPassthroughs == nil { + full.envVarPassthroughs = make([]string, 0) + } + return fs.HashObject(full) + default: + panic("unimplemented environment mode") + } +} + +func calculateGlobalHash( + rootpath turbopath.AbsoluteSystemPath, + rootPackageJSON *fs.PackageJSON, + pipeline fs.Pipeline, + envVarDependencies []string, + globalFileDependencies []string, + packageManager *packagemanager.PackageManager, + lockFile lockfile.Lockfile, + envVarPassthroughs []string, + envMode util.EnvMode, + logger hclog.Logger, + ui cli.Ui, + isStructuredOutput bool, +) (GlobalHashable, error) { + // Calculate env var dependencies + envVars := []string{} + envVars = append(envVars, envVarDependencies...) + envVars = append(envVars, _defaultEnvVars...) + globalHashableEnvVars, err := env.GetHashableEnvVars(envVars, []string{".*THASH.*"}, "") + if err != nil { + return GlobalHashable{}, err + } + + // The only way we can add env vars into the hash via matching is via THASH, + // so we only do a simple check here for entries in `BySource.Matching`. + // If we enable globalEnv to accept wildcard characters, we'll need to update this + // check. + if !isStructuredOutput && len(globalHashableEnvVars.BySource.Matching) > 0 { + ui.Warn(fmt.Sprintf("[DEPRECATED] Using .*THASH.* to specify an environment variable for inclusion into the hash is deprecated. You specified: %s.", strings.Join(globalHashableEnvVars.BySource.Matching.Names(), ", "))) + } + + logger.Debug("global hash env vars", "vars", globalHashableEnvVars.All.Names()) + + // Calculate global file dependencies + globalDeps := make(util.Set) + if len(globalFileDependencies) > 0 { + ignores, err := packageManager.GetWorkspaceIgnores(rootpath) + if err != nil { + return GlobalHashable{}, err + } + + f, err := globby.GlobFiles(rootpath.ToStringDuringMigration(), globalFileDependencies, ignores) + if err != nil { + return GlobalHashable{}, err + } + + for _, val := range f { + globalDeps.Add(val) + } + } + + if lockFile == nil { + // If we don't have lockfile information available, add the specfile and lockfile to global deps + globalDeps.Add(filepath.Join(rootpath.ToStringDuringMigration(), packageManager.Specfile)) + globalDeps.Add(filepath.Join(rootpath.ToStringDuringMigration(), packageManager.Lockfile)) + } + + // No prefix, global deps already have full paths + globalDepsArray := globalDeps.UnsafeListOfStrings() + globalDepsPaths := make([]turbopath.AbsoluteSystemPath, len(globalDepsArray)) + for i, path := range globalDepsArray { + globalDepsPaths[i] = turbopath.AbsoluteSystemPathFromUpstream(path) + } + + globalFileHashMap, err := hashing.GetHashableDeps(rootpath, globalDepsPaths) + if err != nil { + return GlobalHashable{}, fmt.Errorf("error hashing files: %w", err) + } + + return GlobalHashable{ + globalFileHashMap: globalFileHashMap, + rootExternalDepsHash: rootPackageJSON.ExternalDepsHash, + envVars: globalHashableEnvVars, + globalCacheKey: _globalCacheKey, + pipeline: pipeline.Pristine(), + envVarPassthroughs: envVarPassthroughs, + envMode: envMode, + }, nil +} diff --git a/cli/internal/run/graph_run.go b/cli/internal/run/graph_run.go new file mode 100644 index 0000000..8531718 --- /dev/null +++ b/cli/internal/run/graph_run.go @@ -0,0 +1,46 @@ +package run + +import ( + gocontext "context" + + "github.com/pyr-sh/dag" + "github.com/vercel/turbo/cli/internal/cmdutil" + "github.com/vercel/turbo/cli/internal/core" + "github.com/vercel/turbo/cli/internal/graphvisualizer" + "github.com/vercel/turbo/cli/internal/util" +) + +// GraphRun generates a visualization of the task graph rather than executing it. +func GraphRun(ctx gocontext.Context, rs *runSpec, engine *core.Engine, base *cmdutil.CmdBase) error { + graph := engine.TaskGraph + if rs.Opts.runOpts.SinglePackage { + graph = filterSinglePackageGraphForDisplay(engine.TaskGraph) + } + visualizer := graphvisualizer.New(base.RepoRoot, base.UI, graph) + + if rs.Opts.runOpts.GraphDot { + visualizer.RenderDotGraph() + } else { + err := visualizer.GenerateGraphFile(rs.Opts.runOpts.GraphFile) + if err != nil { + return err + } + } + return nil +} + +// filterSinglePackageGraphForDisplay builds an equivalent graph with package names stripped from tasks. +// Given that this should only be used in a single-package context, all of the package names are expected +// to be //. Also, all nodes are always connected to the root node, so we are not concerned with leaving +// behind any unconnected nodes. +func filterSinglePackageGraphForDisplay(originalGraph *dag.AcyclicGraph) *dag.AcyclicGraph { + graph := &dag.AcyclicGraph{} + for _, edge := range originalGraph.Edges() { + src := util.StripPackageName(edge.Source().(string)) + tgt := util.StripPackageName(edge.Target().(string)) + graph.Add(src) + graph.Add(tgt) + graph.Connect(dag.BasicEdge(src, tgt)) + } + return graph +} diff --git a/cli/internal/run/log_tag_go.go b/cli/internal/run/log_tag_go.go new file mode 100644 index 0000000..a3e825f --- /dev/null +++ b/cli/internal/run/log_tag_go.go @@ -0,0 +1,11 @@ +//go:build go || !rust +// +build go !rust + +package run + +import "github.com/hashicorp/go-hclog" + +// LogTag logs out the build tag (in this case "go") for the current build. +func LogTag(logger hclog.Logger) { + logger.Debug("build tag: go") +} diff --git a/cli/internal/run/log_tag_rust.go b/cli/internal/run/log_tag_rust.go new file mode 100644 index 0000000..065f438 --- /dev/null +++ b/cli/internal/run/log_tag_rust.go @@ -0,0 +1,11 @@ +//go:build rust +// +build rust + +package run + +import "github.com/hashicorp/go-hclog" + +// LogTag logs out the build tag (in this case "rust") for the current build. +func LogTag(logger hclog.Logger) { + logger.Debug("build tag: rust") +} diff --git a/cli/internal/run/real_run.go b/cli/internal/run/real_run.go new file mode 100644 index 0000000..32c7965 --- /dev/null +++ b/cli/internal/run/real_run.go @@ -0,0 +1,420 @@ +package run + +import ( + gocontext "context" + "fmt" + "log" + "os/exec" + "strings" + "sync" + "time" + + "github.com/fatih/color" + "github.com/hashicorp/go-hclog" + "github.com/mitchellh/cli" + "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/cache" + "github.com/vercel/turbo/cli/internal/cmdutil" + "github.com/vercel/turbo/cli/internal/colorcache" + "github.com/vercel/turbo/cli/internal/core" + "github.com/vercel/turbo/cli/internal/env" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/graph" + "github.com/vercel/turbo/cli/internal/logstreamer" + "github.com/vercel/turbo/cli/internal/nodes" + "github.com/vercel/turbo/cli/internal/packagemanager" + "github.com/vercel/turbo/cli/internal/process" + "github.com/vercel/turbo/cli/internal/runcache" + "github.com/vercel/turbo/cli/internal/runsummary" + "github.com/vercel/turbo/cli/internal/spinner" + "github.com/vercel/turbo/cli/internal/taskhash" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/ui" + "github.com/vercel/turbo/cli/internal/util" +) + +// RealRun executes a set of tasks +func RealRun( + ctx gocontext.Context, + g *graph.CompleteGraph, + rs *runSpec, + engine *core.Engine, + taskHashTracker *taskhash.Tracker, + turboCache cache.Cache, + turboJSON *fs.TurboJSON, + globalEnvMode util.EnvMode, + packagesInScope []string, + base *cmdutil.CmdBase, + runSummary runsummary.Meta, + packageManager *packagemanager.PackageManager, + processes *process.Manager, +) error { + singlePackage := rs.Opts.runOpts.SinglePackage + + if singlePackage { + base.UI.Output(fmt.Sprintf("%s %s", ui.Dim("• Running"), ui.Dim(ui.Bold(strings.Join(rs.Targets, ", "))))) + } else { + base.UI.Output(fmt.Sprintf(ui.Dim("• Packages in scope: %v"), strings.Join(packagesInScope, ", "))) + base.UI.Output(fmt.Sprintf("%s %s %s", ui.Dim("• Running"), ui.Dim(ui.Bold(strings.Join(rs.Targets, ", "))), ui.Dim(fmt.Sprintf("in %v packages", rs.FilteredPkgs.Len())))) + } + + // Log whether remote cache is enabled + useHTTPCache := !rs.Opts.cacheOpts.SkipRemote + if useHTTPCache { + base.UI.Info(ui.Dim("• Remote caching enabled")) + } else { + base.UI.Info(ui.Dim("• Remote caching disabled")) + } + + defer func() { + _ = spinner.WaitFor(ctx, turboCache.Shutdown, base.UI, "...writing to cache...", 1500*time.Millisecond) + }() + colorCache := colorcache.New() + + runCache := runcache.New(turboCache, base.RepoRoot, rs.Opts.runcacheOpts, colorCache) + + ec := &execContext{ + colorCache: colorCache, + runSummary: runSummary, + rs: rs, + ui: &cli.ConcurrentUi{Ui: base.UI}, + runCache: runCache, + env: turboJSON.GlobalEnv, + passthroughEnv: turboJSON.GlobalPassthroughEnv, + logger: base.Logger, + packageManager: packageManager, + processes: processes, + taskHashTracker: taskHashTracker, + repoRoot: base.RepoRoot, + isSinglePackage: singlePackage, + } + + // run the thing + execOpts := core.EngineExecutionOptions{ + Parallel: rs.Opts.runOpts.Parallel, + Concurrency: rs.Opts.runOpts.Concurrency, + } + + mu := sync.Mutex{} + taskSummaries := []*runsummary.TaskSummary{} + execFunc := func(ctx gocontext.Context, packageTask *nodes.PackageTask, taskSummary *runsummary.TaskSummary) error { + taskExecutionSummary, err := ec.exec(ctx, packageTask) + + // taskExecutionSummary will be nil if the task never executed + // (i.e. if the workspace didn't implement the script corresponding to the task) + // We don't need to collect any of the outputs or execution if the task didn't execute. + if taskExecutionSummary != nil { + taskSummary.ExpandedOutputs = taskHashTracker.GetExpandedOutputs(taskSummary.TaskID) + taskSummary.Execution = taskExecutionSummary + taskSummary.CacheSummary = taskHashTracker.GetCacheStatus(taskSummary.TaskID) + + // lock since multiple things to be appending to this array at the same time + mu.Lock() + taskSummaries = append(taskSummaries, taskSummary) + // not using defer, just release the lock + mu.Unlock() + } + + // Return the error when there is one + if err != nil { + return err + } + + return nil + } + + getArgs := func(taskID string) []string { + return rs.ArgsForTask(taskID) + } + + visitorFn := g.GetPackageTaskVisitor(ctx, engine.TaskGraph, globalEnvMode, getArgs, base.Logger, execFunc) + errs := engine.Execute(visitorFn, execOpts) + + // Track if we saw any child with a non-zero exit code + exitCode := 0 + exitCodeErr := &process.ChildExit{} + + // Assign tasks after execution + runSummary.RunSummary.Tasks = taskSummaries + + for _, err := range errs { + if errors.As(err, &exitCodeErr) { + // If a process gets killed via a signal, Go reports it's exit code as -1. + // We take the absolute value of the exit code so we don't select '0' as + // the greatest exit code. + childExit := exitCodeErr.ExitCode + if childExit < 0 { + childExit = -childExit + } + if childExit > exitCode { + exitCode = childExit + } + } else if exitCode == 0 { + // We hit some error, it shouldn't be exit code 0 + exitCode = 1 + } + base.UI.Error(err.Error()) + } + + // When continue on error is enabled don't register failed tasks as errors + // and instead must inspect the task summaries. + if ec.rs.Opts.runOpts.ContinueOnError { + for _, summary := range runSummary.RunSummary.Tasks { + if childExit := summary.Execution.ExitCode(); childExit != nil { + childExit := *childExit + if childExit < 0 { + childExit = -childExit + } + if childExit > exitCode { + exitCode = childExit + } + } + } + } + + if err := runSummary.Close(ctx, exitCode, g.WorkspaceInfos); err != nil { + // We don't need to throw an error, but we can warn on this. + // Note: this method doesn't actually return an error for Real Runs at the time of writing. + base.UI.Info(fmt.Sprintf("Failed to close Run Summary %v", err)) + } + + if exitCode != 0 { + return &process.ChildExit{ + ExitCode: exitCode, + } + } + return nil +} + +type execContext struct { + colorCache *colorcache.ColorCache + runSummary runsummary.Meta + rs *runSpec + ui cli.Ui + runCache *runcache.RunCache + env []string + passthroughEnv []string + logger hclog.Logger + packageManager *packagemanager.PackageManager + processes *process.Manager + taskHashTracker *taskhash.Tracker + repoRoot turbopath.AbsoluteSystemPath + isSinglePackage bool +} + +func (ec *execContext) logError(prefix string, err error) { + ec.logger.Error(prefix, "error", err) + + if prefix != "" { + prefix += ": " + } + + ec.ui.Error(fmt.Sprintf("%s%s%s", ui.ERROR_PREFIX, prefix, color.RedString(" %v", err))) +} + +func (ec *execContext) exec(ctx gocontext.Context, packageTask *nodes.PackageTask) (*runsummary.TaskExecutionSummary, error) { + // Setup tracer. Every time tracer() is called the taskExecutionSummary's duration is updated + // So make sure to call it before returning. + tracer, taskExecutionSummary := ec.runSummary.RunSummary.TrackTask(packageTask.TaskID) + + progressLogger := ec.logger.Named("") + progressLogger.Debug("start") + + passThroughArgs := ec.rs.ArgsForTask(packageTask.Task) + hash := packageTask.Hash + ec.logger.Debug("task hash", "value", hash) + // TODO(gsoltis): if/when we fix https://github.com/vercel/turbo/issues/937 + // the following block should never get hit. In the meantime, keep it after hashing + // so that downstream tasks can count on the hash existing + // + // bail if the script doesn't exist + if packageTask.Command == "" { + progressLogger.Debug("no task in package, skipping") + progressLogger.Debug("done", "status", "skipped", "duration", taskExecutionSummary.Duration) + // Return nil here because there was no execution, so there is no task execution summary + return nil, nil + } + + // Set building status now that we know it's going to run. + tracer(runsummary.TargetBuilding, nil, &successCode) + + var prefix string + var prettyPrefix string + if ec.rs.Opts.runOpts.LogPrefix == "none" { + prefix = "" + } else { + prefix = packageTask.OutputPrefix(ec.isSinglePackage) + } + + prettyPrefix = ec.colorCache.PrefixWithColor(packageTask.PackageName, prefix) + + // Cache --------------------------------------------- + taskCache := ec.runCache.TaskCache(packageTask, hash) + // Create a logger for replaying + prefixedUI := &cli.PrefixedUi{ + Ui: ec.ui, + OutputPrefix: prettyPrefix, + InfoPrefix: prettyPrefix, + ErrorPrefix: prettyPrefix, + WarnPrefix: prettyPrefix, + } + + cacheStatus, timeSaved, err := taskCache.RestoreOutputs(ctx, prefixedUI, progressLogger) + + // It's safe to set the CacheStatus even if there's an error, because if there's + // an error, the 0 values are actually what we want. We save cacheStatus and timeSaved + // for the task, so that even if there's an error, we have those values for the taskSummary. + ec.taskHashTracker.SetCacheStatus( + packageTask.TaskID, + runsummary.NewTaskCacheSummary(cacheStatus, &timeSaved), + ) + + if err != nil { + prefixedUI.Error(fmt.Sprintf("error fetching from cache: %s", err)) + } else if cacheStatus.Local || cacheStatus.Remote { // If there was a cache hit + ec.taskHashTracker.SetExpandedOutputs(packageTask.TaskID, taskCache.ExpandedOutputs) + // We only cache successful executions, so we can assume this is a successCode exit. + tracer(runsummary.TargetCached, nil, &successCode) + return taskExecutionSummary, nil + } + + // Setup command execution + argsactual := append([]string{"run"}, packageTask.Task) + if len(passThroughArgs) > 0 { + // This will be either '--' or a typed nil + argsactual = append(argsactual, ec.packageManager.ArgSeparator...) + argsactual = append(argsactual, passThroughArgs...) + } + + cmd := exec.Command(ec.packageManager.Command, argsactual...) + cmd.Dir = packageTask.Pkg.Dir.ToSystemPath().RestoreAnchor(ec.repoRoot).ToString() + + currentState := env.GetEnvMap() + passthroughEnv := env.EnvironmentVariableMap{} + + if packageTask.EnvMode == util.Strict { + defaultPassthrough := []string{ + "PATH", + "SHELL", + "SYSTEMROOT", // Go will always include this on Windows, but we're being explicit here + } + + passthroughEnv.Merge(env.FromKeys(currentState, defaultPassthrough)) + passthroughEnv.Merge(env.FromKeys(currentState, ec.env)) + passthroughEnv.Merge(env.FromKeys(currentState, ec.passthroughEnv)) + passthroughEnv.Merge(env.FromKeys(currentState, packageTask.TaskDefinition.EnvVarDependencies)) + passthroughEnv.Merge(env.FromKeys(currentState, packageTask.TaskDefinition.PassthroughEnv)) + } else { + passthroughEnv.Merge(currentState) + } + + // Always last to make sure it clobbers. + passthroughEnv.Add("TURBO_HASH", hash) + + cmd.Env = passthroughEnv.ToHashable() + + // Setup stdout/stderr + // If we are not caching anything, then we don't need to write logs to disk + // be careful about this conditional given the default of cache = true + writer, err := taskCache.OutputWriter(prettyPrefix) + if err != nil { + tracer(runsummary.TargetBuildFailed, err, nil) + + ec.logError(prettyPrefix, err) + if !ec.rs.Opts.runOpts.ContinueOnError { + return nil, errors.Wrapf(err, "failed to capture outputs for \"%v\"", packageTask.TaskID) + } + } + + // Create a logger + logger := log.New(writer, "", 0) + // Setup a streamer that we'll pipe cmd.Stdout to + logStreamerOut := logstreamer.NewLogstreamer(logger, prettyPrefix, false) + // Setup a streamer that we'll pipe cmd.Stderr to. + logStreamerErr := logstreamer.NewLogstreamer(logger, prettyPrefix, false) + cmd.Stderr = logStreamerErr + cmd.Stdout = logStreamerOut + // Flush/Reset any error we recorded + logStreamerErr.FlushRecord() + logStreamerOut.FlushRecord() + + closeOutputs := func() error { + var closeErrors []error + + if err := logStreamerOut.Close(); err != nil { + closeErrors = append(closeErrors, errors.Wrap(err, "log stdout")) + } + if err := logStreamerErr.Close(); err != nil { + closeErrors = append(closeErrors, errors.Wrap(err, "log stderr")) + } + + if err := writer.Close(); err != nil { + closeErrors = append(closeErrors, errors.Wrap(err, "log file")) + } + if len(closeErrors) > 0 { + msgs := make([]string, len(closeErrors)) + for i, err := range closeErrors { + msgs[i] = err.Error() + } + return fmt.Errorf("could not flush log output: %v", strings.Join(msgs, ", ")) + } + return nil + } + + // Run the command + if err := ec.processes.Exec(cmd); err != nil { + // close off our outputs. We errored, so we mostly don't care if we fail to close + _ = closeOutputs() + // if we already know we're in the process of exiting, + // we don't need to record an error to that effect. + if errors.Is(err, process.ErrClosing) { + return taskExecutionSummary, nil + } + + // If the error we got is a ChildExit, it will have an ExitCode field + // Pass that along into the tracer. + var e *process.ChildExit + if errors.As(err, &e) { + tracer(runsummary.TargetBuildFailed, err, &e.ExitCode) + } else { + // If it wasn't a ChildExit, and something else went wrong, we don't have an exitCode + tracer(runsummary.TargetBuildFailed, err, nil) + } + + progressLogger.Error(fmt.Sprintf("Error: command finished with error: %v", err)) + if !ec.rs.Opts.runOpts.ContinueOnError { + prefixedUI.Error(fmt.Sprintf("ERROR: command finished with error: %s", err)) + ec.processes.Close() + } else { + prefixedUI.Warn("command finished with error, but continuing...") + // Set to nil so we don't short-circuit any other execution + err = nil + } + + // If there was an error, flush the buffered output + taskCache.OnError(prefixedUI, progressLogger) + + return taskExecutionSummary, err + } + + // Add another timestamp into the tracer, so we have an accurate timestamp for how long the task took. + tracer(runsummary.TargetExecuted, nil, nil) + + // Close off our outputs and cache them + if err := closeOutputs(); err != nil { + ec.logError("", err) + } else { + if err = taskCache.SaveOutputs(ctx, progressLogger, prefixedUI, int(taskExecutionSummary.Duration.Milliseconds())); err != nil { + ec.logError("", fmt.Errorf("error caching output: %w", err)) + } else { + ec.taskHashTracker.SetExpandedOutputs(packageTask.TaskID, taskCache.ExpandedOutputs) + } + } + + // Clean up tracing + tracer(runsummary.TargetBuilt, nil, &successCode) + progressLogger.Debug("done", "status", "complete", "duration", taskExecutionSummary.Duration) + return taskExecutionSummary, nil +} + +var successCode = 0 diff --git a/cli/internal/run/run.go b/cli/internal/run/run.go new file mode 100644 index 0000000..2ac1141 --- /dev/null +++ b/cli/internal/run/run.go @@ -0,0 +1,487 @@ +package run + +import ( + gocontext "context" + "fmt" + "os" + "sort" + "sync" + "time" + + "github.com/vercel/turbo/cli/internal/analytics" + "github.com/vercel/turbo/cli/internal/cache" + "github.com/vercel/turbo/cli/internal/cmdutil" + "github.com/vercel/turbo/cli/internal/context" + "github.com/vercel/turbo/cli/internal/core" + "github.com/vercel/turbo/cli/internal/daemon" + "github.com/vercel/turbo/cli/internal/daemonclient" + "github.com/vercel/turbo/cli/internal/env" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/graph" + "github.com/vercel/turbo/cli/internal/process" + "github.com/vercel/turbo/cli/internal/runsummary" + "github.com/vercel/turbo/cli/internal/scm" + "github.com/vercel/turbo/cli/internal/scope" + "github.com/vercel/turbo/cli/internal/signals" + "github.com/vercel/turbo/cli/internal/taskhash" + "github.com/vercel/turbo/cli/internal/turbostate" + "github.com/vercel/turbo/cli/internal/ui" + "github.com/vercel/turbo/cli/internal/util" + + "github.com/pkg/errors" +) + +// ExecuteRun executes the run command +func ExecuteRun(ctx gocontext.Context, helper *cmdutil.Helper, signalWatcher *signals.Watcher, args *turbostate.ParsedArgsFromRust) error { + base, err := helper.GetCmdBase(args) + LogTag(base.Logger) + if err != nil { + return err + } + tasks := args.Command.Run.Tasks + passThroughArgs := args.Command.Run.PassThroughArgs + if len(tasks) == 0 { + return errors.New("at least one task must be specified") + } + opts, err := optsFromArgs(args) + if err != nil { + return err + } + + opts.runOpts.PassThroughArgs = passThroughArgs + run := configureRun(base, opts, signalWatcher) + if err := run.run(ctx, tasks); err != nil { + base.LogError("run failed: %v", err) + return err + } + return nil +} + +func optsFromArgs(args *turbostate.ParsedArgsFromRust) (*Opts, error) { + runPayload := args.Command.Run + + opts := getDefaultOptions() + // aliases := make(map[string]string) + if err := scope.OptsFromArgs(&opts.scopeOpts, args); err != nil { + return nil, err + } + + // Cache flags + opts.clientOpts.Timeout = args.RemoteCacheTimeout + opts.cacheOpts.SkipFilesystem = runPayload.RemoteOnly + opts.cacheOpts.OverrideDir = runPayload.CacheDir + opts.cacheOpts.Workers = runPayload.CacheWorkers + + // Run flags + opts.runOpts.LogPrefix = runPayload.LogPrefix + opts.runOpts.Summarize = runPayload.Summarize + opts.runOpts.ExperimentalSpaceID = runPayload.ExperimentalSpaceID + opts.runOpts.EnvMode = runPayload.EnvMode + + // Runcache flags + opts.runcacheOpts.SkipReads = runPayload.Force + opts.runcacheOpts.SkipWrites = runPayload.NoCache + + if runPayload.OutputLogs != "" { + err := opts.runcacheOpts.SetTaskOutputMode(runPayload.OutputLogs) + if err != nil { + return nil, err + } + } + + // Run flags + if runPayload.Concurrency != "" { + concurrency, err := util.ParseConcurrency(runPayload.Concurrency) + if err != nil { + return nil, err + } + opts.runOpts.Concurrency = concurrency + } + opts.runOpts.Parallel = runPayload.Parallel + opts.runOpts.Profile = runPayload.Profile + opts.runOpts.ContinueOnError = runPayload.ContinueExecution + opts.runOpts.Only = runPayload.Only + opts.runOpts.NoDaemon = runPayload.NoDaemon + opts.runOpts.SinglePackage = args.Command.Run.SinglePackage + + // See comment on Graph in turbostate.go for an explanation on Graph's representation. + // If flag is passed... + if runPayload.Graph != nil { + // If no value is attached, we print to stdout + if *runPayload.Graph == "" { + opts.runOpts.GraphDot = true + } else { + // Otherwise, we emit to the file name attached as value + opts.runOpts.GraphDot = false + opts.runOpts.GraphFile = *runPayload.Graph + } + } + + if runPayload.DryRun != "" { + opts.runOpts.DryRunJSON = runPayload.DryRun == _dryRunJSONValue + + if runPayload.DryRun == _dryRunTextValue || runPayload.DryRun == _dryRunJSONValue { + opts.runOpts.DryRun = true + } else { + return nil, fmt.Errorf("invalid dry-run mode: %v", runPayload.DryRun) + } + } + + return opts, nil +} + +func configureRun(base *cmdutil.CmdBase, opts *Opts, signalWatcher *signals.Watcher) *run { + if os.Getenv("TURBO_FORCE") == "true" { + opts.runcacheOpts.SkipReads = true + } + + if os.Getenv("TURBO_REMOTE_ONLY") == "true" { + opts.cacheOpts.SkipFilesystem = true + } + + processes := process.NewManager(base.Logger.Named("processes")) + signalWatcher.AddOnClose(processes.Close) + return &run{ + base: base, + opts: opts, + processes: processes, + } +} + +type run struct { + base *cmdutil.CmdBase + opts *Opts + processes *process.Manager +} + +func (r *run) run(ctx gocontext.Context, targets []string) error { + startAt := time.Now() + packageJSONPath := r.base.RepoRoot.UntypedJoin("package.json") + rootPackageJSON, err := fs.ReadPackageJSON(packageJSONPath) + if err != nil { + return fmt.Errorf("failed to read package.json: %w", err) + } + + isStructuredOutput := r.opts.runOpts.GraphDot || r.opts.runOpts.DryRunJSON + + var pkgDepGraph *context.Context + if r.opts.runOpts.SinglePackage { + pkgDepGraph, err = context.SinglePackageGraph(r.base.RepoRoot, rootPackageJSON) + } else { + pkgDepGraph, err = context.BuildPackageGraph(r.base.RepoRoot, rootPackageJSON) + } + if err != nil { + var warnings *context.Warnings + if errors.As(err, &warnings) { + r.base.LogWarning("Issues occurred when constructing package graph. Turbo will function, but some features may not be available", err) + } else { + return err + } + } + + if ui.IsCI && !r.opts.runOpts.NoDaemon { + r.base.Logger.Info("skipping turbod since we appear to be in a non-interactive context") + } else if !r.opts.runOpts.NoDaemon { + turbodClient, err := daemon.GetClient(ctx, r.base.RepoRoot, r.base.Logger, r.base.TurboVersion, daemon.ClientOpts{}) + if err != nil { + r.base.LogWarning("", errors.Wrap(err, "failed to contact turbod. Continuing in standalone mode")) + } else { + defer func() { _ = turbodClient.Close() }() + r.base.Logger.Debug("running in daemon mode") + daemonClient := daemonclient.New(turbodClient) + r.opts.runcacheOpts.OutputWatcher = daemonClient + } + } + + if err := util.ValidateGraph(&pkgDepGraph.WorkspaceGraph); err != nil { + return errors.Wrap(err, "Invalid package dependency graph") + } + + // TODO: consolidate some of these arguments + // Note: not all properties are set here. GlobalHash and Pipeline keys are set later + g := &graph.CompleteGraph{ + WorkspaceGraph: pkgDepGraph.WorkspaceGraph, + WorkspaceInfos: pkgDepGraph.WorkspaceInfos, + RootNode: pkgDepGraph.RootNode, + TaskDefinitions: map[string]*fs.TaskDefinition{}, + RepoRoot: r.base.RepoRoot, + } + + turboJSON, err := g.GetTurboConfigFromWorkspace(util.RootPkgName, r.opts.runOpts.SinglePackage) + if err != nil { + return err + } + + // TODO: these values come from a config file, hopefully viper can help us merge these + r.opts.cacheOpts.RemoteCacheOpts = turboJSON.RemoteCacheOptions + + pipeline := turboJSON.Pipeline + g.Pipeline = pipeline + scmInstance, err := scm.FromInRepo(r.base.RepoRoot) + if err != nil { + if errors.Is(err, scm.ErrFallback) { + r.base.Logger.Debug("", err) + } else { + return errors.Wrap(err, "failed to create SCM") + } + } + filteredPkgs, isAllPackages, err := scope.ResolvePackages(&r.opts.scopeOpts, r.base.RepoRoot, scmInstance, pkgDepGraph, r.base.UI, r.base.Logger) + if err != nil { + return errors.Wrap(err, "failed to resolve packages to run") + } + if isAllPackages { + // if there is a root task for any of our targets, we need to add it + for _, target := range targets { + key := util.RootTaskID(target) + if _, ok := pipeline[key]; ok { + filteredPkgs.Add(util.RootPkgName) + // we only need to know we're running a root task once to add it for consideration + break + } + } + } + + globalHashable, err := calculateGlobalHash( + r.base.RepoRoot, + rootPackageJSON, + pipeline, + turboJSON.GlobalEnv, + turboJSON.GlobalDeps, + pkgDepGraph.PackageManager, + pkgDepGraph.Lockfile, + turboJSON.GlobalPassthroughEnv, + r.opts.runOpts.EnvMode, + r.base.Logger, + r.base.UI, + isStructuredOutput, + ) + + if err != nil { + return fmt.Errorf("failed to collect global hash inputs: %v", err) + } + + if globalHash, err := calculateGlobalHashFromHashable(globalHashable); err == nil { + r.base.Logger.Debug("global hash", "value", globalHash) + g.GlobalHash = globalHash + } else { + return fmt.Errorf("failed to calculate global hash: %v", err) + } + + r.base.Logger.Debug("local cache folder", "path", r.opts.cacheOpts.OverrideDir) + + rs := &runSpec{ + Targets: targets, + FilteredPkgs: filteredPkgs, + Opts: r.opts, + } + packageManager := pkgDepGraph.PackageManager + + engine, err := buildTaskGraphEngine( + g, + rs, + r.opts.runOpts.SinglePackage, + ) + + if err != nil { + return errors.Wrap(err, "error preparing engine") + } + + taskHashTracker := taskhash.NewTracker( + g.RootNode, + g.GlobalHash, + // TODO(mehulkar): remove g,Pipeline, because we need to get task definitions from CompleteGaph instead + g.Pipeline, + ) + + g.TaskHashTracker = taskHashTracker + + // CalculateFileHashes assigns PackageInputsExpandedHashes as a side-effect + err = taskHashTracker.CalculateFileHashes( + engine.TaskGraph.Vertices(), + rs.Opts.runOpts.Concurrency, + g.WorkspaceInfos, + g.TaskDefinitions, + r.base.RepoRoot, + ) + + if err != nil { + return errors.Wrap(err, "error hashing package files") + } + + // If we are running in parallel, then we remove all the edges in the graph + // except for the root. Rebuild the task graph for backwards compatibility. + // We still use dependencies specified by the pipeline configuration. + if rs.Opts.runOpts.Parallel { + for _, edge := range g.WorkspaceGraph.Edges() { + if edge.Target() != g.RootNode { + g.WorkspaceGraph.RemoveEdge(edge) + } + } + engine, err = buildTaskGraphEngine( + g, + rs, + r.opts.runOpts.SinglePackage, + ) + if err != nil { + return errors.Wrap(err, "error preparing engine") + } + } + + // Graph Run + if rs.Opts.runOpts.GraphFile != "" || rs.Opts.runOpts.GraphDot { + return GraphRun(ctx, rs, engine, r.base) + } + + packagesInScope := rs.FilteredPkgs.UnsafeListOfStrings() + sort.Strings(packagesInScope) + // Initiate analytics and cache + analyticsClient := r.initAnalyticsClient(ctx) + defer analyticsClient.CloseWithTimeout(50 * time.Millisecond) + turboCache, err := r.initCache(ctx, rs, analyticsClient) + + if err != nil { + if errors.Is(err, cache.ErrNoCachesEnabled) { + r.base.UI.Warn("No caches are enabled. You can try \"turbo login\", \"turbo link\", or ensuring you are not passing --remote-only to enable caching") + } else { + return errors.Wrap(err, "failed to set up caching") + } + } + + var envVarPassthroughMap env.EnvironmentVariableMap + if globalHashable.envVarPassthroughs != nil { + if envVarPassthroughDetailedMap, err := env.GetHashableEnvVars(globalHashable.envVarPassthroughs, nil, ""); err == nil { + envVarPassthroughMap = envVarPassthroughDetailedMap.BySource.Explicit + } + } + + globalEnvMode := rs.Opts.runOpts.EnvMode + if globalEnvMode == util.Infer && turboJSON.GlobalPassthroughEnv != nil { + globalEnvMode = util.Strict + } + + // RunSummary contains information that is statically analyzable about + // the tasks that we expect to run based on the user command. + summary := runsummary.NewRunSummary( + startAt, + r.base.UI, + r.base.RepoRoot, + rs.Opts.scopeOpts.PackageInferenceRoot, + r.base.TurboVersion, + r.base.APIClient, + rs.Opts.runOpts, + packagesInScope, + globalEnvMode, + runsummary.NewGlobalHashSummary( + globalHashable.globalFileHashMap, + globalHashable.rootExternalDepsHash, + globalHashable.envVars, + envVarPassthroughMap, + globalHashable.globalCacheKey, + globalHashable.pipeline, + ), + rs.Opts.SynthesizeCommand(rs.Targets), + ) + + // Dry Run + if rs.Opts.runOpts.DryRun { + return DryRun( + ctx, + g, + rs, + engine, + taskHashTracker, + turboCache, + turboJSON, + globalEnvMode, + r.base, + summary, + ) + } + + // Regular run + return RealRun( + ctx, + g, + rs, + engine, + taskHashTracker, + turboCache, + turboJSON, + globalEnvMode, + packagesInScope, + r.base, + summary, + // Extra arg only for regular runs, dry-run doesn't get this + packageManager, + r.processes, + ) +} + +func (r *run) initAnalyticsClient(ctx gocontext.Context) analytics.Client { + apiClient := r.base.APIClient + var analyticsSink analytics.Sink + if apiClient.IsLinked() { + analyticsSink = apiClient + } else { + r.opts.cacheOpts.SkipRemote = true + analyticsSink = analytics.NullSink + } + analyticsClient := analytics.NewClient(ctx, analyticsSink, r.base.Logger.Named("analytics")) + return analyticsClient +} + +func (r *run) initCache(ctx gocontext.Context, rs *runSpec, analyticsClient analytics.Client) (cache.Cache, error) { + apiClient := r.base.APIClient + // Theoretically this is overkill, but bias towards not spamming the console + once := &sync.Once{} + + return cache.New(rs.Opts.cacheOpts, r.base.RepoRoot, apiClient, analyticsClient, func(_cache cache.Cache, err error) { + // Currently the HTTP Cache is the only one that can be disabled. + // With a cache system refactor, we might consider giving names to the caches so + // we can accurately report them here. + once.Do(func() { + r.base.LogWarning("Remote Caching is unavailable", err) + }) + }) +} + +func buildTaskGraphEngine( + g *graph.CompleteGraph, + rs *runSpec, + isSinglePackage bool, +) (*core.Engine, error) { + engine := core.NewEngine(g, isSinglePackage) + + // Note: g.Pipeline is a map, but this for loop only cares about the keys + for taskName := range g.Pipeline { + engine.AddTask(taskName) + } + + if err := engine.Prepare(&core.EngineBuildingOptions{ + Packages: rs.FilteredPkgs.UnsafeListOfStrings(), + TaskNames: rs.Targets, + TasksOnly: rs.Opts.runOpts.Only, + }); err != nil { + return nil, err + } + + // Check for cycles in the DAG. + if err := util.ValidateGraph(engine.TaskGraph); err != nil { + return nil, fmt.Errorf("Invalid task dependency graph:\n%v", err) + } + + // Check that no tasks would be blocked by a persistent task + if err := engine.ValidatePersistentDependencies(g, rs.Opts.runOpts.Concurrency); err != nil { + return nil, fmt.Errorf("Invalid persistent task configuration:\n%v", err) + } + + return engine, nil +} + +// dry run custom flag +// NOTE: These *must* be kept in sync with the corresponding Rust +// enum definitions in shim/src/commands/mod.rs +const ( + _dryRunJSONValue = "Json" + _dryRunTextValue = "Text" +) diff --git a/cli/internal/run/run_spec.go b/cli/internal/run/run_spec.go new file mode 100644 index 0000000..14402d3 --- /dev/null +++ b/cli/internal/run/run_spec.go @@ -0,0 +1,90 @@ +// Package run implements `turbo run` +// This file implements some structs for options +package run + +import ( + "strings" + + "github.com/vercel/turbo/cli/internal/cache" + "github.com/vercel/turbo/cli/internal/client" + "github.com/vercel/turbo/cli/internal/runcache" + "github.com/vercel/turbo/cli/internal/scope" + "github.com/vercel/turbo/cli/internal/util" +) + +// runSpec contains the run-specific configuration elements that come from a particular +// invocation of turbo. +type runSpec struct { + // Target is a list of task that are going to run this time + // E.g. in `turbo run build lint` Targets will be ["build", "lint"] + Targets []string + + // FilteredPkgs is the list of packages that are relevant for this run. + FilteredPkgs util.Set + + // Opts contains various opts, gathered from CLI flags, + // but bucketed in smaller structs based on what they mean. + Opts *Opts +} + +// ArgsForTask returns the set of args that need to be passed through to the task +func (rs *runSpec) ArgsForTask(task string) []string { + passThroughArgs := make([]string, 0, len(rs.Opts.runOpts.PassThroughArgs)) + for _, target := range rs.Targets { + if target == task { + passThroughArgs = append(passThroughArgs, rs.Opts.runOpts.PassThroughArgs...) + } + } + return passThroughArgs +} + +// Opts holds the current run operations configuration +type Opts struct { + runOpts util.RunOpts + cacheOpts cache.Opts + clientOpts client.Opts + runcacheOpts runcache.Opts + scopeOpts scope.Opts +} + +// SynthesizeCommand produces a command that produces an equivalent set of packages, tasks, +// and task arguments to what the current set of opts selects. +func (o *Opts) SynthesizeCommand(tasks []string) string { + cmd := "turbo run" + cmd += " " + strings.Join(tasks, " ") + for _, filterPattern := range o.scopeOpts.FilterPatterns { + cmd += " --filter=" + filterPattern + } + for _, filterPattern := range o.scopeOpts.LegacyFilter.AsFilterPatterns() { + cmd += " --filter=" + filterPattern + } + if o.runOpts.Parallel { + cmd += " --parallel" + } + if o.runOpts.ContinueOnError { + cmd += " --continue" + } + if o.runOpts.DryRun { + if o.runOpts.DryRunJSON { + cmd += " --dry=json" + } else { + cmd += " --dry" + } + } + if len(o.runOpts.PassThroughArgs) > 0 { + cmd += " -- " + strings.Join(o.runOpts.PassThroughArgs, " ") + } + return cmd +} + +// getDefaultOptions returns the default set of Opts for every run +func getDefaultOptions() *Opts { + return &Opts{ + runOpts: util.RunOpts{ + Concurrency: 10, + }, + clientOpts: client.Opts{ + Timeout: client.ClientTimeout, + }, + } +} diff --git a/cli/internal/run/run_spec_test.go b/cli/internal/run/run_spec_test.go new file mode 100644 index 0000000..2bcfe2b --- /dev/null +++ b/cli/internal/run/run_spec_test.go @@ -0,0 +1,107 @@ +package run + +import ( + "testing" + + "github.com/vercel/turbo/cli/internal/scope" + "github.com/vercel/turbo/cli/internal/util" +) + +func TestSynthesizeCommand(t *testing.T) { + testCases := []struct { + filterPatterns []string + legacyFilter scope.LegacyFilter + passThroughArgs []string + parallel bool + continueOnError bool + dryRun bool + dryRunJSON bool + tasks []string + expected string + }{ + { + filterPatterns: []string{"my-app"}, + tasks: []string{"build"}, + expected: "turbo run build --filter=my-app", + }, + { + filterPatterns: []string{"my-app"}, + tasks: []string{"build"}, + passThroughArgs: []string{"-v", "--foo=bar"}, + expected: "turbo run build --filter=my-app -- -v --foo=bar", + }, + { + legacyFilter: scope.LegacyFilter{ + Entrypoints: []string{"my-app"}, + SkipDependents: true, + }, + tasks: []string{"build"}, + passThroughArgs: []string{"-v", "--foo=bar"}, + expected: "turbo run build --filter=my-app -- -v --foo=bar", + }, + { + legacyFilter: scope.LegacyFilter{ + Entrypoints: []string{"my-app"}, + SkipDependents: true, + }, + filterPatterns: []string{"other-app"}, + tasks: []string{"build"}, + passThroughArgs: []string{"-v", "--foo=bar"}, + expected: "turbo run build --filter=other-app --filter=my-app -- -v --foo=bar", + }, + { + legacyFilter: scope.LegacyFilter{ + Entrypoints: []string{"my-app"}, + IncludeDependencies: true, + Since: "some-ref", + }, + filterPatterns: []string{"other-app"}, + tasks: []string{"build"}, + expected: "turbo run build --filter=other-app --filter=...my-app...[some-ref]...", + }, + { + filterPatterns: []string{"my-app"}, + tasks: []string{"build"}, + parallel: true, + continueOnError: true, + expected: "turbo run build --filter=my-app --parallel --continue", + }, + { + filterPatterns: []string{"my-app"}, + tasks: []string{"build"}, + dryRun: true, + expected: "turbo run build --filter=my-app --dry", + }, + { + filterPatterns: []string{"my-app"}, + tasks: []string{"build"}, + dryRun: true, + dryRunJSON: true, + expected: "turbo run build --filter=my-app --dry=json", + }, + } + + for _, testCase := range testCases { + testCase := testCase + t.Run(testCase.expected, func(t *testing.T) { + o := Opts{ + scopeOpts: scope.Opts{ + FilterPatterns: testCase.filterPatterns, + LegacyFilter: testCase.legacyFilter, + }, + runOpts: util.RunOpts{ + PassThroughArgs: testCase.passThroughArgs, + Parallel: testCase.parallel, + ContinueOnError: testCase.continueOnError, + DryRun: testCase.dryRun, + DryRunJSON: testCase.dryRunJSON, + }, + } + cmd := o.SynthesizeCommand(testCase.tasks) + if cmd != testCase.expected { + t.Errorf("SynthesizeCommand() got %v, want %v", cmd, testCase.expected) + } + }) + } + +} diff --git a/cli/internal/runcache/output_watcher.go b/cli/internal/runcache/output_watcher.go new file mode 100644 index 0000000..5f90f0e --- /dev/null +++ b/cli/internal/runcache/output_watcher.go @@ -0,0 +1,32 @@ +package runcache + +import ( + "context" + + "github.com/vercel/turbo/cli/internal/fs" +) + +// OutputWatcher instances are responsible for tracking changes to task outputs +type OutputWatcher interface { + // GetChangedOutputs returns which of the given globs have changed since the specified hash was last run + GetChangedOutputs(ctx context.Context, hash string, repoRelativeOutputGlobs []string) ([]string, error) + // NotifyOutputsWritten tells the watcher that the given globs have been cached with the specified hash + NotifyOutputsWritten(ctx context.Context, hash string, repoRelativeOutputGlobs fs.TaskOutputs) error +} + +// NoOpOutputWatcher implements OutputWatcher, but always considers every glob to have changed +type NoOpOutputWatcher struct{} + +var _ OutputWatcher = (*NoOpOutputWatcher)(nil) + +// GetChangedOutputs implements OutputWatcher.GetChangedOutputs. +// Since this is a no-op watcher, no tracking is done. +func (NoOpOutputWatcher) GetChangedOutputs(ctx context.Context, hash string, repoRelativeOutputGlobs []string) ([]string, error) { + return repoRelativeOutputGlobs, nil +} + +// NotifyOutputsWritten implements OutputWatcher.NotifyOutputsWritten. +// Since this is a no-op watcher, consider all globs to have changed +func (NoOpOutputWatcher) NotifyOutputsWritten(ctx context.Context, hash string, repoRelativeOutputGlobs fs.TaskOutputs) error { + return nil +} diff --git a/cli/internal/runcache/runcache.go b/cli/internal/runcache/runcache.go new file mode 100644 index 0000000..ba6145b --- /dev/null +++ b/cli/internal/runcache/runcache.go @@ -0,0 +1,354 @@ +package runcache + +import ( + "bufio" + "context" + "fmt" + "io" + "os" + "path/filepath" + "strings" + + "github.com/fatih/color" + "github.com/hashicorp/go-hclog" + "github.com/mitchellh/cli" + "github.com/vercel/turbo/cli/internal/cache" + "github.com/vercel/turbo/cli/internal/colorcache" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/globby" + "github.com/vercel/turbo/cli/internal/logstreamer" + "github.com/vercel/turbo/cli/internal/nodes" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/ui" + "github.com/vercel/turbo/cli/internal/util" +) + +// LogReplayer is a function that is responsible for replaying the contents of a given log file +type LogReplayer = func(logger hclog.Logger, output *cli.PrefixedUi, logFile turbopath.AbsoluteSystemPath) + +// Opts holds the configurable options for a RunCache instance +type Opts struct { + SkipReads bool + SkipWrites bool + TaskOutputModeOverride *util.TaskOutputMode + LogReplayer LogReplayer + OutputWatcher OutputWatcher +} + +// SetTaskOutputMode parses the task output mode from string and then sets it in opts +func (opts *Opts) SetTaskOutputMode(value string) error { + outputMode, err := util.FromTaskOutputModeString(value) + if err != nil { + return fmt.Errorf("must be one of \"%v\"", TaskOutputModes()) + } + opts.TaskOutputModeOverride = &outputMode + return nil +} + +// TaskOutputModes creates the description string for task outputs +func TaskOutputModes() string { + var builder strings.Builder + + first := true + for _, mode := range util.TaskOutputModeStrings { + if !first { + builder.WriteString("|") + } + first = false + builder.WriteString(string(mode)) + } + return builder.String() +} + +// RunCache represents the interface to the cache for a single `turbo run` +type RunCache struct { + taskOutputModeOverride *util.TaskOutputMode + cache cache.Cache + readsDisabled bool + writesDisabled bool + repoRoot turbopath.AbsoluteSystemPath + logReplayer LogReplayer + outputWatcher OutputWatcher + colorCache *colorcache.ColorCache +} + +// New returns a new instance of RunCache, wrapping the given cache +func New(cache cache.Cache, repoRoot turbopath.AbsoluteSystemPath, opts Opts, colorCache *colorcache.ColorCache) *RunCache { + rc := &RunCache{ + taskOutputModeOverride: opts.TaskOutputModeOverride, + cache: cache, + readsDisabled: opts.SkipReads, + writesDisabled: opts.SkipWrites, + repoRoot: repoRoot, + logReplayer: opts.LogReplayer, + outputWatcher: opts.OutputWatcher, + colorCache: colorCache, + } + + if rc.logReplayer == nil { + rc.logReplayer = defaultLogReplayer + } + if rc.outputWatcher == nil { + rc.outputWatcher = &NoOpOutputWatcher{} + } + return rc +} + +// TaskCache represents a single task's (package-task?) interface to the RunCache +// and controls access to the task's outputs +type TaskCache struct { + ExpandedOutputs []turbopath.AnchoredSystemPath + rc *RunCache + repoRelativeGlobs fs.TaskOutputs + hash string + pt *nodes.PackageTask + taskOutputMode util.TaskOutputMode + cachingDisabled bool + LogFileName turbopath.AbsoluteSystemPath +} + +// RestoreOutputs attempts to restore output for the corresponding task from the cache. +// Returns the cacheStatus, the timeSaved, and error values, so the consumer can understand +// what happened in here. +func (tc *TaskCache) RestoreOutputs(ctx context.Context, prefixedUI *cli.PrefixedUi, progressLogger hclog.Logger) (cache.ItemStatus, int, error) { + if tc.cachingDisabled || tc.rc.readsDisabled { + if tc.taskOutputMode != util.NoTaskOutput && tc.taskOutputMode != util.ErrorTaskOutput { + prefixedUI.Output(fmt.Sprintf("cache bypass, force executing %s", ui.Dim(tc.hash))) + } + return cache.ItemStatus{Local: false, Remote: false}, 0, nil + } + + changedOutputGlobs, err := tc.rc.outputWatcher.GetChangedOutputs(ctx, tc.hash, tc.repoRelativeGlobs.Inclusions) + if err != nil { + progressLogger.Warn(fmt.Sprintf("Failed to check if we can skip restoring outputs for %v: %v. Proceeding to check cache", tc.pt.TaskID, err)) + prefixedUI.Warn(ui.Dim(fmt.Sprintf("Failed to check if we can skip restoring outputs for %v: %v. Proceeding to check cache", tc.pt.TaskID, err))) + changedOutputGlobs = tc.repoRelativeGlobs.Inclusions + } + + hasChangedOutputs := len(changedOutputGlobs) > 0 + var cacheStatus cache.ItemStatus + var timeSaved int + if hasChangedOutputs { + // Note that we currently don't use the output globs when restoring, but we could in the + // future to avoid doing unnecessary file I/O. We also need to pass along the exclusion + // globs as well. + itemStatus, restoredFiles, duration, err := tc.rc.cache.Fetch(tc.rc.repoRoot, tc.hash, nil) + hit := itemStatus.Local || itemStatus.Remote + timeSaved = duration + tc.ExpandedOutputs = restoredFiles + // Assign to this variable outside this closure so we can return at the end of the function + cacheStatus = itemStatus + if err != nil { + // If there was an error fetching from cache, we'll say there was no cache hit + return cache.ItemStatus{Local: false, Remote: false}, 0, err + } else if !hit { + if tc.taskOutputMode != util.NoTaskOutput && tc.taskOutputMode != util.ErrorTaskOutput { + prefixedUI.Output(fmt.Sprintf("cache miss, executing %s", ui.Dim(tc.hash))) + } + // If there was no hit, we can also say there was no hit + return cache.ItemStatus{Local: false, Remote: false}, 0, nil + } + + if err := tc.rc.outputWatcher.NotifyOutputsWritten(ctx, tc.hash, tc.repoRelativeGlobs); err != nil { + // Don't fail the whole operation just because we failed to watch the outputs + prefixedUI.Warn(ui.Dim(fmt.Sprintf("Failed to mark outputs as cached for %v: %v", tc.pt.TaskID, err))) + } + } else { + // If no outputs have changed, that means we have a local cache hit. + cacheStatus.Local = true + prefixedUI.Warn(fmt.Sprintf("Skipping cache check for %v, outputs have not changed since previous run.", tc.pt.TaskID)) + } + + switch tc.taskOutputMode { + // When only showing new task output, cached output should only show the computed hash + case util.NewTaskOutput: + fallthrough + case util.HashTaskOutput: + prefixedUI.Info(fmt.Sprintf("cache hit, suppressing output %s", ui.Dim(tc.hash))) + case util.FullTaskOutput: + progressLogger.Debug("log file", "path", tc.LogFileName) + prefixedUI.Info(fmt.Sprintf("cache hit, replaying output %s", ui.Dim(tc.hash))) + tc.ReplayLogFile(prefixedUI, progressLogger) + case util.ErrorTaskOutput: + // The task succeeded, so we don't output anything in this case + default: + // NoLogs, do not output anything + } + // TODO: timeSaved could be part of cacheStatus, so we don't have to make a new struct + // downstream, but this would be a more invasive change right now. + return cacheStatus, timeSaved, nil +} + +// ReplayLogFile writes out the stored logfile to the terminal +func (tc TaskCache) ReplayLogFile(prefixedUI *cli.PrefixedUi, progressLogger hclog.Logger) { + if tc.LogFileName.FileExists() { + tc.rc.logReplayer(progressLogger, prefixedUI, tc.LogFileName) + } +} + +// OnError replays the logfile if --output-mode=errors-only. +// This is called if the task exited with an non-zero error code. +func (tc TaskCache) OnError(terminal *cli.PrefixedUi, logger hclog.Logger) { + if tc.taskOutputMode == util.ErrorTaskOutput { + tc.ReplayLogFile(terminal, logger) + } +} + +// nopWriteCloser is modeled after io.NopCloser, which is for Readers +type nopWriteCloser struct { + io.Writer +} + +func (nopWriteCloser) Close() error { return nil } + +type fileWriterCloser struct { + io.Writer + file *os.File + bufio *bufio.Writer +} + +func (fwc *fileWriterCloser) Close() error { + if err := fwc.bufio.Flush(); err != nil { + return err + } + return fwc.file.Close() +} + +// OutputWriter creates a sink suitable for handling the output of the command associated +// with this task. +func (tc TaskCache) OutputWriter(prefix string) (io.WriteCloser, error) { + // an os.Stdout wrapper that will add prefixes before printing to stdout + stdoutWriter := logstreamer.NewPrettyStdoutWriter(prefix) + + if tc.cachingDisabled || tc.rc.writesDisabled { + return nopWriteCloser{stdoutWriter}, nil + } + // Setup log file + if err := tc.LogFileName.EnsureDir(); err != nil { + return nil, err + } + + output, err := tc.LogFileName.Create() + if err != nil { + return nil, err + } + + bufWriter := bufio.NewWriter(output) + fwc := &fileWriterCloser{ + file: output, + bufio: bufWriter, + } + if tc.taskOutputMode == util.NoTaskOutput || tc.taskOutputMode == util.HashTaskOutput || tc.taskOutputMode == util.ErrorTaskOutput { + // only write to log file, not to stdout + fwc.Writer = bufWriter + } else { + fwc.Writer = io.MultiWriter(stdoutWriter, bufWriter) + } + + return fwc, nil +} + +var _emptyIgnore []string + +// SaveOutputs is responsible for saving the outputs of task to the cache, after the task has completed +func (tc *TaskCache) SaveOutputs(ctx context.Context, logger hclog.Logger, terminal cli.Ui, duration int) error { + if tc.cachingDisabled || tc.rc.writesDisabled { + return nil + } + + logger.Debug("caching output", "outputs", tc.repoRelativeGlobs) + + filesToBeCached, err := globby.GlobAll(tc.rc.repoRoot.ToStringDuringMigration(), tc.repoRelativeGlobs.Inclusions, tc.repoRelativeGlobs.Exclusions) + if err != nil { + return err + } + + relativePaths := make([]turbopath.AnchoredSystemPath, len(filesToBeCached)) + + for index, value := range filesToBeCached { + relativePath, err := tc.rc.repoRoot.RelativePathString(value) + if err != nil { + logger.Error(fmt.Sprintf("error: %v", err)) + terminal.Error(fmt.Sprintf("%s%s", ui.ERROR_PREFIX, color.RedString(" %v", fmt.Errorf("File path cannot be made relative: %w", err)))) + continue + } + relativePaths[index] = fs.UnsafeToAnchoredSystemPath(relativePath) + } + + if err = tc.rc.cache.Put(tc.rc.repoRoot, tc.hash, duration, relativePaths); err != nil { + return err + } + err = tc.rc.outputWatcher.NotifyOutputsWritten(ctx, tc.hash, tc.repoRelativeGlobs) + if err != nil { + // Don't fail the cache write because we also failed to record it, we will just do + // extra I/O in the future restoring files that haven't changed from cache + logger.Warn(fmt.Sprintf("Failed to mark outputs as cached for %v: %v", tc.pt.TaskID, err)) + terminal.Warn(ui.Dim(fmt.Sprintf("Failed to mark outputs as cached for %v: %v", tc.pt.TaskID, err))) + } + + tc.ExpandedOutputs = relativePaths + + return nil +} + +// TaskCache returns a TaskCache instance, providing an interface to the underlying cache specific +// to this run and the given PackageTask +func (rc *RunCache) TaskCache(pt *nodes.PackageTask, hash string) TaskCache { + logFileName := rc.repoRoot.UntypedJoin(pt.LogFile) + hashableOutputs := pt.HashableOutputs() + repoRelativeGlobs := fs.TaskOutputs{ + Inclusions: make([]string, len(hashableOutputs.Inclusions)), + Exclusions: make([]string, len(hashableOutputs.Exclusions)), + } + + for index, output := range hashableOutputs.Inclusions { + repoRelativeGlobs.Inclusions[index] = filepath.Join(pt.Pkg.Dir.ToStringDuringMigration(), output) + } + for index, output := range hashableOutputs.Exclusions { + repoRelativeGlobs.Exclusions[index] = filepath.Join(pt.Pkg.Dir.ToStringDuringMigration(), output) + } + + taskOutputMode := pt.TaskDefinition.OutputMode + if rc.taskOutputModeOverride != nil { + taskOutputMode = *rc.taskOutputModeOverride + } + + return TaskCache{ + ExpandedOutputs: []turbopath.AnchoredSystemPath{}, + rc: rc, + repoRelativeGlobs: repoRelativeGlobs, + hash: hash, + pt: pt, + taskOutputMode: taskOutputMode, + cachingDisabled: !pt.TaskDefinition.ShouldCache, + LogFileName: logFileName, + } +} + +// defaultLogReplayer will try to replay logs back to the given Ui instance +func defaultLogReplayer(logger hclog.Logger, output *cli.PrefixedUi, logFileName turbopath.AbsoluteSystemPath) { + logger.Debug("start replaying logs") + f, err := logFileName.Open() + if err != nil { + output.Warn(fmt.Sprintf("error reading logs: %v", err)) + logger.Error(fmt.Sprintf("error reading logs: %v", err.Error())) + } + defer func() { _ = f.Close() }() + scan := bufio.NewScanner(f) + for scan.Scan() { + str := string(scan.Bytes()) + // cli.PrefixedUi won't prefix empty strings (it'll just print them as empty strings). + // So if we have a blank string, we'll just output the string here, instead of passing + // it onto the PrefixedUi. + if str == "" { + // Just output the prefix if the current line is a blank string + // Note: output.OutputPrefix is also a colored prefix already + output.Ui.Output(output.OutputPrefix) + } else { + // Writing to Stdout + output.Output(str) + } + + } + logger.Debug("finish replaying logs") +} diff --git a/cli/internal/runsummary/execution_summary.go b/cli/internal/runsummary/execution_summary.go new file mode 100644 index 0000000..fabb690 --- /dev/null +++ b/cli/internal/runsummary/execution_summary.go @@ -0,0 +1,282 @@ +package runsummary + +import ( + "encoding/json" + "fmt" + "os" + "sync" + "time" + + "github.com/vercel/turbo/cli/internal/chrometracing" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbopath" + + "github.com/mitchellh/cli" +) + +// executionEvent represents a single event in the build process, i.e. a target starting or finishing +// building, or reaching some milestone within those steps. +type executionEvent struct { + // Timestamp of this event + Time time.Time + // Duration of this event + Duration time.Duration + // Target which has just changed + Label string + // Its current status + Status executionEventName + // Error, only populated for failure statuses + Err string + + exitCode *int +} + +// executionEventName represents the status of a target when we log a build result. +type executionEventName int + +// The collection of expected build result statuses. +const ( + targetInitialized executionEventName = iota + TargetBuilding + TargetBuildStopped + TargetExecuted + TargetBuilt + TargetCached + TargetBuildFailed +) + +func (en executionEventName) toString() string { + switch en { + case targetInitialized: + return "initialized" + case TargetBuilding: + return "building" + case TargetBuildStopped: + return "buildStopped" + case TargetExecuted: + return "executed" + case TargetBuilt: + return "built" + case TargetCached: + return "cached" + case TargetBuildFailed: + return "buildFailed" + } + + return "" +} + +// TaskExecutionSummary contains data about the state of a single task in a turbo run. +// Some fields are updated over time as the task prepares to execute and finishes execution. +type TaskExecutionSummary struct { + startAt time.Time // set once + status executionEventName // current status, updated during execution + err string // only populated for failure statuses + Duration time.Duration // updated during the task execution + exitCode *int // pointer so we can distinguish between 0 and unknown. +} + +func (ts *TaskExecutionSummary) endTime() time.Time { + return ts.startAt.Add(ts.Duration) +} + +// MarshalJSON munges the TaskExecutionSummary into a format we want +// We'll use an anonmyous, private struct for this, so it's not confusingly duplicated +func (ts *TaskExecutionSummary) MarshalJSON() ([]byte, error) { + serializable := struct { + Start int64 `json:"startTime"` + End int64 `json:"endTime"` + Err string `json:"error,omitempty"` + ExitCode *int `json:"exitCode"` + }{ + Start: ts.startAt.UnixMilli(), + End: ts.endTime().UnixMilli(), + Err: ts.err, + ExitCode: ts.exitCode, + } + + return json.Marshal(&serializable) +} + +// ExitCode access exit code nil means no exit code was received +func (ts *TaskExecutionSummary) ExitCode() *int { + var exitCode int + if ts.exitCode == nil { + return nil + } + exitCode = *ts.exitCode + return &exitCode +} + +// executionSummary is the state of the entire `turbo run`. Individual task state in `Tasks` field +type executionSummary struct { + // mu guards reads/writes to the `state` field + mu sync.Mutex + tasks map[string]*TaskExecutionSummary // key is a taskID + profileFilename string + + // These get serialized to JSON + command string // a synthesized turbo command to produce this invocation + repoPath turbopath.RelativeSystemPath // the (possibly empty) path from the turborepo root to where the command was run + success int // number of tasks that exited successfully (does not include cache hits) + failure int // number of tasks that exited with failure + cached int // number of tasks that had a cache hit + attempted int // number of tasks that started + startedAt time.Time + endedAt time.Time + exitCode int +} + +// MarshalJSON munges the executionSummary into a format we want +// We'll use an anonmyous, private struct for this, so it's not confusingly duplicated. +func (es *executionSummary) MarshalJSON() ([]byte, error) { + serializable := struct { + Command string `json:"command"` + RepoPath string `json:"repoPath"` + Success int `json:"success"` + Failure int `json:"failed"` + Cached int `json:"cached"` + Attempted int `json:"attempted"` + StartTime int64 `json:"startTime"` + EndTime int64 `json:"endTime"` + ExitCode int `json:"exitCode"` + }{ + Command: es.command, + RepoPath: es.repoPath.ToString(), + StartTime: es.startedAt.UnixMilli(), + EndTime: es.endedAt.UnixMilli(), + Success: es.success, + Failure: es.failure, + Cached: es.cached, + Attempted: es.attempted, + ExitCode: es.exitCode, + } + + return json.Marshal(&serializable) +} + +// newExecutionSummary creates a executionSummary instance to track events in a `turbo run`.` +func newExecutionSummary(command string, repoPath turbopath.RelativeSystemPath, start time.Time, tracingProfile string) *executionSummary { + if tracingProfile != "" { + chrometracing.EnableTracing() + } + + return &executionSummary{ + command: command, + repoPath: repoPath, + success: 0, + failure: 0, + cached: 0, + attempted: 0, + tasks: make(map[string]*TaskExecutionSummary), + startedAt: start, + profileFilename: tracingProfile, + } +} + +// Run starts the Execution of a single task. It returns a function that can +// be used to update the state of a given taskID with the executionEventName enum +func (es *executionSummary) run(taskID string) (func(outcome executionEventName, err error, exitCode *int), *TaskExecutionSummary) { + start := time.Now() + taskExecutionSummary := es.add(&executionEvent{ + Time: start, + Label: taskID, + Status: targetInitialized, + }) + + tracer := chrometracing.Event(taskID) + + // This function can be called with an enum and an optional error to update + // the state of a given taskID. + tracerFn := func(outcome executionEventName, err error, exitCode *int) { + defer tracer.Done() + now := time.Now() + result := &executionEvent{ + Time: now, + Duration: now.Sub(start), + Label: taskID, + Status: outcome, + // We'll assign this here regardless of whether it is nil, but we'll check for nil + // when we assign it to the taskExecutionSummary. + exitCode: exitCode, + } + + if err != nil { + result.Err = err.Error() + } + + // Ignore the return value here + es.add(result) + } + + return tracerFn, taskExecutionSummary +} + +func (es *executionSummary) add(event *executionEvent) *TaskExecutionSummary { + es.mu.Lock() + defer es.mu.Unlock() + + var taskExecSummary *TaskExecutionSummary + if ts, ok := es.tasks[event.Label]; ok { + // If we already know about this task, we'll update it with the new event + taskExecSummary = ts + } else { + // If we don't know about it yet, init and add it into the parent struct + // (event.Status should always be `targetBuilding` here.) + taskExecSummary = &TaskExecutionSummary{startAt: event.Time} + es.tasks[event.Label] = taskExecSummary + } + + // Update the Status, Duration, and Err fields + taskExecSummary.status = event.Status + taskExecSummary.err = event.Err + taskExecSummary.Duration = event.Duration + + if event.exitCode != nil { + taskExecSummary.exitCode = event.exitCode + } + + switch { + case event.Status == TargetBuilding: + es.attempted++ + case event.Status == TargetBuildFailed: + es.failure++ + case event.Status == TargetCached: + es.cached++ + case event.Status == TargetBuilt: + es.success++ + } + + return es.tasks[event.Label] +} + +// writeChromeTracing writes to a profile name if the `--profile` flag was passed to turbo run +func writeChrometracing(filename string, terminal cli.Ui) error { + outputPath := chrometracing.Path() + if outputPath == "" { + // tracing wasn't enabled + return nil + } + + name := fmt.Sprintf("turbo-%s.trace", time.Now().Format(time.RFC3339)) + if filename != "" { + name = filename + } + if err := chrometracing.Close(); err != nil { + terminal.Warn(fmt.Sprintf("Failed to flush tracing data: %v", err)) + } + cwdRaw, err := os.Getwd() + if err != nil { + return err + } + root, err := fs.GetCwd(cwdRaw) + if err != nil { + return err + } + // chrometracing.Path() is absolute by default, but can still be relative if overriden via $CHROMETRACING_DIR + // so we have to account for that before converting to turbopath.AbsoluteSystemPath + if err := fs.CopyFile(&fs.LstatCachedFile{Path: fs.ResolveUnknownPath(root, outputPath)}, name); err != nil { + return err + } + return nil +} diff --git a/cli/internal/runsummary/format_execution_summary.go b/cli/internal/runsummary/format_execution_summary.go new file mode 100644 index 0000000..37092be --- /dev/null +++ b/cli/internal/runsummary/format_execution_summary.go @@ -0,0 +1,70 @@ +package runsummary + +import ( + "os" + "time" + + "github.com/fatih/color" + internalUI "github.com/vercel/turbo/cli/internal/ui" + "github.com/vercel/turbo/cli/internal/util" +) + +func (rsm *Meta) printExecutionSummary() { + maybeFullTurbo := "" + summary := rsm.RunSummary + ui := rsm.ui + + attempted := summary.ExecutionSummary.attempted + successful := summary.ExecutionSummary.cached + summary.ExecutionSummary.success + cached := summary.ExecutionSummary.cached + // TODO: can we use a method on ExecutionSummary here? + duration := time.Since(summary.ExecutionSummary.startedAt).Truncate(time.Millisecond) + + if cached == attempted && attempted > 0 { + terminalProgram := os.Getenv("TERM_PROGRAM") + // On the macOS Terminal, the rainbow colors show up as a magenta background + // with a gray background on a single letter. Instead, we print in bold magenta + if terminalProgram == "Apple_Terminal" { + fallbackTurboColor := color.New(color.FgHiMagenta, color.Bold).SprintFunc() + maybeFullTurbo = fallbackTurboColor(">>> FULL TURBO") + } else { + maybeFullTurbo = internalUI.Rainbow(">>> FULL TURBO") + } + } + + if attempted == 0 { + ui.Output("") // Clear the line + ui.Warn("No tasks were executed as part of this run.") + } + + ui.Output("") // Clear the line + spacer := " " // 4 chars + + var lines []string + + // The only difference between these two branches is that when there is a run summary + // we print the path to that file and we adjust the whitespace in the printed text so it aligns. + // We could just always align to account for the summary line, but that would require a whole + // bunch of test output assertions to change. + if rsm.getPath().FileExists() { + lines = []string{ + util.Sprintf("${BOLD} Tasks:${BOLD_GREEN}%s%v successful${RESET}${GRAY}, %v total${RESET}", spacer, successful, attempted), + util.Sprintf("${BOLD} Cached:%s%v cached${RESET}${GRAY}, %v total${RESET}", spacer, cached, attempted), + util.Sprintf("${BOLD} Time:%s%v${RESET} %v${RESET}", spacer, duration, maybeFullTurbo), + util.Sprintf("${BOLD}Summary:%s%s${RESET}", spacer, rsm.getPath()), + } + } else { + lines = []string{ + util.Sprintf("${BOLD} Tasks:${BOLD_GREEN}%s%v successful${RESET}${GRAY}, %v total${RESET}", spacer, successful, attempted), + util.Sprintf("${BOLD}Cached:%s%v cached${RESET}${GRAY}, %v total${RESET}", spacer, cached, attempted), + util.Sprintf("${BOLD} Time:%s%v${RESET} %v${RESET}", spacer, duration, maybeFullTurbo), + } + } + + // Print the real thing + for _, line := range lines { + ui.Output(line) + } + + ui.Output("") +} diff --git a/cli/internal/runsummary/format_json.go b/cli/internal/runsummary/format_json.go new file mode 100644 index 0000000..76a0a40 --- /dev/null +++ b/cli/internal/runsummary/format_json.go @@ -0,0 +1,66 @@ +package runsummary + +import ( + "encoding/json" + + "github.com/pkg/errors" + "github.com/segmentio/ksuid" + "github.com/vercel/turbo/cli/internal/util" +) + +// FormatJSON returns a json string representing a RunSummary +func (rsm *Meta) FormatJSON() ([]byte, error) { + rsm.normalize() // normalize data + + var bytes []byte + var err error + + if rsm.singlePackage { + bytes, err = json.MarshalIndent(nonMonorepoRunSummary(*rsm.RunSummary), "", " ") + } else { + bytes, err = json.MarshalIndent(rsm.RunSummary, "", " ") + } + + if err != nil { + return nil, errors.Wrap(err, "failed to render JSON") + } + return bytes, nil +} + +func (rsm *Meta) normalize() { + for _, t := range rsm.RunSummary.Tasks { + t.EnvVars.Global = rsm.RunSummary.GlobalHashSummary.envVars + t.EnvVars.GlobalPassthrough = rsm.RunSummary.GlobalHashSummary.passthroughEnvVars + } + + // Remove execution summary for dry runs + if rsm.runType == runTypeDryJSON { + rsm.RunSummary.ExecutionSummary = nil + } + + // For single packages, we don't need the Packages + // and each task summary needs some cleaning. + if rsm.singlePackage { + rsm.RunSummary.Packages = []string{} + + for _, task := range rsm.RunSummary.Tasks { + task.cleanForSinglePackage() + } + } +} + +// nonMonorepoRunSummary is an exact copy of RunSummary, but the JSON tags are structured +// for rendering a single-package run of turbo. Notably, we want to always omit packages +// since there is no concept of packages in a single-workspace repo. +// This struct exists solely for the purpose of serializing to JSON and should not be +// used anywhere else. +type nonMonorepoRunSummary struct { + ID ksuid.KSUID `json:"id"` + Version string `json:"version"` + TurboVersion string `json:"turboVersion"` + GlobalHashSummary *GlobalHashSummary `json:"globalCacheInputs"` + Packages []string `json:"-"` + EnvMode util.EnvMode `json:"envMode"` + ExecutionSummary *executionSummary `json:"execution,omitempty"` + Tasks []*TaskSummary `json:"tasks"` +} diff --git a/cli/internal/runsummary/format_text.go b/cli/internal/runsummary/format_text.go new file mode 100644 index 0000000..28b1638 --- /dev/null +++ b/cli/internal/runsummary/format_text.go @@ -0,0 +1,100 @@ +package runsummary + +import ( + "encoding/json" + "fmt" + "os" + "strconv" + "strings" + "text/tabwriter" + + "github.com/vercel/turbo/cli/internal/util" + "github.com/vercel/turbo/cli/internal/workspace" +) + +// FormatAndPrintText prints a Run Summary to the Terminal UI +func (rsm Meta) FormatAndPrintText(workspaceInfos workspace.Catalog) error { + ui := rsm.ui + summary := rsm.RunSummary + + rsm.normalize() // normalize data + + if !rsm.singlePackage { + ui.Output("") + ui.Info(util.Sprintf("${CYAN}${BOLD}Packages in Scope${RESET}")) + p := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', 0) + fmt.Fprintln(p, "Name\tPath\t") + for _, pkg := range summary.Packages { + fmt.Fprintf(p, "%s\t%s\t\n", pkg, workspaceInfos.PackageJSONs[pkg].Dir) + } + if err := p.Flush(); err != nil { + return err + } + } + + fileCount := 0 + for range summary.GlobalHashSummary.GlobalFileHashMap { + fileCount = fileCount + 1 + } + w1 := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', 0) + ui.Output("") + ui.Info(util.Sprintf("${CYAN}${BOLD}Global Hash Inputs${RESET}")) + fmt.Fprintln(w1, util.Sprintf(" ${GREY}Global Files\t=\t%d${RESET}", fileCount)) + fmt.Fprintln(w1, util.Sprintf(" ${GREY}External Dependencies Hash\t=\t%s${RESET}", summary.GlobalHashSummary.RootExternalDepsHash)) + fmt.Fprintln(w1, util.Sprintf(" ${GREY}Global Cache Key\t=\t%s${RESET}", summary.GlobalHashSummary.GlobalCacheKey)) + if bytes, err := json.Marshal(summary.GlobalHashSummary.Pipeline); err == nil { + fmt.Fprintln(w1, util.Sprintf(" ${GREY}Root pipeline\t=\t%s${RESET}", bytes)) + } + if err := w1.Flush(); err != nil { + return err + } + + ui.Output("") + ui.Info(util.Sprintf("${CYAN}${BOLD}Tasks to Run${RESET}")) + + for _, task := range summary.Tasks { + taskName := task.TaskID + + if rsm.singlePackage { + taskName = task.Task + } + + ui.Info(util.Sprintf("${BOLD}%s${RESET}", taskName)) + w := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', 0) + fmt.Fprintln(w, util.Sprintf(" ${GREY}Task\t=\t%s\t${RESET}", task.Task)) + + if !rsm.singlePackage { + fmt.Fprintln(w, util.Sprintf(" ${GREY}Package\t=\t%s\t${RESET}", task.Package)) + } + fmt.Fprintln(w, util.Sprintf(" ${GREY}Hash\t=\t%s\t${RESET}", task.Hash)) + fmt.Fprintln(w, util.Sprintf(" ${GREY}Cached (Local)\t=\t%s\t${RESET}", strconv.FormatBool(task.CacheSummary.Local))) + fmt.Fprintln(w, util.Sprintf(" ${GREY}Cached (Remote)\t=\t%s\t${RESET}", strconv.FormatBool(task.CacheSummary.Remote))) + + if !rsm.singlePackage { + fmt.Fprintln(w, util.Sprintf(" ${GREY}Directory\t=\t%s\t${RESET}", task.Dir)) + } + + fmt.Fprintln(w, util.Sprintf(" ${GREY}Command\t=\t%s\t${RESET}", task.Command)) + fmt.Fprintln(w, util.Sprintf(" ${GREY}Outputs\t=\t%s\t${RESET}", strings.Join(task.Outputs, ", "))) + fmt.Fprintln(w, util.Sprintf(" ${GREY}Log File\t=\t%s\t${RESET}", task.LogFile)) + fmt.Fprintln(w, util.Sprintf(" ${GREY}Dependencies\t=\t%s\t${RESET}", strings.Join(task.Dependencies, ", "))) + fmt.Fprintln(w, util.Sprintf(" ${GREY}Dependendents\t=\t%s\t${RESET}", strings.Join(task.Dependents, ", "))) + fmt.Fprintln(w, util.Sprintf(" ${GREY}Inputs Files Considered\t=\t%d\t${RESET}", len(task.ExpandedInputs))) + + fmt.Fprintln(w, util.Sprintf(" ${GREY}Configured Environment Variables\t=\t%s\t${RESET}", strings.Join(task.EnvVars.Configured, ", "))) + fmt.Fprintln(w, util.Sprintf(" ${GREY}Inferred Environment Variables\t=\t%s\t${RESET}", strings.Join(task.EnvVars.Inferred, ", "))) + fmt.Fprintln(w, util.Sprintf(" ${GREY}Global Environment Variables\t=\t%s\t${RESET}", strings.Join(task.EnvVars.Global, ", "))) + + bytes, err := json.Marshal(task.ResolvedTaskDefinition) + // If there's an error, we can silently ignore it, we don't need to block the entire print. + if err == nil { + fmt.Fprintln(w, util.Sprintf(" ${GREY}ResolvedTaskDefinition\t=\t%s\t${RESET}", string(bytes))) + } + + fmt.Fprintln(w, util.Sprintf(" ${GREY}Framework\t=\t%s\t${RESET}", task.Framework)) + if err := w.Flush(); err != nil { + return err + } + } + return nil +} diff --git a/cli/internal/runsummary/globalhash_summary.go b/cli/internal/runsummary/globalhash_summary.go new file mode 100644 index 0000000..e24976d5 --- /dev/null +++ b/cli/internal/runsummary/globalhash_summary.go @@ -0,0 +1,38 @@ +package runsummary + +import ( + "github.com/vercel/turbo/cli/internal/env" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// GlobalHashSummary contains the pieces of data that impacted the global hash (then then impacted the task hash) +type GlobalHashSummary struct { + GlobalCacheKey string `json:"rootKey"` + GlobalFileHashMap map[turbopath.AnchoredUnixPath]string `json:"files"` + RootExternalDepsHash string `json:"hashOfExternalDependencies"` + Pipeline fs.PristinePipeline `json:"rootPipeline"` + + // This is a private field because and not in JSON, because we'll add it to each task + envVars env.EnvironmentVariablePairs + passthroughEnvVars env.EnvironmentVariablePairs +} + +// NewGlobalHashSummary creates a GlobalHashSummary struct from a set of fields. +func NewGlobalHashSummary( + fileHashMap map[turbopath.AnchoredUnixPath]string, + rootExternalDepsHash string, + envVars env.DetailedMap, + passthroughEnvVars env.EnvironmentVariableMap, + globalCacheKey string, + pipeline fs.PristinePipeline, +) *GlobalHashSummary { + return &GlobalHashSummary{ + envVars: envVars.All.ToSecretHashable(), + passthroughEnvVars: passthroughEnvVars.ToSecretHashable(), + GlobalFileHashMap: fileHashMap, + RootExternalDepsHash: rootExternalDepsHash, + GlobalCacheKey: globalCacheKey, + Pipeline: pipeline, + } +} diff --git a/cli/internal/runsummary/run_summary.go b/cli/internal/runsummary/run_summary.go new file mode 100644 index 0000000..a297114 --- /dev/null +++ b/cli/internal/runsummary/run_summary.go @@ -0,0 +1,320 @@ +// Package runsummary implements structs that report on a `turbo run` and `turbo run --dry` +package runsummary + +import ( + "context" + "encoding/json" + "fmt" + "path/filepath" + "sync" + "time" + + "github.com/mitchellh/cli" + "github.com/segmentio/ksuid" + "github.com/vercel/turbo/cli/internal/client" + "github.com/vercel/turbo/cli/internal/spinner" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/util" + "github.com/vercel/turbo/cli/internal/workspace" +) + +// MissingTaskLabel is printed when a package is missing a definition for a task that is supposed to run +// E.g. if `turbo run build --dry` is run, and package-a doesn't define a `build` script in package.json, +// the RunSummary will print this, instead of the script (e.g. `next build`). +const MissingTaskLabel = "" + +// MissingFrameworkLabel is a string to identify when a workspace doesn't detect a framework +const MissingFrameworkLabel = "" + +const runSummarySchemaVersion = "0" +const runsEndpoint = "/v0/spaces/%s/runs" +const runsPatchEndpoint = "/v0/spaces/%s/runs/%s" +const tasksEndpoint = "/v0/spaces/%s/runs/%s/tasks" + +type runType int + +const ( + runTypeReal runType = iota + runTypeDryText + runTypeDryJSON +) + +// Meta is a wrapper around the serializable RunSummary, with some extra information +// about the Run and references to other things that we need. +type Meta struct { + RunSummary *RunSummary + ui cli.Ui + repoRoot turbopath.AbsoluteSystemPath // used to write run summary + repoPath turbopath.RelativeSystemPath + singlePackage bool + shouldSave bool + apiClient *client.APIClient + spaceID string + runType runType + synthesizedCommand string +} + +// RunSummary contains a summary of what happens in the `turbo run` command and why. +type RunSummary struct { + ID ksuid.KSUID `json:"id"` + Version string `json:"version"` + TurboVersion string `json:"turboVersion"` + GlobalHashSummary *GlobalHashSummary `json:"globalCacheInputs"` + Packages []string `json:"packages"` + EnvMode util.EnvMode `json:"envMode"` + ExecutionSummary *executionSummary `json:"execution,omitempty"` + Tasks []*TaskSummary `json:"tasks"` +} + +// NewRunSummary returns a RunSummary instance +func NewRunSummary( + startAt time.Time, + ui cli.Ui, + repoRoot turbopath.AbsoluteSystemPath, + repoPath turbopath.RelativeSystemPath, + turboVersion string, + apiClient *client.APIClient, + runOpts util.RunOpts, + packages []string, + globalEnvMode util.EnvMode, + globalHashSummary *GlobalHashSummary, + synthesizedCommand string, +) Meta { + singlePackage := runOpts.SinglePackage + profile := runOpts.Profile + shouldSave := runOpts.Summarize + spaceID := runOpts.ExperimentalSpaceID + + runType := runTypeReal + if runOpts.DryRun { + runType = runTypeDryText + if runOpts.DryRunJSON { + runType = runTypeDryJSON + } + } + + executionSummary := newExecutionSummary(synthesizedCommand, repoPath, startAt, profile) + + return Meta{ + RunSummary: &RunSummary{ + ID: ksuid.New(), + Version: runSummarySchemaVersion, + ExecutionSummary: executionSummary, + TurboVersion: turboVersion, + Packages: packages, + EnvMode: globalEnvMode, + Tasks: []*TaskSummary{}, + GlobalHashSummary: globalHashSummary, + }, + ui: ui, + runType: runType, + repoRoot: repoRoot, + singlePackage: singlePackage, + shouldSave: shouldSave, + apiClient: apiClient, + spaceID: spaceID, + synthesizedCommand: synthesizedCommand, + } +} + +// getPath returns a path to where the runSummary is written. +// The returned path will always be relative to the dir passsed in. +// We don't do a lot of validation, so `../../` paths are allowed. +func (rsm *Meta) getPath() turbopath.AbsoluteSystemPath { + filename := fmt.Sprintf("%s.json", rsm.RunSummary.ID) + return rsm.repoRoot.UntypedJoin(filepath.Join(".turbo", "runs"), filename) +} + +// Close wraps up the RunSummary at the end of a `turbo run`. +func (rsm *Meta) Close(ctx context.Context, exitCode int, workspaceInfos workspace.Catalog) error { + if rsm.runType == runTypeDryJSON || rsm.runType == runTypeDryText { + return rsm.closeDryRun(workspaceInfos) + } + + rsm.RunSummary.ExecutionSummary.exitCode = exitCode + rsm.RunSummary.ExecutionSummary.endedAt = time.Now() + + summary := rsm.RunSummary + if err := writeChrometracing(summary.ExecutionSummary.profileFilename, rsm.ui); err != nil { + rsm.ui.Error(fmt.Sprintf("Error writing tracing data: %v", err)) + } + + // TODO: printing summary to local, writing to disk, and sending to API + // are all the same thng, we should use a strategy similar to cache save/upload to + // do this in parallel. + + // Otherwise, attempt to save the summary + // Warn on the error, but we don't need to throw an error + if rsm.shouldSave { + if err := rsm.save(); err != nil { + rsm.ui.Warn(fmt.Sprintf("Error writing run summary: %v", err)) + } + } + + rsm.printExecutionSummary() + + // If we're not supposed to save or if there's no spaceID + if !rsm.shouldSave || rsm.spaceID == "" { + return nil + } + + if !rsm.apiClient.IsLinked() { + rsm.ui.Warn("Failed to post to space because repo is not linked to a Space. Run `turbo link` first.") + return nil + } + + // Wrap the record function so we can hoist out url/errors but keep + // the function signature/type the spinner.WaitFor expects. + var url string + var errs []error + record := func() { + url, errs = rsm.record() + } + + func() { + _ = spinner.WaitFor(ctx, record, rsm.ui, "...sending run summary...", 1000*time.Millisecond) + }() + + // After the spinner is done, print any errors and the url + if len(errs) > 0 { + rsm.ui.Warn("Errors recording run to Spaces") + for _, err := range errs { + rsm.ui.Warn(fmt.Sprintf("%v", err)) + } + } + + if url != "" { + rsm.ui.Output(fmt.Sprintf("Run: %s", url)) + rsm.ui.Output("") + } + + return nil +} + +// closeDryRun wraps up the Run Summary at the end of `turbo run --dry`. +// Ideally this should be inlined into Close(), but RunSummary doesn't currently +// have context about whether a run was real or dry. +func (rsm *Meta) closeDryRun(workspaceInfos workspace.Catalog) error { + // Render the dry run as json + if rsm.runType == runTypeDryJSON { + rendered, err := rsm.FormatJSON() + if err != nil { + return err + } + + rsm.ui.Output(string(rendered)) + return nil + } + + return rsm.FormatAndPrintText(workspaceInfos) +} + +// TrackTask makes it possible for the consumer to send information about the execution of a task. +func (summary *RunSummary) TrackTask(taskID string) (func(outcome executionEventName, err error, exitCode *int), *TaskExecutionSummary) { + return summary.ExecutionSummary.run(taskID) +} + +// Save saves the run summary to a file +func (rsm *Meta) save() error { + json, err := rsm.FormatJSON() + if err != nil { + return err + } + + // summaryPath will always be relative to the dir passsed in. + // We don't do a lot of validation, so `../../` paths are allowed + summaryPath := rsm.getPath() + + if err := summaryPath.EnsureDir(); err != nil { + return err + } + + return summaryPath.WriteFile(json, 0644) +} + +// record sends the summary to the API +func (rsm *Meta) record() (string, []error) { + errs := []error{} + + // Right now we'll send the POST to create the Run and the subsequent task payloads + // after all execution is done, but in the future, this first POST request + // can happen when the Run actually starts, so we can send updates to the associated Space + // as tasks complete. + createRunEndpoint := fmt.Sprintf(runsEndpoint, rsm.spaceID) + response := &spacesRunResponse{} + + payload := rsm.newSpacesRunCreatePayload() + if startPayload, err := json.Marshal(payload); err == nil { + if resp, err := rsm.apiClient.JSONPost(createRunEndpoint, startPayload); err != nil { + errs = append(errs, fmt.Errorf("POST %s: %w", createRunEndpoint, err)) + } else { + if err := json.Unmarshal(resp, response); err != nil { + errs = append(errs, fmt.Errorf("Error unmarshaling response: %w", err)) + } + } + } + + if response.ID != "" { + if taskErrs := rsm.postTaskSummaries(response.ID); len(taskErrs) > 0 { + errs = append(errs, taskErrs...) + } + + if donePayload, err := json.Marshal(newSpacesDonePayload(rsm.RunSummary)); err == nil { + patchURL := fmt.Sprintf(runsPatchEndpoint, rsm.spaceID, response.ID) + if _, err := rsm.apiClient.JSONPatch(patchURL, donePayload); err != nil { + errs = append(errs, fmt.Errorf("PATCH %s: %w", patchURL, err)) + } + } + } + + if len(errs) > 0 { + return response.URL, errs + } + + return response.URL, nil +} + +func (rsm *Meta) postTaskSummaries(runID string) []error { + errs := []error{} + // We make at most 8 requests at a time. + maxParallelRequests := 8 + taskSummaries := rsm.RunSummary.Tasks + taskCount := len(taskSummaries) + taskURL := fmt.Sprintf(tasksEndpoint, rsm.spaceID, runID) + + parallelRequestCount := maxParallelRequests + if taskCount < maxParallelRequests { + parallelRequestCount = taskCount + } + + queue := make(chan int, taskCount) + + wg := &sync.WaitGroup{} + for i := 0; i < parallelRequestCount; i++ { + wg.Add(1) + go func() { + defer wg.Done() + for index := range queue { + task := taskSummaries[index] + payload := newSpacesTaskPayload(task) + if taskPayload, err := json.Marshal(payload); err == nil { + if _, err := rsm.apiClient.JSONPost(taskURL, taskPayload); err != nil { + errs = append(errs, fmt.Errorf("Error sending %s summary to space: %w", task.TaskID, err)) + } + } + } + }() + } + + for index := range taskSummaries { + queue <- index + } + close(queue) + wg.Wait() + + if len(errs) > 0 { + return errs + } + + return nil +} diff --git a/cli/internal/runsummary/spaces.go b/cli/internal/runsummary/spaces.go new file mode 100644 index 0000000..bf19941 --- /dev/null +++ b/cli/internal/runsummary/spaces.go @@ -0,0 +1,96 @@ +package runsummary + +import ( + "github.com/vercel/turbo/cli/internal/ci" +) + +// spacesRunResponse deserialized the response from POST Run endpoint +type spacesRunResponse struct { + ID string + URL string +} + +type spacesRunPayload struct { + StartTime int64 `json:"startTime,omitempty"` // when the run was started + EndTime int64 `json:"endTime,omitempty"` // when the run ended. we should never submit start and end at the same time. + Status string `json:"status,omitempty"` // Status is "running" or "completed" + Type string `json:"type,omitempty"` // hardcoded to "TURBO" + ExitCode int `json:"exitCode,omitempty"` // exit code for the full run + Command string `json:"command,omitempty"` // the thing that kicked off the turbo run + RepositoryPath string `json:"repositoryPath,omitempty"` // where the command was invoked from + Context string `json:"context,omitempty"` // the host on which this Run was executed (e.g. Github Action, Vercel, etc) + + // TODO: we need to add these in + // originationUser string + // gitBranch string + // gitSha string +} + +// spacesCacheStatus is the same as TaskCacheSummary so we can convert +// spacesCacheStatus(cacheSummary), but change the json tags, to omit local and remote fields +type spacesCacheStatus struct { + // omitted fields, but here so we can convert from TaskCacheSummary easily + Local bool `json:"-"` + Remote bool `json:"-"` + Status string `json:"status"` // should always be there + Source string `json:"source,omitempty"` + TimeSaved int `json:"timeSaved"` +} + +type spacesTask struct { + Key string `json:"key,omitempty"` + Name string `json:"name,omitempty"` + Workspace string `json:"workspace,omitempty"` + Hash string `json:"hash,omitempty"` + StartTime int64 `json:"startTime,omitempty"` + EndTime int64 `json:"endTime,omitempty"` + Cache spacesCacheStatus `json:"cache,omitempty"` + ExitCode int `json:"exitCode,omitempty"` + Dependencies []string `json:"dependencies,omitempty"` + Dependents []string `json:"dependents,omitempty"` + Logs string `json:"log"` +} + +func (rsm *Meta) newSpacesRunCreatePayload() *spacesRunPayload { + startTime := rsm.RunSummary.ExecutionSummary.startedAt.UnixMilli() + context := "LOCAL" + if name := ci.Constant(); name != "" { + context = name + } + return &spacesRunPayload{ + StartTime: startTime, + Status: "running", + Command: rsm.synthesizedCommand, + RepositoryPath: rsm.repoPath.ToString(), + Type: "TURBO", + Context: context, + } +} + +func newSpacesDonePayload(runsummary *RunSummary) *spacesRunPayload { + endTime := runsummary.ExecutionSummary.endedAt.UnixMilli() + return &spacesRunPayload{ + Status: "completed", + EndTime: endTime, + ExitCode: runsummary.ExecutionSummary.exitCode, + } +} + +func newSpacesTaskPayload(taskSummary *TaskSummary) *spacesTask { + startTime := taskSummary.Execution.startAt.UnixMilli() + endTime := taskSummary.Execution.endTime().UnixMilli() + + return &spacesTask{ + Key: taskSummary.TaskID, + Name: taskSummary.Task, + Workspace: taskSummary.Package, + Hash: taskSummary.Hash, + StartTime: startTime, + EndTime: endTime, + Cache: spacesCacheStatus(taskSummary.CacheSummary), // wrapped so we can remove fields + ExitCode: *taskSummary.Execution.exitCode, + Dependencies: taskSummary.Dependencies, + Dependents: taskSummary.Dependents, + Logs: string(taskSummary.GetLogs()), + } +} diff --git a/cli/internal/runsummary/task_summary.go b/cli/internal/runsummary/task_summary.go new file mode 100644 index 0000000..fb0cb30 --- /dev/null +++ b/cli/internal/runsummary/task_summary.go @@ -0,0 +1,117 @@ +package runsummary + +import ( + "os" + + "github.com/vercel/turbo/cli/internal/cache" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/util" +) + +// TaskCacheSummary is an extended version of cache.ItemStatus +// that includes TimeSaved and some better data. +type TaskCacheSummary struct { + Local bool `json:"local"` // Deprecated, but keeping around for --dry=json + Remote bool `json:"remote"` // Deprecated, but keeping around for --dry=json + Status string `json:"status"` // should always be there + Source string `json:"source,omitempty"` // can be empty on status:miss + TimeSaved int `json:"timeSaved"` // always include, but can be 0 +} + +// NewTaskCacheSummary decorates a cache.ItemStatus into a TaskCacheSummary +// Importantly, it adds the derived keys of `source` and `status` based on +// the local/remote booleans. It would be nice if these were just included +// from upstream, but that is a more invasive change. +func NewTaskCacheSummary(itemStatus cache.ItemStatus, timeSaved *int) TaskCacheSummary { + status := cache.CacheEventMiss + if itemStatus.Local || itemStatus.Remote { + status = cache.CacheEventHit + } + + var source string + if itemStatus.Local { + source = cache.CacheSourceFS + } else if itemStatus.Remote { + source = cache.CacheSourceRemote + } + + cs := TaskCacheSummary{ + // copy these over + Local: itemStatus.Local, + Remote: itemStatus.Remote, + Status: status, + Source: source, + } + // add in a dereferences timeSaved, should be 0 if nil + if timeSaved != nil { + cs.TimeSaved = *timeSaved + } + return cs +} + +// TaskSummary contains information about the task that was about to run +// TODO(mehulkar): `Outputs` and `ExcludedOutputs` are slightly redundant +// as the information is also available in ResolvedTaskDefinition. We could remove them +// and favor a version of Outputs that is the fully expanded list of files. +type TaskSummary struct { + TaskID string `json:"taskId,omitempty"` + Task string `json:"task"` + Package string `json:"package,omitempty"` + Hash string `json:"hash"` + ExpandedInputs map[turbopath.AnchoredUnixPath]string `json:"inputs"` + ExternalDepsHash string `json:"hashOfExternalDependencies"` + CacheSummary TaskCacheSummary `json:"cache"` + Command string `json:"command"` + CommandArguments []string `json:"cliArguments"` + Outputs []string `json:"outputs"` + ExcludedOutputs []string `json:"excludedOutputs"` + LogFile string `json:"logFile"` + Dir string `json:"directory,omitempty"` + Dependencies []string `json:"dependencies"` + Dependents []string `json:"dependents"` + ResolvedTaskDefinition *fs.TaskDefinition `json:"resolvedTaskDefinition"` + ExpandedOutputs []turbopath.AnchoredSystemPath `json:"expandedOutputs"` + Framework string `json:"framework"` + EnvMode util.EnvMode `json:"envMode"` + EnvVars TaskEnvVarSummary `json:"environmentVariables"` + Execution *TaskExecutionSummary `json:"execution,omitempty"` // omit when it's not set +} + +// GetLogs reads the Logfile and returns the data +func (ts *TaskSummary) GetLogs() []byte { + bytes, err := os.ReadFile(ts.LogFile) + if err != nil { + return []byte{} + } + return bytes +} + +// TaskEnvVarSummary contains the environment variables that impacted a task's hash +type TaskEnvVarSummary struct { + Configured []string `json:"configured"` + Inferred []string `json:"inferred"` + Global []string `json:"global"` + Passthrough []string `json:"passthrough"` + GlobalPassthrough []string `json:"globalPassthrough"` +} + +// cleanForSinglePackage converts a TaskSummary to remove references to workspaces +func (ts *TaskSummary) cleanForSinglePackage() { + dependencies := make([]string, len(ts.Dependencies)) + for i, dependency := range ts.Dependencies { + dependencies[i] = util.StripPackageName(dependency) + } + dependents := make([]string, len(ts.Dependents)) + for i, dependent := range ts.Dependents { + dependents[i] = util.StripPackageName(dependent) + } + task := util.StripPackageName(ts.TaskID) + + ts.TaskID = task + ts.Task = task + ts.Dependencies = dependencies + ts.Dependents = dependents + ts.Dir = "" + ts.Package = "" +} diff --git a/cli/internal/scm/git_go.go b/cli/internal/scm/git_go.go new file mode 100644 index 0000000..0dac2bf --- /dev/null +++ b/cli/internal/scm/git_go.go @@ -0,0 +1,111 @@ +//go:build go || !rust +// +build go !rust + +// Package scm abstracts operations on various tools like git +// Currently, only git is supported. +// +// Adapted from https://github.com/thought-machine/please/tree/master/src/scm +// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +package scm + +import ( + "fmt" + "github.com/vercel/turbo/cli/internal/turbopath" + "os/exec" + "path/filepath" + "strings" + + "github.com/pkg/errors" +) + +// git implements operations on a git repository. +type git struct { + repoRoot turbopath.AbsoluteSystemPath +} + +// ChangedFiles returns a list of modified files since the given commit, optionally including untracked files. +func (g *git) ChangedFiles(fromCommit string, toCommit string, relativeTo string) ([]string, error) { + if relativeTo == "" { + relativeTo = g.repoRoot.ToString() + } + relSuffix := []string{"--", relativeTo} + command := []string{"diff", "--name-only", toCommit} + + out, err := exec.Command("git", append(command, relSuffix...)...).CombinedOutput() + if err != nil { + return nil, errors.Wrapf(err, "finding changes relative to %v", relativeTo) + } + files := strings.Split(string(out), "\n") + + if fromCommit != "" { + // Grab the diff from the merge-base to HEAD using ... syntax. This ensures we have just + // the changes that have occurred on the current branch. + command = []string{"diff", "--name-only", fromCommit + "..." + toCommit} + out, err = exec.Command("git", append(command, relSuffix...)...).CombinedOutput() + if err != nil { + // Check if we can provide a better error message for non-existent commits. + // If we error on the check or can't find it, fall back to whatever error git + // reported. + if exists, err := commitExists(fromCommit); err == nil && !exists { + return nil, fmt.Errorf("commit %v does not exist", fromCommit) + } + return nil, errors.Wrapf(err, "git comparing with %v", fromCommit) + } + committedChanges := strings.Split(string(out), "\n") + files = append(files, committedChanges...) + } + command = []string{"ls-files", "--other", "--exclude-standard"} + out, err = exec.Command("git", append(command, relSuffix...)...).CombinedOutput() + if err != nil { + return nil, errors.Wrap(err, "finding untracked files") + } + untracked := strings.Split(string(out), "\n") + files = append(files, untracked...) + // git will report changed files relative to the worktree: re-relativize to relativeTo + normalized := make([]string, 0) + for _, f := range files { + if f == "" { + continue + } + normalizedFile, err := g.fixGitRelativePath(strings.TrimSpace(f), relativeTo) + if err != nil { + return nil, err + } + normalized = append(normalized, normalizedFile) + } + return normalized, nil +} + +func (g *git) PreviousContent(fromCommit string, filePath string) ([]byte, error) { + if fromCommit == "" { + return nil, fmt.Errorf("Need commit sha to inspect file contents") + } + + out, err := exec.Command("git", "show", fmt.Sprintf("%s:%s", fromCommit, filePath)).CombinedOutput() + if err != nil { + return nil, errors.Wrapf(err, "unable to get contents of %s", filePath) + } + + return out, nil +} + +func commitExists(commit string) (bool, error) { + err := exec.Command("git", "cat-file", "-t", commit).Run() + if err != nil { + exitErr := &exec.ExitError{} + if errors.As(err, &exitErr) && exitErr.ExitCode() == 128 { + return false, nil + } + return false, err + } + return true, nil +} + +func (g *git) fixGitRelativePath(worktreePath, relativeTo string) (string, error) { + p, err := filepath.Rel(relativeTo, filepath.Join(g.repoRoot, worktreePath)) + if err != nil { + return "", errors.Wrapf(err, "unable to determine relative path for %s and %s", g.repoRoot, relativeTo) + } + return p, nil +} diff --git a/cli/internal/scm/git_rust.go b/cli/internal/scm/git_rust.go new file mode 100644 index 0000000..4b4cd2d --- /dev/null +++ b/cli/internal/scm/git_rust.go @@ -0,0 +1,34 @@ +// Package scm abstracts operations on various tools like git +// Currently, only git is supported. +// +// Adapted from https://github.com/thought-machine/please/tree/master/src/scm +// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +//go:build rust +// +build rust + +package scm + +import ( + "fmt" + "github.com/vercel/turbo/cli/internal/ffi" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// git implements operations on a git repository. +type git struct { + repoRoot turbopath.AbsoluteSystemPath +} + +// ChangedFiles returns a list of modified files since the given commit, optionally including untracked files. +func (g *git) ChangedFiles(fromCommit string, toCommit string, monorepoRoot string) ([]string, error) { + return ffi.ChangedFiles(g.repoRoot.ToString(), monorepoRoot, fromCommit, toCommit) +} + +func (g *git) PreviousContent(fromCommit string, filePath string) ([]byte, error) { + if fromCommit == "" { + return nil, fmt.Errorf("Need commit sha to inspect file contents") + } + + return ffi.PreviousContent(g.repoRoot.ToString(), fromCommit, filePath) +} diff --git a/cli/internal/scm/scm.go b/cli/internal/scm/scm.go new file mode 100644 index 0000000..e7f17c8 --- /dev/null +++ b/cli/internal/scm/scm.go @@ -0,0 +1,53 @@ +// Package scm abstracts operations on various tools like git +// Currently, only git is supported. +// +// Adapted from https://github.com/thought-machine/please/tree/master/src/scm +// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +package scm + +import ( + "github.com/pkg/errors" + + "github.com/vercel/turbo/cli/internal/turbopath" +) + +var ErrFallback = errors.New("cannot find a .git folder. Falling back to manual file hashing (which may be slower). If you are running this build in a pruned directory, you can ignore this message. Otherwise, please initialize a git repository in the root of your monorepo") + +// An SCM represents an SCM implementation that we can ask for various things. +type SCM interface { + // ChangedFiles returns a list of modified files since the given commit, including untracked files + ChangedFiles(fromCommit string, toCommit string, relativeTo string) ([]string, error) + // PreviousContent Returns the content of the file at fromCommit + PreviousContent(fromCommit string, filePath string) ([]byte, error) +} + +// newGitSCM returns a new SCM instance for this repo root. +// It returns nil if there is no known implementation there. +func newGitSCM(repoRoot turbopath.AbsoluteSystemPath) SCM { + if repoRoot.UntypedJoin(".git").Exists() { + return &git{repoRoot: repoRoot} + } + return nil +} + +// newFallback returns a new SCM instance for this repo root. +// If there is no known implementation it returns a stub. +func newFallback(repoRoot turbopath.AbsoluteSystemPath) (SCM, error) { + if scm := newGitSCM(repoRoot); scm != nil { + return scm, nil + } + + return &stub{}, ErrFallback +} + +// FromInRepo produces an SCM instance, given a path within a +// repository. It does not need to be a git repository, and if +// it is not, the given path is assumed to be the root. +func FromInRepo(repoRoot turbopath.AbsoluteSystemPath) (SCM, error) { + dotGitDir, err := repoRoot.Findup(".git") + if err != nil { + return nil, err + } + return newFallback(dotGitDir.Dir()) +} diff --git a/cli/internal/scm/stub.go b/cli/internal/scm/stub.go new file mode 100644 index 0000000..2e356c5 --- /dev/null +++ b/cli/internal/scm/stub.go @@ -0,0 +1,14 @@ +// Adapted from https://github.com/thought-machine/please/tree/master/src/scm +// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +package scm + +type stub struct{} + +func (s *stub) ChangedFiles(fromCommit string, toCommit string, relativeTo string) ([]string, error) { + return nil, nil +} + +func (s *stub) PreviousContent(fromCommit string, filePath string) ([]byte, error) { + return nil, nil +} diff --git a/cli/internal/scope/filter/filter.go b/cli/internal/scope/filter/filter.go new file mode 100644 index 0000000..60aaf1d --- /dev/null +++ b/cli/internal/scope/filter/filter.go @@ -0,0 +1,421 @@ +package filter + +import ( + "fmt" + "strings" + + "github.com/pkg/errors" + "github.com/pyr-sh/dag" + "github.com/vercel/turbo/cli/internal/doublestar" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/util" + "github.com/vercel/turbo/cli/internal/workspace" +) + +type SelectedPackages struct { + pkgs util.Set + unusedFilters []*TargetSelector +} + +// PackagesChangedInRange is the signature of a function to provide the set of +// packages that have changed in a particular range of git refs. +type PackagesChangedInRange = func(fromRef string, toRef string) (util.Set, error) + +// PackageInference holds the information we have inferred from the working-directory +// (really --infer-filter-root flag) about which packages are of interest. +type PackageInference struct { + // PackageName, if set, means that we have determined that filters without a package-specifier + // should get this package name + PackageName string + // DirectoryRoot is used to infer a "parentDir" for the filter in the event that we haven't + // identified a specific package. If the filter already contains a parentDir, this acts as + // a prefix. If the filter does not contain a parentDir, we consider this to be a glob for + // all subdirectories + DirectoryRoot turbopath.RelativeSystemPath +} + +type Resolver struct { + Graph *dag.AcyclicGraph + WorkspaceInfos workspace.Catalog + Cwd turbopath.AbsoluteSystemPath + Inference *PackageInference + PackagesChangedInRange PackagesChangedInRange +} + +// GetPackagesFromPatterns compiles filter patterns and applies them, returning +// the selected packages +func (r *Resolver) GetPackagesFromPatterns(patterns []string) (util.Set, error) { + selectors := []*TargetSelector{} + for _, pattern := range patterns { + selector, err := ParseTargetSelector(pattern) + if err != nil { + return nil, err + } + selectors = append(selectors, selector) + } + selected, err := r.getFilteredPackages(selectors) + if err != nil { + return nil, err + } + return selected.pkgs, nil +} + +func (pi *PackageInference) apply(selector *TargetSelector) error { + if selector.namePattern != "" { + // The selector references a package name, don't apply inference + return nil + } + if pi.PackageName != "" { + selector.namePattern = pi.PackageName + } + if selector.parentDir != "" { + parentDir := pi.DirectoryRoot.Join(selector.parentDir) + selector.parentDir = parentDir + } else if pi.PackageName == "" { + // The user didn't set a parent directory and we didn't find a single package, + // so use the directory we inferred and select all subdirectories + selector.parentDir = pi.DirectoryRoot.Join("**") + } + return nil +} + +func (r *Resolver) applyInference(selectors []*TargetSelector) ([]*TargetSelector, error) { + if r.Inference == nil { + return selectors, nil + } + // If there are existing patterns, use inference on those. If there are no + // patterns, but there is a directory supplied, synthesize a selector + if len(selectors) == 0 { + selectors = append(selectors, &TargetSelector{}) + } + for _, selector := range selectors { + if err := r.Inference.apply(selector); err != nil { + return nil, err + } + } + return selectors, nil +} + +func (r *Resolver) getFilteredPackages(selectors []*TargetSelector) (*SelectedPackages, error) { + selectors, err := r.applyInference(selectors) + if err != nil { + return nil, err + } + prodPackageSelectors := []*TargetSelector{} + allPackageSelectors := []*TargetSelector{} + for _, selector := range selectors { + if selector.followProdDepsOnly { + prodPackageSelectors = append(prodPackageSelectors, selector) + } else { + allPackageSelectors = append(allPackageSelectors, selector) + } + } + if len(allPackageSelectors) > 0 || len(prodPackageSelectors) > 0 { + if len(allPackageSelectors) > 0 { + selected, err := r.filterGraph(allPackageSelectors) + if err != nil { + return nil, err + } + return selected, nil + } + } + return &SelectedPackages{ + pkgs: make(util.Set), + }, nil +} + +func (r *Resolver) filterGraph(selectors []*TargetSelector) (*SelectedPackages, error) { + includeSelectors := []*TargetSelector{} + excludeSelectors := []*TargetSelector{} + for _, selector := range selectors { + if selector.exclude { + excludeSelectors = append(excludeSelectors, selector) + } else { + includeSelectors = append(includeSelectors, selector) + } + } + var include *SelectedPackages + if len(includeSelectors) > 0 { + found, err := r.filterGraphWithSelectors(includeSelectors) + if err != nil { + return nil, err + } + include = found + } else { + vertexSet := make(util.Set) + for _, v := range r.Graph.Vertices() { + vertexSet.Add(v) + } + include = &SelectedPackages{ + pkgs: vertexSet, + } + } + exclude, err := r.filterGraphWithSelectors(excludeSelectors) + if err != nil { + return nil, err + } + return &SelectedPackages{ + pkgs: include.pkgs.Difference(exclude.pkgs), + unusedFilters: append(include.unusedFilters, exclude.unusedFilters...), + }, nil +} + +func (r *Resolver) filterGraphWithSelectors(selectors []*TargetSelector) (*SelectedPackages, error) { + unmatchedSelectors := []*TargetSelector{} + + cherryPickedPackages := make(dag.Set) + walkedDependencies := make(dag.Set) + walkedDependents := make(dag.Set) + walkedDependentsDependencies := make(dag.Set) + + for _, selector := range selectors { + // TODO(gsoltis): this should be a list? + entryPackages, err := r.filterGraphWithSelector(selector) + if err != nil { + return nil, err + } + if entryPackages.Len() == 0 { + unmatchedSelectors = append(unmatchedSelectors, selector) + } + for _, pkg := range entryPackages { + if selector.includeDependencies { + dependencies, err := r.Graph.Ancestors(pkg) + if err != nil { + return nil, errors.Wrapf(err, "failed to get dependencies of package %v", pkg) + } + for dep := range dependencies { + walkedDependencies.Add(dep) + } + if !selector.excludeSelf { + walkedDependencies.Add(pkg) + } + } + if selector.includeDependents { + dependents, err := r.Graph.Descendents(pkg) + if err != nil { + return nil, errors.Wrapf(err, "failed to get dependents of package %v", pkg) + } + for dep := range dependents { + walkedDependents.Add(dep) + if selector.includeDependencies { + dependentDeps, err := r.Graph.Ancestors(dep) + if err != nil { + return nil, errors.Wrapf(err, "failed to get dependencies of dependent %v", dep) + } + for dependentDep := range dependentDeps { + walkedDependentsDependencies.Add(dependentDep) + } + } + } + if !selector.excludeSelf { + walkedDependents.Add(pkg) + } + } + if !selector.includeDependencies && !selector.includeDependents { + cherryPickedPackages.Add(pkg) + } + } + } + allPkgs := make(util.Set) + for pkg := range cherryPickedPackages { + allPkgs.Add(pkg) + } + for pkg := range walkedDependencies { + allPkgs.Add(pkg) + } + for pkg := range walkedDependents { + allPkgs.Add(pkg) + } + for pkg := range walkedDependentsDependencies { + allPkgs.Add(pkg) + } + return &SelectedPackages{ + pkgs: allPkgs, + unusedFilters: unmatchedSelectors, + }, nil +} + +func (r *Resolver) filterGraphWithSelector(selector *TargetSelector) (util.Set, error) { + if selector.matchDependencies { + return r.filterSubtreesWithSelector(selector) + } + return r.filterNodesWithSelector(selector) +} + +// filterNodesWithSelector returns the set of nodes that match a given selector +func (r *Resolver) filterNodesWithSelector(selector *TargetSelector) (util.Set, error) { + entryPackages := make(util.Set) + selectorWasUsed := false + if selector.fromRef != "" { + // get changed packaged + selectorWasUsed = true + changedPkgs, err := r.PackagesChangedInRange(selector.fromRef, selector.getToRef()) + if err != nil { + return nil, err + } + parentDir := selector.parentDir + for pkgName := range changedPkgs { + if parentDir != "" { + // Type assert/coerce to string here because we want to use + // this value in a map that has string keys. + // TODO(mehulkar) `changedPkgs` is a util.Set, we could make a `util.PackageNamesSet`` + // or something similar that is all strings. + pkgNameStr := pkgName.(string) + if pkgName == util.RootPkgName { + // The root package changed, only add it if + // the parentDir is equivalent to the root + if matches, err := doublestar.PathMatch(r.Cwd.Join(parentDir).ToString(), r.Cwd.ToString()); err != nil { + return nil, fmt.Errorf("failed to resolve directory relationship %v contains %v: %v", parentDir, r.Cwd, err) + } else if matches { + entryPackages.Add(pkgName) + } + } else if pkg, ok := r.WorkspaceInfos.PackageJSONs[pkgNameStr]; !ok { + return nil, fmt.Errorf("missing info for package %v", pkgName) + } else if matches, err := doublestar.PathMatch(r.Cwd.Join(parentDir).ToString(), pkg.Dir.RestoreAnchor(r.Cwd).ToString()); err != nil { + return nil, fmt.Errorf("failed to resolve directory relationship %v contains %v: %v", selector.parentDir, pkg.Dir, err) + } else if matches { + entryPackages.Add(pkgName) + } + } else { + entryPackages.Add(pkgName) + } + } + } else if selector.parentDir != "" { + // get packages by path + selectorWasUsed = true + parentDir := selector.parentDir + if parentDir == "." { + entryPackages.Add(util.RootPkgName) + } else { + for name, pkg := range r.WorkspaceInfos.PackageJSONs { + if matches, err := doublestar.PathMatch(r.Cwd.Join(parentDir).ToString(), pkg.Dir.RestoreAnchor(r.Cwd).ToString()); err != nil { + return nil, fmt.Errorf("failed to resolve directory relationship %v contains %v: %v", selector.parentDir, pkg.Dir, err) + } else if matches { + entryPackages.Add(name) + } + } + } + } + if selector.namePattern != "" { + // find packages that match name + if !selectorWasUsed { + matched, err := matchPackageNamesToVertices(selector.namePattern, r.Graph.Vertices()) + if err != nil { + return nil, err + } + entryPackages = matched + selectorWasUsed = true + } else { + matched, err := matchPackageNames(selector.namePattern, entryPackages) + if err != nil { + return nil, err + } + entryPackages = matched + } + } + // TODO(gsoltis): we can do this earlier + // Check if the selector specified anything + if !selectorWasUsed { + return nil, fmt.Errorf("invalid selector: %v", selector.raw) + } + return entryPackages, nil +} + +// filterSubtreesWithSelector returns the set of nodes where the node or any of its dependencies +// match a selector +func (r *Resolver) filterSubtreesWithSelector(selector *TargetSelector) (util.Set, error) { + // foreach package that matches parentDir && namePattern, check if any dependency is in changed packages + changedPkgs, err := r.PackagesChangedInRange(selector.fromRef, selector.getToRef()) + if err != nil { + return nil, err + } + + parentDir := selector.parentDir + entryPackages := make(util.Set) + for name, pkg := range r.WorkspaceInfos.PackageJSONs { + if parentDir == "" { + entryPackages.Add(name) + } else if matches, err := doublestar.PathMatch(parentDir.ToString(), pkg.Dir.RestoreAnchor(r.Cwd).ToString()); err != nil { + return nil, fmt.Errorf("failed to resolve directory relationship %v contains %v: %v", selector.parentDir, pkg.Dir, err) + } else if matches { + entryPackages.Add(name) + } + } + if selector.namePattern != "" { + matched, err := matchPackageNames(selector.namePattern, entryPackages) + if err != nil { + return nil, err + } + entryPackages = matched + } + roots := make(util.Set) + matched := make(util.Set) + for pkg := range entryPackages { + if matched.Includes(pkg) { + roots.Add(pkg) + continue + } + deps, err := r.Graph.Ancestors(pkg) + if err != nil { + return nil, err + } + for changedPkg := range changedPkgs { + if !selector.excludeSelf && pkg == changedPkg { + roots.Add(pkg) + break + } + if deps.Include(changedPkg) { + roots.Add(pkg) + matched.Add(changedPkg) + break + } + } + } + return roots, nil +} + +func matchPackageNamesToVertices(pattern string, vertices []dag.Vertex) (util.Set, error) { + packages := make(util.Set) + for _, v := range vertices { + packages.Add(v) + } + packages.Add(util.RootPkgName) + return matchPackageNames(pattern, packages) +} + +func matchPackageNames(pattern string, packages util.Set) (util.Set, error) { + matcher, err := matcherFromPattern(pattern) + if err != nil { + return nil, err + } + matched := make(util.Set) + for _, pkg := range packages { + pkg := pkg.(string) + if matcher(pkg) { + matched.Add(pkg) + } + } + if matched.Len() == 0 && !strings.HasPrefix(pattern, "@") && !strings.Contains(pattern, "/") { + // we got no matches and the pattern isn't a scoped package. + // Check if we have exactly one scoped package that does match + scopedPattern := fmt.Sprintf("@*/%v", pattern) + matcher, err = matcherFromPattern(scopedPattern) + if err != nil { + return nil, err + } + foundScopedPkg := false + for _, pkg := range packages { + pkg := pkg.(string) + if matcher(pkg) { + if foundScopedPkg { + // we found a second scoped package. Return the empty set, we can't + // disambiguate + return make(util.Set), nil + } + foundScopedPkg = true + matched.Add(pkg) + } + } + } + return matched, nil +} diff --git a/cli/internal/scope/filter/filter_test.go b/cli/internal/scope/filter/filter_test.go new file mode 100644 index 0000000..a23ae1d --- /dev/null +++ b/cli/internal/scope/filter/filter_test.go @@ -0,0 +1,614 @@ +package filter + +import ( + "fmt" + "os" + "strings" + "testing" + + "github.com/pyr-sh/dag" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/util" + "github.com/vercel/turbo/cli/internal/workspace" +) + +func setMatches(t *testing.T, name string, s util.Set, expected []string) { + expectedSet := make(util.Set) + for _, item := range expected { + expectedSet.Add(item) + } + missing := s.Difference(expectedSet) + if missing.Len() > 0 { + t.Errorf("%v set has extra elements: %v", name, strings.Join(missing.UnsafeListOfStrings(), ", ")) + } + extra := expectedSet.Difference(s) + if extra.Len() > 0 { + t.Errorf("%v set missing elements: %v", name, strings.Join(extra.UnsafeListOfStrings(), ", ")) + } +} + +func Test_filter(t *testing.T) { + rawCwd, err := os.Getwd() + if err != nil { + t.Fatalf("failed to get working directory: %v", err) + } + root, err := fs.GetCwd(rawCwd) + if err != nil { + t.Fatalf("failed to get working directory: %v", err) + } + workspaceInfos := workspace.Catalog{ + PackageJSONs: make(map[string]*fs.PackageJSON), + } + packageJSONs := workspaceInfos.PackageJSONs + graph := &dag.AcyclicGraph{} + graph.Add("project-0") + packageJSONs["project-0"] = &fs.PackageJSON{ + Name: "project-0", + Dir: turbopath.AnchoredUnixPath("packages/project-0").ToSystemPath(), + } + graph.Add("project-1") + packageJSONs["project-1"] = &fs.PackageJSON{ + Name: "project-1", + Dir: turbopath.AnchoredUnixPath("packages/project-1").ToSystemPath(), + } + graph.Add("project-2") + packageJSONs["project-2"] = &fs.PackageJSON{ + Name: "project-2", + Dir: "project-2", + } + graph.Add("project-3") + packageJSONs["project-3"] = &fs.PackageJSON{ + Name: "project-3", + Dir: "project-3", + } + graph.Add("project-4") + packageJSONs["project-4"] = &fs.PackageJSON{ + Name: "project-4", + Dir: "project-4", + } + graph.Add("project-5") + packageJSONs["project-5"] = &fs.PackageJSON{ + Name: "project-5", + Dir: "project-5", + } + // Note: inside project-5 + graph.Add("project-6") + packageJSONs["project-6"] = &fs.PackageJSON{ + Name: "project-6", + Dir: turbopath.AnchoredUnixPath("project-5/packages/project-6").ToSystemPath(), + } + // Add dependencies + graph.Connect(dag.BasicEdge("project-0", "project-1")) + graph.Connect(dag.BasicEdge("project-0", "project-5")) + graph.Connect(dag.BasicEdge("project-1", "project-2")) + graph.Connect(dag.BasicEdge("project-1", "project-4")) + + testCases := []struct { + Name string + Selectors []*TargetSelector + PackageInference *PackageInference + Expected []string + }{ + { + "select root package", + []*TargetSelector{ + { + namePattern: util.RootPkgName, + }, + }, + nil, + []string{util.RootPkgName}, + }, + { + "select only package dependencies (excluding the package itself)", + []*TargetSelector{ + { + excludeSelf: true, + includeDependencies: true, + namePattern: "project-1", + }, + }, + nil, + []string{"project-2", "project-4"}, + }, + { + "select package with dependencies", + []*TargetSelector{ + { + excludeSelf: false, + includeDependencies: true, + namePattern: "project-1", + }, + }, + nil, + []string{"project-1", "project-2", "project-4"}, + }, + { + "select package with dependencies and dependents, including dependent dependencies", + []*TargetSelector{ + { + excludeSelf: true, + includeDependencies: true, + includeDependents: true, + namePattern: "project-1", + }, + }, + nil, + []string{"project-0", "project-1", "project-2", "project-4", "project-5"}, + }, + { + "select package with dependents", + []*TargetSelector{ + { + includeDependents: true, + namePattern: "project-2", + }, + }, + nil, + []string{"project-1", "project-2", "project-0"}, + }, + { + "select dependents excluding package itself", + []*TargetSelector{ + { + excludeSelf: true, + includeDependents: true, + namePattern: "project-2", + }, + }, + nil, + []string{"project-0", "project-1"}, + }, + { + "filter using two selectors: one selects dependencies another selects dependents", + []*TargetSelector{ + { + excludeSelf: true, + includeDependents: true, + namePattern: "project-2", + }, + { + excludeSelf: true, + includeDependencies: true, + namePattern: "project-1", + }, + }, + nil, + []string{"project-0", "project-1", "project-2", "project-4"}, + }, + { + "select just a package by name", + []*TargetSelector{ + { + namePattern: "project-2", + }, + }, + nil, + []string{"project-2"}, + }, + // Note: we don't support the option to switch path prefix mode + // { + // "select by parentDir", + // []*TargetSelector{ + // { + // parentDir: "/packages", + // }, + // }, + // []string{"project-0", "project-1"}, + // }, + { + "select by parentDir using glob", + []*TargetSelector{ + { + parentDir: turbopath.MakeRelativeSystemPath("packages", "*"), + }, + }, + nil, + []string{"project-0", "project-1"}, + }, + { + "select by parentDir using globstar", + []*TargetSelector{ + { + parentDir: turbopath.MakeRelativeSystemPath("project-5", "**"), + }, + }, + nil, + []string{"project-5", "project-6"}, + }, + { + "select by parentDir with no glob", + []*TargetSelector{ + { + parentDir: turbopath.MakeRelativeSystemPath("project-5"), + }, + }, + nil, + []string{"project-5"}, + }, + { + "select all packages except one", + []*TargetSelector{ + { + exclude: true, + namePattern: "project-1", + }, + }, + nil, + []string{"project-0", "project-2", "project-3", "project-4", "project-5", "project-6"}, + }, + { + "select by parentDir and exclude one package by pattern", + []*TargetSelector{ + { + parentDir: turbopath.MakeRelativeSystemPath("packages", "*"), + }, + { + exclude: true, + namePattern: "*-1", + }, + }, + nil, + []string{"project-0"}, + }, + { + "select root package by directory", + []*TargetSelector{ + { + parentDir: turbopath.MakeRelativeSystemPath("."), // input . gets cleaned to "" + }, + }, + nil, + []string{util.RootPkgName}, + }, + { + "select packages directory", + []*TargetSelector{}, + &PackageInference{ + DirectoryRoot: turbopath.MakeRelativeSystemPath("packages"), + }, + []string{"project-0", "project-1"}, + }, + { + "infer single package", + []*TargetSelector{}, + &PackageInference{ + DirectoryRoot: turbopath.MakeRelativeSystemPath("packages", "project-0"), + PackageName: "project-0", + }, + []string{"project-0"}, + }, + { + "infer single package from subdirectory", + []*TargetSelector{}, + &PackageInference{ + DirectoryRoot: turbopath.MakeRelativeSystemPath("packages", "project-0", "src"), + PackageName: "project-0", + }, + []string{"project-0"}, + }, + } + + for _, tc := range testCases { + t.Run(tc.Name, func(t *testing.T) { + r := &Resolver{ + Graph: graph, + WorkspaceInfos: workspaceInfos, + Cwd: root, + Inference: tc.PackageInference, + } + pkgs, err := r.getFilteredPackages(tc.Selectors) + if err != nil { + t.Fatalf("%v failed to filter packages: %v", tc.Name, err) + } + setMatches(t, tc.Name, pkgs.pkgs, tc.Expected) + }) + } + + t.Run("report unmatched filters", func(t *testing.T) { + r := &Resolver{ + Graph: graph, + WorkspaceInfos: workspaceInfos, + Cwd: root, + } + pkgs, err := r.getFilteredPackages([]*TargetSelector{ + { + excludeSelf: true, + includeDependencies: true, + namePattern: "project-7", + }, + }) + if err != nil { + t.Fatalf("unmatched filter failed to filter packages: %v", err) + } + if pkgs.pkgs.Len() != 0 { + t.Errorf("unmatched filter expected no packages, got %v", strings.Join(pkgs.pkgs.UnsafeListOfStrings(), ", ")) + } + if len(pkgs.unusedFilters) != 1 { + t.Errorf("unmatched filter expected to report one unused filter, got %v", len(pkgs.unusedFilters)) + } + }) +} + +func Test_matchScopedPackage(t *testing.T) { + rawCwd, err := os.Getwd() + if err != nil { + t.Fatalf("failed to get working directory: %v", err) + } + root, err := fs.GetCwd(rawCwd) + if err != nil { + t.Fatalf("failed to get working directory: %v", err) + } + + workspaceInfos := workspace.Catalog{ + PackageJSONs: make(map[string]*fs.PackageJSON), + } + packageJSONs := workspaceInfos.PackageJSONs + graph := &dag.AcyclicGraph{} + graph.Add("@foo/bar") + packageJSONs["@foo/bar"] = &fs.PackageJSON{ + Name: "@foo/bar", + Dir: turbopath.AnchoredUnixPath("packages/bar").ToSystemPath(), + } + r := &Resolver{ + Graph: graph, + WorkspaceInfos: workspaceInfos, + Cwd: root, + } + pkgs, err := r.getFilteredPackages([]*TargetSelector{ + { + namePattern: "bar", + }, + }) + if err != nil { + t.Fatalf("failed to filter packages: %v", err) + } + setMatches(t, "match scoped package", pkgs.pkgs, []string{"@foo/bar"}) +} + +func Test_matchExactPackages(t *testing.T) { + rawCwd, err := os.Getwd() + if err != nil { + t.Fatalf("failed to get working directory: %v", err) + } + root, err := fs.GetCwd(rawCwd) + if err != nil { + t.Fatalf("failed to get working directory: %v", err) + } + + workspaceInfos := workspace.Catalog{ + PackageJSONs: make(map[string]*fs.PackageJSON), + } + packageJSONs := workspaceInfos.PackageJSONs + graph := &dag.AcyclicGraph{} + graph.Add("@foo/bar") + packageJSONs["@foo/bar"] = &fs.PackageJSON{ + Name: "@foo/bar", + Dir: turbopath.AnchoredUnixPath("packages/@foo/bar").ToSystemPath(), + } + graph.Add("bar") + packageJSONs["bar"] = &fs.PackageJSON{ + Name: "bar", + Dir: turbopath.AnchoredUnixPath("packages/bar").ToSystemPath(), + } + r := &Resolver{ + Graph: graph, + WorkspaceInfos: workspaceInfos, + Cwd: root, + } + pkgs, err := r.getFilteredPackages([]*TargetSelector{ + { + namePattern: "bar", + }, + }) + if err != nil { + t.Fatalf("failed to filter packages: %v", err) + } + setMatches(t, "match exact package", pkgs.pkgs, []string{"bar"}) +} + +func Test_matchMultipleScopedPackages(t *testing.T) { + rawCwd, err := os.Getwd() + if err != nil { + t.Fatalf("failed to get working directory: %v", err) + } + root, err := fs.GetCwd(rawCwd) + if err != nil { + t.Fatalf("failed to get working directory: %v", err) + } + + workspaceInfos := workspace.Catalog{ + PackageJSONs: make(map[string]*fs.PackageJSON), + } + packageJSONs := workspaceInfos.PackageJSONs + graph := &dag.AcyclicGraph{} + graph.Add("@foo/bar") + packageJSONs["@foo/bar"] = &fs.PackageJSON{ + Name: "@foo/bar", + Dir: turbopath.AnchoredUnixPath("packages/@foo/bar").ToSystemPath(), + } + graph.Add("@types/bar") + packageJSONs["@types/bar"] = &fs.PackageJSON{ + Name: "@types/bar", + Dir: turbopath.AnchoredUnixPath("packages/@types/bar").ToSystemPath(), + } + r := &Resolver{ + Graph: graph, + WorkspaceInfos: workspaceInfos, + Cwd: root, + } + pkgs, err := r.getFilteredPackages([]*TargetSelector{ + { + namePattern: "bar", + }, + }) + if err != nil { + t.Fatalf("failed to filter packages: %v", err) + } + setMatches(t, "match nothing with multiple scoped packages", pkgs.pkgs, []string{}) +} + +func Test_SCM(t *testing.T) { + rawCwd, err := os.Getwd() + if err != nil { + t.Fatalf("failed to get working directory: %v", err) + } + root, err := fs.GetCwd(rawCwd) + if err != nil { + t.Fatalf("failed to get working directory: %v", err) + } + head1Changed := make(util.Set) + head1Changed.Add("package-1") + head1Changed.Add("package-2") + head1Changed.Add(util.RootPkgName) + head2Changed := make(util.Set) + head2Changed.Add("package-3") + workspaceInfos := workspace.Catalog{ + PackageJSONs: make(map[string]*fs.PackageJSON), + } + packageJSONs := workspaceInfos.PackageJSONs + graph := &dag.AcyclicGraph{} + graph.Add("package-1") + packageJSONs["package-1"] = &fs.PackageJSON{ + Name: "package-1", + Dir: "package-1", + } + graph.Add("package-2") + packageJSONs["package-2"] = &fs.PackageJSON{ + Name: "package-2", + Dir: "package-2", + } + graph.Add("package-3") + packageJSONs["package-3"] = &fs.PackageJSON{ + Name: "package-3", + Dir: "package-3", + } + graph.Add("package-20") + packageJSONs["package-20"] = &fs.PackageJSON{ + Name: "package-20", + Dir: "package-20", + } + + graph.Connect(dag.BasicEdge("package-3", "package-20")) + + r := &Resolver{ + Graph: graph, + WorkspaceInfos: workspaceInfos, + Cwd: root, + PackagesChangedInRange: func(fromRef string, toRef string) (util.Set, error) { + if fromRef == "HEAD~1" && toRef == "HEAD" { + return head1Changed, nil + } else if fromRef == "HEAD~2" && toRef == "HEAD" { + union := head1Changed.Copy() + for val := range head2Changed { + union.Add(val) + } + return union, nil + } else if fromRef == "HEAD~2" && toRef == "HEAD~1" { + return head2Changed, nil + } + panic(fmt.Sprintf("unsupported commit range %v...%v", fromRef, toRef)) + }, + } + + testCases := []struct { + Name string + Selectors []*TargetSelector + Expected []string + }{ + { + "all changed packages", + []*TargetSelector{ + { + fromRef: "HEAD~1", + }, + }, + []string{"package-1", "package-2", util.RootPkgName}, + }, + { + "all changed packages with parent dir exact match", + []*TargetSelector{ + { + fromRef: "HEAD~1", + parentDir: ".", + }, + }, + []string{util.RootPkgName}, + }, + { + "changed packages in directory", + []*TargetSelector{ + { + fromRef: "HEAD~1", + parentDir: "package-2", + }, + }, + []string{"package-2"}, + }, + { + "changed packages matching pattern", + []*TargetSelector{ + { + fromRef: "HEAD~1", + namePattern: "package-2*", + }, + }, + []string{"package-2"}, + }, + { + "changed packages matching pattern", + []*TargetSelector{ + { + fromRef: "HEAD~1", + namePattern: "package-2*", + }, + }, + []string{"package-2"}, + }, + // Note: missing test here that takes advantage of automatically exempting + // test-only changes from pulling in dependents + // + // turbo-specific tests below here + { + "changed package was requested scope, and we're matching dependencies", + []*TargetSelector{ + { + fromRef: "HEAD~1", + namePattern: "package-1", + matchDependencies: true, + }, + }, + []string{"package-1"}, + }, + { + "older commit", + []*TargetSelector{ + { + fromRef: "HEAD~2", + }, + }, + []string{"package-1", "package-2", "package-3", util.RootPkgName}, + }, + { + "commit range", + []*TargetSelector{ + { + fromRef: "HEAD~2", + toRefOverride: "HEAD~1", + }, + }, + []string{"package-3"}, + }, + } + + for _, tc := range testCases { + t.Run(tc.Name, func(t *testing.T) { + pkgs, err := r.getFilteredPackages(tc.Selectors) + if err != nil { + t.Fatalf("%v failed to filter packages: %v", tc.Name, err) + } + setMatches(t, tc.Name, pkgs.pkgs, tc.Expected) + }) + } +} diff --git a/cli/internal/scope/filter/matcher.go b/cli/internal/scope/filter/matcher.go new file mode 100644 index 0000000..2460326 --- /dev/null +++ b/cli/internal/scope/filter/matcher.go @@ -0,0 +1,32 @@ +package filter + +import ( + "regexp" + "strings" + + "github.com/pkg/errors" +) + +type Matcher = func(pkgName string) bool + +func matchAll(pkgName string) bool { + return true +} + +func matcherFromPattern(pattern string) (Matcher, error) { + if pattern == "*" { + return matchAll, nil + } + + escaped := regexp.QuoteMeta(pattern) + // replace escaped '*' with regex '.*' + normalized := strings.ReplaceAll(escaped, "\\*", ".*") + if normalized == pattern { + return func(pkgName string) bool { return pkgName == pattern }, nil + } + regex, err := regexp.Compile("^" + normalized + "$") + if err != nil { + return nil, errors.Wrapf(err, "failed to compile filter pattern to regex: %v", pattern) + } + return func(pkgName string) bool { return regex.Match([]byte(pkgName)) }, nil +} diff --git a/cli/internal/scope/filter/matcher_test.go b/cli/internal/scope/filter/matcher_test.go new file mode 100644 index 0000000..966be2b --- /dev/null +++ b/cli/internal/scope/filter/matcher_test.go @@ -0,0 +1,65 @@ +package filter + +import "testing" + +func TestMatcher(t *testing.T) { + testCases := map[string][]struct { + test string + want bool + }{ + "*": { + { + test: "@eslint/plugin-foo", + want: true, + }, + { + test: "express", + want: true, + }, + }, + "eslint-*": { + { + test: "eslint-plugin-foo", + want: true, + }, + { + test: "express", + want: false, + }, + }, + "*plugin*": { + { + test: "@eslint/plugin-foo", + want: true, + }, + { + test: "express", + want: false, + }, + }, + "a*c": { + { + test: "abc", + want: true, + }, + }, + "*-positive": { + { + test: "is-positive", + want: true, + }, + }, + } + for pattern, tests := range testCases { + matcher, err := matcherFromPattern(pattern) + if err != nil { + t.Fatalf("failed to compile match pattern %v, %v", pattern, err) + } + for _, testCase := range tests { + got := matcher(testCase.test) + if got != testCase.want { + t.Errorf("%v.match(%v) got %v, want %v", pattern, testCase.test, got, testCase.want) + } + } + } +} diff --git a/cli/internal/scope/filter/parse_target_selector.go b/cli/internal/scope/filter/parse_target_selector.go new file mode 100644 index 0000000..4f5c90f --- /dev/null +++ b/cli/internal/scope/filter/parse_target_selector.go @@ -0,0 +1,165 @@ +package filter + +import ( + "regexp" + "strings" + + "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +type TargetSelector struct { + includeDependencies bool + matchDependencies bool + includeDependents bool + exclude bool + excludeSelf bool + followProdDepsOnly bool + parentDir turbopath.RelativeSystemPath + namePattern string + fromRef string + toRefOverride string + raw string +} + +func (ts *TargetSelector) IsValid() bool { + return ts.fromRef != "" || ts.parentDir != "" || ts.namePattern != "" +} + +// getToRef returns the git ref to use for upper bound of the comparison when finding changed +// packages. +func (ts *TargetSelector) getToRef() string { + if ts.toRefOverride == "" { + return "HEAD" + } + return ts.toRefOverride +} + +var errCantMatchDependencies = errors.New("cannot use match dependencies without specifying either a directory or package") + +var targetSelectorRegex = regexp.MustCompile(`^(?P[^.](?:[^{}[\]]*[^{}[\].])?)?(?P\{[^}]*\})?(?P(?:\.{3})?\[[^\]]+\])?$`) + +// ParseTargetSelector is a function that returns pnpm compatible --filter command line flags +func ParseTargetSelector(rawSelector string) (*TargetSelector, error) { + exclude := false + firstChar := rawSelector[0] + selector := rawSelector + if firstChar == '!' { + selector = selector[1:] + exclude = true + } + excludeSelf := false + includeDependencies := strings.HasSuffix(selector, "...") + if includeDependencies { + selector = selector[:len(selector)-3] + if strings.HasSuffix(selector, "^") { + excludeSelf = true + selector = selector[:len(selector)-1] + } + } + includeDependents := strings.HasPrefix(selector, "...") + if includeDependents { + selector = selector[3:] + if strings.HasPrefix(selector, "^") { + excludeSelf = true + selector = selector[1:] + } + } + + matches := targetSelectorRegex.FindAllStringSubmatch(selector, -1) + + if len(matches) == 0 { + if relativePath, ok := isSelectorByLocation(selector); ok { + return &TargetSelector{ + exclude: exclude, + includeDependencies: includeDependencies, + includeDependents: includeDependents, + parentDir: relativePath, + raw: rawSelector, + }, nil + } + return &TargetSelector{ + exclude: exclude, + excludeSelf: excludeSelf, + includeDependencies: includeDependencies, + includeDependents: includeDependents, + namePattern: selector, + raw: rawSelector, + }, nil + } + + fromRef := "" + toRefOverride := "" + var parentDir turbopath.RelativeSystemPath + namePattern := "" + preAddDepdencies := false + if len(matches) > 0 && len(matches[0]) > 0 { + match := matches[0] + namePattern = match[targetSelectorRegex.SubexpIndex("name")] + rawParentDir := match[targetSelectorRegex.SubexpIndex("directory")] + if len(rawParentDir) > 0 { + // trim {} + rawParentDir = rawParentDir[1 : len(rawParentDir)-1] + if rawParentDir == "" { + return nil, errors.New("empty path specification") + } else if relPath, err := turbopath.CheckedToRelativeSystemPath(rawParentDir); err == nil { + parentDir = relPath + } else { + return nil, errors.Wrapf(err, "invalid path specification: %v", rawParentDir) + } + } + rawCommits := match[targetSelectorRegex.SubexpIndex("commits")] + if len(rawCommits) > 0 { + fromRef = rawCommits + if strings.HasPrefix(fromRef, "...") { + if parentDir == "" && namePattern == "" { + return &TargetSelector{}, errCantMatchDependencies + } + preAddDepdencies = true + fromRef = fromRef[3:] + } + // strip [] + fromRef = fromRef[1 : len(fromRef)-1] + refs := strings.Split(fromRef, "...") + if len(refs) == 2 { + fromRef = refs[0] + toRefOverride = refs[1] + } + } + } + + return &TargetSelector{ + fromRef: fromRef, + toRefOverride: toRefOverride, + exclude: exclude, + excludeSelf: excludeSelf, + includeDependencies: includeDependencies, + matchDependencies: preAddDepdencies, + includeDependents: includeDependents, + namePattern: namePattern, + parentDir: parentDir, + raw: rawSelector, + }, nil +} + +// isSelectorByLocation returns true if the selector is by filesystem location +func isSelectorByLocation(rawSelector string) (turbopath.RelativeSystemPath, bool) { + if rawSelector[0:1] != "." { + return "", false + } + + // . or ./ or .\ + if len(rawSelector) == 1 || rawSelector[1:2] == "/" || rawSelector[1:2] == "\\" { + return turbopath.MakeRelativeSystemPath(rawSelector), true + } + + if rawSelector[1:2] != "." { + return "", false + } + + // .. or ../ or ..\ + if len(rawSelector) == 2 || rawSelector[2:3] == "/" || rawSelector[2:3] == "\\" { + return turbopath.MakeRelativeSystemPath(rawSelector), true + } + return "", false +} diff --git a/cli/internal/scope/filter/parse_target_selector_test.go b/cli/internal/scope/filter/parse_target_selector_test.go new file mode 100644 index 0000000..2973a61 --- /dev/null +++ b/cli/internal/scope/filter/parse_target_selector_test.go @@ -0,0 +1,311 @@ +package filter + +import ( + "reflect" + "testing" + + "github.com/vercel/turbo/cli/internal/turbopath" +) + +func TestParseTargetSelector(t *testing.T) { + tests := []struct { + rawSelector string + want *TargetSelector + wantErr bool + }{ + { + "{}", + &TargetSelector{}, + true, + }, + { + "foo", + &TargetSelector{ + fromRef: "", + exclude: false, + excludeSelf: false, + includeDependencies: false, + includeDependents: false, + namePattern: "foo", + parentDir: "", + }, + false, + }, + { + "foo...", + &TargetSelector{ + fromRef: "", + exclude: false, + excludeSelf: false, + includeDependencies: true, + includeDependents: false, + namePattern: "foo", + parentDir: "", + }, + false, + }, + { + "...foo", + &TargetSelector{ + fromRef: "", + exclude: false, + excludeSelf: false, + includeDependencies: false, + includeDependents: true, + namePattern: "foo", + parentDir: "", + }, + false, + }, + { + "...foo...", + &TargetSelector{ + fromRef: "", + exclude: false, + excludeSelf: false, + includeDependencies: true, + includeDependents: true, + namePattern: "foo", + parentDir: "", + }, + false, + }, + { + "foo^...", + &TargetSelector{ + fromRef: "", + exclude: false, + excludeSelf: true, + includeDependencies: true, + includeDependents: false, + namePattern: "foo", + parentDir: "", + }, + false, + }, + { + "...^foo", + &TargetSelector{ + fromRef: "", + exclude: false, + excludeSelf: true, + includeDependencies: false, + includeDependents: true, + namePattern: "foo", + parentDir: "", + }, + false, + }, + { + "./foo", + &TargetSelector{ + fromRef: "", + exclude: false, + excludeSelf: false, + includeDependencies: false, + includeDependents: false, + namePattern: "", + parentDir: "foo", + }, + false, + }, + { + "../foo", + &TargetSelector{ + fromRef: "", + exclude: false, + excludeSelf: false, + includeDependencies: false, + includeDependents: false, + namePattern: "", + parentDir: turbopath.MakeRelativeSystemPath("..", "foo"), + }, + false, + }, + { + "...{./foo}", + &TargetSelector{ + fromRef: "", + exclude: false, + excludeSelf: false, + includeDependencies: false, + includeDependents: true, + namePattern: "", + parentDir: "foo", + }, + false, + }, + { + ".", + &TargetSelector{ + fromRef: "", + exclude: false, + excludeSelf: false, + includeDependencies: false, + includeDependents: false, + namePattern: "", + parentDir: ".", + }, + false, + }, + { + "..", + &TargetSelector{ + fromRef: "", + exclude: false, + excludeSelf: false, + includeDependencies: false, + includeDependents: false, + namePattern: "", + parentDir: "..", + }, + false, + }, + { + "[master]", + &TargetSelector{ + fromRef: "master", + exclude: false, + excludeSelf: false, + includeDependencies: false, + includeDependents: false, + namePattern: "", + parentDir: "", + }, + false, + }, + { + "[from...to]", + &TargetSelector{ + fromRef: "from", + toRefOverride: "to", + }, + false, + }, + { + "{foo}[master]", + &TargetSelector{ + fromRef: "master", + exclude: false, + excludeSelf: false, + includeDependencies: false, + includeDependents: false, + namePattern: "", + parentDir: "foo", + }, + false, + }, + { + "pattern{foo}[master]", + &TargetSelector{ + fromRef: "master", + exclude: false, + excludeSelf: false, + includeDependencies: false, + includeDependents: false, + namePattern: "pattern", + parentDir: "foo", + }, + false, + }, + { + "[master]...", + &TargetSelector{ + fromRef: "master", + exclude: false, + excludeSelf: false, + includeDependencies: true, + includeDependents: false, + namePattern: "", + parentDir: "", + }, + false, + }, + { + "...[master]", + &TargetSelector{ + fromRef: "master", + exclude: false, + excludeSelf: false, + includeDependencies: false, + includeDependents: true, + namePattern: "", + parentDir: "", + }, + false, + }, + { + "...[master]...", + &TargetSelector{ + fromRef: "master", + exclude: false, + excludeSelf: false, + includeDependencies: true, + includeDependents: true, + namePattern: "", + parentDir: "", + }, + false, + }, + { + "...[from...to]...", + &TargetSelector{ + fromRef: "from", + toRefOverride: "to", + includeDependencies: true, + includeDependents: true, + }, + false, + }, + { + "foo...[master]", + &TargetSelector{ + fromRef: "master", + namePattern: "foo", + matchDependencies: true, + }, + false, + }, + { + "foo...[master]...", + &TargetSelector{ + fromRef: "master", + namePattern: "foo", + matchDependencies: true, + includeDependencies: true, + }, + false, + }, + { + "{foo}...[master]", + &TargetSelector{ + fromRef: "master", + parentDir: "foo", + matchDependencies: true, + }, + false, + }, + { + "......[master]", + &TargetSelector{}, + true, + }, + } + for _, tt := range tests { + t.Run(tt.rawSelector, func(t *testing.T) { + got, err := ParseTargetSelector(tt.rawSelector) + if tt.wantErr { + if err == nil { + t.Errorf("ParseTargetSelector() error = %#v, wantErr %#v", err, tt.wantErr) + } + } else { + // copy the raw selector from the args into what we want. This value is used + // for reporting errors in the case of a malformed selector + tt.want.raw = tt.rawSelector + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("ParseTargetSelector() = %#v, want %#v", got, tt.want) + } + } + }) + } +} diff --git a/cli/internal/scope/scope.go b/cli/internal/scope/scope.go new file mode 100644 index 0000000..b5ed4e7 --- /dev/null +++ b/cli/internal/scope/scope.go @@ -0,0 +1,380 @@ +package scope + +import ( + "fmt" + "os" + "path/filepath" + "sort" + "strings" + + "github.com/hashicorp/go-hclog" + "github.com/mitchellh/cli" + "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/context" + "github.com/vercel/turbo/cli/internal/lockfile" + "github.com/vercel/turbo/cli/internal/scm" + scope_filter "github.com/vercel/turbo/cli/internal/scope/filter" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/turbostate" + "github.com/vercel/turbo/cli/internal/util" + "github.com/vercel/turbo/cli/internal/util/filter" + "github.com/vercel/turbo/cli/internal/workspace" +) + +// LegacyFilter holds the options in use before the filter syntax. They have their own rules +// for how they are compiled into filter expressions. +type LegacyFilter struct { + // IncludeDependencies is whether to include pkg.dependencies in execution (defaults to false) + IncludeDependencies bool + // SkipDependents is whether to skip dependent impacted consumers in execution (defaults to false) + SkipDependents bool + // Entrypoints is a list of package entrypoints + Entrypoints []string + // Since is the git ref used to calculate changed packages + Since string +} + +var _sinceHelp = `Limit/Set scope to changed packages since a +mergebase. This uses the git diff ${target_branch}... +mechanism to identify which packages have changed.` + +func addLegacyFlagsFromArgs(opts *LegacyFilter, args *turbostate.ParsedArgsFromRust) { + opts.IncludeDependencies = args.Command.Run.IncludeDependencies + opts.SkipDependents = args.Command.Run.NoDeps + opts.Entrypoints = args.Command.Run.Scope + opts.Since = args.Command.Run.Since +} + +// Opts holds the options for how to select the entrypoint packages for a turbo run +type Opts struct { + LegacyFilter LegacyFilter + // IgnorePatterns is the list of globs of file paths to ignore from execution scope calculation + IgnorePatterns []string + // GlobalDepPatterns is a list of globs to global files whose contents will be included in the global hash calculation + GlobalDepPatterns []string + // Patterns are the filter patterns supplied to --filter on the commandline + FilterPatterns []string + + PackageInferenceRoot turbopath.RelativeSystemPath +} + +var ( + _filterHelp = `Use the given selector to specify package(s) to act as +entry points. The syntax mirrors pnpm's syntax, and +additional documentation and examples can be found in +turbo's documentation https://turbo.build/repo/docs/reference/command-line-reference#--filter +--filter can be specified multiple times. Packages that +match any filter will be included.` + _ignoreHelp = `Files to ignore when calculating changed files (i.e. --since). Supports globs.` + _globalDepHelp = `Specify glob of global filesystem dependencies to be hashed. Useful for .env and files +in the root directory. Includes turbo.json, root package.json, and the root lockfile by default.` +) + +// normalize package inference path. We compare against "" in several places, so maintain +// that behavior. In a post-rust-port world, this should more properly be an Option +func resolvePackageInferencePath(raw string) (turbopath.RelativeSystemPath, error) { + pkgInferenceRoot, err := turbopath.CheckedToRelativeSystemPath(raw) + if err != nil { + return "", errors.Wrapf(err, "invalid package inference root %v", raw) + } + if pkgInferenceRoot == "." { + return "", nil + } + return pkgInferenceRoot, nil +} + +// OptsFromArgs adds the settings relevant to this package to the given Opts +func OptsFromArgs(opts *Opts, args *turbostate.ParsedArgsFromRust) error { + opts.FilterPatterns = args.Command.Run.Filter + opts.IgnorePatterns = args.Command.Run.Ignore + opts.GlobalDepPatterns = args.Command.Run.GlobalDeps + pkgInferenceRoot, err := resolvePackageInferencePath(args.Command.Run.PkgInferenceRoot) + if err != nil { + return err + } + opts.PackageInferenceRoot = pkgInferenceRoot + addLegacyFlagsFromArgs(&opts.LegacyFilter, args) + return nil +} + +// AsFilterPatterns normalizes legacy selectors to filter syntax +func (l *LegacyFilter) AsFilterPatterns() []string { + var patterns []string + prefix := "" + if !l.SkipDependents { + prefix = "..." + } + suffix := "" + if l.IncludeDependencies { + suffix = "..." + } + since := "" + if l.Since != "" { + since = fmt.Sprintf("[%v]", l.Since) + } + if len(l.Entrypoints) > 0 { + // --scope implies our tweaked syntax to see if any dependency matches + if since != "" { + since = "..." + since + } + for _, pattern := range l.Entrypoints { + if strings.HasPrefix(pattern, "!") { + patterns = append(patterns, pattern) + } else { + filterPattern := fmt.Sprintf("%v%v%v%v", prefix, pattern, since, suffix) + patterns = append(patterns, filterPattern) + } + } + } else if since != "" { + // no scopes specified, but --since was provided + filterPattern := fmt.Sprintf("%v%v%v", prefix, since, suffix) + patterns = append(patterns, filterPattern) + } + return patterns +} + +// ResolvePackages translates specified flags to a set of entry point packages for +// the selected tasks. Returns the selected packages and whether or not the selected +// packages represents a default "all packages". +func ResolvePackages(opts *Opts, repoRoot turbopath.AbsoluteSystemPath, scm scm.SCM, ctx *context.Context, tui cli.Ui, logger hclog.Logger) (util.Set, bool, error) { + inferenceBase, err := calculateInference(repoRoot, opts.PackageInferenceRoot, ctx.WorkspaceInfos, logger) + if err != nil { + return nil, false, err + } + filterResolver := &scope_filter.Resolver{ + Graph: &ctx.WorkspaceGraph, + WorkspaceInfos: ctx.WorkspaceInfos, + Cwd: repoRoot, + Inference: inferenceBase, + PackagesChangedInRange: opts.getPackageChangeFunc(scm, repoRoot, ctx), + } + filterPatterns := opts.FilterPatterns + legacyFilterPatterns := opts.LegacyFilter.AsFilterPatterns() + filterPatterns = append(filterPatterns, legacyFilterPatterns...) + isAllPackages := len(filterPatterns) == 0 && opts.PackageInferenceRoot == "" + filteredPkgs, err := filterResolver.GetPackagesFromPatterns(filterPatterns) + if err != nil { + return nil, false, err + } + + if isAllPackages { + // no filters specified, run every package + for _, f := range ctx.WorkspaceNames { + filteredPkgs.Add(f) + } + } + filteredPkgs.Delete(ctx.RootNode) + return filteredPkgs, isAllPackages, nil +} + +func calculateInference(repoRoot turbopath.AbsoluteSystemPath, pkgInferencePath turbopath.RelativeSystemPath, packageInfos workspace.Catalog, logger hclog.Logger) (*scope_filter.PackageInference, error) { + if pkgInferencePath == "" { + // No inference specified, no need to calculate anything + return nil, nil + } + logger.Debug(fmt.Sprintf("Using %v as a basis for selecting packages", pkgInferencePath)) + fullInferencePath := repoRoot.Join(pkgInferencePath) + for _, pkgInfo := range packageInfos.PackageJSONs { + pkgPath := pkgInfo.Dir.RestoreAnchor(repoRoot) + inferredPathIsBelow, err := pkgPath.ContainsPath(fullInferencePath) + if err != nil { + return nil, err + } + // We skip over the root package as the inferred path will always be below it + if inferredPathIsBelow && pkgPath != repoRoot { + // set both. The user might have set a parent directory filter, + // in which case we *should* fail to find any packages, but we should + // do so in a consistent manner + return &scope_filter.PackageInference{ + PackageName: pkgInfo.Name, + DirectoryRoot: pkgInferencePath, + }, nil + } + inferredPathIsBetweenRootAndPkg, err := fullInferencePath.ContainsPath(pkgPath) + if err != nil { + return nil, err + } + if inferredPathIsBetweenRootAndPkg { + // we've found *some* package below our inference directory. We can stop now and conclude + // that we're looking for all packages in a subdirectory + break + } + } + return &scope_filter.PackageInference{ + DirectoryRoot: pkgInferencePath, + }, nil +} + +func (o *Opts) getPackageChangeFunc(scm scm.SCM, cwd turbopath.AbsoluteSystemPath, ctx *context.Context) scope_filter.PackagesChangedInRange { + return func(fromRef string, toRef string) (util.Set, error) { + // We could filter changed files at the git level, since it's possible + // that the changes we're interested in are scoped, but we need to handle + // global dependencies changing as well. A future optimization might be to + // scope changed files more deeply if we know there are no global dependencies. + var changedFiles []string + if fromRef != "" { + scmChangedFiles, err := scm.ChangedFiles(fromRef, toRef, cwd.ToStringDuringMigration()) + if err != nil { + return nil, err + } + sort.Strings(scmChangedFiles) + changedFiles = scmChangedFiles + } + makeAllPkgs := func() util.Set { + allPkgs := make(util.Set) + for pkg := range ctx.WorkspaceInfos.PackageJSONs { + allPkgs.Add(pkg) + } + return allPkgs + } + if hasRepoGlobalFileChanged, err := repoGlobalFileHasChanged(o, getDefaultGlobalDeps(), changedFiles); err != nil { + return nil, err + } else if hasRepoGlobalFileChanged { + return makeAllPkgs(), nil + } + + filteredChangedFiles, err := filterIgnoredFiles(o, changedFiles) + if err != nil { + return nil, err + } + changedPkgs := getChangedPackages(filteredChangedFiles, ctx.WorkspaceInfos) + + if lockfileChanges, fullChanges := getChangesFromLockfile(scm, ctx, changedFiles, fromRef); !fullChanges { + for _, pkg := range lockfileChanges { + changedPkgs.Add(pkg) + } + } else { + return makeAllPkgs(), nil + } + + return changedPkgs, nil + } +} + +func getChangesFromLockfile(scm scm.SCM, ctx *context.Context, changedFiles []string, fromRef string) ([]string, bool) { + lockfileFilter, err := filter.Compile([]string{ctx.PackageManager.Lockfile}) + if err != nil { + panic(fmt.Sprintf("Lockfile is invalid glob: %v", err)) + } + match := false + for _, file := range changedFiles { + if lockfileFilter.Match(file) { + match = true + break + } + } + if !match { + return nil, false + } + + if lockfile.IsNil(ctx.Lockfile) { + return nil, true + } + + prevContents, err := scm.PreviousContent(fromRef, ctx.PackageManager.Lockfile) + if err != nil { + // unable to reconstruct old lockfile, assume everything changed + return nil, true + } + prevLockfile, err := ctx.PackageManager.UnmarshalLockfile(ctx.WorkspaceInfos.PackageJSONs[util.RootPkgName], prevContents) + if err != nil { + // unable to parse old lockfile, assume everything changed + return nil, true + } + additionalPkgs, err := ctx.ChangedPackages(prevLockfile) + if err != nil { + // missing at least one lockfile, assume everything changed + return nil, true + } + + return additionalPkgs, false +} + +func getDefaultGlobalDeps() []string { + // include turbo.json and root package.json as implicit global dependencies + defaultGlobalDeps := []string{ + "turbo.json", + "package.json", + } + return defaultGlobalDeps +} + +func repoGlobalFileHasChanged(opts *Opts, defaultGlobalDeps []string, changedFiles []string) (bool, error) { + globalDepsGlob, err := filter.Compile(append(opts.GlobalDepPatterns, defaultGlobalDeps...)) + if err != nil { + return false, errors.Wrap(err, "invalid global deps glob") + } + + if globalDepsGlob != nil { + for _, file := range changedFiles { + if globalDepsGlob.Match(filepath.ToSlash(file)) { + return true, nil + } + } + } + return false, nil +} + +func filterIgnoredFiles(opts *Opts, changedFiles []string) ([]string, error) { + // changedFiles is an array of repo-relative system paths. + // opts.IgnorePatterns is an array of unix-separator glob paths. + ignoreGlob, err := filter.Compile(opts.IgnorePatterns) + if err != nil { + return nil, errors.Wrap(err, "invalid ignore globs") + } + filteredChanges := []string{} + for _, file := range changedFiles { + // If we don't have anything to ignore, or if this file doesn't match the ignore pattern, + // keep it as a changed file. + if ignoreGlob == nil || !ignoreGlob.Match(filepath.ToSlash(file)) { + filteredChanges = append(filteredChanges, file) + } + } + return filteredChanges, nil +} + +func fileInPackage(changedFile string, packagePath string) bool { + // This whole method is basically this regex: /^.*\/?$/ + // The regex is more-expensive, so we don't do it. + + // If it has the prefix, it might be in the package. + if strings.HasPrefix(changedFile, packagePath) { + // Now we need to see if the prefix stopped at a reasonable boundary. + prefixLen := len(packagePath) + changedFileLen := len(changedFile) + + // Same path. + if prefixLen == changedFileLen { + return true + } + + // We know changedFile is longer than packagePath. + // We can safely directly index into it. + // Look ahead one byte and see if it's the separator. + if changedFile[prefixLen] == os.PathSeparator { + return true + } + } + + // If it does not have the prefix, it's definitely not in the package. + return false +} + +func getChangedPackages(changedFiles []string, packageInfos workspace.Catalog) util.Set { + changedPackages := make(util.Set) + for _, changedFile := range changedFiles { + found := false + for pkgName, pkgInfo := range packageInfos.PackageJSONs { + if pkgName != util.RootPkgName && fileInPackage(changedFile, pkgInfo.Dir.ToStringDuringMigration()) { + changedPackages.Add(pkgName) + found = true + break + } + } + if !found { + // Consider the root package to have changed + changedPackages.Add(util.RootPkgName) + } + } + return changedPackages +} diff --git a/cli/internal/scope/scope_test.go b/cli/internal/scope/scope_test.go new file mode 100644 index 0000000..216984d --- /dev/null +++ b/cli/internal/scope/scope_test.go @@ -0,0 +1,550 @@ +package scope + +import ( + "fmt" + "io" + "os" + "path/filepath" + "reflect" + "testing" + + "github.com/hashicorp/go-hclog" + "github.com/pyr-sh/dag" + "github.com/vercel/turbo/cli/internal/context" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/lockfile" + "github.com/vercel/turbo/cli/internal/packagemanager" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/ui" + "github.com/vercel/turbo/cli/internal/util" + "github.com/vercel/turbo/cli/internal/workspace" +) + +type mockSCM struct { + changed []string + contents map[string][]byte +} + +func (m *mockSCM) ChangedFiles(_fromCommit string, _toCommit string, _relativeTo string) ([]string, error) { + return m.changed, nil +} + +func (m *mockSCM) PreviousContent(fromCommit string, filePath string) ([]byte, error) { + contents, ok := m.contents[filePath] + if !ok { + return nil, fmt.Errorf("No contents found") + } + return contents, nil +} + +type mockLockfile struct { + globalChange bool + versions map[string]string + allDeps map[string]map[string]string +} + +func (m *mockLockfile) ResolvePackage(workspacePath turbopath.AnchoredUnixPath, name string, version string) (lockfile.Package, error) { + resolvedVersion, ok := m.versions[name] + if ok { + key := fmt.Sprintf("%s%s", name, version) + return lockfile.Package{Key: key, Version: resolvedVersion, Found: true}, nil + } + return lockfile.Package{Found: false}, nil +} + +func (m *mockLockfile) AllDependencies(key string) (map[string]string, bool) { + deps, ok := m.allDeps[key] + return deps, ok +} + +func (m *mockLockfile) Encode(w io.Writer) error { + return nil +} + +func (m *mockLockfile) GlobalChange(other lockfile.Lockfile) bool { + return m.globalChange || (other != nil && other.(*mockLockfile).globalChange) +} + +func (m *mockLockfile) Patches() []turbopath.AnchoredUnixPath { + return nil +} + +func (m *mockLockfile) Subgraph(workspaces []turbopath.AnchoredSystemPath, packages []string) (lockfile.Lockfile, error) { + return nil, nil +} + +var _ (lockfile.Lockfile) = (*mockLockfile)(nil) + +func TestResolvePackages(t *testing.T) { + cwd, err := os.Getwd() + if err != nil { + t.Fatalf("cwd: %v", err) + } + root, err := fs.GetCwd(cwd) + if err != nil { + t.Fatalf("cwd: %v", err) + } + tui := ui.Default() + logger := hclog.Default() + // Dependency graph: + // + // app0 - + // \ + // app1 -> libA + // \ + // > libB -> libD + // / + // app2 < + // \ + // > libC + // / + // app2-a < + // + // Filesystem layout: + // + // app/ + // app0 + // app1 + // app2 + // app2-a + // libs/ + // libA + // libB + // libC + // libD + graph := dag.AcyclicGraph{} + graph.Add("app0") + graph.Add("app1") + graph.Add("app2") + graph.Add("app2-a") + graph.Add("libA") + graph.Add("libB") + graph.Add("libC") + graph.Add("libD") + graph.Connect(dag.BasicEdge("libA", "libB")) + graph.Connect(dag.BasicEdge("libB", "libD")) + graph.Connect(dag.BasicEdge("app0", "libA")) + graph.Connect(dag.BasicEdge("app1", "libA")) + graph.Connect(dag.BasicEdge("app2", "libB")) + graph.Connect(dag.BasicEdge("app2", "libC")) + graph.Connect(dag.BasicEdge("app2-a", "libC")) + workspaceInfos := workspace.Catalog{ + PackageJSONs: map[string]*fs.PackageJSON{ + "//": { + Dir: turbopath.AnchoredSystemPath("").ToSystemPath(), + UnresolvedExternalDeps: map[string]string{"global": "2"}, + TransitiveDeps: []lockfile.Package{{Key: "global2", Version: "2", Found: true}}, + }, + "app0": { + Dir: turbopath.AnchoredUnixPath("app/app0").ToSystemPath(), + Name: "app0", + UnresolvedExternalDeps: map[string]string{"app0-dep": "2"}, + TransitiveDeps: []lockfile.Package{ + {Key: "app0-dep2", Version: "2", Found: true}, + {Key: "app0-util2", Version: "2", Found: true}, + }, + }, + "app1": { + Dir: turbopath.AnchoredUnixPath("app/app1").ToSystemPath(), + Name: "app1", + }, + "app2": { + Dir: turbopath.AnchoredUnixPath("app/app2").ToSystemPath(), + Name: "app2", + }, + "app2-a": { + Dir: turbopath.AnchoredUnixPath("app/app2-a").ToSystemPath(), + Name: "app2-a", + }, + "libA": { + Dir: turbopath.AnchoredUnixPath("libs/libA").ToSystemPath(), + Name: "libA", + }, + "libB": { + Dir: turbopath.AnchoredUnixPath("libs/libB").ToSystemPath(), + Name: "libB", + UnresolvedExternalDeps: map[string]string{"external": "1"}, + TransitiveDeps: []lockfile.Package{ + {Key: "external-dep-a1", Version: "1", Found: true}, + {Key: "external-dep-b1", Version: "1", Found: true}, + {Key: "external1", Version: "1", Found: true}, + }, + }, + "libC": { + Dir: turbopath.AnchoredUnixPath("libs/libC").ToSystemPath(), + Name: "libC", + }, + "libD": { + Dir: turbopath.AnchoredUnixPath("libs/libD").ToSystemPath(), + Name: "libD", + }, + }, + } + packageNames := []string{} + for name := range workspaceInfos.PackageJSONs { + packageNames = append(packageNames, name) + } + + // global -> globalDep + // app0-dep -> app0-dep :) + + makeLockfile := func(f func(*mockLockfile)) *mockLockfile { + l := mockLockfile{ + globalChange: false, + versions: map[string]string{ + "global": "2", + "app0-dep": "2", + "app0-util": "2", + "external": "1", + "external-dep-a": "1", + "external-dep-b": "1", + }, + allDeps: map[string]map[string]string{ + "global2": map[string]string{}, + "app0-dep2": map[string]string{ + "app0-util": "2", + }, + "app0-util2": map[string]string{}, + "external1": map[string]string{ + "external-dep-a": "1", + "external-dep-b": "1", + }, + "external-dep-a1": map[string]string{}, + "external-dep-b1": map[string]string{}, + }, + } + if f != nil { + f(&l) + } + return &l + } + + testCases := []struct { + name string + changed []string + expected []string + expectAllPackages bool + scope []string + since string + ignore string + globalDeps []string + includeDependencies bool + includeDependents bool + lockfile string + currLockfile *mockLockfile + prevLockfile *mockLockfile + inferPkgPath string + }{ + { + name: "Just scope and dependencies", + changed: []string{}, + includeDependencies: true, + scope: []string{"app2"}, + expected: []string{"app2", "libB", "libC", "libD"}, + }, + { + name: "Only turbo.json changed", + changed: []string{"turbo.json"}, + expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"}, + since: "dummy", + includeDependencies: true, + }, + { + name: "Only root package.json changed", + changed: []string{"package.json"}, + expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"}, + since: "dummy", + includeDependencies: true, + }, + { + name: "Only package-lock.json changed", + changed: []string{"package-lock.json"}, + expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"}, + since: "dummy", + includeDependencies: true, + lockfile: "package-lock.json", + }, + { + name: "Only yarn.lock changed", + changed: []string{"yarn.lock"}, + expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"}, + since: "dummy", + includeDependencies: true, + lockfile: "yarn.lock", + }, + { + name: "Only pnpm-lock.yaml changed", + changed: []string{"pnpm-lock.yaml"}, + expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"}, + since: "dummy", + includeDependencies: true, + lockfile: "pnpm-lock.yaml", + }, + { + name: "One package changed", + changed: []string{"libs/libB/src/index.ts"}, + expected: []string{"libB"}, + since: "dummy", + }, + { + name: "One package manifest changed", + changed: []string{"libs/libB/package.json"}, + expected: []string{"libB"}, + since: "dummy", + }, + { + name: "An ignored package changed", + changed: []string{"libs/libB/src/index.ts"}, + expected: []string{}, + since: "dummy", + ignore: "libs/libB/**/*.ts", + }, + { + // nothing in scope depends on the change + name: "unrelated library changed", + changed: []string{"libs/libC/src/index.ts"}, + expected: []string{}, + since: "dummy", + scope: []string{"app1"}, + includeDependencies: true, // scope implies include-dependencies + }, + { + // a dependent lib changed, scope implies include-dependencies, + // so all deps of app1 get built + name: "dependency of scope changed", + changed: []string{"libs/libA/src/index.ts"}, + expected: []string{"libA", "libB", "libD", "app1"}, + since: "dummy", + scope: []string{"app1"}, + includeDependencies: true, // scope implies include-dependencies + }, + { + // a dependent lib changed, user explicitly asked to not build dependencies. + // Since the package matching the scope had a changed dependency, we run it. + // We don't include its dependencies because the user asked for no dependencies. + // note: this is not yet supported by the CLI, as you cannot specify --include-dependencies=false + name: "dependency of scope changed, user asked to not include depedencies", + changed: []string{"libs/libA/src/index.ts"}, + expected: []string{"app1"}, + since: "dummy", + scope: []string{"app1"}, + includeDependencies: false, + }, + { + // a nested dependent lib changed, user explicitly asked to not build dependencies + // note: this is not yet supported by the CLI, as you cannot specify --include-dependencies=false + name: "nested dependency of scope changed, user asked to not include dependencies", + changed: []string{"libs/libB/src/index.ts"}, + expected: []string{"app1"}, + since: "dummy", + scope: []string{"app1"}, + includeDependencies: false, + }, + { + name: "global dependency changed, even though it was ignored, forcing a build of everything", + changed: []string{"libs/libB/src/index.ts"}, + expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"}, + since: "dummy", + ignore: "libs/libB/**/*.ts", + globalDeps: []string{"libs/**/*.ts"}, + }, + { + name: "an app changed, user asked for dependencies to build", + changed: []string{"app/app2/src/index.ts"}, + since: "dummy", + includeDependencies: true, + expected: []string{"app2", "libB", "libC", "libD"}, + }, + { + name: "a library changed, user asked for dependents to be built", + changed: []string{"libs/libB"}, + since: "dummy", + includeDependents: true, + expected: []string{"app0", "app1", "app2", "libA", "libB"}, + }, + { + // no changes, no base to compare against, defaults to everything + name: "no changes or scope specified, build everything", + since: "", + expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"}, + expectAllPackages: true, + }, + { + // a dependent library changed, no deps beyond the scope are build + // "libB" is still built because it is a dependent within the scope, but libB's dependents + // are skipped + name: "a dependent library changed, build up to scope", + changed: []string{"libs/libD/src/index.ts"}, + since: "dummy", + scope: []string{"libB"}, + expected: []string{"libB", "libD"}, + includeDependencies: true, // scope implies include-dependencies + }, + { + name: "library change, no scope", + changed: []string{"libs/libA/src/index.ts"}, + expected: []string{"libA", "app0", "app1"}, + includeDependents: true, + since: "dummy", + }, + { + // make sure multiple apps with the same prefix are handled separately. + // prevents this issue: https://github.com/vercel/turbo/issues/1528 + name: "Two apps with an overlapping prefix changed", + changed: []string{"app/app2/src/index.js", "app/app2-a/src/index.js"}, + expected: []string{"app2", "app2-a"}, + since: "dummy", + }, + { + name: "Global lockfile change invalidates all packages", + changed: []string{"dummy.lock"}, + expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"}, + lockfile: "dummy.lock", + currLockfile: makeLockfile(nil), + prevLockfile: makeLockfile(func(ml *mockLockfile) { + ml.globalChange = true + }), + since: "dummy", + }, + { + name: "Dependency of workspace root change invalidates all packages", + changed: []string{"dummy.lock"}, + expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"}, + lockfile: "dummy.lock", + currLockfile: makeLockfile(nil), + prevLockfile: makeLockfile(func(ml *mockLockfile) { + ml.versions["global"] = "3" + ml.allDeps["global3"] = map[string]string{} + }), + since: "dummy", + }, + { + name: "Version change invalidates package", + changed: []string{"dummy.lock"}, + expected: []string{"//", "app0"}, + lockfile: "dummy.lock", + currLockfile: makeLockfile(nil), + prevLockfile: makeLockfile(func(ml *mockLockfile) { + ml.versions["app0-util"] = "3" + ml.allDeps["app0-dep2"] = map[string]string{"app0-util": "3"} + ml.allDeps["app0-util3"] = map[string]string{} + }), + since: "dummy", + }, + { + name: "Transitive dep invalidates package", + changed: []string{"dummy.lock"}, + expected: []string{"//", "libB"}, + lockfile: "dummy.lock", + currLockfile: makeLockfile(nil), + prevLockfile: makeLockfile(func(ml *mockLockfile) { + ml.versions["external-dep-a"] = "2" + ml.allDeps["external1"] = map[string]string{"external-dep-a": "2", "external-dep-b": "1"} + ml.allDeps["external-dep-a2"] = map[string]string{} + }), + since: "dummy", + }, + { + name: "Transitive dep invalidates package and dependents", + changed: []string{"dummy.lock"}, + expected: []string{"//", "app0", "app1", "app2", "libA", "libB"}, + lockfile: "dummy.lock", + includeDependents: true, + currLockfile: makeLockfile(nil), + prevLockfile: makeLockfile(func(ml *mockLockfile) { + ml.versions["external-dep-a"] = "2" + ml.allDeps["external1"] = map[string]string{"external-dep-a": "2", "external-dep-b": "1"} + ml.allDeps["external-dep-a2"] = map[string]string{} + }), + since: "dummy", + }, + { + name: "Infer app2 from directory", + inferPkgPath: "app/app2", + expected: []string{"app2"}, + }, + { + name: "Infer app2 from a subdirectory", + inferPkgPath: "app/app2/src", + expected: []string{"app2"}, + }, + { + name: "Infer from a directory with no packages", + inferPkgPath: "wrong", + expected: []string{}, + }, + { + name: "Infer from a parent directory", + inferPkgPath: "app", + expected: []string{"app0", "app1", "app2", "app2-a"}, + }, + { + name: "library change, no scope, inferred libs", + changed: []string{"libs/libA/src/index.ts"}, + expected: []string{"libA"}, + since: "dummy", + inferPkgPath: "libs", + }, + { + name: "library change, no scope, inferred app", + changed: []string{"libs/libA/src/index.ts"}, + expected: []string{}, + since: "dummy", + inferPkgPath: "app", + }, + } + for i, tc := range testCases { + t.Run(fmt.Sprintf("test #%v %v", i, tc.name), func(t *testing.T) { + // Convert test data to system separators. + systemSeparatorChanged := make([]string, len(tc.changed)) + for index, path := range tc.changed { + systemSeparatorChanged[index] = filepath.FromSlash(path) + } + scm := &mockSCM{ + changed: systemSeparatorChanged, + contents: make(map[string][]byte, len(systemSeparatorChanged)), + } + for _, path := range systemSeparatorChanged { + scm.contents[path] = nil + } + readLockfile := func(_rootPackageJSON *fs.PackageJSON, content []byte) (lockfile.Lockfile, error) { + return tc.prevLockfile, nil + } + pkgInferenceRoot, err := resolvePackageInferencePath(tc.inferPkgPath) + if err != nil { + t.Errorf("bad inference path (%v): %v", tc.inferPkgPath, err) + } + pkgs, isAllPackages, err := ResolvePackages(&Opts{ + LegacyFilter: LegacyFilter{ + Entrypoints: tc.scope, + Since: tc.since, + IncludeDependencies: tc.includeDependencies, + SkipDependents: !tc.includeDependents, + }, + IgnorePatterns: []string{tc.ignore}, + GlobalDepPatterns: tc.globalDeps, + PackageInferenceRoot: pkgInferenceRoot, + }, root, scm, &context.Context{ + WorkspaceInfos: workspaceInfos, + WorkspaceNames: packageNames, + PackageManager: &packagemanager.PackageManager{Lockfile: tc.lockfile, UnmarshalLockfile: readLockfile}, + WorkspaceGraph: graph, + RootNode: "root", + Lockfile: tc.currLockfile, + }, tui, logger) + if err != nil { + t.Errorf("expected no error, got %v", err) + } + expected := make(util.Set) + for _, pkg := range tc.expected { + expected.Add(pkg) + } + if !reflect.DeepEqual(pkgs, expected) { + t.Errorf("ResolvePackages got %v, want %v", pkgs, expected) + } + if isAllPackages != tc.expectAllPackages { + t.Errorf("isAllPackages got %v, want %v", isAllPackages, tc.expectAllPackages) + } + }) + } +} diff --git a/cli/internal/server/server.go b/cli/internal/server/server.go new file mode 100644 index 0000000..5e738cc --- /dev/null +++ b/cli/internal/server/server.go @@ -0,0 +1,192 @@ +package server + +import ( + "context" + "sync" + "time" + + "github.com/hashicorp/go-hclog" + "github.com/pkg/errors" + "github.com/vercel/turbo/cli/internal/filewatcher" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/globwatcher" + "github.com/vercel/turbo/cli/internal/turbodprotocol" + "github.com/vercel/turbo/cli/internal/turbopath" + "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// Server implements the GRPC serverside of TurbodServer +// Note for the future: we don't yet make use of turbo.json +// or the package graph in the server. Once we do, we may need a +// layer of indirection between "the thing that responds to grpc requests" +// and "the thing that holds our persistent data structures" to handle +// changes in the underlying configuration. +type Server struct { + turbodprotocol.UnimplementedTurbodServer + watcher *filewatcher.FileWatcher + globWatcher *globwatcher.GlobWatcher + turboVersion string + started time.Time + logFilePath turbopath.AbsoluteSystemPath + repoRoot turbopath.AbsoluteSystemPath + closerMu sync.Mutex + closer *closer +} + +// GRPCServer is the interface that the turbo server needs to the underlying +// GRPC server. This lets the turbo server register itself, as well as provides +// a hook for shutting down the server. +type GRPCServer interface { + grpc.ServiceRegistrar + GracefulStop() +} + +type closer struct { + grpcServer GRPCServer + once sync.Once +} + +func (c *closer) close() { + // This can get triggered from a request handler (Shutdown). Since + // calling GracefulStop blocks until all request handlers complete, + // we need to run it in a goroutine to let the Shutdown handler complete + // and avoid deadlocking. + c.once.Do(func() { + go func() { + c.grpcServer.GracefulStop() + }() + }) +} + +var _defaultCookieTimeout = 500 * time.Millisecond + +// New returns a new instance of Server +func New(serverName string, logger hclog.Logger, repoRoot turbopath.AbsoluteSystemPath, turboVersion string, logFilePath turbopath.AbsoluteSystemPath) (*Server, error) { + cookieDir := fs.GetTurboDataDir().UntypedJoin("cookies", serverName) + cookieJar, err := filewatcher.NewCookieJar(cookieDir, _defaultCookieTimeout) + if err != nil { + return nil, err + } + watcher, err := filewatcher.GetPlatformSpecificBackend(logger) + if err != nil { + return nil, err + } + fileWatcher := filewatcher.New(logger.Named("FileWatcher"), repoRoot, watcher) + globWatcher := globwatcher.New(logger.Named("GlobWatcher"), repoRoot, cookieJar) + server := &Server{ + watcher: fileWatcher, + globWatcher: globWatcher, + turboVersion: turboVersion, + started: time.Now(), + logFilePath: logFilePath, + repoRoot: repoRoot, + } + server.watcher.AddClient(cookieJar) + server.watcher.AddClient(globWatcher) + server.watcher.AddClient(server) + if err := server.watcher.Start(); err != nil { + return nil, errors.Wrapf(err, "watching %v", repoRoot) + } + if err := server.watcher.AddRoot(cookieDir); err != nil { + _ = server.watcher.Close() + return nil, errors.Wrapf(err, "failed to watch cookie directory: %v", cookieDir) + } + return server, nil +} + +func (s *Server) tryClose() bool { + s.closerMu.Lock() + defer s.closerMu.Unlock() + if s.closer != nil { + s.closer.close() + return true + } + return false +} + +// OnFileWatchEvent implements filewatcher.FileWatchClient.OnFileWatchEvent +// In the event that the root of the monorepo is deleted, shut down the server. +func (s *Server) OnFileWatchEvent(ev filewatcher.Event) { + if ev.EventType == filewatcher.FileDeleted && ev.Path == s.repoRoot { + _ = s.tryClose() + } +} + +// OnFileWatchError implements filewatcher.FileWatchClient.OnFileWatchError +func (s *Server) OnFileWatchError(err error) {} + +// OnFileWatchClosed implements filewatcher.FileWatchClient.OnFileWatchClosed +func (s *Server) OnFileWatchClosed() {} + +// Close is used for shutting down this copy of the server +func (s *Server) Close() error { + return s.watcher.Close() +} + +// Register registers this server to respond to GRPC requests +func (s *Server) Register(grpcServer GRPCServer) { + s.closerMu.Lock() + s.closer = &closer{ + grpcServer: grpcServer, + } + s.closerMu.Unlock() + turbodprotocol.RegisterTurbodServer(grpcServer, s) +} + +// NotifyOutputsWritten implements the NotifyOutputsWritten rpc from turbo.proto +func (s *Server) NotifyOutputsWritten(ctx context.Context, req *turbodprotocol.NotifyOutputsWrittenRequest) (*turbodprotocol.NotifyOutputsWrittenResponse, error) { + outputs := fs.TaskOutputs{ + Inclusions: req.OutputGlobs, + Exclusions: req.OutputExclusionGlobs, + } + + err := s.globWatcher.WatchGlobs(req.Hash, outputs) + if err != nil { + return nil, err + } + return &turbodprotocol.NotifyOutputsWrittenResponse{}, nil +} + +// GetChangedOutputs implements the GetChangedOutputs rpc from turbo.proto +func (s *Server) GetChangedOutputs(ctx context.Context, req *turbodprotocol.GetChangedOutputsRequest) (*turbodprotocol.GetChangedOutputsResponse, error) { + + changedGlobs, err := s.globWatcher.GetChangedGlobs(req.Hash, req.OutputGlobs) + if err != nil { + return nil, err + } + return &turbodprotocol.GetChangedOutputsResponse{ + ChangedOutputGlobs: changedGlobs, + }, nil +} + +// Hello implements the Hello rpc from turbo.proto +func (s *Server) Hello(ctx context.Context, req *turbodprotocol.HelloRequest) (*turbodprotocol.HelloResponse, error) { + clientVersion := req.Version + if clientVersion != s.turboVersion { + err := status.Errorf(codes.FailedPrecondition, "version mismatch. Client %v Server %v", clientVersion, s.turboVersion) + return nil, err + } + return &turbodprotocol.HelloResponse{}, nil +} + +// Shutdown implements the Shutdown rpc from turbo.proto +func (s *Server) Shutdown(ctx context.Context, req *turbodprotocol.ShutdownRequest) (*turbodprotocol.ShutdownResponse, error) { + if s.tryClose() { + return &turbodprotocol.ShutdownResponse{}, nil + } + err := status.Error(codes.NotFound, "shutdown mechanism not found") + return nil, err +} + +// Status implements the Status rpc from turbo.proto +func (s *Server) Status(ctx context.Context, req *turbodprotocol.StatusRequest) (*turbodprotocol.StatusResponse, error) { + uptime := uint64(time.Since(s.started).Milliseconds()) + return &turbodprotocol.StatusResponse{ + DaemonStatus: &turbodprotocol.DaemonStatus{ + LogFile: s.logFilePath.ToString(), + UptimeMsec: uptime, + }, + }, nil +} diff --git a/cli/internal/server/server_test.go b/cli/internal/server/server_test.go new file mode 100644 index 0000000..b7dcf3a --- /dev/null +++ b/cli/internal/server/server_test.go @@ -0,0 +1,73 @@ +package server + +import ( + "context" + "testing" + "time" + + "github.com/hashicorp/go-hclog" + "google.golang.org/grpc" + "gotest.tools/v3/assert" + + turbofs "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbodprotocol" +) + +type mockGrpc struct { + stopped chan struct{} +} + +func (m *mockGrpc) GracefulStop() { + close(m.stopped) +} + +func (m *mockGrpc) RegisterService(desc *grpc.ServiceDesc, impl interface{}) {} + +func TestDeleteRepoRoot(t *testing.T) { + logger := hclog.Default() + logger.SetLevel(hclog.Debug) + repoRootRaw := t.TempDir() + repoRoot := turbofs.AbsoluteSystemPathFromUpstream(repoRootRaw) + + grpcServer := &mockGrpc{ + stopped: make(chan struct{}), + } + + s, err := New("testServer", logger, repoRoot, "some-version", "/log/file/path") + assert.NilError(t, err, "New") + s.Register(grpcServer) + + // Delete the repo root, ensure that GracefulStop got called + err = repoRoot.Remove() + assert.NilError(t, err, "Remove") + + select { + case <-grpcServer.stopped: + case <-time.After(2 * time.Second): + t.Error("timed out waiting for graceful stop to be called") + } +} + +func TestShutdown(t *testing.T) { + logger := hclog.Default() + repoRootRaw := t.TempDir() + repoRoot := turbofs.AbsoluteSystemPathFromUpstream(repoRootRaw) + + grpcServer := &mockGrpc{ + stopped: make(chan struct{}), + } + + s, err := New("testServer", logger, repoRoot, "some-version", "/log/file/path") + assert.NilError(t, err, "New") + s.Register(grpcServer) + + ctx := context.Background() + _, err = s.Shutdown(ctx, &turbodprotocol.ShutdownRequest{}) + assert.NilError(t, err, "Shutdown") + // Ensure that graceful stop gets called + select { + case <-grpcServer.stopped: + case <-time.After(2 * time.Second): + t.Error("timed out waiting for graceful stop to be called") + } +} diff --git a/cli/internal/signals/signals.go b/cli/internal/signals/signals.go new file mode 100644 index 0000000..8634144 --- /dev/null +++ b/cli/internal/signals/signals.go @@ -0,0 +1,60 @@ +package signals + +import ( + "os" + "os/signal" + "sync" + "syscall" +) + +// Watcher watches for signals delivered to this process and provides +// the opportunity for turbo to run cleanup +type Watcher struct { + doneCh chan struct{} + closed bool + mu sync.Mutex + closers []func() +} + +// AddOnClose registers a cleanup handler to run when a signal is received +func (w *Watcher) AddOnClose(closer func()) { + w.mu.Lock() + defer w.mu.Unlock() + w.closers = append(w.closers, closer) +} + +// Close runs the cleanup handlers registered with this watcher +func (w *Watcher) Close() { + w.mu.Lock() + defer w.mu.Unlock() + if w.closed { + return + } + w.closed = true + for _, closer := range w.closers { + closer() + } + w.closers = nil + close(w.doneCh) +} + +// Done returns a channel that will be closed after all of the cleanup +// handlers have been run. +func (w *Watcher) Done() <-chan struct{} { + return w.doneCh +} + +// NewWatcher returns a new Watcher instance for watching signals. +func NewWatcher() *Watcher { + // TODO: platform specific signals to watch for? + signalCh := make(chan os.Signal, 1) + signal.Notify(signalCh, os.Interrupt, syscall.SIGTERM, syscall.SIGQUIT) + w := &Watcher{ + doneCh: make(chan struct{}), + } + go func() { + <-signalCh + w.Close() + }() + return w +} diff --git a/cli/internal/spinner/spinner.go b/cli/internal/spinner/spinner.go new file mode 100644 index 0000000..8ce6b4a --- /dev/null +++ b/cli/internal/spinner/spinner.go @@ -0,0 +1,89 @@ +package spinner + +import ( + "context" + "fmt" + "io" + "time" + + "github.com/mitchellh/cli" + progressbar "github.com/schollz/progressbar/v3" + "github.com/vercel/turbo/cli/internal/ui" +) + +// getWriterAndColor unwraps cli.Ui instances until it gets to a BasicUi. +// If it happens to spot a ColoredUi along the way, it marks that color is +// enabled. +func getWriterAndColor(terminal cli.Ui, useColor bool) (io.Writer, bool) { + switch terminal := terminal.(type) { + case *cli.BasicUi: + return terminal.Writer, useColor + case *cli.ColoredUi: + return getWriterAndColor(terminal.Ui, true) + case *cli.ConcurrentUi: + return getWriterAndColor(terminal.Ui, useColor) + case *cli.PrefixedUi: + return getWriterAndColor(terminal.Ui, useColor) + case *cli.MockUi: + return terminal.OutputWriter, false + default: + panic(fmt.Sprintf("unknown Ui: %v", terminal)) + } +} + +// WaitFor runs fn, and prints msg to the terminal if it takes longer +// than initialDelay to complete. Depending on the terminal configuration, it may +// display a single instance of msg, or an infinite spinner, updated every 250ms. +func WaitFor(ctx context.Context, fn func(), terminal cli.Ui, msg string, initialDelay time.Duration) error { + doneCh := make(chan struct{}) + go func() { + fn() + close(doneCh) + }() + if ui.IsTTY { + select { + case <-ctx.Done(): + return nil + case <-time.After(initialDelay): + writer, useColor := getWriterAndColor(terminal, false) + bar := progressbar.NewOptions( + -1, + progressbar.OptionEnableColorCodes(useColor), + progressbar.OptionSetDescription(fmt.Sprintf("[yellow]%v[reset]", msg)), + progressbar.OptionSpinnerType(14), + progressbar.OptionSetWriter(writer), + ) + for { + select { + case <-doneCh: + err := bar.Finish() + terminal.Output("") + return err + case <-time.After(250 * time.Millisecond): + if err := bar.Add(1); err != nil { + return err + } + case <-ctx.Done(): + return nil + } + } + case <-doneCh: + return nil + } + } else { + // wait for the timeout before displaying a message, even with no tty + select { + case <-ctx.Done(): + return nil + case <-doneCh: + return nil + case <-time.After(initialDelay): + terminal.Output(msg) + } + select { + case <-ctx.Done(): + case <-doneCh: + } + return nil + } +} diff --git a/cli/internal/tarpatch/tar.go b/cli/internal/tarpatch/tar.go new file mode 100644 index 0000000..a4dab23 --- /dev/null +++ b/cli/internal/tarpatch/tar.go @@ -0,0 +1,92 @@ +// Adapted from https://github.com/moby/moby/blob/924edb948c2731df3b77697a8fcc85da3f6eef57/pkg/archive/archive.go +// Copyright Docker, Inc. +// SPDX-License-Identifier: Apache-2.0 + +// Package tarpatch addresses an issue with stdlib throwing an error in some environments. +package tarpatch + +import ( + "archive/tar" + "io/fs" + "os" + "strings" + "time" + + "github.com/vercel/turbo/cli/internal/turbopath" +) + +// nosysFileInfo hides the system-dependent info of the wrapped FileInfo to +// prevent tar.FileInfoHeader from introspecting it and potentially calling into +// glibc. +type nosysFileInfo struct { + os.FileInfo +} + +func (fi nosysFileInfo) Sys() interface{} { + // A Sys value of type *tar.Header is safe as it is system-independent. + // The tar.FileInfoHeader function copies the fields into the returned + // header without performing any OS lookups. + if sys, ok := fi.FileInfo.Sys().(*tar.Header); ok { + return sys + } + return nil +} + +// FileInfoHeaderNoLookups creates a partially-populated tar.Header from fi. +// +// Compared to the archive/tar.FileInfoHeader function, this function is safe to +// call from a chrooted process as it does not populate fields which would +// require operating system lookups. It behaves identically to +// tar.FileInfoHeader when fi is a FileInfo value returned from +// tar.Header.FileInfo(). +// +// When fi is a FileInfo for a native file, such as returned from os.Stat() and +// os.Lstat(), the returned Header value differs from one returned from +// tar.FileInfoHeader in the following ways. The Uname and Gname fields are not +// set as OS lookups would be required to populate them. The AccessTime and +// ChangeTime fields are not currently set (not yet implemented) although that +// is subject to change. Callers which require the AccessTime or ChangeTime +// fields to be zeroed should explicitly zero them out in the returned Header +// value to avoid any compatibility issues in the future. +func FileInfoHeaderNoLookups(fi fs.FileInfo, link string) (*tar.Header, error) { + hdr, err := tar.FileInfoHeader(nosysFileInfo{fi}, link) + if err != nil { + return nil, err + } + return hdr, sysStat(fi, hdr) +} + +// FileInfoHeader creates a populated Header from fi. +// +// Compared to the archive/tar package, this function fills in less information +// but is safe to call from a chrooted process. The AccessTime and ChangeTime +// fields are not set in the returned header, ModTime is truncated to one-second +// precision, and the Uname and Gname fields are only set when fi is a FileInfo +// value returned from tar.Header.FileInfo(). +func FileInfoHeader(fullPath turbopath.AnchoredUnixPath, fileInfo fs.FileInfo, link string) (*tar.Header, error) { + hdr, err := FileInfoHeaderNoLookups(fileInfo, link) + if err != nil { + return nil, err + } + hdr.Format = tar.FormatPAX + hdr.ModTime = hdr.ModTime.Truncate(time.Second) + hdr.AccessTime = time.Time{} + hdr.ChangeTime = time.Time{} + hdr.Mode = int64(chmodTarEntry(os.FileMode(hdr.Mode))) + hdr.Name = canonicalTarName(fullPath, fileInfo.IsDir()) + return hdr, nil +} + +// canonicalTarName provides a platform-independent and consistent posix-style +// path for files and directories to be archived regardless of the platform. +func canonicalTarName(fullPath turbopath.AnchoredUnixPath, isDir bool) string { + nameString := fullPath.ToString() + if isDir { + // Append '/' if not already present. + if !strings.HasSuffix(nameString, "/") { + nameString += "/" + } + } + + return nameString +} diff --git a/cli/internal/tarpatch/tar_unix.go b/cli/internal/tarpatch/tar_unix.go new file mode 100644 index 0000000..3020c0e --- /dev/null +++ b/cli/internal/tarpatch/tar_unix.go @@ -0,0 +1,42 @@ +//go:build !windows +// +build !windows + +// Adapted from https://github.com/moby/moby/blob/924edb948c2731df3b77697a8fcc85da3f6eef57/pkg/archive/archive_unix.go +// Copyright Docker, Inc. +// SPDX-License-Identifier: Apache-2.0 + +package tarpatch + +import ( + "archive/tar" + "os" + "syscall" + + "golang.org/x/sys/unix" +) + +// chmodTarEntry is used to adjust the file permissions used in tar header based +// on the platform the archival is done. +func chmodTarEntry(perm os.FileMode) os.FileMode { + return perm // noop for unix as golang APIs provide perm bits correctly +} + +// sysStat populates hdr from system-dependent fields of fi without performing +// any OS lookups. +func sysStat(fi os.FileInfo, hdr *tar.Header) error { + s, ok := fi.Sys().(*syscall.Stat_t) + if !ok { + return nil + } + + hdr.Uid = int(s.Uid) + hdr.Gid = int(s.Gid) + + if s.Mode&unix.S_IFBLK != 0 || + s.Mode&unix.S_IFCHR != 0 { + hdr.Devmajor = int64(unix.Major(uint64(s.Rdev))) //nolint: unconvert + hdr.Devminor = int64(unix.Minor(uint64(s.Rdev))) //nolint: unconvert + } + + return nil +} diff --git a/cli/internal/tarpatch/tar_windows.go b/cli/internal/tarpatch/tar_windows.go new file mode 100644 index 0000000..486e6fd --- /dev/null +++ b/cli/internal/tarpatch/tar_windows.go @@ -0,0 +1,27 @@ +//go:build windows +// +build windows + +// Adapted from https://github.com/moby/moby/blob/924edb948c2731df3b77697a8fcc85da3f6eef57/pkg/archive/archive_windows.go +// Copyright Docker, Inc. +// SPDX-License-Identifier: Apache-2.0 + +package tarpatch + +import ( + "archive/tar" + "os" +) + +// chmodTarEntry is used to adjust the file permissions used in tar header based +// on the platform the archival is done. +func chmodTarEntry(perm os.FileMode) os.FileMode { + // Remove group- and world-writable bits. + perm &= 0o755 + + // Add the x bit: make everything +x on Windows + return perm | 0o111 +} + +func sysStat(fi os.FileInfo, hdr *tar.Header) error { + return nil +} diff --git a/cli/internal/taskhash/taskhash.go b/cli/internal/taskhash/taskhash.go new file mode 100644 index 0000000..a912ad9 --- /dev/null +++ b/cli/internal/taskhash/taskhash.go @@ -0,0 +1,497 @@ +// Package taskhash handles calculating dependency hashes for nodes in the task execution graph. +package taskhash + +import ( + "fmt" + "sort" + "strings" + "sync" + + "github.com/hashicorp/go-hclog" + "github.com/pyr-sh/dag" + gitignore "github.com/sabhiram/go-gitignore" + "github.com/vercel/turbo/cli/internal/doublestar" + "github.com/vercel/turbo/cli/internal/env" + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/hashing" + "github.com/vercel/turbo/cli/internal/inference" + "github.com/vercel/turbo/cli/internal/nodes" + "github.com/vercel/turbo/cli/internal/runsummary" + "github.com/vercel/turbo/cli/internal/turbopath" + "github.com/vercel/turbo/cli/internal/util" + "github.com/vercel/turbo/cli/internal/workspace" + "golang.org/x/sync/errgroup" +) + +// Tracker caches package-inputs hashes, as well as package-task hashes. +// package-inputs hashes must be calculated before package-task hashes, +// and package-task hashes must be calculated in topographical order. +// package-task hashing is threadsafe, provided topographical order is +// respected. +type Tracker struct { + rootNode string + globalHash string + pipeline fs.Pipeline + + packageInputsHashes packageFileHashes + + // packageInputsExpandedHashes is a map of a hashkey to a list of files that are inputs to the task. + // Writes to this map happen during CalculateFileHash(). Since this happens synchronously + // before walking the task graph, it does not need to be protected by a mutex. + packageInputsExpandedHashes map[packageFileHashKey]map[turbopath.AnchoredUnixPath]string + + // mu is a mutex that we can lock/unlock to read/write from maps + // the fields below should be protected by the mutex. + mu sync.RWMutex + packageTaskEnvVars map[string]env.DetailedMap // taskId -> envvar pairs that affect the hash. + packageTaskHashes map[string]string // taskID -> hash + packageTaskFramework map[string]string // taskID -> inferred framework for package + packageTaskOutputs map[string][]turbopath.AnchoredSystemPath + packageTaskCacheStatus map[string]runsummary.TaskCacheSummary +} + +// NewTracker creates a tracker for package-inputs combinations and package-task combinations. +func NewTracker(rootNode string, globalHash string, pipeline fs.Pipeline) *Tracker { + return &Tracker{ + rootNode: rootNode, + globalHash: globalHash, + pipeline: pipeline, + packageTaskHashes: make(map[string]string), + packageTaskFramework: make(map[string]string), + packageTaskEnvVars: make(map[string]env.DetailedMap), + packageTaskOutputs: make(map[string][]turbopath.AnchoredSystemPath), + packageTaskCacheStatus: make(map[string]runsummary.TaskCacheSummary), + } +} + +// packageFileSpec defines a combination of a package and optional set of input globs +type packageFileSpec struct { + pkg string + inputs []string +} + +func specFromPackageTask(packageTask *nodes.PackageTask) packageFileSpec { + return packageFileSpec{ + pkg: packageTask.PackageName, + inputs: packageTask.TaskDefinition.Inputs, + } +} + +// packageFileHashKey is a hashable representation of a packageFileSpec. +type packageFileHashKey string + +// hashes the inputs for a packageTask +func (pfs packageFileSpec) ToKey() packageFileHashKey { + sort.Strings(pfs.inputs) + return packageFileHashKey(fmt.Sprintf("%v#%v", pfs.pkg, strings.Join(pfs.inputs, "!"))) +} + +func safeCompileIgnoreFile(filepath string) (*gitignore.GitIgnore, error) { + if fs.FileExists(filepath) { + return gitignore.CompileIgnoreFile(filepath) + } + // no op + return gitignore.CompileIgnoreLines([]string{}...), nil +} + +func (pfs *packageFileSpec) getHashObject(pkg *fs.PackageJSON, repoRoot turbopath.AbsoluteSystemPath) map[turbopath.AnchoredUnixPath]string { + hashObject, pkgDepsErr := hashing.GetPackageDeps(repoRoot, &hashing.PackageDepsOptions{ + PackagePath: pkg.Dir, + InputPatterns: pfs.inputs, + }) + if pkgDepsErr != nil { + manualHashObject, err := manuallyHashPackage(pkg, pfs.inputs, repoRoot) + if err != nil { + return make(map[turbopath.AnchoredUnixPath]string) + } + hashObject = manualHashObject + } + + return hashObject +} + +func (pfs *packageFileSpec) hash(hashObject map[turbopath.AnchoredUnixPath]string) (string, error) { + hashOfFiles, otherErr := fs.HashObject(hashObject) + if otherErr != nil { + return "", otherErr + } + return hashOfFiles, nil +} + +func manuallyHashPackage(pkg *fs.PackageJSON, inputs []string, rootPath turbopath.AbsoluteSystemPath) (map[turbopath.AnchoredUnixPath]string, error) { + hashObject := make(map[turbopath.AnchoredUnixPath]string) + // Instead of implementing all gitignore properly, we hack it. We only respect .gitignore in the root and in + // the directory of a package. + ignore, err := safeCompileIgnoreFile(rootPath.UntypedJoin(".gitignore").ToString()) + if err != nil { + return nil, err + } + + ignorePkg, err := safeCompileIgnoreFile(rootPath.UntypedJoin(pkg.Dir.ToStringDuringMigration(), ".gitignore").ToString()) + if err != nil { + return nil, err + } + + pathPrefix := rootPath.UntypedJoin(pkg.Dir.ToStringDuringMigration()) + includePattern := "" + excludePattern := "" + if len(inputs) > 0 { + var includePatterns []string + var excludePatterns []string + for _, pattern := range inputs { + if len(pattern) > 0 && pattern[0] == '!' { + excludePatterns = append(excludePatterns, pathPrefix.UntypedJoin(pattern[1:]).ToString()) + } else { + includePatterns = append(includePatterns, pathPrefix.UntypedJoin(pattern).ToString()) + } + } + if len(includePatterns) > 0 { + includePattern = "{" + strings.Join(includePatterns, ",") + "}" + } + if len(excludePatterns) > 0 { + excludePattern = "{" + strings.Join(excludePatterns, ",") + "}" + } + } + + err = fs.Walk(pathPrefix.ToStringDuringMigration(), func(name string, isDir bool) error { + convertedName := turbopath.AbsoluteSystemPathFromUpstream(name) + rootMatch := ignore.MatchesPath(convertedName.ToString()) + otherMatch := ignorePkg.MatchesPath(convertedName.ToString()) + if !rootMatch && !otherMatch { + if !isDir { + if includePattern != "" { + val, err := doublestar.PathMatch(includePattern, convertedName.ToString()) + if err != nil { + return err + } + if !val { + return nil + } + } + if excludePattern != "" { + val, err := doublestar.PathMatch(excludePattern, convertedName.ToString()) + if err != nil { + return err + } + if val { + return nil + } + } + hash, err := fs.GitLikeHashFile(convertedName.ToString()) + if err != nil { + return fmt.Errorf("could not hash file %v. \n%w", convertedName.ToString(), err) + } + + relativePath, err := convertedName.RelativeTo(pathPrefix) + if err != nil { + return fmt.Errorf("File path cannot be made relative: %w", err) + } + hashObject[relativePath.ToUnixPath()] = hash + } + } + return nil + }) + if err != nil { + return nil, err + } + return hashObject, nil +} + +// packageFileHashes is a map from a package and optional input globs to the hash of +// the matched files in the package. +type packageFileHashes map[packageFileHashKey]string + +// CalculateFileHashes hashes each unique package-inputs combination that is present +// in the task graph. Must be called before calculating task hashes. +func (th *Tracker) CalculateFileHashes( + allTasks []dag.Vertex, + workerCount int, + workspaceInfos workspace.Catalog, + taskDefinitions map[string]*fs.TaskDefinition, + repoRoot turbopath.AbsoluteSystemPath, +) error { + hashTasks := make(util.Set) + + for _, v := range allTasks { + taskID, ok := v.(string) + if !ok { + return fmt.Errorf("unknown task %v", taskID) + } + if taskID == th.rootNode { + continue + } + pkgName, _ := util.GetPackageTaskFromId(taskID) + if pkgName == th.rootNode { + continue + } + + taskDefinition, ok := taskDefinitions[taskID] + if !ok { + return fmt.Errorf("missing pipeline entry %v", taskID) + } + + pfs := &packageFileSpec{ + pkg: pkgName, + inputs: taskDefinition.Inputs, + } + + hashTasks.Add(pfs) + } + + hashes := make(map[packageFileHashKey]string, len(hashTasks)) + hashObjects := make(map[packageFileHashKey]map[turbopath.AnchoredUnixPath]string, len(hashTasks)) + hashQueue := make(chan *packageFileSpec, workerCount) + hashErrs := &errgroup.Group{} + + for i := 0; i < workerCount; i++ { + hashErrs.Go(func() error { + for packageFileSpec := range hashQueue { + pkg, ok := workspaceInfos.PackageJSONs[packageFileSpec.pkg] + if !ok { + return fmt.Errorf("cannot find package %v", packageFileSpec.pkg) + } + hashObject := packageFileSpec.getHashObject(pkg, repoRoot) + hash, err := packageFileSpec.hash(hashObject) + if err != nil { + return err + } + th.mu.Lock() + pfsKey := packageFileSpec.ToKey() + hashes[pfsKey] = hash + hashObjects[pfsKey] = hashObject + th.mu.Unlock() + } + return nil + }) + } + for ht := range hashTasks { + hashQueue <- ht.(*packageFileSpec) + } + close(hashQueue) + err := hashErrs.Wait() + if err != nil { + return err + } + th.packageInputsHashes = hashes + th.packageInputsExpandedHashes = hashObjects + return nil +} + +type taskHashable struct { + packageDir turbopath.AnchoredUnixPath + hashOfFiles string + externalDepsHash string + task string + outputs fs.TaskOutputs + passThruArgs []string + envMode util.EnvMode + passthroughEnv []string + hashableEnvPairs []string + globalHash string + taskDependencyHashes []string +} + +type oldTaskHashable struct { + packageDir turbopath.AnchoredUnixPath + hashOfFiles string + externalDepsHash string + task string + outputs fs.TaskOutputs + passThruArgs []string + hashableEnvPairs []string + globalHash string + taskDependencyHashes []string +} + +// calculateTaskHashFromHashable returns a hash string from the taskHashable +func calculateTaskHashFromHashable(full *taskHashable, useOldTaskHashable bool) (string, error) { + // The user is not using the strict environment variables feature. + if useOldTaskHashable { + return fs.HashObject(&oldTaskHashable{ + packageDir: full.packageDir, + hashOfFiles: full.hashOfFiles, + externalDepsHash: full.externalDepsHash, + task: full.task, + outputs: full.outputs, + passThruArgs: full.passThruArgs, + hashableEnvPairs: full.hashableEnvPairs, + globalHash: full.globalHash, + taskDependencyHashes: full.taskDependencyHashes, + }) + } + + switch full.envMode { + case util.Loose: + // Remove the passthroughs from hash consideration if we're explicitly loose. + full.passthroughEnv = nil + return fs.HashObject(full) + case util.Strict: + // Collapse `nil` and `[]` in strict mode. + if full.passthroughEnv == nil { + full.passthroughEnv = make([]string, 0) + } + return fs.HashObject(full) + case util.Infer: + panic("task inferred status should have already been resolved") + default: + panic("unimplemented environment mode") + } +} + +func (th *Tracker) calculateDependencyHashes(dependencySet dag.Set) ([]string, error) { + dependencyHashSet := make(util.Set) + + rootPrefix := th.rootNode + util.TaskDelimiter + th.mu.RLock() + defer th.mu.RUnlock() + for _, dependency := range dependencySet { + if dependency == th.rootNode { + continue + } + dependencyTask, ok := dependency.(string) + if !ok { + return nil, fmt.Errorf("unknown task: %v", dependency) + } + if strings.HasPrefix(dependencyTask, rootPrefix) { + continue + } + dependencyHash, ok := th.packageTaskHashes[dependencyTask] + if !ok { + return nil, fmt.Errorf("missing hash for dependent task: %v", dependencyTask) + } + dependencyHashSet.Add(dependencyHash) + } + dependenciesHashList := dependencyHashSet.UnsafeListOfStrings() + sort.Strings(dependenciesHashList) + return dependenciesHashList, nil +} + +// CalculateTaskHash calculates the hash for package-task combination. It is threadsafe, provided +// that it has previously been called on its task-graph dependencies. File hashes must be calculated +// first. +func (th *Tracker) CalculateTaskHash(packageTask *nodes.PackageTask, dependencySet dag.Set, logger hclog.Logger, args []string, useOldTaskHashable bool) (string, error) { + pfs := specFromPackageTask(packageTask) + pkgFileHashKey := pfs.ToKey() + + hashOfFiles, ok := th.packageInputsHashes[pkgFileHashKey] + if !ok { + return "", fmt.Errorf("cannot find package-file hash for %v", pkgFileHashKey) + } + + var keyMatchers []string + framework := inference.InferFramework(packageTask.Pkg) + if framework != nil && framework.EnvMatcher != "" { + // log auto detected framework and env prefix + logger.Debug(fmt.Sprintf("auto detected framework for %s", packageTask.PackageName), "framework", framework.Slug, "env_prefix", framework.EnvMatcher) + keyMatchers = append(keyMatchers, framework.EnvMatcher) + } + + envVars, err := env.GetHashableEnvVars( + packageTask.TaskDefinition.EnvVarDependencies, + keyMatchers, + "TURBO_CI_VENDOR_ENV_KEY", + ) + if err != nil { + return "", err + } + hashableEnvPairs := envVars.All.ToHashable() + outputs := packageTask.HashableOutputs() + taskDependencyHashes, err := th.calculateDependencyHashes(dependencySet) + if err != nil { + return "", err + } + // log any auto detected env vars + logger.Debug(fmt.Sprintf("task hash env vars for %s:%s", packageTask.PackageName, packageTask.Task), "vars", hashableEnvPairs) + + hash, err := calculateTaskHashFromHashable(&taskHashable{ + packageDir: packageTask.Pkg.Dir.ToUnixPath(), + hashOfFiles: hashOfFiles, + externalDepsHash: packageTask.Pkg.ExternalDepsHash, + task: packageTask.Task, + outputs: outputs.Sort(), + passThruArgs: args, + envMode: packageTask.EnvMode, + passthroughEnv: packageTask.TaskDefinition.PassthroughEnv, + hashableEnvPairs: hashableEnvPairs, + globalHash: th.globalHash, + taskDependencyHashes: taskDependencyHashes, + }, useOldTaskHashable) + if err != nil { + return "", fmt.Errorf("failed to hash task %v: %v", packageTask.TaskID, hash) + } + th.mu.Lock() + th.packageTaskEnvVars[packageTask.TaskID] = envVars + th.packageTaskHashes[packageTask.TaskID] = hash + if framework != nil { + th.packageTaskFramework[packageTask.TaskID] = framework.Slug + } + th.mu.Unlock() + return hash, nil +} + +// GetExpandedInputs gets the expanded set of inputs for a given PackageTask +func (th *Tracker) GetExpandedInputs(packageTask *nodes.PackageTask) map[turbopath.AnchoredUnixPath]string { + pfs := specFromPackageTask(packageTask) + expandedInputs := th.packageInputsExpandedHashes[pfs.ToKey()] + inputsCopy := make(map[turbopath.AnchoredUnixPath]string, len(expandedInputs)) + + for path, hash := range expandedInputs { + inputsCopy[path] = hash + } + + return inputsCopy +} + +// GetEnvVars returns the hashed env vars for a given taskID +func (th *Tracker) GetEnvVars(taskID string) env.DetailedMap { + th.mu.RLock() + defer th.mu.RUnlock() + return th.packageTaskEnvVars[taskID] +} + +// GetFramework returns the inferred framework for a given taskID +func (th *Tracker) GetFramework(taskID string) string { + th.mu.RLock() + defer th.mu.RUnlock() + return th.packageTaskFramework[taskID] +} + +// GetExpandedOutputs returns a list of outputs for a given taskID +func (th *Tracker) GetExpandedOutputs(taskID string) []turbopath.AnchoredSystemPath { + th.mu.RLock() + defer th.mu.RUnlock() + outputs, ok := th.packageTaskOutputs[taskID] + + if !ok { + return []turbopath.AnchoredSystemPath{} + } + + return outputs +} + +// SetExpandedOutputs a list of outputs for a given taskID so it can be read later +func (th *Tracker) SetExpandedOutputs(taskID string, outputs []turbopath.AnchoredSystemPath) { + th.mu.Lock() + defer th.mu.Unlock() + th.packageTaskOutputs[taskID] = outputs +} + +// SetCacheStatus records the task status for the given taskID +func (th *Tracker) SetCacheStatus(taskID string, cacheSummary runsummary.TaskCacheSummary) { + th.mu.Lock() + defer th.mu.Unlock() + th.packageTaskCacheStatus[taskID] = cacheSummary +} + +// GetCacheStatus records the task status for the given taskID +func (th *Tracker) GetCacheStatus(taskID string) runsummary.TaskCacheSummary { + th.mu.Lock() + defer th.mu.Unlock() + + if status, ok := th.packageTaskCacheStatus[taskID]; ok { + return status + } + + // Return an empty one, all the fields will be false and 0 + return runsummary.TaskCacheSummary{} +} diff --git a/cli/internal/taskhash/taskhash_test.go b/cli/internal/taskhash/taskhash_test.go new file mode 100644 index 0000000..dea0010 --- /dev/null +++ b/cli/internal/taskhash/taskhash_test.go @@ -0,0 +1,138 @@ +package taskhash + +import ( + "path/filepath" + "strings" + "testing" + + "github.com/vercel/turbo/cli/internal/fs" + "github.com/vercel/turbo/cli/internal/turbopath" +) + +func Test_manuallyHashPackage(t *testing.T) { + rootIgnore := strings.Join([]string{ + "ignoreme", + "ignorethisdir/", + }, "\n") + pkgIgnore := strings.Join([]string{ + "pkgignoreme", + "pkgignorethisdir/", + }, "\n") + root := t.TempDir() + repoRoot := turbopath.AbsoluteSystemPathFromUpstream(root) + pkgName := turbopath.AnchoredUnixPath("child-dir/libA").ToSystemPath() + type fileHash struct { + contents string + hash string + } + files := map[turbopath.AnchoredUnixPath]fileHash{ + "top-level-file": {"top-level-file-contents", ""}, + "other-dir/other-dir-file": {"other-dir-file-contents", ""}, + "ignoreme": {"anything", ""}, + "child-dir/libA/some-file": {"some-file-contents", "7e59c6a6ea9098c6d3beb00e753e2c54ea502311"}, + "child-dir/libA/some-dir/other-file": {"some-file-contents", "7e59c6a6ea9098c6d3beb00e753e2c54ea502311"}, + "child-dir/libA/some-dir/another-one": {"some-file-contents", "7e59c6a6ea9098c6d3beb00e753e2c54ea502311"}, + "child-dir/libA/some-dir/excluded-file": {"some-file-contents", "7e59c6a6ea9098c6d3beb00e753e2c54ea502311"}, + "child-dir/libA/ignoreme": {"anything", ""}, + "child-dir/libA/ignorethisdir/anything": {"anything", ""}, + "child-dir/libA/pkgignoreme": {"anything", ""}, + "child-dir/libA/pkgignorethisdir/file": {"anything", ""}, + } + + rootIgnoreFile, err := repoRoot.Join(".gitignore").Create() + if err != nil { + t.Fatalf("failed to create .gitignore: %v", err) + } + _, err = rootIgnoreFile.WriteString(rootIgnore) + if err != nil { + t.Fatalf("failed to write contents to .gitignore: %v", err) + } + rootIgnoreFile.Close() + pkgIgnoreFilename := pkgName.RestoreAnchor(repoRoot).Join(".gitignore") + err = pkgIgnoreFilename.EnsureDir() + if err != nil { + t.Fatalf("failed to ensure directories for %v: %v", pkgIgnoreFilename, err) + } + pkgIgnoreFile, err := pkgIgnoreFilename.Create() + if err != nil { + t.Fatalf("failed to create libA/.gitignore: %v", err) + } + _, err = pkgIgnoreFile.WriteString(pkgIgnore) + if err != nil { + t.Fatalf("failed to write contents to libA/.gitignore: %v", err) + } + pkgIgnoreFile.Close() + for path, spec := range files { + filename := path.ToSystemPath().RestoreAnchor(repoRoot) + err = filename.EnsureDir() + if err != nil { + t.Fatalf("failed to ensure directories for %v: %v", filename, err) + } + f, err := filename.Create() + if err != nil { + t.Fatalf("failed to create file: %v: %v", filename, err) + } + _, err = f.WriteString(spec.contents) + if err != nil { + t.Fatalf("failed to write contents to %v: %v", filename, err) + } + f.Close() + } + // now that we've created the repo, expect our .gitignore file too + files[turbopath.AnchoredUnixPath("child-dir/libA/.gitignore")] = fileHash{contents: "", hash: "3237694bc3312ded18386964a855074af7b066af"} + + pkg := &fs.PackageJSON{ + Dir: pkgName, + } + hashes, err := manuallyHashPackage(pkg, []string{}, repoRoot) + if err != nil { + t.Fatalf("failed to calculate manual hashes: %v", err) + } + + count := 0 + for path, spec := range files { + systemPath := path.ToSystemPath() + if systemPath.HasPrefix(pkgName) { + relPath := systemPath[len(pkgName)+1:] + got, ok := hashes[relPath.ToUnixPath()] + if !ok { + if spec.hash != "" { + t.Errorf("did not find hash for %v, but wanted one", path) + } + } else if got != spec.hash { + t.Errorf("hash of %v, got %v want %v", path, got, spec.hash) + } else { + count++ + } + } + } + if count != len(hashes) { + t.Errorf("found extra hashes in %v", hashes) + } + + count = 0 + justFileHashes, err := manuallyHashPackage(pkg, []string{filepath.FromSlash("**/*file"), "!" + filepath.FromSlash("some-dir/excluded-file")}, repoRoot) + if err != nil { + t.Fatalf("failed to calculate manual hashes: %v", err) + } + for path, spec := range files { + systemPath := path.ToSystemPath() + if systemPath.HasPrefix(pkgName) { + shouldInclude := strings.HasSuffix(systemPath.ToString(), "file") && !strings.HasSuffix(systemPath.ToString(), "excluded-file") + relPath := systemPath[len(pkgName)+1:] + got, ok := justFileHashes[relPath.ToUnixPath()] + if !ok && shouldInclude { + if spec.hash != "" { + t.Errorf("did not find hash for %v, but wanted one", path) + } + } else if shouldInclude && got != spec.hash { + t.Errorf("hash of %v, got %v want %v", path, got, spec.hash) + } else if shouldInclude { + count++ + } + } + } + if count != len(justFileHashes) { + t.Errorf("found extra hashes in %v", hashes) + } +} diff --git a/cli/internal/turbodprotocol/turbod.proto b/cli/internal/turbodprotocol/turbod.proto new file mode 100644 index 0000000..cf7c554 --- /dev/null +++ b/cli/internal/turbodprotocol/turbod.proto @@ -0,0 +1,53 @@ +syntax = "proto3"; + +option go_package = "github.com/vercel/turbo/cli/internal/turbodprotocol"; + +package turbodprotocol; + +service Turbod { + rpc Hello (HelloRequest) returns (HelloResponse); + rpc Shutdown (ShutdownRequest) returns (ShutdownResponse); + rpc Status (StatusRequest) returns (StatusResponse); + // Implement cache watching + rpc NotifyOutputsWritten (NotifyOutputsWrittenRequest) returns (NotifyOutputsWrittenResponse); + rpc GetChangedOutputs (GetChangedOutputsRequest) returns (GetChangedOutputsResponse); +} + +message HelloRequest { + string version = 1; + string session_id = 2; +} + +message HelloResponse {} + +message ShutdownRequest {} + +message ShutdownResponse {} + +message StatusRequest {} + +message StatusResponse { + DaemonStatus daemonStatus = 1; +} + +message NotifyOutputsWrittenRequest { + repeated string output_globs = 1; + string hash = 2; + repeated string output_exclusion_globs = 3; +} + +message NotifyOutputsWrittenResponse {} + +message GetChangedOutputsRequest { + repeated string output_globs = 1; + string hash = 2; +} + +message GetChangedOutputsResponse { + repeated string changed_output_globs = 1; +} + +message DaemonStatus { + string log_file = 1; + uint64 uptime_msec = 2; +} diff --git a/cli/internal/turbopath/absolute_system_path.go b/cli/internal/turbopath/absolute_system_path.go new file mode 100644 index 0000000..df65827 --- /dev/null +++ b/cli/internal/turbopath/absolute_system_path.go @@ -0,0 +1,258 @@ +package turbopath + +import ( + "io/ioutil" + "os" + "path/filepath" + "strings" +) + +// AbsoluteSystemPath is a root-relative path using system separators. +type AbsoluteSystemPath string + +// _dirPermissions are the default permission bits we apply to directories. +const _dirPermissions = os.ModeDir | 0775 + +// _nonRelativeSentinel is the leading sentinel that indicates traversal. +const _nonRelativeSentinel = ".." + +// ToString returns a string represenation of this Path. +// Used for interfacing with APIs that require a string. +func (p AbsoluteSystemPath) ToString() string { + return string(p) +} + +// RelativeTo calculates the relative path between two `AbsoluteSystemPath`s. +func (p AbsoluteSystemPath) RelativeTo(basePath AbsoluteSystemPath) (AnchoredSystemPath, error) { + processed, err := filepath.Rel(basePath.ToString(), p.ToString()) + return AnchoredSystemPath(processed), err +} + +// Join appends relative path segments to this AbsoluteSystemPath. +func (p AbsoluteSystemPath) Join(additional ...RelativeSystemPath) AbsoluteSystemPath { + cast := RelativeSystemPathArray(additional) + return AbsoluteSystemPath(filepath.Join(p.ToString(), filepath.Join(cast.ToStringArray()...))) +} + +// ToStringDuringMigration returns a string representation of this path. +// These instances should eventually be removed. +func (p AbsoluteSystemPath) ToStringDuringMigration() string { + return p.ToString() +} + +// UntypedJoin is a Join that does not constrain the type of the arguments. +// This enables you to pass in strings, but does not protect you from garbage in. +func (p AbsoluteSystemPath) UntypedJoin(args ...string) AbsoluteSystemPath { + return AbsoluteSystemPath(filepath.Join(p.ToString(), filepath.Join(args...))) +} + +// Dir implements filepath.Dir() for an AbsoluteSystemPath +func (p AbsoluteSystemPath) Dir() AbsoluteSystemPath { + return AbsoluteSystemPath(filepath.Dir(p.ToString())) +} + +// Mkdir implements os.Mkdir(p, perm) +func (p AbsoluteSystemPath) Mkdir(perm os.FileMode) error { + return os.Mkdir(p.ToString(), perm) +} + +// MkdirAll implements os.MkdirAll(p, perm) +func (p AbsoluteSystemPath) MkdirAll(perm os.FileMode) error { + return os.MkdirAll(p.ToString(), perm) +} + +// Open implements os.Open(p) for an AbsoluteSystemPath +func (p AbsoluteSystemPath) Open() (*os.File, error) { + return os.Open(p.ToString()) +} + +// OpenFile implements os.OpenFile for an absolute path +func (p AbsoluteSystemPath) OpenFile(flags int, mode os.FileMode) (*os.File, error) { + return os.OpenFile(p.ToString(), flags, mode) +} + +// Lstat implements os.Lstat for absolute path +func (p AbsoluteSystemPath) Lstat() (os.FileInfo, error) { + return os.Lstat(p.ToString()) +} + +// Stat implements os.Stat for absolute path +func (p AbsoluteSystemPath) Stat() (os.FileInfo, error) { + return os.Stat(p.ToString()) +} + +// Findup checks all parent directories for a file. +func (p AbsoluteSystemPath) Findup(name RelativeSystemPath) (AbsoluteSystemPath, error) { + path, err := FindupFrom(name.ToString(), p.ToString()) + + return AbsoluteSystemPath(path), err + +} + +// Exists returns true if the given path exists. +func (p AbsoluteSystemPath) Exists() bool { + _, err := p.Lstat() + return err == nil +} + +// DirExists returns true if the given path exists and is a directory. +func (p AbsoluteSystemPath) DirExists() bool { + info, err := p.Lstat() + return err == nil && info.IsDir() +} + +// FileExists returns true if the given path exists and is a file. +func (p AbsoluteSystemPath) FileExists() bool { + info, err := os.Lstat(p.ToString()) + return err == nil && !info.IsDir() +} + +// ContainsPath returns true if this absolute path is a parent of the +// argument. +func (p AbsoluteSystemPath) ContainsPath(other AbsoluteSystemPath) (bool, error) { + // In Go, filepath.Rel can return a path that starts with "../" or equivalent. + // Checking filesystem-level contains can get extremely complicated + // (see https://github.com/golang/dep/blob/f13583b555deaa6742f141a9c1185af947720d60/internal/fs/fs.go#L33) + // As a compromise, rely on the stdlib to generate a relative path and then check + // if the first step is "../". + rel, err := filepath.Rel(p.ToString(), other.ToString()) + if err != nil { + return false, err + } + return !strings.HasPrefix(rel, _nonRelativeSentinel), nil +} + +// ReadFile reads the contents of the specified file +func (p AbsoluteSystemPath) ReadFile() ([]byte, error) { + return ioutil.ReadFile(p.ToString()) +} + +// VolumeName returns the volume of the specified path +func (p AbsoluteSystemPath) VolumeName() string { + return filepath.VolumeName(p.ToString()) +} + +// WriteFile writes the contents of the specified file +func (p AbsoluteSystemPath) WriteFile(contents []byte, mode os.FileMode) error { + return ioutil.WriteFile(p.ToString(), contents, mode) +} + +// EnsureDir ensures that the directory containing this file exists +func (p AbsoluteSystemPath) EnsureDir() error { + dir := p.Dir() + err := os.MkdirAll(dir.ToString(), _dirPermissions) + if err != nil && dir.FileExists() { + // It looks like this is a file and not a directory. Attempt to remove it; this can + // happen in some cases if you change a rule from outputting a file to a directory. + if err2 := dir.Remove(); err2 == nil { + err = os.MkdirAll(dir.ToString(), _dirPermissions) + } else { + return err + } + } + return err +} + +// MkdirAllMode Create directory at path and all necessary parents ensuring that path has the correct mode set +func (p AbsoluteSystemPath) MkdirAllMode(mode os.FileMode) error { + info, err := p.Lstat() + if err == nil { + if info.IsDir() && info.Mode() == mode { + // Dir exists with the correct mode + return nil + } else if info.IsDir() { + // Dir exists with incorrect mode + return os.Chmod(p.ToString(), mode) + } else { + // Path exists as file, remove it + if err := p.Remove(); err != nil { + return err + } + } + } + if err := os.MkdirAll(p.ToString(), mode); err != nil { + return err + } + // This is necessary only when umask results in creating a directory with permissions different than the one passed by the user + return os.Chmod(p.ToString(), mode) +} + +// Create is the AbsoluteSystemPath wrapper for os.Create +func (p AbsoluteSystemPath) Create() (*os.File, error) { + return os.Create(p.ToString()) +} + +// Ext implements filepath.Ext(p) for an absolute path +func (p AbsoluteSystemPath) Ext() string { + return filepath.Ext(p.ToString()) +} + +// RelativePathString returns the relative path from this AbsoluteSystemPath to another absolute path in string form as a string +func (p AbsoluteSystemPath) RelativePathString(path string) (string, error) { + return filepath.Rel(p.ToString(), path) +} + +// PathTo returns the relative path between two absolute paths +// This should likely eventually return an AnchoredSystemPath +func (p AbsoluteSystemPath) PathTo(other AbsoluteSystemPath) (string, error) { + return p.RelativePathString(other.ToString()) +} + +// Symlink implements os.Symlink(target, p) for absolute path +func (p AbsoluteSystemPath) Symlink(target string) error { + return os.Symlink(target, p.ToString()) +} + +// Readlink implements os.Readlink(p) for an absolute path +func (p AbsoluteSystemPath) Readlink() (string, error) { + return os.Readlink(p.ToString()) +} + +// Remove removes the file or (empty) directory at the given path +func (p AbsoluteSystemPath) Remove() error { + return os.Remove(p.ToString()) +} + +// RemoveAll implements os.RemoveAll for absolute paths. +func (p AbsoluteSystemPath) RemoveAll() error { + return os.RemoveAll(p.ToString()) +} + +// Base implements filepath.Base for an absolute path +func (p AbsoluteSystemPath) Base() string { + return filepath.Base(p.ToString()) +} + +// Rename implements os.Rename(p, dest) for absolute paths +func (p AbsoluteSystemPath) Rename(dest AbsoluteSystemPath) error { + return os.Rename(p.ToString(), dest.ToString()) +} + +// EvalSymlinks implements filepath.EvalSymlinks for absolute path +func (p AbsoluteSystemPath) EvalSymlinks() (AbsoluteSystemPath, error) { + result, err := filepath.EvalSymlinks(p.ToString()) + if err != nil { + return "", err + } + return AbsoluteSystemPath(result), nil +} + +// HasPrefix is strings.HasPrefix for paths, ensuring that it matches on separator boundaries. +// This does NOT perform Clean in advance. +func (p AbsoluteSystemPath) HasPrefix(prefix AbsoluteSystemPath) bool { + prefixLen := len(prefix) + pathLen := len(p) + + if prefixLen > pathLen { + // Can't be a prefix if longer. + return false + } else if prefixLen == pathLen { + // Can be a prefix if they're equal, but otherwise no. + return p == prefix + } + + // otherPath is definitely shorter than p. + // We need to confirm that p[len(otherPath)] is a system separator. + + return strings.HasPrefix(p.ToString(), prefix.ToString()) && os.IsPathSeparator(p[prefixLen]) +} diff --git a/cli/internal/turbopath/absolute_system_path_darwin.go b/cli/internal/turbopath/absolute_system_path_darwin.go new file mode 100644 index 0000000..e2c3bff --- /dev/null +++ b/cli/internal/turbopath/absolute_system_path_darwin.go @@ -0,0 +1,23 @@ +//go:build darwin +// +build darwin + +// Adapted from https://github.com/containerd/continuity/blob/b4ca35286886296377de39e6eafd1affae019fc3/driver/lchmod_unix.go +// Copyright The containerd Authors +// SPDX-License-Identifier: Apache-2.0 + +package turbopath + +import ( + "os" + + "golang.org/x/sys/unix" +) + +// Lchmod changes the mode of a file not following symlinks. +func (p AbsoluteSystemPath) Lchmod(mode os.FileMode) error { + err := unix.Fchmodat(unix.AT_FDCWD, p.ToString(), uint32(mode), unix.AT_SYMLINK_NOFOLLOW) + if err != nil { + err = &os.PathError{Op: "lchmod", Path: p.ToString(), Err: err} + } + return err +} diff --git a/cli/internal/turbopath/absolute_system_path_notdarwin.go b/cli/internal/turbopath/absolute_system_path_notdarwin.go new file mode 100644 index 0000000..1195888 --- /dev/null +++ b/cli/internal/turbopath/absolute_system_path_notdarwin.go @@ -0,0 +1,13 @@ +//go:build !darwin +// +build !darwin + +package turbopath + +import ( + "os" +) + +// Lchmod changes the mode of a file not following symlinks. +func (p AbsoluteSystemPath) Lchmod(mode os.FileMode) error { + return nil +} diff --git a/cli/internal/turbopath/absolute_system_path_test.go b/cli/internal/turbopath/absolute_system_path_test.go new file mode 100644 index 0000000..4ca36f9 --- /dev/null +++ b/cli/internal/turbopath/absolute_system_path_test.go @@ -0,0 +1,174 @@ +package turbopath + +import ( + "os" + "runtime" + "testing" + + "gotest.tools/v3/assert" + "gotest.tools/v3/fs" +) + +func Test_Mkdir(t *testing.T) { + type Case struct { + name string + isDir bool + exists bool + mode os.FileMode + expectedMode os.FileMode + } + + cases := []Case{ + { + name: "dir doesn't exist", + exists: false, + expectedMode: os.ModeDir | 0777, + }, + { + name: "path exists as file", + exists: true, + isDir: false, + mode: 0666, + expectedMode: os.ModeDir | 0755, + }, + { + name: "dir exists with incorrect mode", + exists: true, + isDir: false, + mode: os.ModeDir | 0755, + expectedMode: os.ModeDir | 0655, + }, + { + name: "dir exists with correct mode", + exists: true, + isDir: false, + mode: os.ModeDir | 0755, + expectedMode: os.ModeDir | 0755, + }, + } + + for _, testCase := range cases { + testDir := fs.NewDir(t, "system-path-mkdir-test") + testName := testCase.name + path := testDir.Join("foo") + if testCase.isDir { + err := os.Mkdir(path, testCase.mode) + assert.NilError(t, err, "%s: Mkdir", testName) + } else if testCase.exists { + file, err := os.Create(path) + assert.NilError(t, err, "%s: Create", testName) + err = file.Chmod(testCase.mode) + assert.NilError(t, err, "%s: Chmod", testName) + err = file.Close() + assert.NilError(t, err, "%s: Close", testName) + } + + testPath := AbsoluteSystemPath(path) + err := testPath.MkdirAllMode(testCase.expectedMode) + assert.NilError(t, err, "%s: Mkdir", testName) + + stat, err := testPath.Lstat() + assert.NilError(t, err, "%s: Lstat", testName) + assert.Assert(t, stat.IsDir(), testName) + + assert.Assert(t, stat.IsDir(), testName) + + if runtime.GOOS == "windows" { + // For windows os.Chmod will only change the writable bit so that's all we check + assert.Equal(t, stat.Mode().Perm()&0200, testCase.expectedMode.Perm()&0200, testName) + } else { + assert.Equal(t, stat.Mode(), testCase.expectedMode, testName) + } + + } +} + +func TestAbsoluteSystemPath_Findup(t *testing.T) { + tests := []struct { + name string + fs []AnchoredSystemPath + executionDirectory AnchoredSystemPath + fileName RelativeSystemPath + want AnchoredSystemPath + wantErr bool + }{ + { + name: "hello world", + fs: []AnchoredSystemPath{ + AnchoredUnixPath("one/two/three/four/.file").ToSystemPath(), + AnchoredUnixPath("one/two/three/four/.target").ToSystemPath(), + }, + executionDirectory: AnchoredUnixPath("one/two/three/four").ToSystemPath(), + fileName: RelativeUnixPath(".target").ToSystemPath(), + want: AnchoredUnixPath("one/two/three/four/.target").ToSystemPath(), + }, + { + name: "parent", + fs: []AnchoredSystemPath{ + AnchoredUnixPath("one/two/three/four/.file").ToSystemPath(), + AnchoredUnixPath("one/two/three/.target").ToSystemPath(), + }, + executionDirectory: AnchoredUnixPath("one/two/three/four").ToSystemPath(), + fileName: RelativeUnixPath(".target").ToSystemPath(), + want: AnchoredUnixPath("one/two/three/.target").ToSystemPath(), + }, + { + name: "gets the closest", + fs: []AnchoredSystemPath{ + AnchoredUnixPath("one/two/three/four/.file").ToSystemPath(), + AnchoredUnixPath("one/two/three/.target").ToSystemPath(), + AnchoredUnixPath("one/two/.target").ToSystemPath(), + }, + executionDirectory: AnchoredUnixPath("one/two/three/four").ToSystemPath(), + fileName: RelativeUnixPath(".target").ToSystemPath(), + want: AnchoredUnixPath("one/two/three/.target").ToSystemPath(), + }, + { + name: "nonexistent", + fs: []AnchoredSystemPath{ + AnchoredUnixPath("one/two/three/four/.file").ToSystemPath(), + }, + executionDirectory: AnchoredUnixPath("one/two/three/four").ToSystemPath(), + fileName: RelativeUnixPath(".nonexistent").ToSystemPath(), + want: "", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + fsRoot := AbsoluteSystemPath(t.TempDir()) + for _, file := range tt.fs { + path := file.RestoreAnchor(fsRoot) + assert.NilError(t, path.Dir().MkdirAll(0777)) + assert.NilError(t, path.WriteFile(nil, 0777)) + } + + got, err := tt.executionDirectory.RestoreAnchor(fsRoot).Findup(tt.fileName) + if tt.wantErr { + assert.ErrorIs(t, err, os.ErrNotExist) + return + } + if got != "" && got != tt.want.RestoreAnchor(fsRoot) { + t.Errorf("AbsoluteSystemPath.Findup() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestJoin(t *testing.T) { + rawRoot, err := os.Getwd() + if err != nil { + t.Fatalf("cwd %v", err) + } + root := AbsoluteSystemPathFromUpstream(rawRoot) + testRoot := root.Join("a", "b", "c") + dot := testRoot.Join(".") + if dot != testRoot { + t.Errorf(". path got %v, want %v", dot, testRoot) + } + + doubleDot := testRoot.Join("..") + expectedDoubleDot := root.Join("a", "b") + if doubleDot != expectedDoubleDot { + t.Errorf(".. path got %v, want %v", doubleDot, expectedDoubleDot) + } +} diff --git a/cli/internal/turbopath/anchored_system_path.go b/cli/internal/turbopath/anchored_system_path.go new file mode 100644 index 0000000..0957ead --- /dev/null +++ b/cli/internal/turbopath/anchored_system_path.go @@ -0,0 +1,75 @@ +package turbopath + +import ( + "os" + "path/filepath" + "strings" +) + +// AnchoredSystemPath is a path stemming from a specified root using system separators. +type AnchoredSystemPath string + +// ToString returns a string represenation of this Path. +// Used for interfacing with APIs that require a string. +func (p AnchoredSystemPath) ToString() string { + return string(p) +} + +// ToStringDuringMigration returns the string representation of this path, and is for +// use in situations where we expect a future path migration to remove the need for the +// string representation +func (p AnchoredSystemPath) ToStringDuringMigration() string { + return string(p) +} + +// ToSystemPath returns itself. +func (p AnchoredSystemPath) ToSystemPath() AnchoredSystemPath { + return p +} + +// ToUnixPath converts a AnchoredSystemPath to a AnchoredUnixPath. +func (p AnchoredSystemPath) ToUnixPath() AnchoredUnixPath { + return AnchoredUnixPath(filepath.ToSlash(p.ToString())) +} + +// RelativeTo calculates the relative path between two AnchoredSystemPath`s. +func (p AnchoredSystemPath) RelativeTo(basePath AnchoredSystemPath) (AnchoredSystemPath, error) { + processed, err := filepath.Rel(basePath.ToString(), p.ToString()) + return AnchoredSystemPath(processed), err +} + +// RestoreAnchor prefixes the AnchoredSystemPath with its anchor to return an AbsoluteSystemPath. +func (p AnchoredSystemPath) RestoreAnchor(anchor AbsoluteSystemPath) AbsoluteSystemPath { + return AbsoluteSystemPath(filepath.Join(anchor.ToString(), p.ToString())) +} + +// Dir returns filepath.Dir for the path. +func (p AnchoredSystemPath) Dir() AnchoredSystemPath { + return AnchoredSystemPath(filepath.Dir(p.ToString())) +} + +// Join appends relative path segments to this AnchoredSystemPath. +func (p AnchoredSystemPath) Join(additional ...RelativeSystemPath) AnchoredSystemPath { + cast := RelativeSystemPathArray(additional) + return AnchoredSystemPath(filepath.Join(p.ToString(), filepath.Join(cast.ToStringArray()...))) +} + +// HasPrefix is strings.HasPrefix for paths, ensuring that it matches on separator boundaries. +// This does NOT perform Clean in advance. +func (p AnchoredSystemPath) HasPrefix(prefix AnchoredSystemPath) bool { + prefixLen := len(prefix) + pathLen := len(p) + + if prefixLen > pathLen { + // Can't be a prefix if longer. + return false + } else if prefixLen == pathLen { + // Can be a prefix if they're equal, but otherwise no. + return p == prefix + } + + // otherPath is definitely shorter than p. + // We need to confirm that p[len(otherPath)] is a system separator. + + return strings.HasPrefix(p.ToString(), prefix.ToString()) && os.IsPathSeparator(p[prefixLen]) +} diff --git a/cli/internal/turbopath/anchored_unix_path.go b/cli/internal/turbopath/anchored_unix_path.go new file mode 100644 index 0000000..23e371a --- /dev/null +++ b/cli/internal/turbopath/anchored_unix_path.go @@ -0,0 +1,31 @@ +package turbopath + +import ( + "path" + "path/filepath" +) + +// AnchoredUnixPath is a path stemming from a specified root using Unix `/` separators. +type AnchoredUnixPath string + +// ToString returns a string represenation of this Path. +// Used for interfacing with APIs that require a string. +func (p AnchoredUnixPath) ToString() string { + return string(p) +} + +// ToSystemPath converts a AnchoredUnixPath to a AnchoredSystemPath. +func (p AnchoredUnixPath) ToSystemPath() AnchoredSystemPath { + return AnchoredSystemPath(filepath.FromSlash(p.ToString())) +} + +// ToUnixPath returns itself. +func (p AnchoredUnixPath) ToUnixPath() AnchoredUnixPath { + return p +} + +// Join appends relative path segments to this RelativeUnixPath. +func (p AnchoredUnixPath) Join(additional ...RelativeUnixPath) AnchoredUnixPath { + cast := RelativeUnixPathArray(additional) + return AnchoredUnixPath(path.Join(p.ToString(), path.Join(cast.ToStringArray()...))) +} diff --git a/cli/internal/turbopath/find_up.go b/cli/internal/turbopath/find_up.go new file mode 100644 index 0000000..bf7c39c --- /dev/null +++ b/cli/internal/turbopath/find_up.go @@ -0,0 +1,50 @@ +package turbopath + +import ( + "os" + "path/filepath" +) + +func hasFile(name, dir string) (bool, error) { + files, err := os.ReadDir(dir) + + if err != nil { + return false, err + } + + for _, f := range files { + if name == f.Name() { + return true, nil + } + } + + return false, nil +} + +func findupFrom(name, dir string) (string, error) { + for { + found, err := hasFile(name, dir) + + if err != nil { + return "", err + } + + if found { + return filepath.Join(dir, name), nil + } + + parent := filepath.Dir(dir) + + if parent == dir { + return "", nil + } + + dir = parent + } +} + +// FindupFrom Recursively finds a file by walking up parents in the file tree +// starting from a specific directory. +func FindupFrom(name, dir string) (string, error) { + return findupFrom(name, dir) +} diff --git a/cli/internal/turbopath/relative_system_path.go b/cli/internal/turbopath/relative_system_path.go new file mode 100644 index 0000000..d6115db --- /dev/null +++ b/cli/internal/turbopath/relative_system_path.go @@ -0,0 +1,44 @@ +package turbopath + +import ( + "fmt" + "path/filepath" +) + +// RelativeSystemPath is a relative path using system separators. +type RelativeSystemPath string + +// CheckedToRelativeSystemPath inspects a string and determines if it is a relative path. +func CheckedToRelativeSystemPath(s string) (RelativeSystemPath, error) { + if filepath.IsAbs(s) { + return "", fmt.Errorf("%v is not a relative path", s) + } + return RelativeSystemPath(filepath.Clean(s)), nil +} + +// MakeRelativeSystemPath joins the given segments in a system-appropriate way +func MakeRelativeSystemPath(segments ...string) RelativeSystemPath { + return RelativeSystemPath(filepath.Join(segments...)) +} + +// ToString returns a string represenation of this Path. +// Used for interfacing with APIs that require a string. +func (p RelativeSystemPath) ToString() string { + return string(p) +} + +// ToSystemPath returns itself. +func (p RelativeSystemPath) ToSystemPath() RelativeSystemPath { + return p +} + +// ToUnixPath converts from RelativeSystemPath to RelativeUnixPath. +func (p RelativeSystemPath) ToUnixPath() RelativeUnixPath { + return RelativeUnixPath(filepath.ToSlash(p.ToString())) +} + +// Join appends relative path segments to this RelativeSystemPath. +func (p RelativeSystemPath) Join(additional ...RelativeSystemPath) RelativeSystemPath { + cast := RelativeSystemPathArray(additional) + return RelativeSystemPath(filepath.Join(p.ToString(), filepath.Join(cast.ToStringArray()...))) +} diff --git a/cli/internal/turbopath/relative_unix_path.go b/cli/internal/turbopath/relative_unix_path.go new file mode 100644 index 0000000..05829e2 --- /dev/null +++ b/cli/internal/turbopath/relative_unix_path.go @@ -0,0 +1,31 @@ +package turbopath + +import ( + "path" + "path/filepath" +) + +// RelativeUnixPath is a relative path using Unix `/` separators. +type RelativeUnixPath string + +// ToString returns a string represenation of this Path. +// Used for interfacing with APIs that require a string. +func (p RelativeUnixPath) ToString() string { + return string(p) +} + +// ToSystemPath converts a RelativeUnixPath to a RelativeSystemPath. +func (p RelativeUnixPath) ToSystemPath() RelativeSystemPath { + return RelativeSystemPath(filepath.FromSlash(p.ToString())) +} + +// ToUnixPath converts a RelativeUnixPath to a RelativeSystemPath. +func (p RelativeUnixPath) ToUnixPath() RelativeUnixPath { + return p +} + +// Join appends relative path segments to this RelativeUnixPath. +func (p RelativeUnixPath) Join(additional ...RelativeUnixPath) RelativeUnixPath { + cast := RelativeUnixPathArray(additional) + return RelativeUnixPath(path.Join(p.ToString(), path.Join(cast.ToStringArray()...))) +} diff --git a/cli/internal/turbopath/turbopath.go b/cli/internal/turbopath/turbopath.go new file mode 100644 index 0000000..f50b75f --- /dev/null +++ b/cli/internal/turbopath/turbopath.go @@ -0,0 +1,112 @@ +// Package turbopath teaches the Go type system about six +// different types of paths: +// - AbsoluteSystemPath +// - RelativeSystemPath +// - AnchoredSystemPath +// - AbsoluteUnixPath +// - RelativeUnixPath +// - AnchoredUnixPath +// +// Between these two things it is assumed that we will be able to +// reasonably describe file paths being used within the system and +// have the type system enforce correctness instead of relying upon +// runtime code to accomplish the task. +// +// Absolute paths are, "absolute, including volume root." They are not +// portable between System and Unix. +// +// Relative paths are simply arbitrary path segments using a particular +// path delimiter. They are portable between System and Unix. +// +// Anchored paths are, "absolute, starting at a particular root." +// They are not aware of *what* their anchor is. It could be a repository, +// an `os.dirFS`, a package, `cwd`, or more. They are stored *without* +// a preceding delimiter for compatibility with `io/fs`. They are portable +// between System and Unix. +// +// In some future world everything in here can be optimized out at compile time. +// Everything is either `string` or `[]string` +// +// Much of this is dreadfully repetitive because of intentional +// limitations in the Go type system. +package turbopath + +// AnchoredUnixPathArray is a type used to enable transform operations on arrays of paths. +type AnchoredUnixPathArray []AnchoredUnixPath + +// RelativeSystemPathArray is a type used to enable transform operations on arrays of paths. +type RelativeSystemPathArray []RelativeSystemPath + +// RelativeUnixPathArray is a type used to enable transform operations on arrays of paths. +type RelativeUnixPathArray []RelativeUnixPath + +// ToStringArray enables ergonomic operations on arrays of RelativeSystemPath +func (source RelativeSystemPathArray) ToStringArray() []string { + output := make([]string, len(source)) + for index, path := range source { + output[index] = path.ToString() + } + return output +} + +// ToStringArray enables ergonomic operations on arrays of RelativeUnixPath +func (source RelativeUnixPathArray) ToStringArray() []string { + output := make([]string, len(source)) + for index, path := range source { + output[index] = path.ToString() + } + return output +} + +// ToSystemPathArray enables ergonomic operations on arrays of AnchoredUnixPath +func (source AnchoredUnixPathArray) ToSystemPathArray() []AnchoredSystemPath { + output := make([]AnchoredSystemPath, len(source)) + for index, path := range source { + output[index] = path.ToSystemPath() + } + return output +} + +// The following methods exist to import a path string and cast it to the appropriate +// type. They exist to communicate intent and make it explicit that this is an +// intentional action, not a "helpful" insertion by the IDE. +// +// This is intended to map closely to the `unsafe` keyword, without the denotative +// meaning of `unsafe` in English. These are "trust me, I've checkex it" places, and +// intend to mark the places where we smuggle paths from outside the world of safe +// path handling into the world where we carefully consider the path to ensure safety. + +// AbsoluteSystemPathFromUpstream takes a path string and casts it to an +// AbsoluteSystemPath without checking. If the input to this function is +// not an AbsoluteSystemPath it will result in downstream errors. +func AbsoluteSystemPathFromUpstream(path string) AbsoluteSystemPath { + return AbsoluteSystemPath(path) +} + +// AnchoredSystemPathFromUpstream takes a path string and casts it to an +// AnchoredSystemPath without checking. If the input to this function is +// not an AnchoredSystemPath it will result in downstream errors. +func AnchoredSystemPathFromUpstream(path string) AnchoredSystemPath { + return AnchoredSystemPath(path) +} + +// AnchoredUnixPathFromUpstream takes a path string and casts it to an +// AnchoredUnixPath without checking. If the input to this function is +// not an AnchoredUnixPath it will result in downstream errors. +func AnchoredUnixPathFromUpstream(path string) AnchoredUnixPath { + return AnchoredUnixPath(path) +} + +// RelativeSystemPathFromUpstream takes a path string and casts it to an +// RelativeSystemPath without checking. If the input to this function is +// not an RelativeSystemPath it will result in downstream errors. +func RelativeSystemPathFromUpstream(path string) RelativeSystemPath { + return RelativeSystemPath(path) +} + +// RelativeUnixPathFromUpstream takes a path string and casts it to an +// RelativeUnixPath without checking. If the input to this function is +// not an RelativeUnixPath it will result in downstream errors. +func RelativeUnixPathFromUpstream(path string) RelativeUnixPath { + return RelativeUnixPath(path) +} diff --git a/cli/internal/turbostate/turbostate.go b/cli/internal/turbostate/turbostate.go new file mode 100644 index 0000000..dad5b47 --- /dev/null +++ b/cli/internal/turbostate/turbostate.go @@ -0,0 +1,141 @@ +// Package turbostate holds all of the state given from the Rust CLI +// that is necessary to execute turbo. We transfer this state from Rust +// to Go via a JSON payload. +package turbostate + +import ( + "fmt" + + "github.com/vercel/turbo/cli/internal/util" +) + +// RepoState is the state for repository. Consists of the root for the repo +// along with the mode (single package or multi package) +type RepoState struct { + Root string `json:"root"` + Mode string `json:"mode"` +} + +// DaemonPayload is the extra flags and command that are +// passed for the `daemon` subcommand +type DaemonPayload struct { + IdleTimeout string `json:"idle_time"` + JSON bool `json:"json"` +} + +// PrunePayload is the extra flags passed for the `prune` subcommand +type PrunePayload struct { + Scope []string `json:"scope"` + Docker bool `json:"docker"` + OutputDir string `json:"output_dir"` +} + +// RunPayload is the extra flags passed for the `run` subcommand +type RunPayload struct { + CacheDir string `json:"cache_dir"` + CacheWorkers int `json:"cache_workers"` + Concurrency string `json:"concurrency"` + ContinueExecution bool `json:"continue_execution"` + DryRun string `json:"dry_run"` + Filter []string `json:"filter"` + Force bool `json:"force"` + GlobalDeps []string `json:"global_deps"` + EnvMode util.EnvMode `json:"env_mode"` + // NOTE: Graph has three effective states that is modeled using a *string: + // nil -> no flag passed + // "" -> flag passed but no file name attached: print to stdout + // "foo" -> flag passed and file name attached: emit to file + // The mirror for this in Rust is `Option` with the default value + // for the flag being `Some("")`. + Graph *string `json:"graph"` + Ignore []string `json:"ignore"` + IncludeDependencies bool `json:"include_dependencies"` + NoCache bool `json:"no_cache"` + NoDaemon bool `json:"no_daemon"` + NoDeps bool `json:"no_deps"` + Only bool `json:"only"` + OutputLogs string `json:"output_logs"` + PassThroughArgs []string `json:"pass_through_args"` + Parallel bool `json:"parallel"` + Profile string `json:"profile"` + RemoteOnly bool `json:"remote_only"` + Scope []string `json:"scope"` + Since string `json:"since"` + SinglePackage bool `json:"single_package"` + Summarize bool `json:"summarize"` + Tasks []string `json:"tasks"` + PkgInferenceRoot string `json:"pkg_inference_root"` + LogPrefix string `json:"log_prefix"` + ExperimentalSpaceID string `json:"experimental_space_id"` +} + +// Command consists of the data necessary to run a command. +// Only one of these fields should be initialized at a time. +type Command struct { + Daemon *DaemonPayload `json:"daemon"` + Prune *PrunePayload `json:"prune"` + Run *RunPayload `json:"run"` +} + +// ParsedArgsFromRust are the parsed command line arguments passed +// from the Rust shim +type ParsedArgsFromRust struct { + API string `json:"api"` + Color bool `json:"color"` + CPUProfile string `json:"cpu_profile"` + CWD string `json:"cwd"` + Heap string `json:"heap"` + Login string `json:"login"` + NoColor bool `json:"no_color"` + Preflight bool `json:"preflight"` + RemoteCacheTimeout uint64 `json:"remote_cache_timeout"` + Team string `json:"team"` + Token string `json:"token"` + Trace string `json:"trace"` + Verbosity int `json:"verbosity"` + TestRun bool `json:"test_run"` + Command Command `json:"command"` +} + +// GetColor returns the value of the `color` flag. +func (a ParsedArgsFromRust) GetColor() bool { + return a.Color +} + +// GetNoColor returns the value of the `token` flag. +func (a ParsedArgsFromRust) GetNoColor() bool { + return a.NoColor +} + +// GetLogin returns the value of the `login` flag. +func (a ParsedArgsFromRust) GetLogin() (string, error) { + return a.Login, nil +} + +// GetAPI returns the value of the `api` flag. +func (a ParsedArgsFromRust) GetAPI() (string, error) { + return a.API, nil +} + +// GetTeam returns the value of the `team` flag. +func (a ParsedArgsFromRust) GetTeam() (string, error) { + return a.Team, nil +} + +// GetToken returns the value of the `token` flag. +func (a ParsedArgsFromRust) GetToken() (string, error) { + return a.Token, nil +} + +// GetCwd returns the value of the `cwd` flag. +func (a ParsedArgsFromRust) GetCwd() (string, error) { + return a.CWD, nil +} + +// GetRemoteCacheTimeout returns the value of the `remote-cache-timeout` flag. +func (a ParsedArgsFromRust) GetRemoteCacheTimeout() (uint64, error) { + if a.RemoteCacheTimeout != 0 { + return a.RemoteCacheTimeout, nil + } + return 0, fmt.Errorf("no remote cache timeout provided") +} diff --git a/cli/internal/ui/charset.go b/cli/internal/ui/charset.go new file mode 100644 index 0000000..0207c10 --- /dev/null +++ b/cli/internal/ui/charset.go @@ -0,0 +1,3 @@ +package ui + +var charset = []string{" ", "> ", ">> ", ">>>"} diff --git a/cli/internal/ui/colors.go b/cli/internal/ui/colors.go new file mode 100644 index 0000000..4b2eccd --- /dev/null +++ b/cli/internal/ui/colors.go @@ -0,0 +1,54 @@ +package ui + +import ( + "os" + + "github.com/fatih/color" +) + +type ColorMode int + +const ( + ColorModeUndefined ColorMode = iota + 1 + ColorModeSuppressed + ColorModeForced +) + +func GetColorModeFromEnv() ColorMode { + // The FORCED_COLOR behavior and accepted values are taken from the supports-color NodeJS Package: + // The accepted values as documented are "0" to disable, and "1", "2", or "3" to force-enable color + // at the specified support level (1 = 16 colors, 2 = 256 colors, 3 = 16M colors). + // We don't currently use the level for anything specific, and just treat things as on and off. + // + // Note: while "false" and "true" aren't documented, the library coerces these values to 0 and 1 + // respectively, so that behavior is reproduced here as well. + // https://www.npmjs.com/package/supports-color + + switch forceColor := os.Getenv("FORCE_COLOR"); { + case forceColor == "false" || forceColor == "0": + return ColorModeSuppressed + case forceColor == "true" || forceColor == "1" || forceColor == "2" || forceColor == "3": + return ColorModeForced + default: + return ColorModeUndefined + } +} + +func applyColorMode(colorMode ColorMode) ColorMode { + switch colorMode { + case ColorModeForced: + color.NoColor = false + case ColorModeSuppressed: + color.NoColor = true + case ColorModeUndefined: + default: + // color.NoColor already gets its default value based on + // isTTY and/or the presence of the NO_COLOR env variable. + } + + if color.NoColor { + return ColorModeSuppressed + } else { + return ColorModeForced + } +} diff --git a/cli/internal/ui/spinner.go b/cli/internal/ui/spinner.go new file mode 100644 index 0000000..6e47d2d --- /dev/null +++ b/cli/internal/ui/spinner.go @@ -0,0 +1,80 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +package ui + +import ( + "fmt" + "io" + "os" + "time" + + "github.com/briandowns/spinner" +) + +// startStopper is the interface to interact with the spinner. +type startStopper interface { + Start() + Stop() +} + +// Spinner represents an indicator that an asynchronous operation is taking place. +// +// For short operations, less than 4 seconds, display only the spinner with the Start and Stop methods. +// For longer operations, display intermediate progress events using the Events method. +type Spinner struct { + spin startStopper +} + +// NewSpinner returns a spinner that outputs to w. +func NewSpinner(w io.Writer) *Spinner { + interval := 125 * time.Millisecond + if os.Getenv("CI") == "true" { + interval = 30 * time.Second + } + s := spinner.New(charset, interval, spinner.WithHiddenCursor(true)) + s.Writer = w + s.Color("faint") + return &Spinner{ + spin: s, + } +} + +// Start starts the spinner suffixed with a label. +func (s *Spinner) Start(label string) { + s.suffix(fmt.Sprintf(" %s", label)) + s.spin.Start() +} + +// Stop stops the spinner and replaces it with a label. +func (s *Spinner) Stop(label string) { + s.finalMSG(fmt.Sprint(label)) + s.spin.Stop() +} + +func (s *Spinner) lock() { + if spinner, ok := s.spin.(*spinner.Spinner); ok { + spinner.Lock() + } +} + +func (s *Spinner) unlock() { + if spinner, ok := s.spin.(*spinner.Spinner); ok { + spinner.Unlock() + } +} + +func (s *Spinner) suffix(label string) { + s.lock() + defer s.unlock() + if spinner, ok := s.spin.(*spinner.Spinner); ok { + spinner.Suffix = label + } +} + +func (s *Spinner) finalMSG(label string) { + s.lock() + defer s.unlock() + if spinner, ok := s.spin.(*spinner.Spinner); ok { + spinner.FinalMSG = label + } +} diff --git a/cli/internal/ui/term/cursor.go b/cli/internal/ui/term/cursor.go new file mode 100644 index 0000000..253f043 --- /dev/null +++ b/cli/internal/ui/term/cursor.go @@ -0,0 +1,73 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// Package cursor provides functionality to interact with the terminal cursor. +package cursor + +import ( + "io" + "os" + + "github.com/AlecAivazis/survey/v2/terminal" +) + +type cursor interface { + Up(n int) error + Down(n int) error + Hide() error + Show() error +} + +// fakeFileWriter is a terminal.FileWriter. +// If the underlying writer w does not implement Fd() then a dummy value is returned. +type fakeFileWriter struct { + w io.Writer +} + +// Write delegates to the internal writer. +func (w *fakeFileWriter) Write(p []byte) (int, error) { + return w.w.Write(p) +} + +// Fd is required to be implemented to satisfy the terminal.FileWriter interface. +// If the underlying writer is a file, like os.Stdout, then invoke it. Otherwise, this method allows us to create +// a Cursor that can write to any io.Writer like a bytes.Buffer by returning a dummy value. +func (w *fakeFileWriter) Fd() uintptr { + if v, ok := w.w.(terminal.FileWriter); ok { + return v.Fd() + } + return 0 +} + +// Cursor represents the terminal's cursor. +type Cursor struct { + c cursor +} + +// New creates a new cursor that writes to stderr. +func New() *Cursor { + return &Cursor{ + c: &terminal.Cursor{ + Out: os.Stderr, + }, + } +} + +// EraseLine erases a line from a FileWriter. +func EraseLine(fw terminal.FileWriter) { + terminal.EraseLine(fw, terminal.ERASE_LINE_ALL) +} + +// EraseLinesAbove erases a line and moves the cursor up from fw, repeated n times. +func EraseLinesAbove(fw terminal.FileWriter, n int) { + c := Cursor{ + c: &terminal.Cursor{ + Out: fw, + }, + } + for i := 0; i < n; i += 1 { + EraseLine(fw) + c.c.Up(1) + } + EraseLine(fw) // Erase the nth line as well. +} diff --git a/cli/internal/ui/term/cursor_test.go b/cli/internal/ui/term/cursor_test.go new file mode 100644 index 0000000..270ebe8 --- /dev/null +++ b/cli/internal/ui/term/cursor_test.go @@ -0,0 +1,43 @@ +//go:build !windows +// +build !windows + +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +package cursor + +import ( + "io" + "strings" + "testing" + + "github.com/AlecAivazis/survey/v2/terminal" + "github.com/stretchr/testify/require" +) + +func TestEraseLine(t *testing.T) { + testCases := map[string]struct { + inWriter func(writer io.Writer) terminal.FileWriter + shouldErase bool + }{ + "should erase a line if the writer is a file": { + inWriter: func(writer io.Writer) terminal.FileWriter { + return &fakeFileWriter{w: writer} + }, + shouldErase: true, + }, + } + + for name, tc := range testCases { + t.Run(name, func(t *testing.T) { + // GIVEN + buf := new(strings.Builder) + + // WHEN + EraseLine(tc.inWriter(buf)) + + // THEN + isErased := buf.String() != "" + require.Equal(t, tc.shouldErase, isErased) + }) + } +} diff --git a/cli/internal/ui/ui.go b/cli/internal/ui/ui.go new file mode 100644 index 0000000..9084c76 --- /dev/null +++ b/cli/internal/ui/ui.go @@ -0,0 +1,121 @@ +package ui + +import ( + "fmt" + "io" + "math" + "os" + "regexp" + "strings" + + "github.com/fatih/color" + "github.com/mattn/go-isatty" + "github.com/mitchellh/cli" + "github.com/vercel/turbo/cli/internal/ci" +) + +const ansiEscapeStr = "[\u001B\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[a-zA-Z\\d]*)*)?\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PRZcf-ntqry=><~]))" + +// IsTTY is true when stdout appears to be a tty +var IsTTY = isatty.IsTerminal(os.Stdout.Fd()) || isatty.IsCygwinTerminal(os.Stdout.Fd()) + +// IsCI is true when we appear to be running in a non-interactive context. +var IsCI = !IsTTY || ci.IsCi() +var gray = color.New(color.Faint) +var bold = color.New(color.Bold) +var ERROR_PREFIX = color.New(color.Bold, color.FgRed, color.ReverseVideo).Sprint(" ERROR ") +var WARNING_PREFIX = color.New(color.Bold, color.FgYellow, color.ReverseVideo).Sprint(" WARNING ") + +// InfoPrefix is a colored string for warning level log messages +var InfoPrefix = color.New(color.Bold, color.FgWhite, color.ReverseVideo).Sprint(" INFO ") + +var ansiRegex = regexp.MustCompile(ansiEscapeStr) + +// Dim prints out dimmed text +func Dim(str string) string { + return gray.Sprint(str) +} + +func Bold(str string) string { + return bold.Sprint(str) +} + +// Adapted from go-rainbow +// Copyright (c) 2017 Raphael Amorim +// Source: https://github.com/raphamorim/go-rainbow +// SPDX-License-Identifier: MIT +func rgb(i int) (int, int, int) { + var f = 0.275 + + return int(math.Sin(f*float64(i)+4*math.Pi/3)*127 + 128), + // int(math.Sin(f*float64(i)+2*math.Pi/3)*127 + 128), + int(45), + int(math.Sin(f*float64(i)+0)*127 + 128) +} + +// Rainbow function returns a formated colorized string ready to print it to the shell/terminal +// +// Adapted from go-rainbow +// Copyright (c) 2017 Raphael Amorim +// Source: https://github.com/raphamorim/go-rainbow +// SPDX-License-Identifier: MIT +func Rainbow(text string) string { + var rainbowStr []string + for index, value := range text { + r, g, b := rgb(index) + str := fmt.Sprintf("\033[1m\033[38;2;%d;%d;%dm%c\033[0m\033[0;1m", r, g, b, value) + rainbowStr = append(rainbowStr, str) + } + + return strings.Join(rainbowStr, "") +} + +type stripAnsiWriter struct { + wrappedWriter io.Writer +} + +func (into *stripAnsiWriter) Write(p []byte) (int, error) { + n, err := into.wrappedWriter.Write(ansiRegex.ReplaceAll(p, []byte{})) + if err != nil { + // The number of bytes returned here isn't directly related to the input bytes + // if ansi color codes were being stripped out, but we are counting on Stdout.Write + // not failing under typical operation as well. + return n, err + } + + // Write must return a non-nil error if it returns n < len(p). Consequently, if the + // wrappedWrite.Write call succeeded we will return len(p) as the number of bytes + // written. + return len(p), nil +} + +// Default returns the default colored ui +func Default() *cli.ColoredUi { + return BuildColoredUi(ColorModeUndefined) +} + +func BuildColoredUi(colorMode ColorMode) *cli.ColoredUi { + colorMode = applyColorMode(colorMode) + + var outWriter, errWriter io.Writer + + if colorMode == ColorModeSuppressed { + outWriter = &stripAnsiWriter{wrappedWriter: os.Stdout} + errWriter = &stripAnsiWriter{wrappedWriter: os.Stderr} + } else { + outWriter = os.Stdout + errWriter = os.Stderr + } + + return &cli.ColoredUi{ + Ui: &cli.BasicUi{ + Reader: os.Stdin, + Writer: outWriter, + ErrorWriter: errWriter, + }, + OutputColor: cli.UiColorNone, + InfoColor: cli.UiColorNone, + WarnColor: cli.UiColor{Code: int(color.FgYellow), Bold: false}, + ErrorColor: cli.UiColorRed, + } +} diff --git a/cli/internal/util/backends.go b/cli/internal/util/backends.go new file mode 100644 index 0000000..66941ad --- /dev/null +++ b/cli/internal/util/backends.go @@ -0,0 +1,30 @@ +package util + +import ( + "fmt" + "io/ioutil" + "path/filepath" + + "github.com/vercel/turbo/cli/internal/yaml" +) + +// YarnRC Represents contents of .yarnrc.yml +type YarnRC struct { + NodeLinker string `yaml:"nodeLinker"` +} + +// IsNMLinker Checks that Yarn is set to use the node-modules linker style +func IsNMLinker(cwd string) (bool, error) { + yarnRC := &YarnRC{} + + bytes, err := ioutil.ReadFile(filepath.Join(cwd, ".yarnrc.yml")) + if err != nil { + return false, fmt.Errorf(".yarnrc.yml: %w", err) + } + + if yaml.Unmarshal(bytes, yarnRC) != nil { + return false, fmt.Errorf(".yarnrc.yml: %w", err) + } + + return yarnRC.NodeLinker == "node-modules", nil +} diff --git a/cli/internal/util/browser/open.go b/cli/internal/util/browser/open.go new file mode 100644 index 0000000..a6171e9 --- /dev/null +++ b/cli/internal/util/browser/open.go @@ -0,0 +1,37 @@ +package browser + +import ( + "fmt" + "os/exec" + "runtime" +) + +// OpenBrowser attempts to interactively open a browser window at the given URL +func OpenBrowser(url string) error { + var err error + + switch runtime.GOOS { + case "linux": + if posixBinExists("wslview") { + err = exec.Command("wslview", url).Start() + } else { + err = exec.Command("xdg-open", url).Start() + } + case "windows": + err = exec.Command("rundll32", "url.dll,FileProtocolHandler", url).Start() + case "darwin": + err = exec.Command("open", url).Start() + default: + err = fmt.Errorf("unsupported platform") + } + if err != nil { + return err + } + return nil +} + +func posixBinExists(bin string) bool { + err := exec.Command("which", bin).Run() + // we mostly don't care what the error is, it suggests the binary is not usable + return err == nil +} diff --git a/cli/internal/util/closer.go b/cli/internal/util/closer.go new file mode 100644 index 0000000..996760b --- /dev/null +++ b/cli/internal/util/closer.go @@ -0,0 +1,15 @@ +package util + +// CloseAndIgnoreError is a utility to tell our linter that we explicitly deem it okay +// to not check a particular error on closing of a resource. +// +// We use `errcheck` as a linter, which is super-opinionated about checking errors, +// even in places where we don't necessarily care to check the error. +// +// `golangci-lint` has a default ignore list for this lint problem (EXC0001) which +// can be used to sidestep this problem but it's possibly a little too-heavy-handed +// in exclusion. At the expense of discoverability, this utility function forces +// opt-in to ignoring errors on closing of things that can be `Close`d. +func CloseAndIgnoreError(closer interface{ Close() error }) { + _ = closer.Close() +} diff --git a/cli/internal/util/cmd.go b/cli/internal/util/cmd.go new file mode 100644 index 0000000..ae79aa0 --- /dev/null +++ b/cli/internal/util/cmd.go @@ -0,0 +1,24 @@ +package util + +import ( + "bytes" + + "github.com/spf13/cobra" +) + +// ExitCodeError is a specific error that is returned by the command to specify the exit code +type ExitCodeError struct { + ExitCode int +} + +func (e *ExitCodeError) Error() string { return "exit code error" } + +// HelpForCobraCmd returns the help string for a given command +// Note that this overwrites the output for the command +func HelpForCobraCmd(cmd *cobra.Command) string { + f := cmd.HelpFunc() + buf := bytes.NewBufferString("") + cmd.SetOut(buf) + f(cmd, []string{}) + return buf.String() +} diff --git a/cli/internal/util/filter/filter.go b/cli/internal/util/filter/filter.go new file mode 100644 index 0000000..fbc475d --- /dev/null +++ b/cli/internal/util/filter/filter.go @@ -0,0 +1,133 @@ +// Copyright (c) 2015-2020 InfluxData Inc. MIT License (MIT) +// https://github.com/influxdata/telegraf +package filter + +import ( + "strings" + + "github.com/gobwas/glob" +) + +type Filter interface { + Match(string) bool +} + +// Compile takes a list of string filters and returns a Filter interface +// for matching a given string against the filter list. The filter list +// supports glob matching too, ie: +// +// f, _ := Compile([]string{"cpu", "mem", "net*"}) +// f.Match("cpu") // true +// f.Match("network") // true +// f.Match("memory") // false +func Compile(filters []string) (Filter, error) { + // return if there is nothing to compile + if len(filters) == 0 { + return nil, nil + } + + // check if we can compile a non-glob filter + noGlob := true + for _, filter := range filters { + if hasMeta(filter) { + noGlob = false + break + } + } + + switch { + case noGlob: + // return non-globbing filter if not needed. + return compileFilterNoGlob(filters), nil + case len(filters) == 1: + return glob.Compile(filters[0]) + default: + return glob.Compile("{" + strings.Join(filters, ",") + "}") + } +} + +// hasMeta reports whether path contains any magic glob characters. +func hasMeta(s string) bool { + return strings.ContainsAny(s, "*?[") +} + +type filter struct { + m map[string]struct{} +} + +func (f *filter) Match(s string) bool { + _, ok := f.m[s] + return ok +} + +type filtersingle struct { + s string +} + +func (f *filtersingle) Match(s string) bool { + return f.s == s +} + +func compileFilterNoGlob(filters []string) Filter { + if len(filters) == 1 { + return &filtersingle{s: filters[0]} + } + out := filter{m: make(map[string]struct{})} + for _, filter := range filters { + out.m[filter] = struct{}{} + } + return &out +} + +type IncludeExcludeFilter struct { + include Filter + exclude Filter + includeDefault bool + excludeDefault bool +} + +func NewIncludeExcludeFilter( + include []string, + exclude []string, +) (Filter, error) { + return NewIncludeExcludeFilterDefaults(include, exclude, true, false) +} + +func NewIncludeExcludeFilterDefaults( + include []string, + exclude []string, + includeDefault bool, + excludeDefault bool, +) (Filter, error) { + in, err := Compile(include) + if err != nil { + return nil, err + } + + ex, err := Compile(exclude) + if err != nil { + return nil, err + } + + return &IncludeExcludeFilter{in, ex, includeDefault, excludeDefault}, nil +} + +func (f *IncludeExcludeFilter) Match(s string) bool { + if f.include != nil { + if !f.include.Match(s) { + return false + } + } else if !f.includeDefault { + return false + } + + if f.exclude != nil { + if f.exclude.Match(s) { + return false + } + } else if f.excludeDefault { + return false + } + + return true +} diff --git a/cli/internal/util/filter/filter_test.go b/cli/internal/util/filter/filter_test.go new file mode 100644 index 0000000..727a4b6 --- /dev/null +++ b/cli/internal/util/filter/filter_test.go @@ -0,0 +1,116 @@ +// Copyright (c) 2015-2020 InfluxData Inc. MIT License (MIT) +// https://github.com/influxdata/telegraf +package filter + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestCompile(t *testing.T) { + f, err := Compile([]string{}) + assert.NoError(t, err) + assert.Nil(t, f) + + f, err = Compile([]string{"cpu"}) + assert.NoError(t, err) + assert.True(t, f.Match("cpu")) + assert.False(t, f.Match("cpu0")) + assert.False(t, f.Match("mem")) + + f, err = Compile([]string{"cpu*"}) + assert.NoError(t, err) + assert.True(t, f.Match("cpu")) + assert.True(t, f.Match("cpu0")) + assert.False(t, f.Match("mem")) + + f, err = Compile([]string{"cpu", "mem"}) + assert.NoError(t, err) + assert.True(t, f.Match("cpu")) + assert.False(t, f.Match("cpu0")) + assert.True(t, f.Match("mem")) + + f, err = Compile([]string{"cpu", "mem", "net*"}) + assert.NoError(t, err) + assert.True(t, f.Match("cpu")) + assert.False(t, f.Match("cpu0")) + assert.True(t, f.Match("mem")) + assert.True(t, f.Match("network")) +} + +func TestIncludeExclude(t *testing.T) { + tags := []string{} + labels := []string{"best", "com_influxdata", "timeseries", "com_influxdata_telegraf", "ever"} + + filter, err := NewIncludeExcludeFilter([]string{}, []string{"com_influx*"}) + if err != nil { + t.Fatalf("Failed to create include/exclude filter - %v", err) + } + + for i := range labels { + if filter.Match(labels[i]) { + tags = append(tags, labels[i]) + } + } + + assert.Equal(t, []string{"best", "timeseries", "ever"}, tags) +} + +var benchbool bool + +func BenchmarkFilterSingleNoGlobFalse(b *testing.B) { + f, _ := Compile([]string{"cpu"}) + var tmp bool + for n := 0; n < b.N; n++ { + tmp = f.Match("network") + } + benchbool = tmp +} + +func BenchmarkFilterSingleNoGlobTrue(b *testing.B) { + f, _ := Compile([]string{"cpu"}) + var tmp bool + for n := 0; n < b.N; n++ { + tmp = f.Match("cpu") + } + benchbool = tmp +} + +func BenchmarkFilter(b *testing.B) { + f, _ := Compile([]string{"cpu", "mem", "net*"}) + var tmp bool + for n := 0; n < b.N; n++ { + tmp = f.Match("network") + } + benchbool = tmp +} + +func BenchmarkFilterNoGlob(b *testing.B) { + f, _ := Compile([]string{"cpu", "mem", "net"}) + var tmp bool + for n := 0; n < b.N; n++ { + tmp = f.Match("net") + } + benchbool = tmp +} + +func BenchmarkFilter2(b *testing.B) { + f, _ := Compile([]string{"aa", "bb", "c", "ad", "ar", "at", "aq", + "aw", "az", "axxx", "ab", "cpu", "mem", "net*"}) + var tmp bool + for n := 0; n < b.N; n++ { + tmp = f.Match("network") + } + benchbool = tmp +} + +func BenchmarkFilter2NoGlob(b *testing.B) { + f, _ := Compile([]string{"aa", "bb", "c", "ad", "ar", "at", "aq", + "aw", "az", "axxx", "ab", "cpu", "mem", "net"}) + var tmp bool + for n := 0; n < b.N; n++ { + tmp = f.Match("net") + } + benchbool = tmp +} diff --git a/cli/internal/util/graph.go b/cli/internal/util/graph.go new file mode 100644 index 0000000..89de18c --- /dev/null +++ b/cli/internal/util/graph.go @@ -0,0 +1,35 @@ +package util + +import ( + "fmt" + "strings" + + "github.com/pyr-sh/dag" +) + +// ValidateGraph checks that a given DAG has no cycles and no self-referential edges. +// We differ from the underlying DAG Validate method in that we allow multiple roots. +func ValidateGraph(graph *dag.AcyclicGraph) error { + // We use Cycles instead of Validate because + // our DAG has multiple roots (entrypoints). + // Validate mandates that there is only a single root node. + cycles := graph.Cycles() + if len(cycles) > 0 { + cycleLines := make([]string, len(cycles)) + for i, cycle := range cycles { + vertices := make([]string, len(cycle)) + for j, vertex := range cycle { + vertices[j] = vertex.(string) + } + cycleLines[i] = "\t" + strings.Join(vertices, ",") + } + return fmt.Errorf("cyclic dependency detected:\n%s", strings.Join(cycleLines, "\n")) + } + + for _, e := range graph.Edges() { + if e.Source() == e.Target() { + return fmt.Errorf("%s depends on itself", e.Source()) + } + } + return nil +} diff --git a/cli/internal/util/modulo.go b/cli/internal/util/modulo.go new file mode 100644 index 0000000..ec2957a --- /dev/null +++ b/cli/internal/util/modulo.go @@ -0,0 +1,13 @@ +package util + +// PostitiveMod returns a modulo operator like JavaScripts +func PositiveMod(x, d int) int { + x = x % d + if x >= 0 { + return x + } + if d < 0 { + return x - d + } + return x + d +} diff --git a/cli/internal/util/parse_concurrency.go b/cli/internal/util/parse_concurrency.go new file mode 100644 index 0000000..6917600 --- /dev/null +++ b/cli/internal/util/parse_concurrency.go @@ -0,0 +1,39 @@ +package util + +import ( + "fmt" + "math" + "runtime" + "strconv" + "strings" +) + +var ( + // alias so we can mock in tests + runtimeNumCPU = runtime.NumCPU + // positive values check for +Inf + _positiveInfinity = 1 +) + +// ParseConcurrency parses a concurrency value, which can be a number (e.g. 2) or a percentage (e.g. 50%). +func ParseConcurrency(concurrencyRaw string) (int, error) { + if strings.HasSuffix(concurrencyRaw, "%") { + if percent, err := strconv.ParseFloat(concurrencyRaw[:len(concurrencyRaw)-1], 64); err != nil { + return 0, fmt.Errorf("invalid value for --concurrency CLI flag. This should be a number --concurrency=4 or percentage of CPU cores --concurrency=50%% : %w", err) + } else { + if percent > 0 && !math.IsInf(percent, _positiveInfinity) { + return int(math.Max(1, float64(runtimeNumCPU())*percent/100)), nil + } else { + return 0, fmt.Errorf("invalid percentage value for --concurrency CLI flag. This should be a percentage of CPU cores, between 1%% and 100%% : %w", err) + } + } + } else if i, err := strconv.Atoi(concurrencyRaw); err != nil { + return 0, fmt.Errorf("invalid value for --concurrency CLI flag. This should be a positive integer greater than or equal to 1: %w", err) + } else { + if i >= 1 { + return i, nil + } else { + return 0, fmt.Errorf("invalid value %v for --concurrency CLI flag. This should be a positive integer greater than or equal to 1", i) + } + } +} diff --git a/cli/internal/util/parse_concurrency_test.go b/cli/internal/util/parse_concurrency_test.go new file mode 100644 index 0000000..b732724 --- /dev/null +++ b/cli/internal/util/parse_concurrency_test.go @@ -0,0 +1,79 @@ +package util + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestParseConcurrency(t *testing.T) { + cases := []struct { + Input string + Expected int + }{ + { + "12", + 12, + }, + { + "200%", + 20, + }, + { + "100%", + 10, + }, + { + "50%", + 5, + }, + { + "25%", + 2, + }, + { + "1%", + 1, + }, + { + "0644", // we parse in base 10 + 644, + }, + } + + // mock runtime.NumCPU() to 10 + runtimeNumCPU = func() int { + return 10 + } + + for i, tc := range cases { + t.Run(fmt.Sprintf("%d) '%s' should be parsed at '%d'", i, tc.Input, tc.Expected), func(t *testing.T) { + if result, err := ParseConcurrency(tc.Input); err != nil { + t.Fatalf("invalid parse: %#v", err) + } else { + assert.EqualValues(t, tc.Expected, result) + } + }) + } +} + +func TestInvalidPercents(t *testing.T) { + inputs := []string{ + "asdf", + "-1", + "-l%", + "infinity%", + "-infinity%", + "nan%", + "0b01", + "0o644", + "0xFF", + } + for _, tc := range inputs { + t.Run(tc, func(t *testing.T) { + val, err := ParseConcurrency(tc) + assert.Error(t, err, "input %v got %v", tc, val) + }) + } +} diff --git a/cli/internal/util/printf.go b/cli/internal/util/printf.go new file mode 100644 index 0000000..9cd6dce --- /dev/null +++ b/cli/internal/util/printf.go @@ -0,0 +1,63 @@ +// Copyright Thought Machine, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +package util + +import ( + "fmt" + "io" + "os" + + "github.com/vercel/turbo/cli/internal/ui" +) + +// initPrintf sets up the replacements used by printf. +func InitPrintf() { + if !ui.IsTTY { + replacements = map[string]string{} + } +} + +// printf is used throughout this package to print something to stderr with some +// replacements for pseudo-shell variables for ANSI formatting codes. +func Sprintf(format string, args ...interface{}) string { + return os.Expand(fmt.Sprintf(format, args...), replace) +} + +func Printf(format string, args ...interface{}) { + fmt.Fprint(os.Stderr, os.Expand(fmt.Sprintf(format, args...), replace)) +} + +func Fprintf(writer io.Writer, format string, args ...interface{}) { + fmt.Fprint(writer, os.Expand(fmt.Sprintf(format, args...), replace)) +} + +func replace(s string) string { + return replacements[s] +} + +// These are the standard set of replacements we use. +var replacements = map[string]string{ + "BOLD": "\x1b[1m", + "BOLD_GREY": "\x1b[30;1m", + "BOLD_RED": "\x1b[31;1m", + "BOLD_GREEN": "\x1b[32;1m", + "BOLD_YELLOW": "\x1b[33;1m", + "BOLD_BLUE": "\x1b[34;1m", + "BOLD_MAGENTA": "\x1b[35;1m", + "BOLD_CYAN": "\x1b[36;1m", + "BOLD_WHITE": "\x1b[37;1m", + "UNDERLINE": "\x1b[4m", + "GREY": "\x1b[2m", + "RED": "\x1b[31m", + "GREEN": "\x1b[32m", + "YELLOW": "\x1b[33m", + "BLUE": "\x1b[34m", + "MAGENTA": "\x1b[35m", + "CYAN": "\x1b[36m", + "WHITE": "\x1b[37m", + "WHITE_ON_RED": "\x1b[37;41;1m", + "RED_NO_BG": "\x1b[31;49;1m", + "RESET": "\x1b[0m", + "ERASE_AFTER": "\x1b[K", + "CLEAR_END": "\x1b[0J", +} diff --git a/cli/internal/util/run_opts.go b/cli/internal/util/run_opts.go new file mode 100644 index 0000000..08676a0 --- /dev/null +++ b/cli/internal/util/run_opts.go @@ -0,0 +1,53 @@ +package util + +import "strings" + +// EnvMode specifies if we will be using strict env vars +type EnvMode string + +const ( + // Infer - infer environment variable constraints from turbo.json + Infer EnvMode = "Infer" + // Loose - environment variables are unconstrained + Loose EnvMode = "Loose" + // Strict - environment variables are limited + Strict EnvMode = "Strict" +) + +// MarshalText implements TextMarshaler for the struct. +func (s EnvMode) MarshalText() (text []byte, err error) { + return []byte(strings.ToLower(string(s))), nil +} + +// RunOpts holds the options that control the execution of a turbo run +type RunOpts struct { + // Force execution to be serially one-at-a-time + Concurrency int + // Whether to execute in parallel (defaults to false) + Parallel bool + + EnvMode EnvMode + // The filename to write a perf profile. + Profile string + // If true, continue task executions even if a task fails. + ContinueOnError bool + PassThroughArgs []string + // Restrict execution to only the listed task names. Default false + Only bool + // Dry run flags + DryRun bool + DryRunJSON bool + // Graph flags + GraphDot bool + GraphFile string + NoDaemon bool + SinglePackage bool + + // logPrefix controls whether we should print a prefix in task logs + LogPrefix string + + // Whether turbo should create a run summary + Summarize bool + + ExperimentalSpaceID string +} diff --git a/cli/internal/util/semaphore.go b/cli/internal/util/semaphore.go new file mode 100644 index 0000000..ef29df0 --- /dev/null +++ b/cli/internal/util/semaphore.go @@ -0,0 +1,43 @@ +package util + +// Semaphore is a wrapper around a channel to provide +// utility methods to clarify that we are treating the +// channel as a semaphore +type Semaphore chan struct{} + +// NewSemaphore creates a semaphore that allows up +// to a given limit of simultaneous acquisitions +func NewSemaphore(n int) Semaphore { + if n <= 0 { + panic("semaphore with limit <=0") + } + ch := make(chan struct{}, n) + return Semaphore(ch) +} + +// Acquire is used to acquire an available slot. +// Blocks until available. +func (s Semaphore) Acquire() { + s <- struct{}{} +} + +// TryAcquire is used to do a non-blocking acquire. +// Returns a bool indicating success +func (s Semaphore) TryAcquire() bool { + select { + case s <- struct{}{}: + return true + default: + return false + } +} + +// Release is used to return a slot. Acquire must +// be called as a pre-condition. +func (s Semaphore) Release() { + select { + case <-s: + default: + panic("release without an acquire") + } +} diff --git a/cli/internal/util/set.go b/cli/internal/util/set.go new file mode 100644 index 0000000..b6c5f86 --- /dev/null +++ b/cli/internal/util/set.go @@ -0,0 +1,147 @@ +package util + +// Set is a set data structure. +type Set map[interface{}]interface{} + +// SetFromStrings creates a Set containing the strings from the given slice +func SetFromStrings(sl []string) Set { + set := make(Set, len(sl)) + for _, item := range sl { + set.Add(item) + } + return set +} + +// Hashable is the interface used by set to get the hash code of a value. +// If this isn't given, then the value of the item being added to the set +// itself is used as the comparison value. +type Hashable interface { + Hashcode() interface{} +} + +// hashcode returns the hashcode used for set elements. +func hashcode(v interface{}) interface{} { + if h, ok := v.(Hashable); ok { + return h.Hashcode() + } + + return v +} + +// Add adds an item to the set +func (s Set) Add(v interface{}) { + s[hashcode(v)] = v +} + +// Delete removes an item from the set. +func (s Set) Delete(v interface{}) { + delete(s, hashcode(v)) +} + +// Includes returns true/false of whether a value is in the set. +func (s Set) Includes(v interface{}) bool { + _, ok := s[hashcode(v)] + return ok +} + +// Intersection computes the set intersection with other. +func (s Set) Intersection(other Set) Set { + result := make(Set) + if s == nil || other == nil { + return result + } + // Iteration over a smaller set has better performance. + if other.Len() < s.Len() { + s, other = other, s + } + for _, v := range s { + if other.Includes(v) { + result.Add(v) + } + } + return result +} + +// Difference returns a set with the elements that s has but +// other doesn't. +func (s Set) Difference(other Set) Set { + result := make(Set) + for k, v := range s { + var ok bool + if other != nil { + _, ok = other[k] + } + if !ok { + result.Add(v) + } + } + + return result +} + +// Some tests whether at least one element in the array passes the test implemented by the provided function. +// It returns a Boolean value. +func (s Set) Some(cb func(interface{}) bool) bool { + for _, v := range s { + if cb(v) { + return true + } + } + return false +} + +// Filter returns a set that contains the elements from the receiver +// where the given callback returns true. +func (s Set) Filter(cb func(interface{}) bool) Set { + result := make(Set) + + for _, v := range s { + if cb(v) { + result.Add(v) + } + } + + return result +} + +// Len is the number of items in the set. +func (s Set) Len() int { + return len(s) +} + +// List returns the list of set elements. +func (s Set) List() []interface{} { + if s == nil { + return nil + } + + r := make([]interface{}, 0, len(s)) + for _, v := range s { + r = append(r, v) + } + + return r +} + +// UnsafeListOfStrings dangerously casts list to a string +func (s Set) UnsafeListOfStrings() []string { + if s == nil { + return nil + } + + r := make([]string, 0, len(s)) + for _, v := range s { + r = append(r, v.(string)) + } + + return r +} + +// Copy returns a shallow copy of the set. +func (s Set) Copy() Set { + c := make(Set) + for k, v := range s { + c[k] = v + } + return c +} diff --git a/cli/internal/util/set_test.go b/cli/internal/util/set_test.go new file mode 100644 index 0000000..52736b4 --- /dev/null +++ b/cli/internal/util/set_test.go @@ -0,0 +1,149 @@ +package util + +import ( + "fmt" + "testing" +) + +func TestSetDifference(t *testing.T) { + cases := []struct { + Name string + A, B []interface{} + Expected []interface{} + }{ + { + "same", + []interface{}{1, 2, 3}, + []interface{}{3, 1, 2}, + []interface{}{}, + }, + + { + "A has extra elements", + []interface{}{1, 2, 3}, + []interface{}{3, 2}, + []interface{}{1}, + }, + + { + "B has extra elements", + []interface{}{1, 2, 3}, + []interface{}{3, 2, 1, 4}, + []interface{}{}, + }, + } + + for i, tc := range cases { + t.Run(fmt.Sprintf("%d-%s", i, tc.Name), func(t *testing.T) { + one := make(Set) + two := make(Set) + expected := make(Set) + for _, v := range tc.A { + one.Add(v) + } + for _, v := range tc.B { + two.Add(v) + } + for _, v := range tc.Expected { + expected.Add(v) + } + + actual := one.Difference(two) + match := actual.Intersection(expected) + if match.Len() != expected.Len() { + t.Fatalf("bad: %#v", actual.List()) + } + }) + } +} + +func TestSetFilter(t *testing.T) { + cases := []struct { + Input []interface{} + Expected []interface{} + }{ + { + []interface{}{1, 2, 3}, + []interface{}{1, 2, 3}, + }, + + { + []interface{}{4, 5, 6}, + []interface{}{4}, + }, + + { + []interface{}{7, 8, 9}, + []interface{}{}, + }, + } + + for i, tc := range cases { + t.Run(fmt.Sprintf("%d-%#v", i, tc.Input), func(t *testing.T) { + input := make(Set) + expected := make(Set) + for _, v := range tc.Input { + input.Add(v) + } + for _, v := range tc.Expected { + expected.Add(v) + } + + actual := input.Filter(func(v interface{}) bool { + return v.(int) < 5 + }) + match := actual.Intersection(expected) + if match.Len() != expected.Len() { + t.Fatalf("bad: %#v", actual.List()) + } + }) + } +} + +func TestSetCopy(t *testing.T) { + a := make(Set) + a.Add(1) + a.Add(2) + + b := a.Copy() + b.Add(3) + + diff := b.Difference(a) + + if diff.Len() != 1 { + t.Fatalf("expected single diff value, got %#v", diff) + } + + if !diff.Includes(3) { + t.Fatalf("diff does not contain 3, got %#v", diff) + } + +} + +func makeSet(n int) Set { + ret := make(Set, n) + for i := 0; i < n; i++ { + ret.Add(i) + } + return ret +} + +func BenchmarkSetIntersection_100_100000(b *testing.B) { + small := makeSet(100) + large := makeSet(100000) + + b.ResetTimer() + for n := 0; n < b.N; n++ { + small.Intersection(large) + } +} + +func BenchmarkSetIntersection_100000_100(b *testing.B) { + small := makeSet(100) + large := makeSet(100000) + + b.ResetTimer() + for n := 0; n < b.N; n++ { + large.Intersection(small) + } +} diff --git a/cli/internal/util/status.go b/cli/internal/util/status.go new file mode 100644 index 0000000..23ae165 --- /dev/null +++ b/cli/internal/util/status.go @@ -0,0 +1,47 @@ +package util + +import "fmt" + +// CachingStatus represents the api server's perspective +// on whether remote caching should be allowed +type CachingStatus int + +const ( + // CachingStatusDisabled indicates that the server will not accept or serve artifacts + CachingStatusDisabled CachingStatus = iota + // CachingStatusEnabled indicates that the server will accept and serve artifacts + CachingStatusEnabled + // CachingStatusOverLimit indicates that a usage limit has been hit and the + // server will temporarily not accept or serve artifacts + CachingStatusOverLimit + // CachingStatusPaused indicates that a customer's spending has been paused and the + // server will temporarily not accept or serve artifacts + CachingStatusPaused +) + +// CachingStatusFromString parses a raw string to a caching status enum value +func CachingStatusFromString(raw string) (CachingStatus, error) { + switch raw { + case "disabled": + return CachingStatusDisabled, nil + case "enabled": + return CachingStatusEnabled, nil + case "over_limit": + return CachingStatusOverLimit, nil + case "paused": + return CachingStatusPaused, nil + default: + return CachingStatusDisabled, fmt.Errorf("unknown caching status: %v", raw) + } +} + +// CacheDisabledError is an error used to indicate that remote caching +// is not available. +type CacheDisabledError struct { + Status CachingStatus + Message string +} + +func (cd *CacheDisabledError) Error() string { + return cd.Message +} diff --git a/cli/internal/util/task_id.go b/cli/internal/util/task_id.go new file mode 100644 index 0000000..e4415b6 --- /dev/null +++ b/cli/internal/util/task_id.go @@ -0,0 +1,66 @@ +package util + +import ( + "fmt" + "strings" +) + +const ( + // TaskDelimiter separates a package name from a task name in a task id + TaskDelimiter = "#" + // RootPkgName is the reserved name that specifies the root package + RootPkgName = "//" +) + +// GetTaskId returns a package-task identifier (e.g @feed/thing#build). +func GetTaskId(pkgName interface{}, target string) string { + if IsPackageTask(target) { + return target + } + return fmt.Sprintf("%v%v%v", pkgName, TaskDelimiter, target) +} + +// RootTaskID returns the task id for running the given task in the root package +func RootTaskID(target string) string { + return GetTaskId(RootPkgName, target) +} + +// GetPackageTaskFromId returns a tuple of the package name and target task +func GetPackageTaskFromId(taskId string) (packageName string, task string) { + arr := strings.Split(taskId, TaskDelimiter) + return arr[0], arr[1] +} + +// RootTaskTaskName returns the task portion of a root task taskID +func RootTaskTaskName(taskID string) string { + return strings.TrimPrefix(taskID, RootPkgName+TaskDelimiter) +} + +// IsPackageTask returns true if input is a package-specific task +// whose name has a length greater than 0. +// +// Accepted: myapp#build +// Rejected: #build, build +func IsPackageTask(task string) bool { + return strings.Index(task, TaskDelimiter) > 0 +} + +// IsTaskInPackage returns true if the task does not belong to a different package +// note that this means unscoped tasks will always return true +func IsTaskInPackage(task string, packageName string) bool { + if !IsPackageTask(task) { + return true + } + packageNameExpected, _ := GetPackageTaskFromId(task) + return packageNameExpected == packageName +} + +// StripPackageName removes the package portion of a taskID if it +// is a package task. Non-package tasks are returned unmodified +func StripPackageName(taskID string) string { + if IsPackageTask(taskID) { + _, task := GetPackageTaskFromId(taskID) + return task + } + return taskID +} diff --git a/cli/internal/util/task_output_mode.go b/cli/internal/util/task_output_mode.go new file mode 100644 index 0000000..eee42e0 --- /dev/null +++ b/cli/internal/util/task_output_mode.go @@ -0,0 +1,100 @@ +package util + +import ( + "encoding/json" + "fmt" +) + +// TaskOutputMode defines the ways turbo can display task output during a run +type TaskOutputMode int + +const ( + // FullTaskOutput will show all task output + FullTaskOutput TaskOutputMode = iota + // NoTaskOutput will hide all task output + NoTaskOutput + // HashTaskOutput will display turbo-computed task hashes + HashTaskOutput + // NewTaskOutput will show all new task output and turbo-computed task hashes for cached output + NewTaskOutput + // ErrorTaskOutput will show task output for failures only; no cache miss/hit messages are emitted + ErrorTaskOutput +) + +const ( + fullTaskOutputString = "full" + noTaskOutputString = "none" + hashTaskOutputString = "hash-only" + newTaskOutputString = "new-only" + errorTaskOutputString = "errors-only" +) + +// TaskOutputModeStrings is an array containing the string representations for task output modes +var TaskOutputModeStrings = []string{ + fullTaskOutputString, + noTaskOutputString, + hashTaskOutputString, + newTaskOutputString, + errorTaskOutputString, +} + +// FromTaskOutputModeString converts a task output mode's string representation into the enum value +func FromTaskOutputModeString(value string) (TaskOutputMode, error) { + switch value { + case fullTaskOutputString: + return FullTaskOutput, nil + case noTaskOutputString: + return NoTaskOutput, nil + case hashTaskOutputString: + return HashTaskOutput, nil + case newTaskOutputString: + return NewTaskOutput, nil + case errorTaskOutputString: + return ErrorTaskOutput, nil + } + + return FullTaskOutput, fmt.Errorf("invalid task output mode: %v", value) +} + +// ToTaskOutputModeString converts a task output mode enum value into the string representation +func ToTaskOutputModeString(value TaskOutputMode) (string, error) { + switch value { + case FullTaskOutput: + return fullTaskOutputString, nil + case NoTaskOutput: + return noTaskOutputString, nil + case HashTaskOutput: + return hashTaskOutputString, nil + case NewTaskOutput: + return newTaskOutputString, nil + case ErrorTaskOutput: + return errorTaskOutputString, nil + } + + return "", fmt.Errorf("invalid task output mode: %v", value) +} + +// UnmarshalJSON converts a task output mode string representation into an enum +func (c *TaskOutputMode) UnmarshalJSON(data []byte) error { + var rawTaskOutputMode string + if err := json.Unmarshal(data, &rawTaskOutputMode); err != nil { + return err + } + + taskOutputMode, err := FromTaskOutputModeString(rawTaskOutputMode) + if err != nil { + return err + } + + *c = taskOutputMode + return nil +} + +// MarshalJSON converts a task output mode to its string representation +func (c TaskOutputMode) MarshalJSON() ([]byte, error) { + outputModeString, err := ToTaskOutputModeString(c) + if err != nil { + return nil, err + } + return json.Marshal(outputModeString) +} diff --git a/cli/internal/workspace/workspace.go b/cli/internal/workspace/workspace.go new file mode 100644 index 0000000..fcd1eb8 --- /dev/null +++ b/cli/internal/workspace/workspace.go @@ -0,0 +1,10 @@ +// Package workspace contains some utilities around managing workspaces +package workspace + +import "github.com/vercel/turbo/cli/internal/fs" + +// Catalog holds information about each workspace in the monorepo. +type Catalog struct { + PackageJSONs map[string]*fs.PackageJSON + TurboConfigs map[string]*fs.TurboJSON +} diff --git a/cli/internal/xxhash/xxhash.go b/cli/internal/xxhash/xxhash.go new file mode 100644 index 0000000..642ac73 --- /dev/null +++ b/cli/internal/xxhash/xxhash.go @@ -0,0 +1,202 @@ +// Package xxhash implements the 64-bit variant of xxHash (XXH64) as described +// at http://cyan4973.github.io/xxHash/. + +// Adapted from https://cs.github.com/evanw/esbuild/blob/0c9ced59c8b3ea3bd8dd5feebafed1f47ed279dd/internal/xxhash +// Copyright (c) 2016 Caleb Spare. All rights reserved. +// SPDX-License-Identifier: MIT +package xxhash + +import ( + "encoding/binary" + "math/bits" +) + +const ( + prime1 uint64 = 11400714785074694791 + prime2 uint64 = 14029467366897019727 + prime3 uint64 = 1609587929392839161 + prime4 uint64 = 9650029242287828579 + prime5 uint64 = 2870177450012600261 +) + +// NOTE(caleb): I'm using both consts and vars of the primes. Using consts where +// possible in the Go code is worth a small (but measurable) performance boost +// by avoiding some MOVQs. Vars are needed for the asm and also are useful for +// convenience in the Go code in a few places where we need to intentionally +// avoid constant arithmetic (e.g., v1 := prime1 + prime2 fails because the +// result overflows a uint64). +var prime1v = prime1 + +// Digest implements hash.Hash64. +type Digest struct { + v1 uint64 + v2 uint64 + v3 uint64 + v4 uint64 + total uint64 + mem [32]byte + n int // how much of mem is used +} + +// New creates a new Digest that computes the 64-bit xxHash algorithm. +func New() *Digest { + var d Digest + d.Reset() + return &d +} + +// Reset clears the Digest's state so that it can be reused. +func (d *Digest) Reset() { + d.v1 = prime1v + prime2 + d.v2 = prime2 + d.v3 = 0 + d.v4 = -prime1v + d.total = 0 + d.n = 0 +} + +// Size always returns 8 bytes. +func (d *Digest) Size() int { return 8 } + +// BlockSize always returns 32 bytes. +func (d *Digest) BlockSize() int { return 32 } + +// Write adds more data to d. It always returns len(b), nil. +func (d *Digest) Write(b []byte) (n int, err error) { + n = len(b) + d.total += uint64(n) + + if d.n+n < 32 { + // This new data doesn't even fill the current block. + copy(d.mem[d.n:], b) + d.n += n + return + } + + if d.n > 0 { + // Finish off the partial block. + copy(d.mem[d.n:], b) + d.v1 = round(d.v1, u64(d.mem[0:8])) + d.v2 = round(d.v2, u64(d.mem[8:16])) + d.v3 = round(d.v3, u64(d.mem[16:24])) + d.v4 = round(d.v4, u64(d.mem[24:32])) + b = b[32-d.n:] + d.n = 0 + } + + if len(b) >= 32 { + // One or more full blocks left. + nw := writeBlocks(d, b) + b = b[nw:] + } + + // Store any remaining partial block. + copy(d.mem[:], b) + d.n = len(b) + + return +} + +// Sum appends the current hash to b and returns the resulting slice. +func (d *Digest) Sum(b []byte) []byte { + s := d.Sum64() + return append( + b, + byte(s>>56), + byte(s>>48), + byte(s>>40), + byte(s>>32), + byte(s>>24), + byte(s>>16), + byte(s>>8), + byte(s), + ) +} + +// Sum64 returns the current hash. +func (d *Digest) Sum64() uint64 { + var h uint64 + + if d.total >= 32 { + v1, v2, v3, v4 := d.v1, d.v2, d.v3, d.v4 + h = rol1(v1) + rol7(v2) + rol12(v3) + rol18(v4) + h = mergeRound(h, v1) + h = mergeRound(h, v2) + h = mergeRound(h, v3) + h = mergeRound(h, v4) + } else { + h = d.v3 + prime5 + } + + h += d.total + + i, end := 0, d.n + for ; i+8 <= end; i += 8 { + k1 := round(0, u64(d.mem[i:i+8])) + h ^= k1 + h = rol27(h)*prime1 + prime4 + } + if i+4 <= end { + h ^= uint64(u32(d.mem[i:i+4])) * prime1 + h = rol23(h)*prime2 + prime3 + i += 4 + } + for i < end { + h ^= uint64(d.mem[i]) * prime5 + h = rol11(h) * prime1 + i++ + } + + h ^= h >> 33 + h *= prime2 + h ^= h >> 29 + h *= prime3 + h ^= h >> 32 + + return h +} + +const ( + magic = "xxh\x06" + marshaledSize = len(magic) + 8*5 + 32 +) + +func u64(b []byte) uint64 { return binary.LittleEndian.Uint64(b) } +func u32(b []byte) uint32 { return binary.LittleEndian.Uint32(b) } + +func round(acc, input uint64) uint64 { + acc += input * prime2 + acc = rol31(acc) + acc *= prime1 + return acc +} + +func mergeRound(acc, val uint64) uint64 { + val = round(0, val) + acc ^= val + acc = acc*prime1 + prime4 + return acc +} + +func rol1(x uint64) uint64 { return bits.RotateLeft64(x, 1) } +func rol7(x uint64) uint64 { return bits.RotateLeft64(x, 7) } +func rol11(x uint64) uint64 { return bits.RotateLeft64(x, 11) } +func rol12(x uint64) uint64 { return bits.RotateLeft64(x, 12) } +func rol18(x uint64) uint64 { return bits.RotateLeft64(x, 18) } +func rol23(x uint64) uint64 { return bits.RotateLeft64(x, 23) } +func rol27(x uint64) uint64 { return bits.RotateLeft64(x, 27) } +func rol31(x uint64) uint64 { return bits.RotateLeft64(x, 31) } + +func writeBlocks(d *Digest, b []byte) int { + v1, v2, v3, v4 := d.v1, d.v2, d.v3, d.v4 + n := len(b) + for len(b) >= 32 { + v1 = round(v1, u64(b[0:8:len(b)])) + v2 = round(v2, u64(b[8:16:len(b)])) + v3 = round(v3, u64(b[16:24:len(b)])) + v4 = round(v4, u64(b[24:32:len(b)])) + b = b[32:len(b):len(b)] + } + d.v1, d.v2, d.v3, d.v4 = v1, v2, v3, v4 + return n - len(b) +} diff --git a/cli/internal/yaml/apic.go b/cli/internal/yaml/apic.go new file mode 100644 index 0000000..05fd305 --- /dev/null +++ b/cli/internal/yaml/apic.go @@ -0,0 +1,747 @@ +// +// Copyright (c) 2011-2019 Canonical Ltd +// Copyright (c) 2006-2010 Kirill Simonov +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of +// this software and associated documentation files (the "Software"), to deal in +// the Software without restriction, including without limitation the rights to +// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do +// so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +package yaml + +import ( + "io" +) + +func yaml_insert_token(parser *yaml_parser_t, pos int, token *yaml_token_t) { + //fmt.Println("yaml_insert_token", "pos:", pos, "typ:", token.typ, "head:", parser.tokens_head, "len:", len(parser.tokens)) + + // Check if we can move the queue at the beginning of the buffer. + if parser.tokens_head > 0 && len(parser.tokens) == cap(parser.tokens) { + if parser.tokens_head != len(parser.tokens) { + copy(parser.tokens, parser.tokens[parser.tokens_head:]) + } + parser.tokens = parser.tokens[:len(parser.tokens)-parser.tokens_head] + parser.tokens_head = 0 + } + parser.tokens = append(parser.tokens, *token) + if pos < 0 { + return + } + copy(parser.tokens[parser.tokens_head+pos+1:], parser.tokens[parser.tokens_head+pos:]) + parser.tokens[parser.tokens_head+pos] = *token +} + +// Create a new parser object. +func yaml_parser_initialize(parser *yaml_parser_t) bool { + *parser = yaml_parser_t{ + raw_buffer: make([]byte, 0, input_raw_buffer_size), + buffer: make([]byte, 0, input_buffer_size), + } + return true +} + +// Destroy a parser object. +func yaml_parser_delete(parser *yaml_parser_t) { + *parser = yaml_parser_t{} +} + +// String read handler. +func yaml_string_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) { + if parser.input_pos == len(parser.input) { + return 0, io.EOF + } + n = copy(buffer, parser.input[parser.input_pos:]) + parser.input_pos += n + return n, nil +} + +// Reader read handler. +func yaml_reader_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) { + return parser.input_reader.Read(buffer) +} + +// Set a string input. +func yaml_parser_set_input_string(parser *yaml_parser_t, input []byte) { + if parser.read_handler != nil { + panic("must set the input source only once") + } + parser.read_handler = yaml_string_read_handler + parser.input = input + parser.input_pos = 0 +} + +// Set a file input. +func yaml_parser_set_input_reader(parser *yaml_parser_t, r io.Reader) { + if parser.read_handler != nil { + panic("must set the input source only once") + } + parser.read_handler = yaml_reader_read_handler + parser.input_reader = r +} + +// Set the source encoding. +func yaml_parser_set_encoding(parser *yaml_parser_t, encoding yaml_encoding_t) { + if parser.encoding != yaml_ANY_ENCODING { + panic("must set the encoding only once") + } + parser.encoding = encoding +} + +// Create a new emitter object. +func yaml_emitter_initialize(emitter *yaml_emitter_t) { + *emitter = yaml_emitter_t{ + buffer: make([]byte, output_buffer_size), + raw_buffer: make([]byte, 0, output_raw_buffer_size), + states: make([]yaml_emitter_state_t, 0, initial_stack_size), + events: make([]yaml_event_t, 0, initial_queue_size), + best_width: -1, + } +} + +// Destroy an emitter object. +func yaml_emitter_delete(emitter *yaml_emitter_t) { + *emitter = yaml_emitter_t{} +} + +// String write handler. +func yaml_string_write_handler(emitter *yaml_emitter_t, buffer []byte) error { + *emitter.output_buffer = append(*emitter.output_buffer, buffer...) + return nil +} + +// yaml_writer_write_handler uses emitter.output_writer to write the +// emitted text. +func yaml_writer_write_handler(emitter *yaml_emitter_t, buffer []byte) error { + _, err := emitter.output_writer.Write(buffer) + return err +} + +// Set a string output. +func yaml_emitter_set_output_string(emitter *yaml_emitter_t, output_buffer *[]byte) { + if emitter.write_handler != nil { + panic("must set the output target only once") + } + emitter.write_handler = yaml_string_write_handler + emitter.output_buffer = output_buffer +} + +// Set a file output. +func yaml_emitter_set_output_writer(emitter *yaml_emitter_t, w io.Writer) { + if emitter.write_handler != nil { + panic("must set the output target only once") + } + emitter.write_handler = yaml_writer_write_handler + emitter.output_writer = w +} + +// Set the output encoding. +func yaml_emitter_set_encoding(emitter *yaml_emitter_t, encoding yaml_encoding_t) { + if emitter.encoding != yaml_ANY_ENCODING { + panic("must set the output encoding only once") + } + emitter.encoding = encoding +} + +// Set the canonical output style. +func yaml_emitter_set_canonical(emitter *yaml_emitter_t, canonical bool) { + emitter.canonical = canonical +} + +// Set the indentation increment. +func yaml_emitter_set_indent(emitter *yaml_emitter_t, indent int) { + if indent < 2 || indent > 9 { + indent = 2 + } + emitter.best_indent = indent +} + +// Set the preferred line width. +func yaml_emitter_set_width(emitter *yaml_emitter_t, width int) { + if width < 0 { + width = -1 + } + emitter.best_width = width +} + +// Set if unescaped non-ASCII characters are allowed. +func yaml_emitter_set_unicode(emitter *yaml_emitter_t, unicode bool) { + emitter.unicode = unicode +} + +// Set the preferred line break character. +func yaml_emitter_set_break(emitter *yaml_emitter_t, line_break yaml_break_t) { + emitter.line_break = line_break +} + +///* +// * Destroy a token object. +// */ +// +//YAML_DECLARE(void) +//yaml_token_delete(yaml_token_t *token) +//{ +// assert(token); // Non-NULL token object expected. +// +// switch (token.type) +// { +// case YAML_TAG_DIRECTIVE_TOKEN: +// yaml_free(token.data.tag_directive.handle); +// yaml_free(token.data.tag_directive.prefix); +// break; +// +// case YAML_ALIAS_TOKEN: +// yaml_free(token.data.alias.value); +// break; +// +// case YAML_ANCHOR_TOKEN: +// yaml_free(token.data.anchor.value); +// break; +// +// case YAML_TAG_TOKEN: +// yaml_free(token.data.tag.handle); +// yaml_free(token.data.tag.suffix); +// break; +// +// case YAML_SCALAR_TOKEN: +// yaml_free(token.data.scalar.value); +// break; +// +// default: +// break; +// } +// +// memset(token, 0, sizeof(yaml_token_t)); +//} +// +///* +// * Check if a string is a valid UTF-8 sequence. +// * +// * Check 'reader.c' for more details on UTF-8 encoding. +// */ +// +//static int +//yaml_check_utf8(yaml_char_t *start, size_t length) +//{ +// yaml_char_t *end = start+length; +// yaml_char_t *pointer = start; +// +// while (pointer < end) { +// unsigned char octet; +// unsigned int width; +// unsigned int value; +// size_t k; +// +// octet = pointer[0]; +// width = (octet & 0x80) == 0x00 ? 1 : +// (octet & 0xE0) == 0xC0 ? 2 : +// (octet & 0xF0) == 0xE0 ? 3 : +// (octet & 0xF8) == 0xF0 ? 4 : 0; +// value = (octet & 0x80) == 0x00 ? octet & 0x7F : +// (octet & 0xE0) == 0xC0 ? octet & 0x1F : +// (octet & 0xF0) == 0xE0 ? octet & 0x0F : +// (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0; +// if (!width) return 0; +// if (pointer+width > end) return 0; +// for (k = 1; k < width; k ++) { +// octet = pointer[k]; +// if ((octet & 0xC0) != 0x80) return 0; +// value = (value << 6) + (octet & 0x3F); +// } +// if (!((width == 1) || +// (width == 2 && value >= 0x80) || +// (width == 3 && value >= 0x800) || +// (width == 4 && value >= 0x10000))) return 0; +// +// pointer += width; +// } +// +// return 1; +//} +// + +// Create STREAM-START. +func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) { + *event = yaml_event_t{ + typ: yaml_STREAM_START_EVENT, + encoding: encoding, + } +} + +// Create STREAM-END. +func yaml_stream_end_event_initialize(event *yaml_event_t) { + *event = yaml_event_t{ + typ: yaml_STREAM_END_EVENT, + } +} + +// Create DOCUMENT-START. +func yaml_document_start_event_initialize( + event *yaml_event_t, + version_directive *yaml_version_directive_t, + tag_directives []yaml_tag_directive_t, + implicit bool, +) { + *event = yaml_event_t{ + typ: yaml_DOCUMENT_START_EVENT, + version_directive: version_directive, + tag_directives: tag_directives, + implicit: implicit, + } +} + +// Create DOCUMENT-END. +func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) { + *event = yaml_event_t{ + typ: yaml_DOCUMENT_END_EVENT, + implicit: implicit, + } +} + +// Create ALIAS. +func yaml_alias_event_initialize(event *yaml_event_t, anchor []byte) bool { + *event = yaml_event_t{ + typ: yaml_ALIAS_EVENT, + anchor: anchor, + } + return true +} + +// Create SCALAR. +func yaml_scalar_event_initialize(event *yaml_event_t, anchor, tag, value []byte, plain_implicit, quoted_implicit bool, style yaml_scalar_style_t) bool { + *event = yaml_event_t{ + typ: yaml_SCALAR_EVENT, + anchor: anchor, + tag: tag, + value: value, + implicit: plain_implicit, + quoted_implicit: quoted_implicit, + style: yaml_style_t(style), + } + return true +} + +// Create SEQUENCE-START. +func yaml_sequence_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_sequence_style_t) bool { + *event = yaml_event_t{ + typ: yaml_SEQUENCE_START_EVENT, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(style), + } + return true +} + +// Create SEQUENCE-END. +func yaml_sequence_end_event_initialize(event *yaml_event_t) bool { + *event = yaml_event_t{ + typ: yaml_SEQUENCE_END_EVENT, + } + return true +} + +// Create MAPPING-START. +func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_mapping_style_t) { + *event = yaml_event_t{ + typ: yaml_MAPPING_START_EVENT, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(style), + } +} + +// Create MAPPING-END. +func yaml_mapping_end_event_initialize(event *yaml_event_t) { + *event = yaml_event_t{ + typ: yaml_MAPPING_END_EVENT, + } +} + +// Destroy an event object. +func yaml_event_delete(event *yaml_event_t) { + *event = yaml_event_t{} +} + +///* +// * Create a document object. +// */ +// +//YAML_DECLARE(int) +//yaml_document_initialize(document *yaml_document_t, +// version_directive *yaml_version_directive_t, +// tag_directives_start *yaml_tag_directive_t, +// tag_directives_end *yaml_tag_directive_t, +// start_implicit int, end_implicit int) +//{ +// struct { +// error yaml_error_type_t +// } context +// struct { +// start *yaml_node_t +// end *yaml_node_t +// top *yaml_node_t +// } nodes = { NULL, NULL, NULL } +// version_directive_copy *yaml_version_directive_t = NULL +// struct { +// start *yaml_tag_directive_t +// end *yaml_tag_directive_t +// top *yaml_tag_directive_t +// } tag_directives_copy = { NULL, NULL, NULL } +// value yaml_tag_directive_t = { NULL, NULL } +// mark yaml_mark_t = { 0, 0, 0 } +// +// assert(document) // Non-NULL document object is expected. +// assert((tag_directives_start && tag_directives_end) || +// (tag_directives_start == tag_directives_end)) +// // Valid tag directives are expected. +// +// if (!STACK_INIT(&context, nodes, INITIAL_STACK_SIZE)) goto error +// +// if (version_directive) { +// version_directive_copy = yaml_malloc(sizeof(yaml_version_directive_t)) +// if (!version_directive_copy) goto error +// version_directive_copy.major = version_directive.major +// version_directive_copy.minor = version_directive.minor +// } +// +// if (tag_directives_start != tag_directives_end) { +// tag_directive *yaml_tag_directive_t +// if (!STACK_INIT(&context, tag_directives_copy, INITIAL_STACK_SIZE)) +// goto error +// for (tag_directive = tag_directives_start +// tag_directive != tag_directives_end; tag_directive ++) { +// assert(tag_directive.handle) +// assert(tag_directive.prefix) +// if (!yaml_check_utf8(tag_directive.handle, +// strlen((char *)tag_directive.handle))) +// goto error +// if (!yaml_check_utf8(tag_directive.prefix, +// strlen((char *)tag_directive.prefix))) +// goto error +// value.handle = yaml_strdup(tag_directive.handle) +// value.prefix = yaml_strdup(tag_directive.prefix) +// if (!value.handle || !value.prefix) goto error +// if (!PUSH(&context, tag_directives_copy, value)) +// goto error +// value.handle = NULL +// value.prefix = NULL +// } +// } +// +// DOCUMENT_INIT(*document, nodes.start, nodes.end, version_directive_copy, +// tag_directives_copy.start, tag_directives_copy.top, +// start_implicit, end_implicit, mark, mark) +// +// return 1 +// +//error: +// STACK_DEL(&context, nodes) +// yaml_free(version_directive_copy) +// while (!STACK_EMPTY(&context, tag_directives_copy)) { +// value yaml_tag_directive_t = POP(&context, tag_directives_copy) +// yaml_free(value.handle) +// yaml_free(value.prefix) +// } +// STACK_DEL(&context, tag_directives_copy) +// yaml_free(value.handle) +// yaml_free(value.prefix) +// +// return 0 +//} +// +///* +// * Destroy a document object. +// */ +// +//YAML_DECLARE(void) +//yaml_document_delete(document *yaml_document_t) +//{ +// struct { +// error yaml_error_type_t +// } context +// tag_directive *yaml_tag_directive_t +// +// context.error = YAML_NO_ERROR // Eliminate a compiler warning. +// +// assert(document) // Non-NULL document object is expected. +// +// while (!STACK_EMPTY(&context, document.nodes)) { +// node yaml_node_t = POP(&context, document.nodes) +// yaml_free(node.tag) +// switch (node.type) { +// case YAML_SCALAR_NODE: +// yaml_free(node.data.scalar.value) +// break +// case YAML_SEQUENCE_NODE: +// STACK_DEL(&context, node.data.sequence.items) +// break +// case YAML_MAPPING_NODE: +// STACK_DEL(&context, node.data.mapping.pairs) +// break +// default: +// assert(0) // Should not happen. +// } +// } +// STACK_DEL(&context, document.nodes) +// +// yaml_free(document.version_directive) +// for (tag_directive = document.tag_directives.start +// tag_directive != document.tag_directives.end +// tag_directive++) { +// yaml_free(tag_directive.handle) +// yaml_free(tag_directive.prefix) +// } +// yaml_free(document.tag_directives.start) +// +// memset(document, 0, sizeof(yaml_document_t)) +//} +// +///** +// * Get a document node. +// */ +// +//YAML_DECLARE(yaml_node_t *) +//yaml_document_get_node(document *yaml_document_t, index int) +//{ +// assert(document) // Non-NULL document object is expected. +// +// if (index > 0 && document.nodes.start + index <= document.nodes.top) { +// return document.nodes.start + index - 1 +// } +// return NULL +//} +// +///** +// * Get the root object. +// */ +// +//YAML_DECLARE(yaml_node_t *) +//yaml_document_get_root_node(document *yaml_document_t) +//{ +// assert(document) // Non-NULL document object is expected. +// +// if (document.nodes.top != document.nodes.start) { +// return document.nodes.start +// } +// return NULL +//} +// +///* +// * Add a scalar node to a document. +// */ +// +//YAML_DECLARE(int) +//yaml_document_add_scalar(document *yaml_document_t, +// tag *yaml_char_t, value *yaml_char_t, length int, +// style yaml_scalar_style_t) +//{ +// struct { +// error yaml_error_type_t +// } context +// mark yaml_mark_t = { 0, 0, 0 } +// tag_copy *yaml_char_t = NULL +// value_copy *yaml_char_t = NULL +// node yaml_node_t +// +// assert(document) // Non-NULL document object is expected. +// assert(value) // Non-NULL value is expected. +// +// if (!tag) { +// tag = (yaml_char_t *)YAML_DEFAULT_SCALAR_TAG +// } +// +// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error +// tag_copy = yaml_strdup(tag) +// if (!tag_copy) goto error +// +// if (length < 0) { +// length = strlen((char *)value) +// } +// +// if (!yaml_check_utf8(value, length)) goto error +// value_copy = yaml_malloc(length+1) +// if (!value_copy) goto error +// memcpy(value_copy, value, length) +// value_copy[length] = '\0' +// +// SCALAR_NODE_INIT(node, tag_copy, value_copy, length, style, mark, mark) +// if (!PUSH(&context, document.nodes, node)) goto error +// +// return document.nodes.top - document.nodes.start +// +//error: +// yaml_free(tag_copy) +// yaml_free(value_copy) +// +// return 0 +//} +// +///* +// * Add a sequence node to a document. +// */ +// +//YAML_DECLARE(int) +//yaml_document_add_sequence(document *yaml_document_t, +// tag *yaml_char_t, style yaml_sequence_style_t) +//{ +// struct { +// error yaml_error_type_t +// } context +// mark yaml_mark_t = { 0, 0, 0 } +// tag_copy *yaml_char_t = NULL +// struct { +// start *yaml_node_item_t +// end *yaml_node_item_t +// top *yaml_node_item_t +// } items = { NULL, NULL, NULL } +// node yaml_node_t +// +// assert(document) // Non-NULL document object is expected. +// +// if (!tag) { +// tag = (yaml_char_t *)YAML_DEFAULT_SEQUENCE_TAG +// } +// +// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error +// tag_copy = yaml_strdup(tag) +// if (!tag_copy) goto error +// +// if (!STACK_INIT(&context, items, INITIAL_STACK_SIZE)) goto error +// +// SEQUENCE_NODE_INIT(node, tag_copy, items.start, items.end, +// style, mark, mark) +// if (!PUSH(&context, document.nodes, node)) goto error +// +// return document.nodes.top - document.nodes.start +// +//error: +// STACK_DEL(&context, items) +// yaml_free(tag_copy) +// +// return 0 +//} +// +///* +// * Add a mapping node to a document. +// */ +// +//YAML_DECLARE(int) +//yaml_document_add_mapping(document *yaml_document_t, +// tag *yaml_char_t, style yaml_mapping_style_t) +//{ +// struct { +// error yaml_error_type_t +// } context +// mark yaml_mark_t = { 0, 0, 0 } +// tag_copy *yaml_char_t = NULL +// struct { +// start *yaml_node_pair_t +// end *yaml_node_pair_t +// top *yaml_node_pair_t +// } pairs = { NULL, NULL, NULL } +// node yaml_node_t +// +// assert(document) // Non-NULL document object is expected. +// +// if (!tag) { +// tag = (yaml_char_t *)YAML_DEFAULT_MAPPING_TAG +// } +// +// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error +// tag_copy = yaml_strdup(tag) +// if (!tag_copy) goto error +// +// if (!STACK_INIT(&context, pairs, INITIAL_STACK_SIZE)) goto error +// +// MAPPING_NODE_INIT(node, tag_copy, pairs.start, pairs.end, +// style, mark, mark) +// if (!PUSH(&context, document.nodes, node)) goto error +// +// return document.nodes.top - document.nodes.start +// +//error: +// STACK_DEL(&context, pairs) +// yaml_free(tag_copy) +// +// return 0 +//} +// +///* +// * Append an item to a sequence node. +// */ +// +//YAML_DECLARE(int) +//yaml_document_append_sequence_item(document *yaml_document_t, +// sequence int, item int) +//{ +// struct { +// error yaml_error_type_t +// } context +// +// assert(document) // Non-NULL document is required. +// assert(sequence > 0 +// && document.nodes.start + sequence <= document.nodes.top) +// // Valid sequence id is required. +// assert(document.nodes.start[sequence-1].type == YAML_SEQUENCE_NODE) +// // A sequence node is required. +// assert(item > 0 && document.nodes.start + item <= document.nodes.top) +// // Valid item id is required. +// +// if (!PUSH(&context, +// document.nodes.start[sequence-1].data.sequence.items, item)) +// return 0 +// +// return 1 +//} +// +///* +// * Append a pair of a key and a value to a mapping node. +// */ +// +//YAML_DECLARE(int) +//yaml_document_append_mapping_pair(document *yaml_document_t, +// mapping int, key int, value int) +//{ +// struct { +// error yaml_error_type_t +// } context +// +// pair yaml_node_pair_t +// +// assert(document) // Non-NULL document is required. +// assert(mapping > 0 +// && document.nodes.start + mapping <= document.nodes.top) +// // Valid mapping id is required. +// assert(document.nodes.start[mapping-1].type == YAML_MAPPING_NODE) +// // A mapping node is required. +// assert(key > 0 && document.nodes.start + key <= document.nodes.top) +// // Valid key id is required. +// assert(value > 0 && document.nodes.start + value <= document.nodes.top) +// // Valid value id is required. +// +// pair.key = key +// pair.value = value +// +// if (!PUSH(&context, +// document.nodes.start[mapping-1].data.mapping.pairs, pair)) +// return 0 +// +// return 1 +//} +// +// diff --git a/cli/internal/yaml/decode.go b/cli/internal/yaml/decode.go new file mode 100644 index 0000000..0173b69 --- /dev/null +++ b/cli/internal/yaml/decode.go @@ -0,0 +1,1000 @@ +// +// Copyright (c) 2011-2019 Canonical Ltd +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package yaml + +import ( + "encoding" + "encoding/base64" + "fmt" + "io" + "math" + "reflect" + "strconv" + "time" +) + +// ---------------------------------------------------------------------------- +// Parser, produces a node tree out of a libyaml event stream. + +type parser struct { + parser yaml_parser_t + event yaml_event_t + doc *Node + anchors map[string]*Node + doneInit bool + textless bool +} + +func newParser(b []byte) *parser { + p := parser{} + if !yaml_parser_initialize(&p.parser) { + panic("failed to initialize YAML emitter") + } + if len(b) == 0 { + b = []byte{'\n'} + } + yaml_parser_set_input_string(&p.parser, b) + return &p +} + +func newParserFromReader(r io.Reader) *parser { + p := parser{} + if !yaml_parser_initialize(&p.parser) { + panic("failed to initialize YAML emitter") + } + yaml_parser_set_input_reader(&p.parser, r) + return &p +} + +func (p *parser) init() { + if p.doneInit { + return + } + p.anchors = make(map[string]*Node) + p.expect(yaml_STREAM_START_EVENT) + p.doneInit = true +} + +func (p *parser) destroy() { + if p.event.typ != yaml_NO_EVENT { + yaml_event_delete(&p.event) + } + yaml_parser_delete(&p.parser) +} + +// expect consumes an event from the event stream and +// checks that it's of the expected type. +func (p *parser) expect(e yaml_event_type_t) { + if p.event.typ == yaml_NO_EVENT { + if !yaml_parser_parse(&p.parser, &p.event) { + p.fail() + } + } + if p.event.typ == yaml_STREAM_END_EVENT { + failf("attempted to go past the end of stream; corrupted value?") + } + if p.event.typ != e { + p.parser.problem = fmt.Sprintf("expected %s event but got %s", e, p.event.typ) + p.fail() + } + yaml_event_delete(&p.event) + p.event.typ = yaml_NO_EVENT +} + +// peek peeks at the next event in the event stream, +// puts the results into p.event and returns the event type. +func (p *parser) peek() yaml_event_type_t { + if p.event.typ != yaml_NO_EVENT { + return p.event.typ + } + // It's curious choice from the underlying API to generally return a + // positive result on success, but on this case return true in an error + // scenario. This was the source of bugs in the past (issue #666). + if !yaml_parser_parse(&p.parser, &p.event) || p.parser.error != yaml_NO_ERROR { + p.fail() + } + return p.event.typ +} + +func (p *parser) fail() { + var where string + var line int + if p.parser.context_mark.line != 0 { + line = p.parser.context_mark.line + // Scanner errors don't iterate line before returning error + if p.parser.error == yaml_SCANNER_ERROR { + line++ + } + } else if p.parser.problem_mark.line != 0 { + line = p.parser.problem_mark.line + // Scanner errors don't iterate line before returning error + if p.parser.error == yaml_SCANNER_ERROR { + line++ + } + } + if line != 0 { + where = "line " + strconv.Itoa(line) + ": " + } + var msg string + if len(p.parser.problem) > 0 { + msg = p.parser.problem + } else { + msg = "unknown problem parsing YAML content" + } + failf("%s%s", where, msg) +} + +func (p *parser) anchor(n *Node, anchor []byte) { + if anchor != nil { + n.Anchor = string(anchor) + p.anchors[n.Anchor] = n + } +} + +func (p *parser) parse() *Node { + p.init() + switch p.peek() { + case yaml_SCALAR_EVENT: + return p.scalar() + case yaml_ALIAS_EVENT: + return p.alias() + case yaml_MAPPING_START_EVENT: + return p.mapping() + case yaml_SEQUENCE_START_EVENT: + return p.sequence() + case yaml_DOCUMENT_START_EVENT: + return p.document() + case yaml_STREAM_END_EVENT: + // Happens when attempting to decode an empty buffer. + return nil + case yaml_TAIL_COMMENT_EVENT: + panic("internal error: unexpected tail comment event (please report)") + default: + panic("internal error: attempted to parse unknown event (please report): " + p.event.typ.String()) + } +} + +func (p *parser) node(kind Kind, defaultTag, tag, value string) *Node { + var style Style + if tag != "" && tag != "!" { + tag = shortTag(tag) + style = TaggedStyle + } else if defaultTag != "" { + tag = defaultTag + } else if kind == ScalarNode { + tag, _ = resolve("", value) + } + n := &Node{ + Kind: kind, + Tag: tag, + Value: value, + Style: style, + } + if !p.textless { + n.Line = p.event.start_mark.line + 1 + n.Column = p.event.start_mark.column + 1 + n.HeadComment = string(p.event.head_comment) + n.LineComment = string(p.event.line_comment) + n.FootComment = string(p.event.foot_comment) + } + return n +} + +func (p *parser) parseChild(parent *Node) *Node { + child := p.parse() + parent.Content = append(parent.Content, child) + return child +} + +func (p *parser) document() *Node { + n := p.node(DocumentNode, "", "", "") + p.doc = n + p.expect(yaml_DOCUMENT_START_EVENT) + p.parseChild(n) + if p.peek() == yaml_DOCUMENT_END_EVENT { + n.FootComment = string(p.event.foot_comment) + } + p.expect(yaml_DOCUMENT_END_EVENT) + return n +} + +func (p *parser) alias() *Node { + n := p.node(AliasNode, "", "", string(p.event.anchor)) + n.Alias = p.anchors[n.Value] + if n.Alias == nil { + failf("unknown anchor '%s' referenced", n.Value) + } + p.expect(yaml_ALIAS_EVENT) + return n +} + +func (p *parser) scalar() *Node { + var parsedStyle = p.event.scalar_style() + var nodeStyle Style + switch { + case parsedStyle&yaml_DOUBLE_QUOTED_SCALAR_STYLE != 0: + nodeStyle = DoubleQuotedStyle + case parsedStyle&yaml_SINGLE_QUOTED_SCALAR_STYLE != 0: + nodeStyle = SingleQuotedStyle + case parsedStyle&yaml_LITERAL_SCALAR_STYLE != 0: + nodeStyle = LiteralStyle + case parsedStyle&yaml_FOLDED_SCALAR_STYLE != 0: + nodeStyle = FoldedStyle + } + var nodeValue = string(p.event.value) + var nodeTag = string(p.event.tag) + var defaultTag string + if nodeStyle == 0 { + if nodeValue == "<<" { + defaultTag = mergeTag + } + } else { + defaultTag = strTag + } + n := p.node(ScalarNode, defaultTag, nodeTag, nodeValue) + n.Style |= nodeStyle + p.anchor(n, p.event.anchor) + p.expect(yaml_SCALAR_EVENT) + return n +} + +func (p *parser) sequence() *Node { + n := p.node(SequenceNode, seqTag, string(p.event.tag), "") + if p.event.sequence_style()&yaml_FLOW_SEQUENCE_STYLE != 0 { + n.Style |= FlowStyle + } + p.anchor(n, p.event.anchor) + p.expect(yaml_SEQUENCE_START_EVENT) + for p.peek() != yaml_SEQUENCE_END_EVENT { + p.parseChild(n) + } + n.LineComment = string(p.event.line_comment) + n.FootComment = string(p.event.foot_comment) + p.expect(yaml_SEQUENCE_END_EVENT) + return n +} + +func (p *parser) mapping() *Node { + n := p.node(MappingNode, mapTag, string(p.event.tag), "") + block := true + if p.event.mapping_style()&yaml_FLOW_MAPPING_STYLE != 0 { + block = false + n.Style |= FlowStyle + } + p.anchor(n, p.event.anchor) + p.expect(yaml_MAPPING_START_EVENT) + for p.peek() != yaml_MAPPING_END_EVENT { + k := p.parseChild(n) + if block && k.FootComment != "" { + // Must be a foot comment for the prior value when being dedented. + if len(n.Content) > 2 { + n.Content[len(n.Content)-3].FootComment = k.FootComment + k.FootComment = "" + } + } + v := p.parseChild(n) + if k.FootComment == "" && v.FootComment != "" { + k.FootComment = v.FootComment + v.FootComment = "" + } + if p.peek() == yaml_TAIL_COMMENT_EVENT { + if k.FootComment == "" { + k.FootComment = string(p.event.foot_comment) + } + p.expect(yaml_TAIL_COMMENT_EVENT) + } + } + n.LineComment = string(p.event.line_comment) + n.FootComment = string(p.event.foot_comment) + if n.Style&FlowStyle == 0 && n.FootComment != "" && len(n.Content) > 1 { + n.Content[len(n.Content)-2].FootComment = n.FootComment + n.FootComment = "" + } + p.expect(yaml_MAPPING_END_EVENT) + return n +} + +// ---------------------------------------------------------------------------- +// Decoder, unmarshals a node into a provided value. + +type decoder struct { + doc *Node + aliases map[*Node]bool + terrors []string + + stringMapType reflect.Type + generalMapType reflect.Type + + knownFields bool + uniqueKeys bool + decodeCount int + aliasCount int + aliasDepth int + + mergedFields map[interface{}]bool +} + +var ( + nodeType = reflect.TypeOf(Node{}) + durationType = reflect.TypeOf(time.Duration(0)) + stringMapType = reflect.TypeOf(map[string]interface{}{}) + generalMapType = reflect.TypeOf(map[interface{}]interface{}{}) + ifaceType = generalMapType.Elem() + timeType = reflect.TypeOf(time.Time{}) + ptrTimeType = reflect.TypeOf(&time.Time{}) +) + +func newDecoder() *decoder { + d := &decoder{ + stringMapType: stringMapType, + generalMapType: generalMapType, + uniqueKeys: true, + } + d.aliases = make(map[*Node]bool) + return d +} + +func (d *decoder) terror(n *Node, tag string, out reflect.Value) { + if n.Tag != "" { + tag = n.Tag + } + value := n.Value + if tag != seqTag && tag != mapTag { + if len(value) > 10 { + value = " `" + value[:7] + "...`" + } else { + value = " `" + value + "`" + } + } + d.terrors = append(d.terrors, fmt.Sprintf("line %d: cannot unmarshal %s%s into %s", n.Line, shortTag(tag), value, out.Type())) +} + +func (d *decoder) callUnmarshaler(n *Node, u Unmarshaler) (good bool) { + err := u.UnmarshalYAML(n) + if e, ok := err.(*TypeError); ok { + d.terrors = append(d.terrors, e.Errors...) + return false + } + if err != nil { + fail(err) + } + return true +} + +func (d *decoder) callObsoleteUnmarshaler(n *Node, u obsoleteUnmarshaler) (good bool) { + terrlen := len(d.terrors) + err := u.UnmarshalYAML(func(v interface{}) (err error) { + defer handleErr(&err) + d.unmarshal(n, reflect.ValueOf(v)) + if len(d.terrors) > terrlen { + issues := d.terrors[terrlen:] + d.terrors = d.terrors[:terrlen] + return &TypeError{issues} + } + return nil + }) + if e, ok := err.(*TypeError); ok { + d.terrors = append(d.terrors, e.Errors...) + return false + } + if err != nil { + fail(err) + } + return true +} + +// d.prepare initializes and dereferences pointers and calls UnmarshalYAML +// if a value is found to implement it. +// It returns the initialized and dereferenced out value, whether +// unmarshalling was already done by UnmarshalYAML, and if so whether +// its types unmarshalled appropriately. +// +// If n holds a null value, prepare returns before doing anything. +func (d *decoder) prepare(n *Node, out reflect.Value) (newout reflect.Value, unmarshaled, good bool) { + if n.ShortTag() == nullTag { + return out, false, false + } + again := true + for again { + again = false + if out.Kind() == reflect.Ptr { + if out.IsNil() { + out.Set(reflect.New(out.Type().Elem())) + } + out = out.Elem() + again = true + } + if out.CanAddr() { + outi := out.Addr().Interface() + if u, ok := outi.(Unmarshaler); ok { + good = d.callUnmarshaler(n, u) + return out, true, good + } + if u, ok := outi.(obsoleteUnmarshaler); ok { + good = d.callObsoleteUnmarshaler(n, u) + return out, true, good + } + } + } + return out, false, false +} + +func (d *decoder) fieldByIndex(n *Node, v reflect.Value, index []int) (field reflect.Value) { + if n.ShortTag() == nullTag { + return reflect.Value{} + } + for _, num := range index { + for { + if v.Kind() == reflect.Ptr { + if v.IsNil() { + v.Set(reflect.New(v.Type().Elem())) + } + v = v.Elem() + continue + } + break + } + v = v.Field(num) + } + return v +} + +const ( + // 400,000 decode operations is ~500kb of dense object declarations, or + // ~5kb of dense object declarations with 10000% alias expansion + alias_ratio_range_low = 400000 + + // 4,000,000 decode operations is ~5MB of dense object declarations, or + // ~4.5MB of dense object declarations with 10% alias expansion + alias_ratio_range_high = 4000000 + + // alias_ratio_range is the range over which we scale allowed alias ratios + alias_ratio_range = float64(alias_ratio_range_high - alias_ratio_range_low) +) + +func allowedAliasRatio(decodeCount int) float64 { + switch { + case decodeCount <= alias_ratio_range_low: + // allow 99% to come from alias expansion for small-to-medium documents + return 0.99 + case decodeCount >= alias_ratio_range_high: + // allow 10% to come from alias expansion for very large documents + return 0.10 + default: + // scale smoothly from 99% down to 10% over the range. + // this maps to 396,000 - 400,000 allowed alias-driven decodes over the range. + // 400,000 decode operations is ~100MB of allocations in worst-case scenarios (single-item maps). + return 0.99 - 0.89*(float64(decodeCount-alias_ratio_range_low)/alias_ratio_range) + } +} + +func (d *decoder) unmarshal(n *Node, out reflect.Value) (good bool) { + d.decodeCount++ + if d.aliasDepth > 0 { + d.aliasCount++ + } + if d.aliasCount > 100 && d.decodeCount > 1000 && float64(d.aliasCount)/float64(d.decodeCount) > allowedAliasRatio(d.decodeCount) { + failf("document contains excessive aliasing") + } + if out.Type() == nodeType { + out.Set(reflect.ValueOf(n).Elem()) + return true + } + switch n.Kind { + case DocumentNode: + return d.document(n, out) + case AliasNode: + return d.alias(n, out) + } + out, unmarshaled, good := d.prepare(n, out) + if unmarshaled { + return good + } + switch n.Kind { + case ScalarNode: + good = d.scalar(n, out) + case MappingNode: + good = d.mapping(n, out) + case SequenceNode: + good = d.sequence(n, out) + case 0: + if n.IsZero() { + return d.null(out) + } + fallthrough + default: + failf("cannot decode node with unknown kind %d", n.Kind) + } + return good +} + +func (d *decoder) document(n *Node, out reflect.Value) (good bool) { + if len(n.Content) == 1 { + d.doc = n + d.unmarshal(n.Content[0], out) + return true + } + return false +} + +func (d *decoder) alias(n *Node, out reflect.Value) (good bool) { + if d.aliases[n] { + // TODO this could actually be allowed in some circumstances. + failf("anchor '%s' value contains itself", n.Value) + } + d.aliases[n] = true + d.aliasDepth++ + good = d.unmarshal(n.Alias, out) + d.aliasDepth-- + delete(d.aliases, n) + return good +} + +var zeroValue reflect.Value + +func resetMap(out reflect.Value) { + for _, k := range out.MapKeys() { + out.SetMapIndex(k, zeroValue) + } +} + +func (d *decoder) null(out reflect.Value) bool { + if out.CanAddr() { + switch out.Kind() { + case reflect.Interface, reflect.Ptr, reflect.Map, reflect.Slice: + out.Set(reflect.Zero(out.Type())) + return true + } + } + return false +} + +func (d *decoder) scalar(n *Node, out reflect.Value) bool { + var tag string + var resolved interface{} + if n.indicatedString() { + tag = strTag + resolved = n.Value + } else { + tag, resolved = resolve(n.Tag, n.Value) + if tag == binaryTag { + data, err := base64.StdEncoding.DecodeString(resolved.(string)) + if err != nil { + failf("!!binary value contains invalid base64 data") + } + resolved = string(data) + } + } + if resolved == nil { + return d.null(out) + } + if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() { + // We've resolved to exactly the type we want, so use that. + out.Set(resolvedv) + return true + } + // Perhaps we can use the value as a TextUnmarshaler to + // set its value. + if out.CanAddr() { + u, ok := out.Addr().Interface().(encoding.TextUnmarshaler) + if ok { + var text []byte + if tag == binaryTag { + text = []byte(resolved.(string)) + } else { + // We let any value be unmarshaled into TextUnmarshaler. + // That might be more lax than we'd like, but the + // TextUnmarshaler itself should bowl out any dubious values. + text = []byte(n.Value) + } + err := u.UnmarshalText(text) + if err != nil { + fail(err) + } + return true + } + } + switch out.Kind() { + case reflect.String: + if tag == binaryTag { + out.SetString(resolved.(string)) + return true + } + out.SetString(n.Value) + return true + case reflect.Interface: + out.Set(reflect.ValueOf(resolved)) + return true + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + // This used to work in v2, but it's very unfriendly. + isDuration := out.Type() == durationType + + switch resolved := resolved.(type) { + case int: + if !isDuration && !out.OverflowInt(int64(resolved)) { + out.SetInt(int64(resolved)) + return true + } + case int64: + if !isDuration && !out.OverflowInt(resolved) { + out.SetInt(resolved) + return true + } + case uint64: + if !isDuration && resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) { + out.SetInt(int64(resolved)) + return true + } + case float64: + if !isDuration && resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) { + out.SetInt(int64(resolved)) + return true + } + case string: + if out.Type() == durationType { + d, err := time.ParseDuration(resolved) + if err == nil { + out.SetInt(int64(d)) + return true + } + } + } + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + switch resolved := resolved.(type) { + case int: + if resolved >= 0 && !out.OverflowUint(uint64(resolved)) { + out.SetUint(uint64(resolved)) + return true + } + case int64: + if resolved >= 0 && !out.OverflowUint(uint64(resolved)) { + out.SetUint(uint64(resolved)) + return true + } + case uint64: + if !out.OverflowUint(uint64(resolved)) { + out.SetUint(uint64(resolved)) + return true + } + case float64: + if resolved <= math.MaxUint64 && !out.OverflowUint(uint64(resolved)) { + out.SetUint(uint64(resolved)) + return true + } + } + case reflect.Bool: + switch resolved := resolved.(type) { + case bool: + out.SetBool(resolved) + return true + case string: + // This offers some compatibility with the 1.1 spec (https://yaml.org/type/bool.html). + // It only works if explicitly attempting to unmarshal into a typed bool value. + switch resolved { + case "y", "Y", "yes", "Yes", "YES", "on", "On", "ON": + out.SetBool(true) + return true + case "n", "N", "no", "No", "NO", "off", "Off", "OFF": + out.SetBool(false) + return true + } + } + case reflect.Float32, reflect.Float64: + switch resolved := resolved.(type) { + case int: + out.SetFloat(float64(resolved)) + return true + case int64: + out.SetFloat(float64(resolved)) + return true + case uint64: + out.SetFloat(float64(resolved)) + return true + case float64: + out.SetFloat(resolved) + return true + } + case reflect.Struct: + if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() { + out.Set(resolvedv) + return true + } + case reflect.Ptr: + panic("yaml internal error: please report the issue") + } + d.terror(n, tag, out) + return false +} + +func settableValueOf(i interface{}) reflect.Value { + v := reflect.ValueOf(i) + sv := reflect.New(v.Type()).Elem() + sv.Set(v) + return sv +} + +func (d *decoder) sequence(n *Node, out reflect.Value) (good bool) { + l := len(n.Content) + + var iface reflect.Value + switch out.Kind() { + case reflect.Slice: + out.Set(reflect.MakeSlice(out.Type(), l, l)) + case reflect.Array: + if l != out.Len() { + failf("invalid array: want %d elements but got %d", out.Len(), l) + } + case reflect.Interface: + // No type hints. Will have to use a generic sequence. + iface = out + out = settableValueOf(make([]interface{}, l)) + default: + d.terror(n, seqTag, out) + return false + } + et := out.Type().Elem() + + j := 0 + for i := 0; i < l; i++ { + e := reflect.New(et).Elem() + if ok := d.unmarshal(n.Content[i], e); ok { + out.Index(j).Set(e) + j++ + } + } + if out.Kind() != reflect.Array { + out.Set(out.Slice(0, j)) + } + if iface.IsValid() { + iface.Set(out) + } + return true +} + +func (d *decoder) mapping(n *Node, out reflect.Value) (good bool) { + l := len(n.Content) + if d.uniqueKeys { + nerrs := len(d.terrors) + for i := 0; i < l; i += 2 { + ni := n.Content[i] + for j := i + 2; j < l; j += 2 { + nj := n.Content[j] + if ni.Kind == nj.Kind && ni.Value == nj.Value { + d.terrors = append(d.terrors, fmt.Sprintf("line %d: mapping key %#v already defined at line %d", nj.Line, nj.Value, ni.Line)) + } + } + } + if len(d.terrors) > nerrs { + return false + } + } + switch out.Kind() { + case reflect.Struct: + return d.mappingStruct(n, out) + case reflect.Map: + // okay + case reflect.Interface: + iface := out + if isStringMap(n) { + out = reflect.MakeMap(d.stringMapType) + } else { + out = reflect.MakeMap(d.generalMapType) + } + iface.Set(out) + default: + d.terror(n, mapTag, out) + return false + } + + outt := out.Type() + kt := outt.Key() + et := outt.Elem() + + stringMapType := d.stringMapType + generalMapType := d.generalMapType + if outt.Elem() == ifaceType { + if outt.Key().Kind() == reflect.String { + d.stringMapType = outt + } else if outt.Key() == ifaceType { + d.generalMapType = outt + } + } + + mergedFields := d.mergedFields + d.mergedFields = nil + + var mergeNode *Node + + mapIsNew := false + if out.IsNil() { + out.Set(reflect.MakeMap(outt)) + mapIsNew = true + } + for i := 0; i < l; i += 2 { + if isMerge(n.Content[i]) { + mergeNode = n.Content[i+1] + continue + } + k := reflect.New(kt).Elem() + if d.unmarshal(n.Content[i], k) { + if mergedFields != nil { + ki := k.Interface() + if mergedFields[ki] { + continue + } + mergedFields[ki] = true + } + kkind := k.Kind() + if kkind == reflect.Interface { + kkind = k.Elem().Kind() + } + if kkind == reflect.Map || kkind == reflect.Slice { + failf("invalid map key: %#v", k.Interface()) + } + e := reflect.New(et).Elem() + if d.unmarshal(n.Content[i+1], e) || n.Content[i+1].ShortTag() == nullTag && (mapIsNew || !out.MapIndex(k).IsValid()) { + out.SetMapIndex(k, e) + } + } + } + + d.mergedFields = mergedFields + if mergeNode != nil { + d.merge(n, mergeNode, out) + } + + d.stringMapType = stringMapType + d.generalMapType = generalMapType + return true +} + +func isStringMap(n *Node) bool { + if n.Kind != MappingNode { + return false + } + l := len(n.Content) + for i := 0; i < l; i += 2 { + shortTag := n.Content[i].ShortTag() + if shortTag != strTag && shortTag != mergeTag { + return false + } + } + return true +} + +func (d *decoder) mappingStruct(n *Node, out reflect.Value) (good bool) { + sinfo, err := getStructInfo(out.Type()) + if err != nil { + panic(err) + } + + var inlineMap reflect.Value + var elemType reflect.Type + if sinfo.InlineMap != -1 { + inlineMap = out.Field(sinfo.InlineMap) + elemType = inlineMap.Type().Elem() + } + + for _, index := range sinfo.InlineUnmarshalers { + field := d.fieldByIndex(n, out, index) + d.prepare(n, field) + } + + mergedFields := d.mergedFields + d.mergedFields = nil + var mergeNode *Node + var doneFields []bool + if d.uniqueKeys { + doneFields = make([]bool, len(sinfo.FieldsList)) + } + name := settableValueOf("") + l := len(n.Content) + for i := 0; i < l; i += 2 { + ni := n.Content[i] + if isMerge(ni) { + mergeNode = n.Content[i+1] + continue + } + if !d.unmarshal(ni, name) { + continue + } + sname := name.String() + if mergedFields != nil { + if mergedFields[sname] { + continue + } + mergedFields[sname] = true + } + if info, ok := sinfo.FieldsMap[sname]; ok { + if d.uniqueKeys { + if doneFields[info.Id] { + d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s already set in type %s", ni.Line, name.String(), out.Type())) + continue + } + doneFields[info.Id] = true + } + var field reflect.Value + if info.Inline == nil { + field = out.Field(info.Num) + } else { + field = d.fieldByIndex(n, out, info.Inline) + } + d.unmarshal(n.Content[i+1], field) + } else if sinfo.InlineMap != -1 { + if inlineMap.IsNil() { + inlineMap.Set(reflect.MakeMap(inlineMap.Type())) + } + value := reflect.New(elemType).Elem() + d.unmarshal(n.Content[i+1], value) + inlineMap.SetMapIndex(name, value) + } else if d.knownFields { + d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s not found in type %s", ni.Line, name.String(), out.Type())) + } + } + + d.mergedFields = mergedFields + if mergeNode != nil { + d.merge(n, mergeNode, out) + } + return true +} + +func failWantMap() { + failf("map merge requires map or sequence of maps as the value") +} + +func (d *decoder) merge(parent *Node, merge *Node, out reflect.Value) { + mergedFields := d.mergedFields + if mergedFields == nil { + d.mergedFields = make(map[interface{}]bool) + for i := 0; i < len(parent.Content); i += 2 { + k := reflect.New(ifaceType).Elem() + if d.unmarshal(parent.Content[i], k) { + d.mergedFields[k.Interface()] = true + } + } + } + + switch merge.Kind { + case MappingNode: + d.unmarshal(merge, out) + case AliasNode: + if merge.Alias != nil && merge.Alias.Kind != MappingNode { + failWantMap() + } + d.unmarshal(merge, out) + case SequenceNode: + for i := 0; i < len(merge.Content); i++ { + ni := merge.Content[i] + if ni.Kind == AliasNode { + if ni.Alias != nil && ni.Alias.Kind != MappingNode { + failWantMap() + } + } else if ni.Kind != MappingNode { + failWantMap() + } + d.unmarshal(ni, out) + } + default: + failWantMap() + } + + d.mergedFields = mergedFields +} + +func isMerge(n *Node) bool { + return n.Kind == ScalarNode && n.Value == "<<" && (n.Tag == "" || n.Tag == "!" || shortTag(n.Tag) == mergeTag) +} diff --git a/cli/internal/yaml/emitterc.go b/cli/internal/yaml/emitterc.go new file mode 100644 index 0000000..dde20e5 --- /dev/null +++ b/cli/internal/yaml/emitterc.go @@ -0,0 +1,2019 @@ +// +// Copyright (c) 2011-2019 Canonical Ltd +// Copyright (c) 2006-2010 Kirill Simonov +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of +// this software and associated documentation files (the "Software"), to deal in +// the Software without restriction, including without limitation the rights to +// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do +// so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +package yaml + +import ( + "bytes" + "fmt" +) + +// Flush the buffer if needed. +func flush(emitter *yaml_emitter_t) bool { + if emitter.buffer_pos+5 >= len(emitter.buffer) { + return yaml_emitter_flush(emitter) + } + return true +} + +// Put a character to the output buffer. +func put(emitter *yaml_emitter_t, value byte) bool { + if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { + return false + } + emitter.buffer[emitter.buffer_pos] = value + emitter.buffer_pos++ + emitter.column++ + return true +} + +// Put a line break to the output buffer. +func put_break(emitter *yaml_emitter_t) bool { + if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { + return false + } + switch emitter.line_break { + case yaml_CR_BREAK: + emitter.buffer[emitter.buffer_pos] = '\r' + emitter.buffer_pos += 1 + case yaml_LN_BREAK: + emitter.buffer[emitter.buffer_pos] = '\n' + emitter.buffer_pos += 1 + case yaml_CRLN_BREAK: + emitter.buffer[emitter.buffer_pos+0] = '\r' + emitter.buffer[emitter.buffer_pos+1] = '\n' + emitter.buffer_pos += 2 + default: + panic("unknown line break setting") + } + if emitter.column == 0 { + emitter.space_above = true + } + emitter.column = 0 + emitter.line++ + // [Go] Do this here and below and drop from everywhere else (see commented lines). + emitter.indention = true + return true +} + +// Copy a character from a string into buffer. +func write(emitter *yaml_emitter_t, s []byte, i *int) bool { + if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { + return false + } + p := emitter.buffer_pos + w := width(s[*i]) + switch w { + case 4: + emitter.buffer[p+3] = s[*i+3] + fallthrough + case 3: + emitter.buffer[p+2] = s[*i+2] + fallthrough + case 2: + emitter.buffer[p+1] = s[*i+1] + fallthrough + case 1: + emitter.buffer[p+0] = s[*i+0] + default: + panic("unknown character width") + } + emitter.column++ + emitter.buffer_pos += w + *i += w + return true +} + +// Write a whole string into buffer. +func write_all(emitter *yaml_emitter_t, s []byte) bool { + for i := 0; i < len(s); { + if !write(emitter, s, &i) { + return false + } + } + return true +} + +// Copy a line break character from a string into buffer. +func write_break(emitter *yaml_emitter_t, s []byte, i *int) bool { + if s[*i] == '\n' { + if !put_break(emitter) { + return false + } + *i++ + } else { + if !write(emitter, s, i) { + return false + } + if emitter.column == 0 { + emitter.space_above = true + } + emitter.column = 0 + emitter.line++ + // [Go] Do this here and above and drop from everywhere else (see commented lines). + emitter.indention = true + } + return true +} + +// Set an emitter error and return false. +func yaml_emitter_set_emitter_error(emitter *yaml_emitter_t, problem string) bool { + emitter.error = yaml_EMITTER_ERROR + emitter.problem = problem + return false +} + +// Emit an event. +func yaml_emitter_emit(emitter *yaml_emitter_t, event *yaml_event_t) bool { + emitter.events = append(emitter.events, *event) + for !yaml_emitter_need_more_events(emitter) { + event := &emitter.events[emitter.events_head] + if !yaml_emitter_analyze_event(emitter, event) { + return false + } + if !yaml_emitter_state_machine(emitter, event) { + return false + } + yaml_event_delete(event) + emitter.events_head++ + } + return true +} + +// Check if we need to accumulate more events before emitting. +// +// We accumulate extra +// - 1 event for DOCUMENT-START +// - 2 events for SEQUENCE-START +// - 3 events for MAPPING-START +func yaml_emitter_need_more_events(emitter *yaml_emitter_t) bool { + if emitter.events_head == len(emitter.events) { + return true + } + var accumulate int + switch emitter.events[emitter.events_head].typ { + case yaml_DOCUMENT_START_EVENT: + accumulate = 1 + break + case yaml_SEQUENCE_START_EVENT: + accumulate = 2 + break + case yaml_MAPPING_START_EVENT: + accumulate = 3 + break + default: + return false + } + if len(emitter.events)-emitter.events_head > accumulate { + return false + } + var level int + for i := emitter.events_head; i < len(emitter.events); i++ { + switch emitter.events[i].typ { + case yaml_STREAM_START_EVENT, yaml_DOCUMENT_START_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT: + level++ + case yaml_STREAM_END_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_END_EVENT, yaml_MAPPING_END_EVENT: + level-- + } + if level == 0 { + return false + } + } + return true +} + +// Append a directive to the directives stack. +func yaml_emitter_append_tag_directive(emitter *yaml_emitter_t, value *yaml_tag_directive_t, allow_duplicates bool) bool { + for i := 0; i < len(emitter.tag_directives); i++ { + if bytes.Equal(value.handle, emitter.tag_directives[i].handle) { + if allow_duplicates { + return true + } + return yaml_emitter_set_emitter_error(emitter, "duplicate %TAG directive") + } + } + + // [Go] Do we actually need to copy this given garbage collection + // and the lack of deallocating destructors? + tag_copy := yaml_tag_directive_t{ + handle: make([]byte, len(value.handle)), + prefix: make([]byte, len(value.prefix)), + } + copy(tag_copy.handle, value.handle) + copy(tag_copy.prefix, value.prefix) + emitter.tag_directives = append(emitter.tag_directives, tag_copy) + return true +} + +// Increase the indentation level. +func yaml_emitter_increase_indent(emitter *yaml_emitter_t, flow, indentless bool) bool { + emitter.indents = append(emitter.indents, emitter.indent) + if emitter.indent < 0 { + if flow { + emitter.indent = emitter.best_indent + } else { + emitter.indent = 0 + } + } else if !indentless { + // [Go] This was changed so that indentations are more regular. + if emitter.states[len(emitter.states)-1] == yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE { + // The first indent inside a sequence will just skip the "- " indicator. + emitter.indent += 2 + } else { + // Everything else aligns to the chosen indentation. + emitter.indent = emitter.best_indent * ((emitter.indent + emitter.best_indent) / emitter.best_indent) + } + } + return true +} + +// State dispatcher. +func yaml_emitter_state_machine(emitter *yaml_emitter_t, event *yaml_event_t) bool { + switch emitter.state { + default: + case yaml_EMIT_STREAM_START_STATE: + return yaml_emitter_emit_stream_start(emitter, event) + + case yaml_EMIT_FIRST_DOCUMENT_START_STATE: + return yaml_emitter_emit_document_start(emitter, event, true) + + case yaml_EMIT_DOCUMENT_START_STATE: + return yaml_emitter_emit_document_start(emitter, event, false) + + case yaml_EMIT_DOCUMENT_CONTENT_STATE: + return yaml_emitter_emit_document_content(emitter, event) + + case yaml_EMIT_DOCUMENT_END_STATE: + return yaml_emitter_emit_document_end(emitter, event) + + case yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE: + return yaml_emitter_emit_flow_sequence_item(emitter, event, true, false) + + case yaml_EMIT_FLOW_SEQUENCE_TRAIL_ITEM_STATE: + return yaml_emitter_emit_flow_sequence_item(emitter, event, false, true) + + case yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE: + return yaml_emitter_emit_flow_sequence_item(emitter, event, false, false) + + case yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE: + return yaml_emitter_emit_flow_mapping_key(emitter, event, true, false) + + case yaml_EMIT_FLOW_MAPPING_TRAIL_KEY_STATE: + return yaml_emitter_emit_flow_mapping_key(emitter, event, false, true) + + case yaml_EMIT_FLOW_MAPPING_KEY_STATE: + return yaml_emitter_emit_flow_mapping_key(emitter, event, false, false) + + case yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE: + return yaml_emitter_emit_flow_mapping_value(emitter, event, true) + + case yaml_EMIT_FLOW_MAPPING_VALUE_STATE: + return yaml_emitter_emit_flow_mapping_value(emitter, event, false) + + case yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE: + return yaml_emitter_emit_block_sequence_item(emitter, event, true) + + case yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE: + return yaml_emitter_emit_block_sequence_item(emitter, event, false) + + case yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE: + return yaml_emitter_emit_block_mapping_key(emitter, event, true) + + case yaml_EMIT_BLOCK_MAPPING_KEY_STATE: + return yaml_emitter_emit_block_mapping_key(emitter, event, false) + + case yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE: + return yaml_emitter_emit_block_mapping_value(emitter, event, true) + + case yaml_EMIT_BLOCK_MAPPING_VALUE_STATE: + return yaml_emitter_emit_block_mapping_value(emitter, event, false) + + case yaml_EMIT_END_STATE: + return yaml_emitter_set_emitter_error(emitter, "expected nothing after STREAM-END") + } + panic("invalid emitter state") +} + +// Expect STREAM-START. +func yaml_emitter_emit_stream_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if event.typ != yaml_STREAM_START_EVENT { + return yaml_emitter_set_emitter_error(emitter, "expected STREAM-START") + } + if emitter.encoding == yaml_ANY_ENCODING { + emitter.encoding = event.encoding + if emitter.encoding == yaml_ANY_ENCODING { + emitter.encoding = yaml_UTF8_ENCODING + } + } + if emitter.best_indent < 2 || emitter.best_indent > 9 { + emitter.best_indent = 2 + } + if emitter.best_width >= 0 && emitter.best_width <= emitter.best_indent*2 { + emitter.best_width = 80 + } + if emitter.best_width < 0 { + emitter.best_width = 1<<31 - 1 + } + if emitter.line_break == yaml_ANY_BREAK { + emitter.line_break = yaml_LN_BREAK + } + + emitter.indent = -1 + emitter.line = 0 + emitter.column = 0 + emitter.whitespace = true + emitter.indention = true + emitter.space_above = true + emitter.foot_indent = -1 + + if emitter.encoding != yaml_UTF8_ENCODING { + if !yaml_emitter_write_bom(emitter) { + return false + } + } + emitter.state = yaml_EMIT_FIRST_DOCUMENT_START_STATE + return true +} + +// Expect DOCUMENT-START or STREAM-END. +func yaml_emitter_emit_document_start(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { + + if event.typ == yaml_DOCUMENT_START_EVENT { + + if event.version_directive != nil { + if !yaml_emitter_analyze_version_directive(emitter, event.version_directive) { + return false + } + } + + for i := 0; i < len(event.tag_directives); i++ { + tag_directive := &event.tag_directives[i] + if !yaml_emitter_analyze_tag_directive(emitter, tag_directive) { + return false + } + if !yaml_emitter_append_tag_directive(emitter, tag_directive, false) { + return false + } + } + + for i := 0; i < len(default_tag_directives); i++ { + tag_directive := &default_tag_directives[i] + if !yaml_emitter_append_tag_directive(emitter, tag_directive, true) { + return false + } + } + + implicit := event.implicit + if !first || emitter.canonical { + implicit = false + } + + if emitter.open_ended && (event.version_directive != nil || len(event.tag_directives) > 0) { + if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + + if event.version_directive != nil { + implicit = false + if !yaml_emitter_write_indicator(emitter, []byte("%YAML"), true, false, false) { + return false + } + if !yaml_emitter_write_indicator(emitter, []byte("1.1"), true, false, false) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + + if len(event.tag_directives) > 0 { + implicit = false + for i := 0; i < len(event.tag_directives); i++ { + tag_directive := &event.tag_directives[i] + if !yaml_emitter_write_indicator(emitter, []byte("%TAG"), true, false, false) { + return false + } + if !yaml_emitter_write_tag_handle(emitter, tag_directive.handle) { + return false + } + if !yaml_emitter_write_tag_content(emitter, tag_directive.prefix, true) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + } + + if yaml_emitter_check_empty_document(emitter) { + implicit = false + } + if !implicit { + if !yaml_emitter_write_indent(emitter) { + return false + } + if !yaml_emitter_write_indicator(emitter, []byte("---"), true, false, false) { + return false + } + if emitter.canonical || true { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + } + + if len(emitter.head_comment) > 0 { + if !yaml_emitter_process_head_comment(emitter) { + return false + } + if !put_break(emitter) { + return false + } + } + + emitter.state = yaml_EMIT_DOCUMENT_CONTENT_STATE + return true + } + + if event.typ == yaml_STREAM_END_EVENT { + if emitter.open_ended { + if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !yaml_emitter_flush(emitter) { + return false + } + emitter.state = yaml_EMIT_END_STATE + return true + } + + return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-START or STREAM-END") +} + +// Expect the root node. +func yaml_emitter_emit_document_content(emitter *yaml_emitter_t, event *yaml_event_t) bool { + emitter.states = append(emitter.states, yaml_EMIT_DOCUMENT_END_STATE) + + if !yaml_emitter_process_head_comment(emitter) { + return false + } + if !yaml_emitter_emit_node(emitter, event, true, false, false, false) { + return false + } + if !yaml_emitter_process_line_comment(emitter) { + return false + } + if !yaml_emitter_process_foot_comment(emitter) { + return false + } + return true +} + +// Expect DOCUMENT-END. +func yaml_emitter_emit_document_end(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if event.typ != yaml_DOCUMENT_END_EVENT { + return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-END") + } + // [Go] Force document foot separation. + emitter.foot_indent = 0 + if !yaml_emitter_process_foot_comment(emitter) { + return false + } + emitter.foot_indent = -1 + if !yaml_emitter_write_indent(emitter) { + return false + } + if !event.implicit { + // [Go] Allocate the slice elsewhere. + if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !yaml_emitter_flush(emitter) { + return false + } + emitter.state = yaml_EMIT_DOCUMENT_START_STATE + emitter.tag_directives = emitter.tag_directives[:0] + return true +} + +// Expect a flow item node. +func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first, trail bool) bool { + if first { + if !yaml_emitter_write_indicator(emitter, []byte{'['}, true, true, false) { + return false + } + if !yaml_emitter_increase_indent(emitter, true, false) { + return false + } + emitter.flow_level++ + } + + if event.typ == yaml_SEQUENCE_END_EVENT { + if emitter.canonical && !first && !trail { + if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { + return false + } + } + emitter.flow_level-- + emitter.indent = emitter.indents[len(emitter.indents)-1] + emitter.indents = emitter.indents[:len(emitter.indents)-1] + if emitter.column == 0 || emitter.canonical && !first { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !yaml_emitter_write_indicator(emitter, []byte{']'}, false, false, false) { + return false + } + if !yaml_emitter_process_line_comment(emitter) { + return false + } + if !yaml_emitter_process_foot_comment(emitter) { + return false + } + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + + return true + } + + if !first && !trail { + if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { + return false + } + } + + if !yaml_emitter_process_head_comment(emitter) { + return false + } + if emitter.column == 0 { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + + if emitter.canonical || emitter.column > emitter.best_width { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if len(emitter.line_comment)+len(emitter.foot_comment)+len(emitter.tail_comment) > 0 { + emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_TRAIL_ITEM_STATE) + } else { + emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE) + } + if !yaml_emitter_emit_node(emitter, event, false, true, false, false) { + return false + } + if len(emitter.line_comment)+len(emitter.foot_comment)+len(emitter.tail_comment) > 0 { + if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { + return false + } + } + if !yaml_emitter_process_line_comment(emitter) { + return false + } + if !yaml_emitter_process_foot_comment(emitter) { + return false + } + return true +} + +// Expect a flow key node. +func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first, trail bool) bool { + if first { + if !yaml_emitter_write_indicator(emitter, []byte{'{'}, true, true, false) { + return false + } + if !yaml_emitter_increase_indent(emitter, true, false) { + return false + } + emitter.flow_level++ + } + + if event.typ == yaml_MAPPING_END_EVENT { + if (emitter.canonical || len(emitter.head_comment)+len(emitter.foot_comment)+len(emitter.tail_comment) > 0) && !first && !trail { + if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { + return false + } + } + if !yaml_emitter_process_head_comment(emitter) { + return false + } + emitter.flow_level-- + emitter.indent = emitter.indents[len(emitter.indents)-1] + emitter.indents = emitter.indents[:len(emitter.indents)-1] + if emitter.canonical && !first { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !yaml_emitter_write_indicator(emitter, []byte{'}'}, false, false, false) { + return false + } + if !yaml_emitter_process_line_comment(emitter) { + return false + } + if !yaml_emitter_process_foot_comment(emitter) { + return false + } + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + return true + } + + if !first && !trail { + if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { + return false + } + } + + if !yaml_emitter_process_head_comment(emitter) { + return false + } + + if emitter.column == 0 { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + + if emitter.canonical || emitter.column > emitter.best_width { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + + if !emitter.canonical && yaml_emitter_check_simple_key(emitter) { + emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE) + return yaml_emitter_emit_node(emitter, event, false, false, true, true) + } + if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, false) { + return false + } + emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_VALUE_STATE) + return yaml_emitter_emit_node(emitter, event, false, false, true, false) +} + +// Expect a flow value node. +func yaml_emitter_emit_flow_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool { + if simple { + if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) { + return false + } + } else { + if emitter.canonical || emitter.column > emitter.best_width { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, false) { + return false + } + } + if len(emitter.line_comment)+len(emitter.foot_comment)+len(emitter.tail_comment) > 0 { + emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_TRAIL_KEY_STATE) + } else { + emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_KEY_STATE) + } + if !yaml_emitter_emit_node(emitter, event, false, false, true, false) { + return false + } + if len(emitter.line_comment)+len(emitter.foot_comment)+len(emitter.tail_comment) > 0 { + if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { + return false + } + } + if !yaml_emitter_process_line_comment(emitter) { + return false + } + if !yaml_emitter_process_foot_comment(emitter) { + return false + } + return true +} + +// Expect a block item node. +func yaml_emitter_emit_block_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { + if first { + if !yaml_emitter_increase_indent(emitter, false, false) { + return false + } + } + if event.typ == yaml_SEQUENCE_END_EVENT { + emitter.indent = emitter.indents[len(emitter.indents)-1] + emitter.indents = emitter.indents[:len(emitter.indents)-1] + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + return true + } + if !yaml_emitter_process_head_comment(emitter) { + return false + } + if !yaml_emitter_write_indent(emitter) { + return false + } + if !yaml_emitter_write_indicator(emitter, []byte{'-'}, true, false, true) { + return false + } + emitter.states = append(emitter.states, yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE) + if !yaml_emitter_emit_node(emitter, event, false, true, false, false) { + return false + } + if !yaml_emitter_process_line_comment(emitter) { + return false + } + if !yaml_emitter_process_foot_comment(emitter) { + return false + } + return true +} + +// Expect a block key node. +func yaml_emitter_emit_block_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { + if first { + if !yaml_emitter_increase_indent(emitter, false, false) { + return false + } + } + if !yaml_emitter_process_head_comment(emitter) { + return false + } + if event.typ == yaml_MAPPING_END_EVENT { + emitter.indent = emitter.indents[len(emitter.indents)-1] + emitter.indents = emitter.indents[:len(emitter.indents)-1] + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + return true + } + if !yaml_emitter_write_indent(emitter) { + return false + } + if len(emitter.line_comment) > 0 { + // [Go] A line comment was provided for the key. That's unusual as the + // scanner associates line comments with the value. Either way, + // save the line comment and render it appropriately later. + emitter.key_line_comment = emitter.line_comment + emitter.line_comment = nil + } + if yaml_emitter_check_simple_key(emitter) { + emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE) + return yaml_emitter_emit_node(emitter, event, false, false, true, true) + } + if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, true) { + return false + } + emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_VALUE_STATE) + return yaml_emitter_emit_node(emitter, event, false, false, true, false) +} + +// Expect a block value node. +func yaml_emitter_emit_block_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool { + if simple { + if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) { + return false + } + } else { + if !yaml_emitter_write_indent(emitter) { + return false + } + if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, true) { + return false + } + } + if len(emitter.key_line_comment) > 0 { + // [Go] Line comments are generally associated with the value, but when there's + // no value on the same line as a mapping key they end up attached to the + // key itself. + if event.typ == yaml_SCALAR_EVENT { + if len(emitter.line_comment) == 0 { + // A scalar is coming and it has no line comments by itself yet, + // so just let it handle the line comment as usual. If it has a + // line comment, we can't have both so the one from the key is lost. + emitter.line_comment = emitter.key_line_comment + emitter.key_line_comment = nil + } + } else if event.sequence_style() != yaml_FLOW_SEQUENCE_STYLE && (event.typ == yaml_MAPPING_START_EVENT || event.typ == yaml_SEQUENCE_START_EVENT) { + // An indented block follows, so write the comment right now. + emitter.line_comment, emitter.key_line_comment = emitter.key_line_comment, emitter.line_comment + if !yaml_emitter_process_line_comment(emitter) { + return false + } + emitter.line_comment, emitter.key_line_comment = emitter.key_line_comment, emitter.line_comment + } + } + emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_KEY_STATE) + if !yaml_emitter_emit_node(emitter, event, false, false, true, false) { + return false + } + if !yaml_emitter_process_line_comment(emitter) { + return false + } + if !yaml_emitter_process_foot_comment(emitter) { + return false + } + return true +} + +func yaml_emitter_silent_nil_event(emitter *yaml_emitter_t, event *yaml_event_t) bool { + return event.typ == yaml_SCALAR_EVENT && event.implicit && !emitter.canonical && len(emitter.scalar_data.value) == 0 +} + +// Expect a node. +func yaml_emitter_emit_node(emitter *yaml_emitter_t, event *yaml_event_t, + root bool, sequence bool, mapping bool, simple_key bool) bool { + + emitter.root_context = root + emitter.sequence_context = sequence + emitter.mapping_context = mapping + emitter.simple_key_context = simple_key + + switch event.typ { + case yaml_ALIAS_EVENT: + return yaml_emitter_emit_alias(emitter, event) + case yaml_SCALAR_EVENT: + return yaml_emitter_emit_scalar(emitter, event) + case yaml_SEQUENCE_START_EVENT: + return yaml_emitter_emit_sequence_start(emitter, event) + case yaml_MAPPING_START_EVENT: + return yaml_emitter_emit_mapping_start(emitter, event) + default: + return yaml_emitter_set_emitter_error(emitter, + fmt.Sprintf("expected SCALAR, SEQUENCE-START, MAPPING-START, or ALIAS, but got %v", event.typ)) + } +} + +// Expect ALIAS. +func yaml_emitter_emit_alias(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if !yaml_emitter_process_anchor(emitter) { + return false + } + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + return true +} + +// Expect SCALAR. +func yaml_emitter_emit_scalar(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if !yaml_emitter_select_scalar_style(emitter, event) { + return false + } + if !yaml_emitter_process_anchor(emitter) { + return false + } + if !yaml_emitter_process_tag(emitter) { + return false + } + if !yaml_emitter_increase_indent(emitter, true, false) { + return false + } + if !yaml_emitter_process_scalar(emitter) { + return false + } + emitter.indent = emitter.indents[len(emitter.indents)-1] + emitter.indents = emitter.indents[:len(emitter.indents)-1] + emitter.state = emitter.states[len(emitter.states)-1] + emitter.states = emitter.states[:len(emitter.states)-1] + return true +} + +// Expect SEQUENCE-START. +func yaml_emitter_emit_sequence_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if !yaml_emitter_process_anchor(emitter) { + return false + } + if !yaml_emitter_process_tag(emitter) { + return false + } + if emitter.flow_level > 0 || emitter.canonical || event.sequence_style() == yaml_FLOW_SEQUENCE_STYLE || + yaml_emitter_check_empty_sequence(emitter) { + emitter.state = yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE + } else { + emitter.state = yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE + } + return true +} + +// Expect MAPPING-START. +func yaml_emitter_emit_mapping_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { + if !yaml_emitter_process_anchor(emitter) { + return false + } + if !yaml_emitter_process_tag(emitter) { + return false + } + if emitter.flow_level > 0 || emitter.canonical || event.mapping_style() == yaml_FLOW_MAPPING_STYLE || + yaml_emitter_check_empty_mapping(emitter) { + emitter.state = yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE + } else { + emitter.state = yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE + } + return true +} + +// Check if the document content is an empty scalar. +func yaml_emitter_check_empty_document(emitter *yaml_emitter_t) bool { + return false // [Go] Huh? +} + +// Check if the next events represent an empty sequence. +func yaml_emitter_check_empty_sequence(emitter *yaml_emitter_t) bool { + if len(emitter.events)-emitter.events_head < 2 { + return false + } + return emitter.events[emitter.events_head].typ == yaml_SEQUENCE_START_EVENT && + emitter.events[emitter.events_head+1].typ == yaml_SEQUENCE_END_EVENT +} + +// Check if the next events represent an empty mapping. +func yaml_emitter_check_empty_mapping(emitter *yaml_emitter_t) bool { + if len(emitter.events)-emitter.events_head < 2 { + return false + } + return emitter.events[emitter.events_head].typ == yaml_MAPPING_START_EVENT && + emitter.events[emitter.events_head+1].typ == yaml_MAPPING_END_EVENT +} + +// Check if the next node can be expressed as a simple key. +func yaml_emitter_check_simple_key(emitter *yaml_emitter_t) bool { + length := 0 + switch emitter.events[emitter.events_head].typ { + case yaml_ALIAS_EVENT: + length += len(emitter.anchor_data.anchor) + case yaml_SCALAR_EVENT: + if emitter.scalar_data.multiline { + return false + } + length += len(emitter.anchor_data.anchor) + + len(emitter.tag_data.handle) + + len(emitter.tag_data.suffix) + + len(emitter.scalar_data.value) + case yaml_SEQUENCE_START_EVENT: + if !yaml_emitter_check_empty_sequence(emitter) { + return false + } + length += len(emitter.anchor_data.anchor) + + len(emitter.tag_data.handle) + + len(emitter.tag_data.suffix) + case yaml_MAPPING_START_EVENT: + if !yaml_emitter_check_empty_mapping(emitter) { + return false + } + length += len(emitter.anchor_data.anchor) + + len(emitter.tag_data.handle) + + len(emitter.tag_data.suffix) + default: + return false + } + return length <= 128 +} + +// Determine an acceptable scalar style. +func yaml_emitter_select_scalar_style(emitter *yaml_emitter_t, event *yaml_event_t) bool { + + no_tag := len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 + if no_tag && !event.implicit && !event.quoted_implicit { + return yaml_emitter_set_emitter_error(emitter, "neither tag nor implicit flags are specified") + } + + style := event.scalar_style() + if style == yaml_ANY_SCALAR_STYLE { + style = yaml_PLAIN_SCALAR_STYLE + } + if emitter.canonical { + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + if emitter.simple_key_context && emitter.scalar_data.multiline { + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + + if style == yaml_PLAIN_SCALAR_STYLE { + if emitter.flow_level > 0 && !emitter.scalar_data.flow_plain_allowed || + emitter.flow_level == 0 && !emitter.scalar_data.block_plain_allowed { + style = yaml_SINGLE_QUOTED_SCALAR_STYLE + } + if len(emitter.scalar_data.value) == 0 && (emitter.flow_level > 0 || emitter.simple_key_context) { + style = yaml_SINGLE_QUOTED_SCALAR_STYLE + } + if no_tag && !event.implicit { + style = yaml_SINGLE_QUOTED_SCALAR_STYLE + } + } + if style == yaml_SINGLE_QUOTED_SCALAR_STYLE { + if !emitter.scalar_data.single_quoted_allowed { + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + } + if style == yaml_LITERAL_SCALAR_STYLE || style == yaml_FOLDED_SCALAR_STYLE { + if !emitter.scalar_data.block_allowed || emitter.flow_level > 0 || emitter.simple_key_context { + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + } + + if no_tag && !event.quoted_implicit && style != yaml_PLAIN_SCALAR_STYLE { + emitter.tag_data.handle = []byte{'!'} + } + emitter.scalar_data.style = style + return true +} + +// Write an anchor. +func yaml_emitter_process_anchor(emitter *yaml_emitter_t) bool { + if emitter.anchor_data.anchor == nil { + return true + } + c := []byte{'&'} + if emitter.anchor_data.alias { + c[0] = '*' + } + if !yaml_emitter_write_indicator(emitter, c, true, false, false) { + return false + } + return yaml_emitter_write_anchor(emitter, emitter.anchor_data.anchor) +} + +// Write a tag. +func yaml_emitter_process_tag(emitter *yaml_emitter_t) bool { + if len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 { + return true + } + if len(emitter.tag_data.handle) > 0 { + if !yaml_emitter_write_tag_handle(emitter, emitter.tag_data.handle) { + return false + } + if len(emitter.tag_data.suffix) > 0 { + if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) { + return false + } + } + } else { + // [Go] Allocate these slices elsewhere. + if !yaml_emitter_write_indicator(emitter, []byte("!<"), true, false, false) { + return false + } + if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) { + return false + } + if !yaml_emitter_write_indicator(emitter, []byte{'>'}, false, false, false) { + return false + } + } + return true +} + +// Write a scalar. +func yaml_emitter_process_scalar(emitter *yaml_emitter_t) bool { + switch emitter.scalar_data.style { + case yaml_PLAIN_SCALAR_STYLE: + return yaml_emitter_write_plain_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) + + case yaml_SINGLE_QUOTED_SCALAR_STYLE: + return yaml_emitter_write_single_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) + + case yaml_DOUBLE_QUOTED_SCALAR_STYLE: + return yaml_emitter_write_double_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) + + case yaml_LITERAL_SCALAR_STYLE: + return yaml_emitter_write_literal_scalar(emitter, emitter.scalar_data.value) + + case yaml_FOLDED_SCALAR_STYLE: + return yaml_emitter_write_folded_scalar(emitter, emitter.scalar_data.value) + } + panic("unknown scalar style") +} + +// Write a head comment. +func yaml_emitter_process_head_comment(emitter *yaml_emitter_t) bool { + if len(emitter.tail_comment) > 0 { + if !yaml_emitter_write_indent(emitter) { + return false + } + if !yaml_emitter_write_comment(emitter, emitter.tail_comment) { + return false + } + emitter.tail_comment = emitter.tail_comment[:0] + emitter.foot_indent = emitter.indent + if emitter.foot_indent < 0 { + emitter.foot_indent = 0 + } + } + + if len(emitter.head_comment) == 0 { + return true + } + if !yaml_emitter_write_indent(emitter) { + return false + } + if !yaml_emitter_write_comment(emitter, emitter.head_comment) { + return false + } + emitter.head_comment = emitter.head_comment[:0] + return true +} + +// Write an line comment. +func yaml_emitter_process_line_comment(emitter *yaml_emitter_t) bool { + if len(emitter.line_comment) == 0 { + return true + } + if !emitter.whitespace { + if !put(emitter, ' ') { + return false + } + } + if !yaml_emitter_write_comment(emitter, emitter.line_comment) { + return false + } + emitter.line_comment = emitter.line_comment[:0] + return true +} + +// Write a foot comment. +func yaml_emitter_process_foot_comment(emitter *yaml_emitter_t) bool { + if len(emitter.foot_comment) == 0 { + return true + } + if !yaml_emitter_write_indent(emitter) { + return false + } + if !yaml_emitter_write_comment(emitter, emitter.foot_comment) { + return false + } + emitter.foot_comment = emitter.foot_comment[:0] + emitter.foot_indent = emitter.indent + if emitter.foot_indent < 0 { + emitter.foot_indent = 0 + } + return true +} + +// Check if a %YAML directive is valid. +func yaml_emitter_analyze_version_directive(emitter *yaml_emitter_t, version_directive *yaml_version_directive_t) bool { + if version_directive.major != 1 || version_directive.minor != 1 { + return yaml_emitter_set_emitter_error(emitter, "incompatible %YAML directive") + } + return true +} + +// Check if a %TAG directive is valid. +func yaml_emitter_analyze_tag_directive(emitter *yaml_emitter_t, tag_directive *yaml_tag_directive_t) bool { + handle := tag_directive.handle + prefix := tag_directive.prefix + if len(handle) == 0 { + return yaml_emitter_set_emitter_error(emitter, "tag handle must not be empty") + } + if handle[0] != '!' { + return yaml_emitter_set_emitter_error(emitter, "tag handle must start with '!'") + } + if handle[len(handle)-1] != '!' { + return yaml_emitter_set_emitter_error(emitter, "tag handle must end with '!'") + } + for i := 1; i < len(handle)-1; i += width(handle[i]) { + if !is_alpha(handle, i) { + return yaml_emitter_set_emitter_error(emitter, "tag handle must contain alphanumerical characters only") + } + } + if len(prefix) == 0 { + return yaml_emitter_set_emitter_error(emitter, "tag prefix must not be empty") + } + return true +} + +// Check if an anchor is valid. +func yaml_emitter_analyze_anchor(emitter *yaml_emitter_t, anchor []byte, alias bool) bool { + if len(anchor) == 0 { + problem := "anchor value must not be empty" + if alias { + problem = "alias value must not be empty" + } + return yaml_emitter_set_emitter_error(emitter, problem) + } + for i := 0; i < len(anchor); i += width(anchor[i]) { + if !is_alpha(anchor, i) { + problem := "anchor value must contain alphanumerical characters only" + if alias { + problem = "alias value must contain alphanumerical characters only" + } + return yaml_emitter_set_emitter_error(emitter, problem) + } + } + emitter.anchor_data.anchor = anchor + emitter.anchor_data.alias = alias + return true +} + +// Check if a tag is valid. +func yaml_emitter_analyze_tag(emitter *yaml_emitter_t, tag []byte) bool { + if len(tag) == 0 { + return yaml_emitter_set_emitter_error(emitter, "tag value must not be empty") + } + for i := 0; i < len(emitter.tag_directives); i++ { + tag_directive := &emitter.tag_directives[i] + if bytes.HasPrefix(tag, tag_directive.prefix) { + emitter.tag_data.handle = tag_directive.handle + emitter.tag_data.suffix = tag[len(tag_directive.prefix):] + return true + } + } + emitter.tag_data.suffix = tag + return true +} + +// Check if a scalar is valid. +func yaml_emitter_analyze_scalar(emitter *yaml_emitter_t, value []byte) bool { + var ( + block_indicators = false + flow_indicators = false + line_breaks = false + special_characters = false + tab_characters = false + + leading_space = false + leading_break = false + trailing_space = false + trailing_break = false + break_space = false + space_break = false + + preceded_by_whitespace = false + followed_by_whitespace = false + previous_space = false + previous_break = false + ) + + emitter.scalar_data.value = value + + if len(value) == 0 { + emitter.scalar_data.multiline = false + emitter.scalar_data.flow_plain_allowed = false + emitter.scalar_data.block_plain_allowed = true + emitter.scalar_data.single_quoted_allowed = true + emitter.scalar_data.block_allowed = false + return true + } + + if len(value) >= 3 && ((value[0] == '-' && value[1] == '-' && value[2] == '-') || (value[0] == '.' && value[1] == '.' && value[2] == '.')) { + block_indicators = true + flow_indicators = true + } + + preceded_by_whitespace = true + for i, w := 0, 0; i < len(value); i += w { + w = width(value[i]) + followed_by_whitespace = i+w >= len(value) || is_blank(value, i+w) + + if i == 0 { + switch value[i] { + case '#', ',', '[', ']', '{', '}', '&', '*', '!', '|', '>', '\'', '"', '%', '@', '`': + flow_indicators = true + block_indicators = true + case '?', ':': + flow_indicators = true + if followed_by_whitespace { + block_indicators = true + } + case '-': + if followed_by_whitespace { + flow_indicators = true + block_indicators = true + } + } + } else { + switch value[i] { + case ',', '?', '[', ']', '{', '}': + flow_indicators = true + case ':': + flow_indicators = true + if followed_by_whitespace { + block_indicators = true + } + case '#': + if preceded_by_whitespace { + flow_indicators = true + block_indicators = true + } + } + } + + if value[i] == '\t' { + tab_characters = true + } else if !is_printable(value, i) || !is_ascii(value, i) && !emitter.unicode { + special_characters = true + } + if is_space(value, i) { + if i == 0 { + leading_space = true + } + if i+width(value[i]) == len(value) { + trailing_space = true + } + if previous_break { + break_space = true + } + previous_space = true + previous_break = false + } else if is_break(value, i) { + line_breaks = true + if i == 0 { + leading_break = true + } + if i+width(value[i]) == len(value) { + trailing_break = true + } + if previous_space { + space_break = true + } + previous_space = false + previous_break = true + } else { + previous_space = false + previous_break = false + } + + // [Go]: Why 'z'? Couldn't be the end of the string as that's the loop condition. + preceded_by_whitespace = is_blankz(value, i) + } + + emitter.scalar_data.multiline = line_breaks + emitter.scalar_data.flow_plain_allowed = true + emitter.scalar_data.block_plain_allowed = true + emitter.scalar_data.single_quoted_allowed = true + emitter.scalar_data.block_allowed = true + + if leading_space || leading_break || trailing_space || trailing_break { + emitter.scalar_data.flow_plain_allowed = false + emitter.scalar_data.block_plain_allowed = false + } + if trailing_space { + emitter.scalar_data.block_allowed = false + } + if break_space { + emitter.scalar_data.flow_plain_allowed = false + emitter.scalar_data.block_plain_allowed = false + emitter.scalar_data.single_quoted_allowed = false + } + if space_break || tab_characters || special_characters { + emitter.scalar_data.flow_plain_allowed = false + emitter.scalar_data.block_plain_allowed = false + emitter.scalar_data.single_quoted_allowed = false + } + if space_break || special_characters { + emitter.scalar_data.block_allowed = false + } + if line_breaks { + emitter.scalar_data.flow_plain_allowed = false + emitter.scalar_data.block_plain_allowed = false + } + if flow_indicators { + emitter.scalar_data.flow_plain_allowed = false + } + if block_indicators { + emitter.scalar_data.block_plain_allowed = false + } + return true +} + +// Check if the event data is valid. +func yaml_emitter_analyze_event(emitter *yaml_emitter_t, event *yaml_event_t) bool { + + emitter.anchor_data.anchor = nil + emitter.tag_data.handle = nil + emitter.tag_data.suffix = nil + emitter.scalar_data.value = nil + + if len(event.head_comment) > 0 { + emitter.head_comment = event.head_comment + } + if len(event.line_comment) > 0 { + emitter.line_comment = event.line_comment + } + if len(event.foot_comment) > 0 { + emitter.foot_comment = event.foot_comment + } + if len(event.tail_comment) > 0 { + emitter.tail_comment = event.tail_comment + } + + switch event.typ { + case yaml_ALIAS_EVENT: + if !yaml_emitter_analyze_anchor(emitter, event.anchor, true) { + return false + } + + case yaml_SCALAR_EVENT: + if len(event.anchor) > 0 { + if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { + return false + } + } + if len(event.tag) > 0 && (emitter.canonical || (!event.implicit && !event.quoted_implicit)) { + if !yaml_emitter_analyze_tag(emitter, event.tag) { + return false + } + } + if !yaml_emitter_analyze_scalar(emitter, event.value) { + return false + } + + case yaml_SEQUENCE_START_EVENT: + if len(event.anchor) > 0 { + if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { + return false + } + } + if len(event.tag) > 0 && (emitter.canonical || !event.implicit) { + if !yaml_emitter_analyze_tag(emitter, event.tag) { + return false + } + } + + case yaml_MAPPING_START_EVENT: + if len(event.anchor) > 0 { + if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { + return false + } + } + if len(event.tag) > 0 && (emitter.canonical || !event.implicit) { + if !yaml_emitter_analyze_tag(emitter, event.tag) { + return false + } + } + } + return true +} + +// Write the BOM character. +func yaml_emitter_write_bom(emitter *yaml_emitter_t) bool { + if !flush(emitter) { + return false + } + pos := emitter.buffer_pos + emitter.buffer[pos+0] = '\xEF' + emitter.buffer[pos+1] = '\xBB' + emitter.buffer[pos+2] = '\xBF' + emitter.buffer_pos += 3 + return true +} + +func yaml_emitter_write_indent(emitter *yaml_emitter_t) bool { + indent := emitter.indent + if indent < 0 { + indent = 0 + } + if !emitter.indention || emitter.column > indent || (emitter.column == indent && !emitter.whitespace) { + if !put_break(emitter) { + return false + } + } + if emitter.foot_indent == indent { + if !put_break(emitter) { + return false + } + } + for emitter.column < indent { + if !put(emitter, ' ') { + return false + } + } + emitter.whitespace = true + //emitter.indention = true + emitter.space_above = false + emitter.foot_indent = -1 + return true +} + +func yaml_emitter_write_indicator(emitter *yaml_emitter_t, indicator []byte, need_whitespace, is_whitespace, is_indention bool) bool { + if need_whitespace && !emitter.whitespace { + if !put(emitter, ' ') { + return false + } + } + if !write_all(emitter, indicator) { + return false + } + emitter.whitespace = is_whitespace + emitter.indention = (emitter.indention && is_indention) + emitter.open_ended = false + return true +} + +func yaml_emitter_write_anchor(emitter *yaml_emitter_t, value []byte) bool { + if !write_all(emitter, value) { + return false + } + emitter.whitespace = false + emitter.indention = false + return true +} + +func yaml_emitter_write_tag_handle(emitter *yaml_emitter_t, value []byte) bool { + if !emitter.whitespace { + if !put(emitter, ' ') { + return false + } + } + if !write_all(emitter, value) { + return false + } + emitter.whitespace = false + emitter.indention = false + return true +} + +func yaml_emitter_write_tag_content(emitter *yaml_emitter_t, value []byte, need_whitespace bool) bool { + if need_whitespace && !emitter.whitespace { + if !put(emitter, ' ') { + return false + } + } + for i := 0; i < len(value); { + var must_write bool + switch value[i] { + case ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '_', '.', '~', '*', '\'', '(', ')', '[', ']': + must_write = true + default: + must_write = is_alpha(value, i) + } + if must_write { + if !write(emitter, value, &i) { + return false + } + } else { + w := width(value[i]) + for k := 0; k < w; k++ { + octet := value[i] + i++ + if !put(emitter, '%') { + return false + } + + c := octet >> 4 + if c < 10 { + c += '0' + } else { + c += 'A' - 10 + } + if !put(emitter, c) { + return false + } + + c = octet & 0x0f + if c < 10 { + c += '0' + } else { + c += 'A' - 10 + } + if !put(emitter, c) { + return false + } + } + } + } + emitter.whitespace = false + emitter.indention = false + return true +} + +func yaml_emitter_write_plain_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { + if len(value) > 0 && !emitter.whitespace { + if !put(emitter, ' ') { + return false + } + } + + spaces := false + breaks := false + for i := 0; i < len(value); { + if is_space(value, i) { + if allow_breaks && !spaces && emitter.column > emitter.best_width && !is_space(value, i+1) { + if !yaml_emitter_write_indent(emitter) { + return false + } + i += width(value[i]) + } else { + if !write(emitter, value, &i) { + return false + } + } + spaces = true + } else if is_break(value, i) { + if !breaks && value[i] == '\n' { + if !put_break(emitter) { + return false + } + } + if !write_break(emitter, value, &i) { + return false + } + //emitter.indention = true + breaks = true + } else { + if breaks { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !write(emitter, value, &i) { + return false + } + emitter.indention = false + spaces = false + breaks = false + } + } + + if len(value) > 0 { + emitter.whitespace = false + } + emitter.indention = false + if emitter.root_context { + emitter.open_ended = true + } + + return true +} + +func yaml_emitter_write_single_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { + + if !yaml_emitter_write_indicator(emitter, []byte{'\''}, true, false, false) { + return false + } + + spaces := false + breaks := false + for i := 0; i < len(value); { + if is_space(value, i) { + if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 && !is_space(value, i+1) { + if !yaml_emitter_write_indent(emitter) { + return false + } + i += width(value[i]) + } else { + if !write(emitter, value, &i) { + return false + } + } + spaces = true + } else if is_break(value, i) { + if !breaks && value[i] == '\n' { + if !put_break(emitter) { + return false + } + } + if !write_break(emitter, value, &i) { + return false + } + //emitter.indention = true + breaks = true + } else { + if breaks { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if value[i] == '\'' { + if !put(emitter, '\'') { + return false + } + } + if !write(emitter, value, &i) { + return false + } + emitter.indention = false + spaces = false + breaks = false + } + } + if !yaml_emitter_write_indicator(emitter, []byte{'\''}, false, false, false) { + return false + } + emitter.whitespace = false + emitter.indention = false + return true +} + +func yaml_emitter_write_double_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { + spaces := false + if !yaml_emitter_write_indicator(emitter, []byte{'"'}, true, false, false) { + return false + } + + for i := 0; i < len(value); { + if !is_printable(value, i) || (!emitter.unicode && !is_ascii(value, i)) || + is_bom(value, i) || is_break(value, i) || + value[i] == '"' || value[i] == '\\' { + + octet := value[i] + + var w int + var v rune + switch { + case octet&0x80 == 0x00: + w, v = 1, rune(octet&0x7F) + case octet&0xE0 == 0xC0: + w, v = 2, rune(octet&0x1F) + case octet&0xF0 == 0xE0: + w, v = 3, rune(octet&0x0F) + case octet&0xF8 == 0xF0: + w, v = 4, rune(octet&0x07) + } + for k := 1; k < w; k++ { + octet = value[i+k] + v = (v << 6) + (rune(octet) & 0x3F) + } + i += w + + if !put(emitter, '\\') { + return false + } + + var ok bool + switch v { + case 0x00: + ok = put(emitter, '0') + case 0x07: + ok = put(emitter, 'a') + case 0x08: + ok = put(emitter, 'b') + case 0x09: + ok = put(emitter, 't') + case 0x0A: + ok = put(emitter, 'n') + case 0x0b: + ok = put(emitter, 'v') + case 0x0c: + ok = put(emitter, 'f') + case 0x0d: + ok = put(emitter, 'r') + case 0x1b: + ok = put(emitter, 'e') + case 0x22: + ok = put(emitter, '"') + case 0x5c: + ok = put(emitter, '\\') + case 0x85: + ok = put(emitter, 'N') + case 0xA0: + ok = put(emitter, '_') + case 0x2028: + ok = put(emitter, 'L') + case 0x2029: + ok = put(emitter, 'P') + default: + if v <= 0xFF { + ok = put(emitter, 'x') + w = 2 + } else if v <= 0xFFFF { + ok = put(emitter, 'u') + w = 4 + } else { + ok = put(emitter, 'U') + w = 8 + } + for k := (w - 1) * 4; ok && k >= 0; k -= 4 { + digit := byte((v >> uint(k)) & 0x0F) + if digit < 10 { + ok = put(emitter, digit+'0') + } else { + ok = put(emitter, digit+'A'-10) + } + } + } + if !ok { + return false + } + spaces = false + } else if is_space(value, i) { + if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 { + if !yaml_emitter_write_indent(emitter) { + return false + } + if is_space(value, i+1) { + if !put(emitter, '\\') { + return false + } + } + i += width(value[i]) + } else if !write(emitter, value, &i) { + return false + } + spaces = true + } else { + if !write(emitter, value, &i) { + return false + } + spaces = false + } + } + if !yaml_emitter_write_indicator(emitter, []byte{'"'}, false, false, false) { + return false + } + emitter.whitespace = false + emitter.indention = false + return true +} + +func yaml_emitter_write_block_scalar_hints(emitter *yaml_emitter_t, value []byte) bool { + if is_space(value, 0) || is_break(value, 0) { + indent_hint := []byte{'0' + byte(emitter.best_indent)} + if !yaml_emitter_write_indicator(emitter, indent_hint, false, false, false) { + return false + } + } + + emitter.open_ended = false + + var chomp_hint [1]byte + if len(value) == 0 { + chomp_hint[0] = '-' + } else { + i := len(value) - 1 + for value[i]&0xC0 == 0x80 { + i-- + } + if !is_break(value, i) { + chomp_hint[0] = '-' + } else if i == 0 { + chomp_hint[0] = '+' + emitter.open_ended = true + } else { + i-- + for value[i]&0xC0 == 0x80 { + i-- + } + if is_break(value, i) { + chomp_hint[0] = '+' + emitter.open_ended = true + } + } + } + if chomp_hint[0] != 0 { + if !yaml_emitter_write_indicator(emitter, chomp_hint[:], false, false, false) { + return false + } + } + return true +} + +func yaml_emitter_write_literal_scalar(emitter *yaml_emitter_t, value []byte) bool { + if !yaml_emitter_write_indicator(emitter, []byte{'|'}, true, false, false) { + return false + } + if !yaml_emitter_write_block_scalar_hints(emitter, value) { + return false + } + if !yaml_emitter_process_line_comment(emitter) { + return false + } + //emitter.indention = true + emitter.whitespace = true + breaks := true + for i := 0; i < len(value); { + if is_break(value, i) { + if !write_break(emitter, value, &i) { + return false + } + //emitter.indention = true + breaks = true + } else { + if breaks { + if !yaml_emitter_write_indent(emitter) { + return false + } + } + if !write(emitter, value, &i) { + return false + } + emitter.indention = false + breaks = false + } + } + + return true +} + +func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) bool { + if !yaml_emitter_write_indicator(emitter, []byte{'>'}, true, false, false) { + return false + } + if !yaml_emitter_write_block_scalar_hints(emitter, value) { + return false + } + if !yaml_emitter_process_line_comment(emitter) { + return false + } + + //emitter.indention = true + emitter.whitespace = true + + breaks := true + leading_spaces := true + for i := 0; i < len(value); { + if is_break(value, i) { + if !breaks && !leading_spaces && value[i] == '\n' { + k := 0 + for is_break(value, k) { + k += width(value[k]) + } + if !is_blankz(value, k) { + if !put_break(emitter) { + return false + } + } + } + if !write_break(emitter, value, &i) { + return false + } + //emitter.indention = true + breaks = true + } else { + if breaks { + if !yaml_emitter_write_indent(emitter) { + return false + } + leading_spaces = is_blank(value, i) + } + if !breaks && is_space(value, i) && !is_space(value, i+1) && emitter.column > emitter.best_width { + if !yaml_emitter_write_indent(emitter) { + return false + } + i += width(value[i]) + } else { + if !write(emitter, value, &i) { + return false + } + } + emitter.indention = false + breaks = false + } + } + return true +} + +func yaml_emitter_write_comment(emitter *yaml_emitter_t, comment []byte) bool { + breaks := false + pound := false + for i := 0; i < len(comment); { + if is_break(comment, i) { + if !write_break(emitter, comment, &i) { + return false + } + //emitter.indention = true + breaks = true + pound = false + } else { + if breaks && !yaml_emitter_write_indent(emitter) { + return false + } + if !pound { + if comment[i] != '#' && (!put(emitter, '#') || !put(emitter, ' ')) { + return false + } + pound = true + } + if !write(emitter, comment, &i) { + return false + } + emitter.indention = false + breaks = false + } + } + if !breaks && !put_break(emitter) { + return false + } + + emitter.whitespace = true + //emitter.indention = true + return true +} diff --git a/cli/internal/yaml/encode.go b/cli/internal/yaml/encode.go new file mode 100644 index 0000000..de9e72a --- /dev/null +++ b/cli/internal/yaml/encode.go @@ -0,0 +1,577 @@ +// +// Copyright (c) 2011-2019 Canonical Ltd +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package yaml + +import ( + "encoding" + "fmt" + "io" + "reflect" + "regexp" + "sort" + "strconv" + "strings" + "time" + "unicode/utf8" +) + +type encoder struct { + emitter yaml_emitter_t + event yaml_event_t + out []byte + flow bool + indent int + doneInit bool +} + +func newEncoder() *encoder { + e := &encoder{} + yaml_emitter_initialize(&e.emitter) + yaml_emitter_set_output_string(&e.emitter, &e.out) + yaml_emitter_set_unicode(&e.emitter, true) + return e +} + +func newEncoderWithWriter(w io.Writer) *encoder { + e := &encoder{} + yaml_emitter_initialize(&e.emitter) + yaml_emitter_set_output_writer(&e.emitter, w) + yaml_emitter_set_unicode(&e.emitter, true) + return e +} + +func (e *encoder) init() { + if e.doneInit { + return + } + if e.indent == 0 { + e.indent = 4 + } + e.emitter.best_indent = e.indent + yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING) + e.emit() + e.doneInit = true +} + +func (e *encoder) finish() { + e.emitter.open_ended = false + yaml_stream_end_event_initialize(&e.event) + e.emit() +} + +func (e *encoder) destroy() { + yaml_emitter_delete(&e.emitter) +} + +func (e *encoder) emit() { + // This will internally delete the e.event value. + e.must(yaml_emitter_emit(&e.emitter, &e.event)) +} + +func (e *encoder) must(ok bool) { + if !ok { + msg := e.emitter.problem + if msg == "" { + msg = "unknown problem generating YAML content" + } + failf("%s", msg) + } +} + +func (e *encoder) marshalDoc(tag string, in reflect.Value) { + e.init() + var node *Node + if in.IsValid() { + node, _ = in.Interface().(*Node) + } + if node != nil && node.Kind == DocumentNode { + e.nodev(in) + } else { + yaml_document_start_event_initialize(&e.event, nil, nil, true) + e.emit() + e.marshal(tag, in) + yaml_document_end_event_initialize(&e.event, true) + e.emit() + } +} + +func (e *encoder) marshal(tag string, in reflect.Value) { + tag = shortTag(tag) + if !in.IsValid() || in.Kind() == reflect.Ptr && in.IsNil() { + e.nilv() + return + } + iface := in.Interface() + switch value := iface.(type) { + case *Node: + e.nodev(in) + return + case Node: + if !in.CanAddr() { + var n = reflect.New(in.Type()).Elem() + n.Set(in) + in = n + } + e.nodev(in.Addr()) + return + case time.Time: + e.timev(tag, in) + return + case *time.Time: + e.timev(tag, in.Elem()) + return + case time.Duration: + e.stringv(tag, reflect.ValueOf(value.String())) + return + case Marshaler: + v, err := value.MarshalYAML() + if err != nil { + fail(err) + } + if v == nil { + e.nilv() + return + } + e.marshal(tag, reflect.ValueOf(v)) + return + case encoding.TextMarshaler: + text, err := value.MarshalText() + if err != nil { + fail(err) + } + in = reflect.ValueOf(string(text)) + case nil: + e.nilv() + return + } + switch in.Kind() { + case reflect.Interface: + e.marshal(tag, in.Elem()) + case reflect.Map: + e.mapv(tag, in) + case reflect.Ptr: + e.marshal(tag, in.Elem()) + case reflect.Struct: + e.structv(tag, in) + case reflect.Slice, reflect.Array: + e.slicev(tag, in) + case reflect.String: + e.stringv(tag, in) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + e.intv(tag, in) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + e.uintv(tag, in) + case reflect.Float32, reflect.Float64: + e.floatv(tag, in) + case reflect.Bool: + e.boolv(tag, in) + default: + panic("cannot marshal type: " + in.Type().String()) + } +} + +func (e *encoder) mapv(tag string, in reflect.Value) { + e.mappingv(tag, func() { + keys := keyList(in.MapKeys()) + sort.Sort(keys) + for _, k := range keys { + e.marshal("", k) + e.marshal("", in.MapIndex(k)) + } + }) +} + +func (e *encoder) fieldByIndex(v reflect.Value, index []int) (field reflect.Value) { + for _, num := range index { + for { + if v.Kind() == reflect.Ptr { + if v.IsNil() { + return reflect.Value{} + } + v = v.Elem() + continue + } + break + } + v = v.Field(num) + } + return v +} + +func (e *encoder) structv(tag string, in reflect.Value) { + sinfo, err := getStructInfo(in.Type()) + if err != nil { + panic(err) + } + e.mappingv(tag, func() { + for _, info := range sinfo.FieldsList { + var value reflect.Value + if info.Inline == nil { + value = in.Field(info.Num) + } else { + value = e.fieldByIndex(in, info.Inline) + if !value.IsValid() { + continue + } + } + if info.OmitEmpty && isZero(value) { + continue + } + e.marshal("", reflect.ValueOf(info.Key)) + e.flow = info.Flow + e.marshal("", value) + } + if sinfo.InlineMap >= 0 { + m := in.Field(sinfo.InlineMap) + if m.Len() > 0 { + e.flow = false + keys := keyList(m.MapKeys()) + sort.Sort(keys) + for _, k := range keys { + if _, found := sinfo.FieldsMap[k.String()]; found { + panic(fmt.Sprintf("cannot have key %q in inlined map: conflicts with struct field", k.String())) + } + e.marshal("", k) + e.flow = false + e.marshal("", m.MapIndex(k)) + } + } + } + }) +} + +func (e *encoder) mappingv(tag string, f func()) { + implicit := tag == "" + style := yaml_BLOCK_MAPPING_STYLE + if e.flow { + e.flow = false + style = yaml_FLOW_MAPPING_STYLE + } + yaml_mapping_start_event_initialize(&e.event, nil, []byte(tag), implicit, style) + e.emit() + f() + yaml_mapping_end_event_initialize(&e.event) + e.emit() +} + +func (e *encoder) slicev(tag string, in reflect.Value) { + implicit := tag == "" + style := yaml_BLOCK_SEQUENCE_STYLE + if e.flow { + e.flow = false + style = yaml_FLOW_SEQUENCE_STYLE + } + e.must(yaml_sequence_start_event_initialize(&e.event, nil, []byte(tag), implicit, style)) + e.emit() + n := in.Len() + for i := 0; i < n; i++ { + e.marshal("", in.Index(i)) + } + e.must(yaml_sequence_end_event_initialize(&e.event)) + e.emit() +} + +// isBase60 returns whether s is in base 60 notation as defined in YAML 1.1. +// +// The base 60 float notation in YAML 1.1 is a terrible idea and is unsupported +// in YAML 1.2 and by this package, but these should be marshalled quoted for +// the time being for compatibility with other parsers. +func isBase60Float(s string) (result bool) { + // Fast path. + if s == "" { + return false + } + c := s[0] + if !(c == '+' || c == '-' || c >= '0' && c <= '9') || strings.IndexByte(s, ':') < 0 { + return false + } + // Do the full match. + return base60float.MatchString(s) +} + +// From http://yaml.org/type/float.html, except the regular expression there +// is bogus. In practice parsers do not enforce the "\.[0-9_]*" suffix. +var base60float = regexp.MustCompile(`^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+(?:\.[0-9_]*)?$`) + +// isOldBool returns whether s is bool notation as defined in YAML 1.1. +// +// We continue to force strings that YAML 1.1 would interpret as booleans to be +// rendered as quotes strings so that the marshalled output valid for YAML 1.1 +// parsing. +func isOldBool(s string) (result bool) { + switch s { + case "y", "Y", "yes", "Yes", "YES", "on", "On", "ON", + "n", "N", "no", "No", "NO", "off", "Off", "OFF": + return true + default: + return false + } +} + +func (e *encoder) stringv(tag string, in reflect.Value) { + var style yaml_scalar_style_t + s := in.String() + canUsePlain := true + switch { + case !utf8.ValidString(s): + if tag == binaryTag { + failf("explicitly tagged !!binary data must be base64-encoded") + } + if tag != "" { + failf("cannot marshal invalid UTF-8 data as %s", shortTag(tag)) + } + // It can't be encoded directly as YAML so use a binary tag + // and encode it as base64. + tag = binaryTag + s = encodeBase64(s) + case tag == "": + // Check to see if it would resolve to a specific + // tag when encoded unquoted. If it doesn't, + // there's no need to quote it. + rtag, _ := resolve("", s) + canUsePlain = rtag == strTag && !(isBase60Float(s) || isOldBool(s)) + } + // Note: it's possible for user code to emit invalid YAML + // if they explicitly specify a tag and a string containing + // text that's incompatible with that tag. + switch { + case strings.Contains(s, "\n"): + if e.flow { + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } else { + style = yaml_LITERAL_SCALAR_STYLE + } + case canUsePlain: + style = yaml_PLAIN_SCALAR_STYLE + default: + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + e.emitScalar(s, "", tag, style, nil, nil, nil, nil) +} + +func (e *encoder) boolv(tag string, in reflect.Value) { + var s string + if in.Bool() { + s = "true" + } else { + s = "false" + } + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil, nil) +} + +func (e *encoder) intv(tag string, in reflect.Value) { + s := strconv.FormatInt(in.Int(), 10) + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil, nil) +} + +func (e *encoder) uintv(tag string, in reflect.Value) { + s := strconv.FormatUint(in.Uint(), 10) + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil, nil) +} + +func (e *encoder) timev(tag string, in reflect.Value) { + t := in.Interface().(time.Time) + s := t.Format(time.RFC3339Nano) + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil, nil) +} + +func (e *encoder) floatv(tag string, in reflect.Value) { + // Issue #352: When formatting, use the precision of the underlying value + precision := 64 + if in.Kind() == reflect.Float32 { + precision = 32 + } + + s := strconv.FormatFloat(in.Float(), 'g', -1, precision) + switch s { + case "+Inf": + s = ".inf" + case "-Inf": + s = "-.inf" + case "NaN": + s = ".nan" + } + e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE, nil, nil, nil, nil) +} + +func (e *encoder) nilv() { + e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE, nil, nil, nil, nil) +} + +func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t, head, line, foot, tail []byte) { + // TODO Kill this function. Replace all initialize calls by their underlining Go literals. + implicit := tag == "" + if !implicit { + tag = longTag(tag) + } + e.must(yaml_scalar_event_initialize(&e.event, []byte(anchor), []byte(tag), []byte(value), implicit, implicit, style)) + e.event.head_comment = head + e.event.line_comment = line + e.event.foot_comment = foot + e.event.tail_comment = tail + e.emit() +} + +func (e *encoder) nodev(in reflect.Value) { + e.node(in.Interface().(*Node), "") +} + +func (e *encoder) node(node *Node, tail string) { + // Zero nodes behave as nil. + if node.Kind == 0 && node.IsZero() { + e.nilv() + return + } + + // If the tag was not explicitly requested, and dropping it won't change the + // implicit tag of the value, don't include it in the presentation. + var tag = node.Tag + var stag = shortTag(tag) + var forceQuoting bool + if tag != "" && node.Style&TaggedStyle == 0 { + if node.Kind == ScalarNode { + if stag == strTag && node.Style&(SingleQuotedStyle|DoubleQuotedStyle|LiteralStyle|FoldedStyle) != 0 { + tag = "" + } else { + rtag, _ := resolve("", node.Value) + if rtag == stag { + tag = "" + } else if stag == strTag { + tag = "" + forceQuoting = true + } + } + } else { + var rtag string + switch node.Kind { + case MappingNode: + rtag = mapTag + case SequenceNode: + rtag = seqTag + } + if rtag == stag { + tag = "" + } + } + } + + switch node.Kind { + case DocumentNode: + yaml_document_start_event_initialize(&e.event, nil, nil, true) + e.event.head_comment = []byte(node.HeadComment) + e.emit() + for _, node := range node.Content { + e.node(node, "") + } + yaml_document_end_event_initialize(&e.event, true) + e.event.foot_comment = []byte(node.FootComment) + e.emit() + + case SequenceNode: + style := yaml_BLOCK_SEQUENCE_STYLE + if node.Style&FlowStyle != 0 { + style = yaml_FLOW_SEQUENCE_STYLE + } + e.must(yaml_sequence_start_event_initialize(&e.event, []byte(node.Anchor), []byte(longTag(tag)), tag == "", style)) + e.event.head_comment = []byte(node.HeadComment) + e.emit() + for _, node := range node.Content { + e.node(node, "") + } + e.must(yaml_sequence_end_event_initialize(&e.event)) + e.event.line_comment = []byte(node.LineComment) + e.event.foot_comment = []byte(node.FootComment) + e.emit() + + case MappingNode: + style := yaml_BLOCK_MAPPING_STYLE + if node.Style&FlowStyle != 0 { + style = yaml_FLOW_MAPPING_STYLE + } + yaml_mapping_start_event_initialize(&e.event, []byte(node.Anchor), []byte(longTag(tag)), tag == "", style) + e.event.tail_comment = []byte(tail) + e.event.head_comment = []byte(node.HeadComment) + e.emit() + + // The tail logic below moves the foot comment of prior keys to the following key, + // since the value for each key may be a nested structure and the foot needs to be + // processed only the entirety of the value is streamed. The last tail is processed + // with the mapping end event. + var tail string + for i := 0; i+1 < len(node.Content); i += 2 { + k := node.Content[i] + foot := k.FootComment + if foot != "" { + kopy := *k + kopy.FootComment = "" + k = &kopy + } + e.node(k, tail) + tail = foot + + v := node.Content[i+1] + e.node(v, "") + } + + yaml_mapping_end_event_initialize(&e.event) + e.event.tail_comment = []byte(tail) + e.event.line_comment = []byte(node.LineComment) + e.event.foot_comment = []byte(node.FootComment) + e.emit() + + case AliasNode: + yaml_alias_event_initialize(&e.event, []byte(node.Value)) + e.event.head_comment = []byte(node.HeadComment) + e.event.line_comment = []byte(node.LineComment) + e.event.foot_comment = []byte(node.FootComment) + e.emit() + + case ScalarNode: + value := node.Value + if !utf8.ValidString(value) { + if stag == binaryTag { + failf("explicitly tagged !!binary data must be base64-encoded") + } + if stag != "" { + failf("cannot marshal invalid UTF-8 data as %s", stag) + } + // It can't be encoded directly as YAML so use a binary tag + // and encode it as base64. + tag = binaryTag + value = encodeBase64(value) + } + + style := yaml_PLAIN_SCALAR_STYLE + switch { + case node.Style&DoubleQuotedStyle != 0: + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + case node.Style&SingleQuotedStyle != 0: + style = yaml_SINGLE_QUOTED_SCALAR_STYLE + case node.Style&LiteralStyle != 0: + style = yaml_LITERAL_SCALAR_STYLE + case node.Style&FoldedStyle != 0: + style = yaml_FOLDED_SCALAR_STYLE + case strings.Contains(value, "\n"): + style = yaml_LITERAL_SCALAR_STYLE + case forceQuoting: + style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + + e.emitScalar(value, node.Anchor, tag, style, []byte(node.HeadComment), []byte(node.LineComment), []byte(node.FootComment), []byte(tail)) + default: + failf("cannot encode node with unknown kind %d", node.Kind) + } +} diff --git a/cli/internal/yaml/parserc.go b/cli/internal/yaml/parserc.go new file mode 100644 index 0000000..25fe823 --- /dev/null +++ b/cli/internal/yaml/parserc.go @@ -0,0 +1,1274 @@ +// +// Copyright (c) 2011-2019 Canonical Ltd +// Copyright (c) 2006-2010 Kirill Simonov +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of +// this software and associated documentation files (the "Software"), to deal in +// the Software without restriction, including without limitation the rights to +// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do +// so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +package yaml + +import ( + "bytes" +) + +// The parser implements the following grammar: +// +// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END +// implicit_document ::= block_node DOCUMENT-END* +// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* +// block_node_or_indentless_sequence ::= +// ALIAS +// | properties (block_content | indentless_block_sequence)? +// | block_content +// | indentless_block_sequence +// block_node ::= ALIAS +// | properties block_content? +// | block_content +// flow_node ::= ALIAS +// | properties flow_content? +// | flow_content +// properties ::= TAG ANCHOR? | ANCHOR TAG? +// block_content ::= block_collection | flow_collection | SCALAR +// flow_content ::= flow_collection | SCALAR +// block_collection ::= block_sequence | block_mapping +// flow_collection ::= flow_sequence | flow_mapping +// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END +// indentless_sequence ::= (BLOCK-ENTRY block_node?)+ +// block_mapping ::= BLOCK-MAPPING_START +// ((KEY block_node_or_indentless_sequence?)? +// (VALUE block_node_or_indentless_sequence?)?)* +// BLOCK-END +// flow_sequence ::= FLOW-SEQUENCE-START +// (flow_sequence_entry FLOW-ENTRY)* +// flow_sequence_entry? +// FLOW-SEQUENCE-END +// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +// flow_mapping ::= FLOW-MAPPING-START +// (flow_mapping_entry FLOW-ENTRY)* +// flow_mapping_entry? +// FLOW-MAPPING-END +// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + +// Peek the next token in the token queue. +func peek_token(parser *yaml_parser_t) *yaml_token_t { + if parser.token_available || yaml_parser_fetch_more_tokens(parser) { + token := &parser.tokens[parser.tokens_head] + yaml_parser_unfold_comments(parser, token) + return token + } + return nil +} + +// yaml_parser_unfold_comments walks through the comments queue and joins all +// comments behind the position of the provided token into the respective +// top-level comment slices in the parser. +func yaml_parser_unfold_comments(parser *yaml_parser_t, token *yaml_token_t) { + for parser.comments_head < len(parser.comments) && token.start_mark.index >= parser.comments[parser.comments_head].token_mark.index { + comment := &parser.comments[parser.comments_head] + if len(comment.head) > 0 { + if token.typ == yaml_BLOCK_END_TOKEN { + // No heads on ends, so keep comment.head for a follow up token. + break + } + if len(parser.head_comment) > 0 { + parser.head_comment = append(parser.head_comment, '\n') + } + parser.head_comment = append(parser.head_comment, comment.head...) + } + if len(comment.foot) > 0 { + if len(parser.foot_comment) > 0 { + parser.foot_comment = append(parser.foot_comment, '\n') + } + parser.foot_comment = append(parser.foot_comment, comment.foot...) + } + if len(comment.line) > 0 { + if len(parser.line_comment) > 0 { + parser.line_comment = append(parser.line_comment, '\n') + } + parser.line_comment = append(parser.line_comment, comment.line...) + } + *comment = yaml_comment_t{} + parser.comments_head++ + } +} + +// Remove the next token from the queue (must be called after peek_token). +func skip_token(parser *yaml_parser_t) { + parser.token_available = false + parser.tokens_parsed++ + parser.stream_end_produced = parser.tokens[parser.tokens_head].typ == yaml_STREAM_END_TOKEN + parser.tokens_head++ +} + +// Get the next event. +func yaml_parser_parse(parser *yaml_parser_t, event *yaml_event_t) bool { + // Erase the event object. + *event = yaml_event_t{} + + // No events after the end of the stream or error. + if parser.stream_end_produced || parser.error != yaml_NO_ERROR || parser.state == yaml_PARSE_END_STATE { + return true + } + + // Generate the next event. + return yaml_parser_state_machine(parser, event) +} + +// Set parser error. +func yaml_parser_set_parser_error(parser *yaml_parser_t, problem string, problem_mark yaml_mark_t) bool { + parser.error = yaml_PARSER_ERROR + parser.problem = problem + parser.problem_mark = problem_mark + return false +} + +func yaml_parser_set_parser_error_context(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string, problem_mark yaml_mark_t) bool { + parser.error = yaml_PARSER_ERROR + parser.context = context + parser.context_mark = context_mark + parser.problem = problem + parser.problem_mark = problem_mark + return false +} + +// State dispatcher. +func yaml_parser_state_machine(parser *yaml_parser_t, event *yaml_event_t) bool { + //trace("yaml_parser_state_machine", "state:", parser.state.String()) + + switch parser.state { + case yaml_PARSE_STREAM_START_STATE: + return yaml_parser_parse_stream_start(parser, event) + + case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE: + return yaml_parser_parse_document_start(parser, event, true) + + case yaml_PARSE_DOCUMENT_START_STATE: + return yaml_parser_parse_document_start(parser, event, false) + + case yaml_PARSE_DOCUMENT_CONTENT_STATE: + return yaml_parser_parse_document_content(parser, event) + + case yaml_PARSE_DOCUMENT_END_STATE: + return yaml_parser_parse_document_end(parser, event) + + case yaml_PARSE_BLOCK_NODE_STATE: + return yaml_parser_parse_node(parser, event, true, false) + + case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE: + return yaml_parser_parse_node(parser, event, true, true) + + case yaml_PARSE_FLOW_NODE_STATE: + return yaml_parser_parse_node(parser, event, false, false) + + case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE: + return yaml_parser_parse_block_sequence_entry(parser, event, true) + + case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE: + return yaml_parser_parse_block_sequence_entry(parser, event, false) + + case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE: + return yaml_parser_parse_indentless_sequence_entry(parser, event) + + case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE: + return yaml_parser_parse_block_mapping_key(parser, event, true) + + case yaml_PARSE_BLOCK_MAPPING_KEY_STATE: + return yaml_parser_parse_block_mapping_key(parser, event, false) + + case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE: + return yaml_parser_parse_block_mapping_value(parser, event) + + case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE: + return yaml_parser_parse_flow_sequence_entry(parser, event, true) + + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE: + return yaml_parser_parse_flow_sequence_entry(parser, event, false) + + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE: + return yaml_parser_parse_flow_sequence_entry_mapping_key(parser, event) + + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE: + return yaml_parser_parse_flow_sequence_entry_mapping_value(parser, event) + + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE: + return yaml_parser_parse_flow_sequence_entry_mapping_end(parser, event) + + case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE: + return yaml_parser_parse_flow_mapping_key(parser, event, true) + + case yaml_PARSE_FLOW_MAPPING_KEY_STATE: + return yaml_parser_parse_flow_mapping_key(parser, event, false) + + case yaml_PARSE_FLOW_MAPPING_VALUE_STATE: + return yaml_parser_parse_flow_mapping_value(parser, event, false) + + case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE: + return yaml_parser_parse_flow_mapping_value(parser, event, true) + + default: + panic("invalid parser state") + } +} + +// Parse the production: +// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END +// +// ************ +func yaml_parser_parse_stream_start(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_STREAM_START_TOKEN { + return yaml_parser_set_parser_error(parser, "did not find expected ", token.start_mark) + } + parser.state = yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE + *event = yaml_event_t{ + typ: yaml_STREAM_START_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + encoding: token.encoding, + } + skip_token(parser) + return true +} + +// Parse the productions: +// implicit_document ::= block_node DOCUMENT-END* +// +// * +// +// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* +// +// ************************* +func yaml_parser_parse_document_start(parser *yaml_parser_t, event *yaml_event_t, implicit bool) bool { + + token := peek_token(parser) + if token == nil { + return false + } + + // Parse extra document end indicators. + if !implicit { + for token.typ == yaml_DOCUMENT_END_TOKEN { + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } + } + + if implicit && token.typ != yaml_VERSION_DIRECTIVE_TOKEN && + token.typ != yaml_TAG_DIRECTIVE_TOKEN && + token.typ != yaml_DOCUMENT_START_TOKEN && + token.typ != yaml_STREAM_END_TOKEN { + // Parse an implicit document. + if !yaml_parser_process_directives(parser, nil, nil) { + return false + } + parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) + parser.state = yaml_PARSE_BLOCK_NODE_STATE + + var head_comment []byte + if len(parser.head_comment) > 0 { + // [Go] Scan the header comment backwards, and if an empty line is found, break + // the header so the part before the last empty line goes into the + // document header, while the bottom of it goes into a follow up event. + for i := len(parser.head_comment) - 1; i > 0; i-- { + if parser.head_comment[i] == '\n' { + if i == len(parser.head_comment)-1 { + head_comment = parser.head_comment[:i] + parser.head_comment = parser.head_comment[i+1:] + break + } else if parser.head_comment[i-1] == '\n' { + head_comment = parser.head_comment[:i-1] + parser.head_comment = parser.head_comment[i+1:] + break + } + } + } + } + + *event = yaml_event_t{ + typ: yaml_DOCUMENT_START_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + + head_comment: head_comment, + } + + } else if token.typ != yaml_STREAM_END_TOKEN { + // Parse an explicit document. + var version_directive *yaml_version_directive_t + var tag_directives []yaml_tag_directive_t + start_mark := token.start_mark + if !yaml_parser_process_directives(parser, &version_directive, &tag_directives) { + return false + } + token = peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_DOCUMENT_START_TOKEN { + yaml_parser_set_parser_error(parser, + "did not find expected ", token.start_mark) + return false + } + parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) + parser.state = yaml_PARSE_DOCUMENT_CONTENT_STATE + end_mark := token.end_mark + + *event = yaml_event_t{ + typ: yaml_DOCUMENT_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + version_directive: version_directive, + tag_directives: tag_directives, + implicit: false, + } + skip_token(parser) + + } else { + // Parse the stream end. + parser.state = yaml_PARSE_END_STATE + *event = yaml_event_t{ + typ: yaml_STREAM_END_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + skip_token(parser) + } + + return true +} + +// Parse the productions: +// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* +// +// *********** +func yaml_parser_parse_document_content(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + + if token.typ == yaml_VERSION_DIRECTIVE_TOKEN || + token.typ == yaml_TAG_DIRECTIVE_TOKEN || + token.typ == yaml_DOCUMENT_START_TOKEN || + token.typ == yaml_DOCUMENT_END_TOKEN || + token.typ == yaml_STREAM_END_TOKEN { + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + return yaml_parser_process_empty_scalar(parser, event, + token.start_mark) + } + return yaml_parser_parse_node(parser, event, true, false) +} + +// Parse the productions: +// implicit_document ::= block_node DOCUMENT-END* +// +// ************* +// +// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* +func yaml_parser_parse_document_end(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + + start_mark := token.start_mark + end_mark := token.start_mark + + implicit := true + if token.typ == yaml_DOCUMENT_END_TOKEN { + end_mark = token.end_mark + skip_token(parser) + implicit = false + } + + parser.tag_directives = parser.tag_directives[:0] + + parser.state = yaml_PARSE_DOCUMENT_START_STATE + *event = yaml_event_t{ + typ: yaml_DOCUMENT_END_EVENT, + start_mark: start_mark, + end_mark: end_mark, + implicit: implicit, + } + yaml_parser_set_event_comments(parser, event) + if len(event.head_comment) > 0 && len(event.foot_comment) == 0 { + event.foot_comment = event.head_comment + event.head_comment = nil + } + return true +} + +func yaml_parser_set_event_comments(parser *yaml_parser_t, event *yaml_event_t) { + event.head_comment = parser.head_comment + event.line_comment = parser.line_comment + event.foot_comment = parser.foot_comment + parser.head_comment = nil + parser.line_comment = nil + parser.foot_comment = nil + parser.tail_comment = nil + parser.stem_comment = nil +} + +// Parse the productions: +// block_node_or_indentless_sequence ::= +// +// ALIAS +// ***** +// | properties (block_content | indentless_block_sequence)? +// ********** * +// | block_content | indentless_block_sequence +// * +// +// block_node ::= ALIAS +// +// ***** +// | properties block_content? +// ********** * +// | block_content +// * +// +// flow_node ::= ALIAS +// +// ***** +// | properties flow_content? +// ********** * +// | flow_content +// * +// +// properties ::= TAG ANCHOR? | ANCHOR TAG? +// +// ************************* +// +// block_content ::= block_collection | flow_collection | SCALAR +// +// ****** +// +// flow_content ::= flow_collection | SCALAR +// +// ****** +func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, indentless_sequence bool) bool { + //defer trace("yaml_parser_parse_node", "block:", block, "indentless_sequence:", indentless_sequence)() + + token := peek_token(parser) + if token == nil { + return false + } + + if token.typ == yaml_ALIAS_TOKEN { + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + *event = yaml_event_t{ + typ: yaml_ALIAS_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + anchor: token.value, + } + yaml_parser_set_event_comments(parser, event) + skip_token(parser) + return true + } + + start_mark := token.start_mark + end_mark := token.start_mark + + var tag_token bool + var tag_handle, tag_suffix, anchor []byte + var tag_mark yaml_mark_t + if token.typ == yaml_ANCHOR_TOKEN { + anchor = token.value + start_mark = token.start_mark + end_mark = token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ == yaml_TAG_TOKEN { + tag_token = true + tag_handle = token.value + tag_suffix = token.suffix + tag_mark = token.start_mark + end_mark = token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } + } else if token.typ == yaml_TAG_TOKEN { + tag_token = true + tag_handle = token.value + tag_suffix = token.suffix + start_mark = token.start_mark + tag_mark = token.start_mark + end_mark = token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ == yaml_ANCHOR_TOKEN { + anchor = token.value + end_mark = token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } + } + + var tag []byte + if tag_token { + if len(tag_handle) == 0 { + tag = tag_suffix + tag_suffix = nil + } else { + for i := range parser.tag_directives { + if bytes.Equal(parser.tag_directives[i].handle, tag_handle) { + tag = append([]byte(nil), parser.tag_directives[i].prefix...) + tag = append(tag, tag_suffix...) + break + } + } + if len(tag) == 0 { + yaml_parser_set_parser_error_context(parser, + "while parsing a node", start_mark, + "found undefined tag handle", tag_mark) + return false + } + } + } + + implicit := len(tag) == 0 + if indentless_sequence && token.typ == yaml_BLOCK_ENTRY_TOKEN { + end_mark = token.end_mark + parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE + *event = yaml_event_t{ + typ: yaml_SEQUENCE_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), + } + return true + } + if token.typ == yaml_SCALAR_TOKEN { + var plain_implicit, quoted_implicit bool + end_mark = token.end_mark + if (len(tag) == 0 && token.style == yaml_PLAIN_SCALAR_STYLE) || (len(tag) == 1 && tag[0] == '!') { + plain_implicit = true + } else if len(tag) == 0 { + quoted_implicit = true + } + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + + *event = yaml_event_t{ + typ: yaml_SCALAR_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + value: token.value, + implicit: plain_implicit, + quoted_implicit: quoted_implicit, + style: yaml_style_t(token.style), + } + yaml_parser_set_event_comments(parser, event) + skip_token(parser) + return true + } + if token.typ == yaml_FLOW_SEQUENCE_START_TOKEN { + // [Go] Some of the events below can be merged as they differ only on style. + end_mark = token.end_mark + parser.state = yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE + *event = yaml_event_t{ + typ: yaml_SEQUENCE_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(yaml_FLOW_SEQUENCE_STYLE), + } + yaml_parser_set_event_comments(parser, event) + return true + } + if token.typ == yaml_FLOW_MAPPING_START_TOKEN { + end_mark = token.end_mark + parser.state = yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE + *event = yaml_event_t{ + typ: yaml_MAPPING_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), + } + yaml_parser_set_event_comments(parser, event) + return true + } + if block && token.typ == yaml_BLOCK_SEQUENCE_START_TOKEN { + end_mark = token.end_mark + parser.state = yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE + *event = yaml_event_t{ + typ: yaml_SEQUENCE_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), + } + if parser.stem_comment != nil { + event.head_comment = parser.stem_comment + parser.stem_comment = nil + } + return true + } + if block && token.typ == yaml_BLOCK_MAPPING_START_TOKEN { + end_mark = token.end_mark + parser.state = yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE + *event = yaml_event_t{ + typ: yaml_MAPPING_START_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + style: yaml_style_t(yaml_BLOCK_MAPPING_STYLE), + } + if parser.stem_comment != nil { + event.head_comment = parser.stem_comment + parser.stem_comment = nil + } + return true + } + if len(anchor) > 0 || len(tag) > 0 { + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + + *event = yaml_event_t{ + typ: yaml_SCALAR_EVENT, + start_mark: start_mark, + end_mark: end_mark, + anchor: anchor, + tag: tag, + implicit: implicit, + quoted_implicit: false, + style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), + } + return true + } + + context := "while parsing a flow node" + if block { + context = "while parsing a block node" + } + yaml_parser_set_parser_error_context(parser, context, start_mark, + "did not find expected node content", token.start_mark) + return false +} + +// Parse the productions: +// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END +// +// ******************** *********** * ********* +func yaml_parser_parse_block_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { + if first { + token := peek_token(parser) + if token == nil { + return false + } + parser.marks = append(parser.marks, token.start_mark) + skip_token(parser) + } + + token := peek_token(parser) + if token == nil { + return false + } + + if token.typ == yaml_BLOCK_ENTRY_TOKEN { + mark := token.end_mark + prior_head_len := len(parser.head_comment) + skip_token(parser) + yaml_parser_split_stem_comment(parser, prior_head_len) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_BLOCK_ENTRY_TOKEN && token.typ != yaml_BLOCK_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE) + return yaml_parser_parse_node(parser, event, true, false) + } else { + parser.state = yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE + return yaml_parser_process_empty_scalar(parser, event, mark) + } + } + if token.typ == yaml_BLOCK_END_TOKEN { + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + + *event = yaml_event_t{ + typ: yaml_SEQUENCE_END_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + + skip_token(parser) + return true + } + + context_mark := parser.marks[len(parser.marks)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + return yaml_parser_set_parser_error_context(parser, + "while parsing a block collection", context_mark, + "did not find expected '-' indicator", token.start_mark) +} + +// Parse the productions: +// indentless_sequence ::= (BLOCK-ENTRY block_node?)+ +// +// *********** * +func yaml_parser_parse_indentless_sequence_entry(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + + if token.typ == yaml_BLOCK_ENTRY_TOKEN { + mark := token.end_mark + prior_head_len := len(parser.head_comment) + skip_token(parser) + yaml_parser_split_stem_comment(parser, prior_head_len) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_BLOCK_ENTRY_TOKEN && + token.typ != yaml_KEY_TOKEN && + token.typ != yaml_VALUE_TOKEN && + token.typ != yaml_BLOCK_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE) + return yaml_parser_parse_node(parser, event, true, false) + } + parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE + return yaml_parser_process_empty_scalar(parser, event, mark) + } + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + + *event = yaml_event_t{ + typ: yaml_SEQUENCE_END_EVENT, + start_mark: token.start_mark, + end_mark: token.start_mark, // [Go] Shouldn't this be token.end_mark? + } + return true +} + +// Split stem comment from head comment. +// +// When a sequence or map is found under a sequence entry, the former head comment +// is assigned to the underlying sequence or map as a whole, not the individual +// sequence or map entry as would be expected otherwise. To handle this case the +// previous head comment is moved aside as the stem comment. +func yaml_parser_split_stem_comment(parser *yaml_parser_t, stem_len int) { + if stem_len == 0 { + return + } + + token := peek_token(parser) + if token == nil || token.typ != yaml_BLOCK_SEQUENCE_START_TOKEN && token.typ != yaml_BLOCK_MAPPING_START_TOKEN { + return + } + + parser.stem_comment = parser.head_comment[:stem_len] + if len(parser.head_comment) == stem_len { + parser.head_comment = nil + } else { + // Copy suffix to prevent very strange bugs if someone ever appends + // further bytes to the prefix in the stem_comment slice above. + parser.head_comment = append([]byte(nil), parser.head_comment[stem_len+1:]...) + } +} + +// Parse the productions: +// block_mapping ::= BLOCK-MAPPING_START +// +// ******************* +// ((KEY block_node_or_indentless_sequence?)? +// *** * +// (VALUE block_node_or_indentless_sequence?)?)* +// +// BLOCK-END +// ********* +func yaml_parser_parse_block_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { + if first { + token := peek_token(parser) + if token == nil { + return false + } + parser.marks = append(parser.marks, token.start_mark) + skip_token(parser) + } + + token := peek_token(parser) + if token == nil { + return false + } + + // [Go] A tail comment was left from the prior mapping value processed. Emit an event + // as it needs to be processed with that value and not the following key. + if len(parser.tail_comment) > 0 { + *event = yaml_event_t{ + typ: yaml_TAIL_COMMENT_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + foot_comment: parser.tail_comment, + } + parser.tail_comment = nil + return true + } + + if token.typ == yaml_KEY_TOKEN { + mark := token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_KEY_TOKEN && + token.typ != yaml_VALUE_TOKEN && + token.typ != yaml_BLOCK_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_VALUE_STATE) + return yaml_parser_parse_node(parser, event, true, true) + } else { + parser.state = yaml_PARSE_BLOCK_MAPPING_VALUE_STATE + return yaml_parser_process_empty_scalar(parser, event, mark) + } + } else if token.typ == yaml_BLOCK_END_TOKEN { + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + *event = yaml_event_t{ + typ: yaml_MAPPING_END_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + yaml_parser_set_event_comments(parser, event) + skip_token(parser) + return true + } + + context_mark := parser.marks[len(parser.marks)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + return yaml_parser_set_parser_error_context(parser, + "while parsing a block mapping", context_mark, + "did not find expected key", token.start_mark) +} + +// Parse the productions: +// block_mapping ::= BLOCK-MAPPING_START +// +// ((KEY block_node_or_indentless_sequence?)? +// +// (VALUE block_node_or_indentless_sequence?)?)* +// ***** * +// BLOCK-END +func yaml_parser_parse_block_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + if token.typ == yaml_VALUE_TOKEN { + mark := token.end_mark + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_KEY_TOKEN && + token.typ != yaml_VALUE_TOKEN && + token.typ != yaml_BLOCK_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_KEY_STATE) + return yaml_parser_parse_node(parser, event, true, true) + } + parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE + return yaml_parser_process_empty_scalar(parser, event, mark) + } + parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE + return yaml_parser_process_empty_scalar(parser, event, token.start_mark) +} + +// Parse the productions: +// flow_sequence ::= FLOW-SEQUENCE-START +// +// ******************* +// (flow_sequence_entry FLOW-ENTRY)* +// * ********** +// flow_sequence_entry? +// * +// FLOW-SEQUENCE-END +// ***************** +// +// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +// +// * +func yaml_parser_parse_flow_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { + if first { + token := peek_token(parser) + if token == nil { + return false + } + parser.marks = append(parser.marks, token.start_mark) + skip_token(parser) + } + token := peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { + if !first { + if token.typ == yaml_FLOW_ENTRY_TOKEN { + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } else { + context_mark := parser.marks[len(parser.marks)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + return yaml_parser_set_parser_error_context(parser, + "while parsing a flow sequence", context_mark, + "did not find expected ',' or ']'", token.start_mark) + } + } + + if token.typ == yaml_KEY_TOKEN { + parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE + *event = yaml_event_t{ + typ: yaml_MAPPING_START_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + implicit: true, + style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), + } + skip_token(parser) + return true + } else if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } + } + + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + + *event = yaml_event_t{ + typ: yaml_SEQUENCE_END_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + yaml_parser_set_event_comments(parser, event) + + skip_token(parser) + return true +} + +// Parse the productions: +// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +// +// *** * +func yaml_parser_parse_flow_sequence_entry_mapping_key(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_VALUE_TOKEN && + token.typ != yaml_FLOW_ENTRY_TOKEN && + token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } + mark := token.end_mark + skip_token(parser) + parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE + return yaml_parser_process_empty_scalar(parser, event, mark) +} + +// Parse the productions: +// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +// +// ***** * +func yaml_parser_parse_flow_sequence_entry_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + if token.typ == yaml_VALUE_TOKEN { + skip_token(parser) + token := peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } + } + parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE + return yaml_parser_process_empty_scalar(parser, event, token.start_mark) +} + +// Parse the productions: +// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +// +// * +func yaml_parser_parse_flow_sequence_entry_mapping_end(parser *yaml_parser_t, event *yaml_event_t) bool { + token := peek_token(parser) + if token == nil { + return false + } + parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE + *event = yaml_event_t{ + typ: yaml_MAPPING_END_EVENT, + start_mark: token.start_mark, + end_mark: token.start_mark, // [Go] Shouldn't this be end_mark? + } + return true +} + +// Parse the productions: +// flow_mapping ::= FLOW-MAPPING-START +// +// ****************** +// (flow_mapping_entry FLOW-ENTRY)* +// * ********** +// flow_mapping_entry? +// ****************** +// FLOW-MAPPING-END +// **************** +// +// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +// - *** * +func yaml_parser_parse_flow_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { + if first { + token := peek_token(parser) + parser.marks = append(parser.marks, token.start_mark) + skip_token(parser) + } + + token := peek_token(parser) + if token == nil { + return false + } + + if token.typ != yaml_FLOW_MAPPING_END_TOKEN { + if !first { + if token.typ == yaml_FLOW_ENTRY_TOKEN { + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } else { + context_mark := parser.marks[len(parser.marks)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + return yaml_parser_set_parser_error_context(parser, + "while parsing a flow mapping", context_mark, + "did not find expected ',' or '}'", token.start_mark) + } + } + + if token.typ == yaml_KEY_TOKEN { + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_VALUE_TOKEN && + token.typ != yaml_FLOW_ENTRY_TOKEN && + token.typ != yaml_FLOW_MAPPING_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_VALUE_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } else { + parser.state = yaml_PARSE_FLOW_MAPPING_VALUE_STATE + return yaml_parser_process_empty_scalar(parser, event, token.start_mark) + } + } else if token.typ != yaml_FLOW_MAPPING_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } + } + + parser.state = parser.states[len(parser.states)-1] + parser.states = parser.states[:len(parser.states)-1] + parser.marks = parser.marks[:len(parser.marks)-1] + *event = yaml_event_t{ + typ: yaml_MAPPING_END_EVENT, + start_mark: token.start_mark, + end_mark: token.end_mark, + } + yaml_parser_set_event_comments(parser, event) + skip_token(parser) + return true +} + +// Parse the productions: +// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? +// - ***** * +func yaml_parser_parse_flow_mapping_value(parser *yaml_parser_t, event *yaml_event_t, empty bool) bool { + token := peek_token(parser) + if token == nil { + return false + } + if empty { + parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE + return yaml_parser_process_empty_scalar(parser, event, token.start_mark) + } + if token.typ == yaml_VALUE_TOKEN { + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_MAPPING_END_TOKEN { + parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_KEY_STATE) + return yaml_parser_parse_node(parser, event, false, false) + } + } + parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE + return yaml_parser_process_empty_scalar(parser, event, token.start_mark) +} + +// Generate an empty scalar event. +func yaml_parser_process_empty_scalar(parser *yaml_parser_t, event *yaml_event_t, mark yaml_mark_t) bool { + *event = yaml_event_t{ + typ: yaml_SCALAR_EVENT, + start_mark: mark, + end_mark: mark, + value: nil, // Empty + implicit: true, + style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), + } + return true +} + +var default_tag_directives = []yaml_tag_directive_t{ + {[]byte("!"), []byte("!")}, + {[]byte("!!"), []byte("tag:yaml.org,2002:")}, +} + +// Parse directives. +func yaml_parser_process_directives(parser *yaml_parser_t, + version_directive_ref **yaml_version_directive_t, + tag_directives_ref *[]yaml_tag_directive_t) bool { + + var version_directive *yaml_version_directive_t + var tag_directives []yaml_tag_directive_t + + token := peek_token(parser) + if token == nil { + return false + } + + for token.typ == yaml_VERSION_DIRECTIVE_TOKEN || token.typ == yaml_TAG_DIRECTIVE_TOKEN { + if token.typ == yaml_VERSION_DIRECTIVE_TOKEN { + if version_directive != nil { + yaml_parser_set_parser_error(parser, + "found duplicate %YAML directive", token.start_mark) + return false + } + if token.major != 1 || token.minor != 1 { + yaml_parser_set_parser_error(parser, + "found incompatible YAML document", token.start_mark) + return false + } + version_directive = &yaml_version_directive_t{ + major: token.major, + minor: token.minor, + } + } else if token.typ == yaml_TAG_DIRECTIVE_TOKEN { + value := yaml_tag_directive_t{ + handle: token.value, + prefix: token.prefix, + } + if !yaml_parser_append_tag_directive(parser, value, false, token.start_mark) { + return false + } + tag_directives = append(tag_directives, value) + } + + skip_token(parser) + token = peek_token(parser) + if token == nil { + return false + } + } + + for i := range default_tag_directives { + if !yaml_parser_append_tag_directive(parser, default_tag_directives[i], true, token.start_mark) { + return false + } + } + + if version_directive_ref != nil { + *version_directive_ref = version_directive + } + if tag_directives_ref != nil { + *tag_directives_ref = tag_directives + } + return true +} + +// Append a tag directive to the directives stack. +func yaml_parser_append_tag_directive(parser *yaml_parser_t, value yaml_tag_directive_t, allow_duplicates bool, mark yaml_mark_t) bool { + for i := range parser.tag_directives { + if bytes.Equal(value.handle, parser.tag_directives[i].handle) { + if allow_duplicates { + return true + } + return yaml_parser_set_parser_error(parser, "found duplicate %TAG directive", mark) + } + } + + // [Go] I suspect the copy is unnecessary. This was likely done + // because there was no way to track ownership of the data. + value_copy := yaml_tag_directive_t{ + handle: make([]byte, len(value.handle)), + prefix: make([]byte, len(value.prefix)), + } + copy(value_copy.handle, value.handle) + copy(value_copy.prefix, value.prefix) + parser.tag_directives = append(parser.tag_directives, value_copy) + return true +} diff --git a/cli/internal/yaml/readerc.go b/cli/internal/yaml/readerc.go new file mode 100644 index 0000000..56af245 --- /dev/null +++ b/cli/internal/yaml/readerc.go @@ -0,0 +1,434 @@ +// +// Copyright (c) 2011-2019 Canonical Ltd +// Copyright (c) 2006-2010 Kirill Simonov +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of +// this software and associated documentation files (the "Software"), to deal in +// the Software without restriction, including without limitation the rights to +// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do +// so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +package yaml + +import ( + "io" +) + +// Set the reader error and return 0. +func yaml_parser_set_reader_error(parser *yaml_parser_t, problem string, offset int, value int) bool { + parser.error = yaml_READER_ERROR + parser.problem = problem + parser.problem_offset = offset + parser.problem_value = value + return false +} + +// Byte order marks. +const ( + bom_UTF8 = "\xef\xbb\xbf" + bom_UTF16LE = "\xff\xfe" + bom_UTF16BE = "\xfe\xff" +) + +// Determine the input stream encoding by checking the BOM symbol. If no BOM is +// found, the UTF-8 encoding is assumed. Return 1 on success, 0 on failure. +func yaml_parser_determine_encoding(parser *yaml_parser_t) bool { + // Ensure that we had enough bytes in the raw buffer. + for !parser.eof && len(parser.raw_buffer)-parser.raw_buffer_pos < 3 { + if !yaml_parser_update_raw_buffer(parser) { + return false + } + } + + // Determine the encoding. + buf := parser.raw_buffer + pos := parser.raw_buffer_pos + avail := len(buf) - pos + if avail >= 2 && buf[pos] == bom_UTF16LE[0] && buf[pos+1] == bom_UTF16LE[1] { + parser.encoding = yaml_UTF16LE_ENCODING + parser.raw_buffer_pos += 2 + parser.offset += 2 + } else if avail >= 2 && buf[pos] == bom_UTF16BE[0] && buf[pos+1] == bom_UTF16BE[1] { + parser.encoding = yaml_UTF16BE_ENCODING + parser.raw_buffer_pos += 2 + parser.offset += 2 + } else if avail >= 3 && buf[pos] == bom_UTF8[0] && buf[pos+1] == bom_UTF8[1] && buf[pos+2] == bom_UTF8[2] { + parser.encoding = yaml_UTF8_ENCODING + parser.raw_buffer_pos += 3 + parser.offset += 3 + } else { + parser.encoding = yaml_UTF8_ENCODING + } + return true +} + +// Update the raw buffer. +func yaml_parser_update_raw_buffer(parser *yaml_parser_t) bool { + size_read := 0 + + // Return if the raw buffer is full. + if parser.raw_buffer_pos == 0 && len(parser.raw_buffer) == cap(parser.raw_buffer) { + return true + } + + // Return on EOF. + if parser.eof { + return true + } + + // Move the remaining bytes in the raw buffer to the beginning. + if parser.raw_buffer_pos > 0 && parser.raw_buffer_pos < len(parser.raw_buffer) { + copy(parser.raw_buffer, parser.raw_buffer[parser.raw_buffer_pos:]) + } + parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)-parser.raw_buffer_pos] + parser.raw_buffer_pos = 0 + + // Call the read handler to fill the buffer. + size_read, err := parser.read_handler(parser, parser.raw_buffer[len(parser.raw_buffer):cap(parser.raw_buffer)]) + parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)+size_read] + if err == io.EOF { + parser.eof = true + } else if err != nil { + return yaml_parser_set_reader_error(parser, "input error: "+err.Error(), parser.offset, -1) + } + return true +} + +// Ensure that the buffer contains at least `length` characters. +// Return true on success, false on failure. +// +// The length is supposed to be significantly less that the buffer size. +func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool { + if parser.read_handler == nil { + panic("read handler must be set") + } + + // [Go] This function was changed to guarantee the requested length size at EOF. + // The fact we need to do this is pretty awful, but the description above implies + // for that to be the case, and there are tests + + // If the EOF flag is set and the raw buffer is empty, do nothing. + if parser.eof && parser.raw_buffer_pos == len(parser.raw_buffer) { + // [Go] ACTUALLY! Read the documentation of this function above. + // This is just broken. To return true, we need to have the + // given length in the buffer. Not doing that means every single + // check that calls this function to make sure the buffer has a + // given length is Go) panicking; or C) accessing invalid memory. + //return true + } + + // Return if the buffer contains enough characters. + if parser.unread >= length { + return true + } + + // Determine the input encoding if it is not known yet. + if parser.encoding == yaml_ANY_ENCODING { + if !yaml_parser_determine_encoding(parser) { + return false + } + } + + // Move the unread characters to the beginning of the buffer. + buffer_len := len(parser.buffer) + if parser.buffer_pos > 0 && parser.buffer_pos < buffer_len { + copy(parser.buffer, parser.buffer[parser.buffer_pos:]) + buffer_len -= parser.buffer_pos + parser.buffer_pos = 0 + } else if parser.buffer_pos == buffer_len { + buffer_len = 0 + parser.buffer_pos = 0 + } + + // Open the whole buffer for writing, and cut it before returning. + parser.buffer = parser.buffer[:cap(parser.buffer)] + + // Fill the buffer until it has enough characters. + first := true + for parser.unread < length { + + // Fill the raw buffer if necessary. + if !first || parser.raw_buffer_pos == len(parser.raw_buffer) { + if !yaml_parser_update_raw_buffer(parser) { + parser.buffer = parser.buffer[:buffer_len] + return false + } + } + first = false + + // Decode the raw buffer. + inner: + for parser.raw_buffer_pos != len(parser.raw_buffer) { + var value rune + var width int + + raw_unread := len(parser.raw_buffer) - parser.raw_buffer_pos + + // Decode the next character. + switch parser.encoding { + case yaml_UTF8_ENCODING: + // Decode a UTF-8 character. Check RFC 3629 + // (http://www.ietf.org/rfc/rfc3629.txt) for more details. + // + // The following table (taken from the RFC) is used for + // decoding. + // + // Char. number range | UTF-8 octet sequence + // (hexadecimal) | (binary) + // --------------------+------------------------------------ + // 0000 0000-0000 007F | 0xxxxxxx + // 0000 0080-0000 07FF | 110xxxxx 10xxxxxx + // 0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx + // 0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx + // + // Additionally, the characters in the range 0xD800-0xDFFF + // are prohibited as they are reserved for use with UTF-16 + // surrogate pairs. + + // Determine the length of the UTF-8 sequence. + octet := parser.raw_buffer[parser.raw_buffer_pos] + switch { + case octet&0x80 == 0x00: + width = 1 + case octet&0xE0 == 0xC0: + width = 2 + case octet&0xF0 == 0xE0: + width = 3 + case octet&0xF8 == 0xF0: + width = 4 + default: + // The leading octet is invalid. + return yaml_parser_set_reader_error(parser, + "invalid leading UTF-8 octet", + parser.offset, int(octet)) + } + + // Check if the raw buffer contains an incomplete character. + if width > raw_unread { + if parser.eof { + return yaml_parser_set_reader_error(parser, + "incomplete UTF-8 octet sequence", + parser.offset, -1) + } + break inner + } + + // Decode the leading octet. + switch { + case octet&0x80 == 0x00: + value = rune(octet & 0x7F) + case octet&0xE0 == 0xC0: + value = rune(octet & 0x1F) + case octet&0xF0 == 0xE0: + value = rune(octet & 0x0F) + case octet&0xF8 == 0xF0: + value = rune(octet & 0x07) + default: + value = 0 + } + + // Check and decode the trailing octets. + for k := 1; k < width; k++ { + octet = parser.raw_buffer[parser.raw_buffer_pos+k] + + // Check if the octet is valid. + if (octet & 0xC0) != 0x80 { + return yaml_parser_set_reader_error(parser, + "invalid trailing UTF-8 octet", + parser.offset+k, int(octet)) + } + + // Decode the octet. + value = (value << 6) + rune(octet&0x3F) + } + + // Check the length of the sequence against the value. + switch { + case width == 1: + case width == 2 && value >= 0x80: + case width == 3 && value >= 0x800: + case width == 4 && value >= 0x10000: + default: + return yaml_parser_set_reader_error(parser, + "invalid length of a UTF-8 sequence", + parser.offset, -1) + } + + // Check the range of the value. + if value >= 0xD800 && value <= 0xDFFF || value > 0x10FFFF { + return yaml_parser_set_reader_error(parser, + "invalid Unicode character", + parser.offset, int(value)) + } + + case yaml_UTF16LE_ENCODING, yaml_UTF16BE_ENCODING: + var low, high int + if parser.encoding == yaml_UTF16LE_ENCODING { + low, high = 0, 1 + } else { + low, high = 1, 0 + } + + // The UTF-16 encoding is not as simple as one might + // naively think. Check RFC 2781 + // (http://www.ietf.org/rfc/rfc2781.txt). + // + // Normally, two subsequent bytes describe a Unicode + // character. However a special technique (called a + // surrogate pair) is used for specifying character + // values larger than 0xFFFF. + // + // A surrogate pair consists of two pseudo-characters: + // high surrogate area (0xD800-0xDBFF) + // low surrogate area (0xDC00-0xDFFF) + // + // The following formulas are used for decoding + // and encoding characters using surrogate pairs: + // + // U = U' + 0x10000 (0x01 00 00 <= U <= 0x10 FF FF) + // U' = yyyyyyyyyyxxxxxxxxxx (0 <= U' <= 0x0F FF FF) + // W1 = 110110yyyyyyyyyy + // W2 = 110111xxxxxxxxxx + // + // where U is the character value, W1 is the high surrogate + // area, W2 is the low surrogate area. + + // Check for incomplete UTF-16 character. + if raw_unread < 2 { + if parser.eof { + return yaml_parser_set_reader_error(parser, + "incomplete UTF-16 character", + parser.offset, -1) + } + break inner + } + + // Get the character. + value = rune(parser.raw_buffer[parser.raw_buffer_pos+low]) + + (rune(parser.raw_buffer[parser.raw_buffer_pos+high]) << 8) + + // Check for unexpected low surrogate area. + if value&0xFC00 == 0xDC00 { + return yaml_parser_set_reader_error(parser, + "unexpected low surrogate area", + parser.offset, int(value)) + } + + // Check for a high surrogate area. + if value&0xFC00 == 0xD800 { + width = 4 + + // Check for incomplete surrogate pair. + if raw_unread < 4 { + if parser.eof { + return yaml_parser_set_reader_error(parser, + "incomplete UTF-16 surrogate pair", + parser.offset, -1) + } + break inner + } + + // Get the next character. + value2 := rune(parser.raw_buffer[parser.raw_buffer_pos+low+2]) + + (rune(parser.raw_buffer[parser.raw_buffer_pos+high+2]) << 8) + + // Check for a low surrogate area. + if value2&0xFC00 != 0xDC00 { + return yaml_parser_set_reader_error(parser, + "expected low surrogate area", + parser.offset+2, int(value2)) + } + + // Generate the value of the surrogate pair. + value = 0x10000 + ((value & 0x3FF) << 10) + (value2 & 0x3FF) + } else { + width = 2 + } + + default: + panic("impossible") + } + + // Check if the character is in the allowed range: + // #x9 | #xA | #xD | [#x20-#x7E] (8 bit) + // | #x85 | [#xA0-#xD7FF] | [#xE000-#xFFFD] (16 bit) + // | [#x10000-#x10FFFF] (32 bit) + switch { + case value == 0x09: + case value == 0x0A: + case value == 0x0D: + case value >= 0x20 && value <= 0x7E: + case value == 0x85: + case value >= 0xA0 && value <= 0xD7FF: + case value >= 0xE000 && value <= 0xFFFD: + case value >= 0x10000 && value <= 0x10FFFF: + default: + return yaml_parser_set_reader_error(parser, + "control characters are not allowed", + parser.offset, int(value)) + } + + // Move the raw pointers. + parser.raw_buffer_pos += width + parser.offset += width + + // Finally put the character into the buffer. + if value <= 0x7F { + // 0000 0000-0000 007F . 0xxxxxxx + parser.buffer[buffer_len+0] = byte(value) + buffer_len += 1 + } else if value <= 0x7FF { + // 0000 0080-0000 07FF . 110xxxxx 10xxxxxx + parser.buffer[buffer_len+0] = byte(0xC0 + (value >> 6)) + parser.buffer[buffer_len+1] = byte(0x80 + (value & 0x3F)) + buffer_len += 2 + } else if value <= 0xFFFF { + // 0000 0800-0000 FFFF . 1110xxxx 10xxxxxx 10xxxxxx + parser.buffer[buffer_len+0] = byte(0xE0 + (value >> 12)) + parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 6) & 0x3F)) + parser.buffer[buffer_len+2] = byte(0x80 + (value & 0x3F)) + buffer_len += 3 + } else { + // 0001 0000-0010 FFFF . 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx + parser.buffer[buffer_len+0] = byte(0xF0 + (value >> 18)) + parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 12) & 0x3F)) + parser.buffer[buffer_len+2] = byte(0x80 + ((value >> 6) & 0x3F)) + parser.buffer[buffer_len+3] = byte(0x80 + (value & 0x3F)) + buffer_len += 4 + } + + parser.unread++ + } + + // On EOF, put NUL into the buffer and return. + if parser.eof { + parser.buffer[buffer_len] = 0 + buffer_len++ + parser.unread++ + break + } + } + // [Go] Read the documentation of this function above. To return true, + // we need to have the given length in the buffer. Not doing that means + // every single check that calls this function to make sure the buffer + // has a given length is Go) panicking; or C) accessing invalid memory. + // This happens here due to the EOF above breaking early. + for buffer_len < length { + parser.buffer[buffer_len] = 0 + buffer_len++ + } + parser.buffer = parser.buffer[:buffer_len] + return true +} diff --git a/cli/internal/yaml/resolve.go b/cli/internal/yaml/resolve.go new file mode 100644 index 0000000..64ae888 --- /dev/null +++ b/cli/internal/yaml/resolve.go @@ -0,0 +1,326 @@ +// +// Copyright (c) 2011-2019 Canonical Ltd +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package yaml + +import ( + "encoding/base64" + "math" + "regexp" + "strconv" + "strings" + "time" +) + +type resolveMapItem struct { + value interface{} + tag string +} + +var resolveTable = make([]byte, 256) +var resolveMap = make(map[string]resolveMapItem) + +func init() { + t := resolveTable + t[int('+')] = 'S' // Sign + t[int('-')] = 'S' + for _, c := range "0123456789" { + t[int(c)] = 'D' // Digit + } + for _, c := range "yYnNtTfFoO~" { + t[int(c)] = 'M' // In map + } + t[int('.')] = '.' // Float (potentially in map) + + var resolveMapList = []struct { + v interface{} + tag string + l []string + }{ + {true, boolTag, []string{"true", "True", "TRUE"}}, + {false, boolTag, []string{"false", "False", "FALSE"}}, + {nil, nullTag, []string{"", "~", "null", "Null", "NULL"}}, + {math.NaN(), floatTag, []string{".nan", ".NaN", ".NAN"}}, + {math.Inf(+1), floatTag, []string{".inf", ".Inf", ".INF"}}, + {math.Inf(+1), floatTag, []string{"+.inf", "+.Inf", "+.INF"}}, + {math.Inf(-1), floatTag, []string{"-.inf", "-.Inf", "-.INF"}}, + {"<<", mergeTag, []string{"<<"}}, + } + + m := resolveMap + for _, item := range resolveMapList { + for _, s := range item.l { + m[s] = resolveMapItem{item.v, item.tag} + } + } +} + +const ( + nullTag = "!!null" + boolTag = "!!bool" + strTag = "!!str" + intTag = "!!int" + floatTag = "!!float" + timestampTag = "!!timestamp" + seqTag = "!!seq" + mapTag = "!!map" + binaryTag = "!!binary" + mergeTag = "!!merge" +) + +var longTags = make(map[string]string) +var shortTags = make(map[string]string) + +func init() { + for _, stag := range []string{nullTag, boolTag, strTag, intTag, floatTag, timestampTag, seqTag, mapTag, binaryTag, mergeTag} { + ltag := longTag(stag) + longTags[stag] = ltag + shortTags[ltag] = stag + } +} + +const longTagPrefix = "tag:yaml.org,2002:" + +func shortTag(tag string) string { + if strings.HasPrefix(tag, longTagPrefix) { + if stag, ok := shortTags[tag]; ok { + return stag + } + return "!!" + tag[len(longTagPrefix):] + } + return tag +} + +func longTag(tag string) string { + if strings.HasPrefix(tag, "!!") { + if ltag, ok := longTags[tag]; ok { + return ltag + } + return longTagPrefix + tag[2:] + } + return tag +} + +func resolvableTag(tag string) bool { + switch tag { + case "", strTag, boolTag, intTag, floatTag, nullTag, timestampTag: + return true + } + return false +} + +var yamlStyleFloat = regexp.MustCompile(`^[-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?$`) + +func resolve(tag string, in string) (rtag string, out interface{}) { + tag = shortTag(tag) + if !resolvableTag(tag) { + return tag, in + } + + defer func() { + switch tag { + case "", rtag, strTag, binaryTag: + return + case floatTag: + if rtag == intTag { + switch v := out.(type) { + case int64: + rtag = floatTag + out = float64(v) + return + case int: + rtag = floatTag + out = float64(v) + return + } + } + } + failf("cannot decode %s `%s` as a %s", shortTag(rtag), in, shortTag(tag)) + }() + + // Any data is accepted as a !!str or !!binary. + // Otherwise, the prefix is enough of a hint about what it might be. + hint := byte('N') + if in != "" { + hint = resolveTable[in[0]] + } + if hint != 0 && tag != strTag && tag != binaryTag { + // Handle things we can lookup in a map. + if item, ok := resolveMap[in]; ok { + return item.tag, item.value + } + + // Base 60 floats are a bad idea, were dropped in YAML 1.2, and + // are purposefully unsupported here. They're still quoted on + // the way out for compatibility with other parser, though. + + switch hint { + case 'M': + // We've already checked the map above. + + case '.': + // Not in the map, so maybe a normal float. + floatv, err := strconv.ParseFloat(in, 64) + if err == nil { + return floatTag, floatv + } + + case 'D', 'S': + // Int, float, or timestamp. + // Only try values as a timestamp if the value is unquoted or there's an explicit + // !!timestamp tag. + if tag == "" || tag == timestampTag { + t, ok := parseTimestamp(in) + if ok { + return timestampTag, t + } + } + + plain := strings.Replace(in, "_", "", -1) + intv, err := strconv.ParseInt(plain, 0, 64) + if err == nil { + if intv == int64(int(intv)) { + return intTag, int(intv) + } else { + return intTag, intv + } + } + uintv, err := strconv.ParseUint(plain, 0, 64) + if err == nil { + return intTag, uintv + } + if yamlStyleFloat.MatchString(plain) { + floatv, err := strconv.ParseFloat(plain, 64) + if err == nil { + return floatTag, floatv + } + } + if strings.HasPrefix(plain, "0b") { + intv, err := strconv.ParseInt(plain[2:], 2, 64) + if err == nil { + if intv == int64(int(intv)) { + return intTag, int(intv) + } else { + return intTag, intv + } + } + uintv, err := strconv.ParseUint(plain[2:], 2, 64) + if err == nil { + return intTag, uintv + } + } else if strings.HasPrefix(plain, "-0b") { + intv, err := strconv.ParseInt("-"+plain[3:], 2, 64) + if err == nil { + if true || intv == int64(int(intv)) { + return intTag, int(intv) + } else { + return intTag, intv + } + } + } + // Octals as introduced in version 1.2 of the spec. + // Octals from the 1.1 spec, spelled as 0777, are still + // decoded by default in v3 as well for compatibility. + // May be dropped in v4 depending on how usage evolves. + if strings.HasPrefix(plain, "0o") { + intv, err := strconv.ParseInt(plain[2:], 8, 64) + if err == nil { + if intv == int64(int(intv)) { + return intTag, int(intv) + } else { + return intTag, intv + } + } + uintv, err := strconv.ParseUint(plain[2:], 8, 64) + if err == nil { + return intTag, uintv + } + } else if strings.HasPrefix(plain, "-0o") { + intv, err := strconv.ParseInt("-"+plain[3:], 8, 64) + if err == nil { + if true || intv == int64(int(intv)) { + return intTag, int(intv) + } else { + return intTag, intv + } + } + } + default: + panic("internal error: missing handler for resolver table: " + string(rune(hint)) + " (with " + in + ")") + } + } + return strTag, in +} + +// encodeBase64 encodes s as base64 that is broken up into multiple lines +// as appropriate for the resulting length. +func encodeBase64(s string) string { + const lineLen = 70 + encLen := base64.StdEncoding.EncodedLen(len(s)) + lines := encLen/lineLen + 1 + buf := make([]byte, encLen*2+lines) + in := buf[0:encLen] + out := buf[encLen:] + base64.StdEncoding.Encode(in, []byte(s)) + k := 0 + for i := 0; i < len(in); i += lineLen { + j := i + lineLen + if j > len(in) { + j = len(in) + } + k += copy(out[k:], in[i:j]) + if lines > 1 { + out[k] = '\n' + k++ + } + } + return string(out[:k]) +} + +// This is a subset of the formats allowed by the regular expression +// defined at http://yaml.org/type/timestamp.html. +var allowedTimestampFormats = []string{ + "2006-1-2T15:4:5.999999999Z07:00", // RCF3339Nano with short date fields. + "2006-1-2t15:4:5.999999999Z07:00", // RFC3339Nano with short date fields and lower-case "t". + "2006-1-2 15:4:5.999999999", // space separated with no time zone + "2006-1-2", // date only + // Notable exception: time.Parse cannot handle: "2001-12-14 21:59:43.10 -5" + // from the set of examples. +} + +// parseTimestamp parses s as a timestamp string and +// returns the timestamp and reports whether it succeeded. +// Timestamp formats are defined at http://yaml.org/type/timestamp.html +func parseTimestamp(s string) (time.Time, bool) { + // TODO write code to check all the formats supported by + // http://yaml.org/type/timestamp.html instead of using time.Parse. + + // Quick check: all date formats start with YYYY-. + i := 0 + for ; i < len(s); i++ { + if c := s[i]; c < '0' || c > '9' { + break + } + } + if i != 4 || i == len(s) || s[i] != '-' { + return time.Time{}, false + } + for _, format := range allowedTimestampFormats { + if t, err := time.Parse(format, s); err == nil { + return t, true + } + } + return time.Time{}, false +} diff --git a/cli/internal/yaml/scannerc.go b/cli/internal/yaml/scannerc.go new file mode 100644 index 0000000..87e46ef --- /dev/null +++ b/cli/internal/yaml/scannerc.go @@ -0,0 +1,3040 @@ +// +// Copyright (c) 2011-2019 Canonical Ltd +// Copyright (c) 2006-2010 Kirill Simonov +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of +// this software and associated documentation files (the "Software"), to deal in +// the Software without restriction, including without limitation the rights to +// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do +// so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +package yaml + +import ( + "bytes" + "fmt" +) + +// Introduction +// ************ +// +// The following notes assume that you are familiar with the YAML specification +// (http://yaml.org/spec/1.2/spec.html). We mostly follow it, although in +// some cases we are less restrictive that it requires. +// +// The process of transforming a YAML stream into a sequence of events is +// divided on two steps: Scanning and Parsing. +// +// The Scanner transforms the input stream into a sequence of tokens, while the +// parser transform the sequence of tokens produced by the Scanner into a +// sequence of parsing events. +// +// The Scanner is rather clever and complicated. The Parser, on the contrary, +// is a straightforward implementation of a recursive-descendant parser (or, +// LL(1) parser, as it is usually called). +// +// Actually there are two issues of Scanning that might be called "clever", the +// rest is quite straightforward. The issues are "block collection start" and +// "simple keys". Both issues are explained below in details. +// +// Here the Scanning step is explained and implemented. We start with the list +// of all the tokens produced by the Scanner together with short descriptions. +// +// Now, tokens: +// +// STREAM-START(encoding) # The stream start. +// STREAM-END # The stream end. +// VERSION-DIRECTIVE(major,minor) # The '%YAML' directive. +// TAG-DIRECTIVE(handle,prefix) # The '%TAG' directive. +// DOCUMENT-START # '---' +// DOCUMENT-END # '...' +// BLOCK-SEQUENCE-START # Indentation increase denoting a block +// BLOCK-MAPPING-START # sequence or a block mapping. +// BLOCK-END # Indentation decrease. +// FLOW-SEQUENCE-START # '[' +// FLOW-SEQUENCE-END # ']' +// BLOCK-SEQUENCE-START # '{' +// BLOCK-SEQUENCE-END # '}' +// BLOCK-ENTRY # '-' +// FLOW-ENTRY # ',' +// KEY # '?' or nothing (simple keys). +// VALUE # ':' +// ALIAS(anchor) # '*anchor' +// ANCHOR(anchor) # '&anchor' +// TAG(handle,suffix) # '!handle!suffix' +// SCALAR(value,style) # A scalar. +// +// The following two tokens are "virtual" tokens denoting the beginning and the +// end of the stream: +// +// STREAM-START(encoding) +// STREAM-END +// +// We pass the information about the input stream encoding with the +// STREAM-START token. +// +// The next two tokens are responsible for tags: +// +// VERSION-DIRECTIVE(major,minor) +// TAG-DIRECTIVE(handle,prefix) +// +// Example: +// +// %YAML 1.1 +// %TAG ! !foo +// %TAG !yaml! tag:yaml.org,2002: +// --- +// +// The correspoding sequence of tokens: +// +// STREAM-START(utf-8) +// VERSION-DIRECTIVE(1,1) +// TAG-DIRECTIVE("!","!foo") +// TAG-DIRECTIVE("!yaml","tag:yaml.org,2002:") +// DOCUMENT-START +// STREAM-END +// +// Note that the VERSION-DIRECTIVE and TAG-DIRECTIVE tokens occupy a whole +// line. +// +// The document start and end indicators are represented by: +// +// DOCUMENT-START +// DOCUMENT-END +// +// Note that if a YAML stream contains an implicit document (without '---' +// and '...' indicators), no DOCUMENT-START and DOCUMENT-END tokens will be +// produced. +// +// In the following examples, we present whole documents together with the +// produced tokens. +// +// 1. An implicit document: +// +// 'a scalar' +// +// Tokens: +// +// STREAM-START(utf-8) +// SCALAR("a scalar",single-quoted) +// STREAM-END +// +// 2. An explicit document: +// +// --- +// 'a scalar' +// ... +// +// Tokens: +// +// STREAM-START(utf-8) +// DOCUMENT-START +// SCALAR("a scalar",single-quoted) +// DOCUMENT-END +// STREAM-END +// +// 3. Several documents in a stream: +// +// 'a scalar' +// --- +// 'another scalar' +// --- +// 'yet another scalar' +// +// Tokens: +// +// STREAM-START(utf-8) +// SCALAR("a scalar",single-quoted) +// DOCUMENT-START +// SCALAR("another scalar",single-quoted) +// DOCUMENT-START +// SCALAR("yet another scalar",single-quoted) +// STREAM-END +// +// We have already introduced the SCALAR token above. The following tokens are +// used to describe aliases, anchors, tag, and scalars: +// +// ALIAS(anchor) +// ANCHOR(anchor) +// TAG(handle,suffix) +// SCALAR(value,style) +// +// The following series of examples illustrate the usage of these tokens: +// +// 1. A recursive sequence: +// +// &A [ *A ] +// +// Tokens: +// +// STREAM-START(utf-8) +// ANCHOR("A") +// FLOW-SEQUENCE-START +// ALIAS("A") +// FLOW-SEQUENCE-END +// STREAM-END +// +// 2. A tagged scalar: +// +// !!float "3.14" # A good approximation. +// +// Tokens: +// +// STREAM-START(utf-8) +// TAG("!!","float") +// SCALAR("3.14",double-quoted) +// STREAM-END +// +// 3. Various scalar styles: +// +// --- # Implicit empty plain scalars do not produce tokens. +// --- a plain scalar +// --- 'a single-quoted scalar' +// --- "a double-quoted scalar" +// --- |- +// a literal scalar +// --- >- +// a folded +// scalar +// +// Tokens: +// +// STREAM-START(utf-8) +// DOCUMENT-START +// DOCUMENT-START +// SCALAR("a plain scalar",plain) +// DOCUMENT-START +// SCALAR("a single-quoted scalar",single-quoted) +// DOCUMENT-START +// SCALAR("a double-quoted scalar",double-quoted) +// DOCUMENT-START +// SCALAR("a literal scalar",literal) +// DOCUMENT-START +// SCALAR("a folded scalar",folded) +// STREAM-END +// +// Now it's time to review collection-related tokens. We will start with +// flow collections: +// +// FLOW-SEQUENCE-START +// FLOW-SEQUENCE-END +// FLOW-MAPPING-START +// FLOW-MAPPING-END +// FLOW-ENTRY +// KEY +// VALUE +// +// The tokens FLOW-SEQUENCE-START, FLOW-SEQUENCE-END, FLOW-MAPPING-START, and +// FLOW-MAPPING-END represent the indicators '[', ']', '{', and '}' +// correspondingly. FLOW-ENTRY represent the ',' indicator. Finally the +// indicators '?' and ':', which are used for denoting mapping keys and values, +// are represented by the KEY and VALUE tokens. +// +// The following examples show flow collections: +// +// 1. A flow sequence: +// +// [item 1, item 2, item 3] +// +// Tokens: +// +// STREAM-START(utf-8) +// FLOW-SEQUENCE-START +// SCALAR("item 1",plain) +// FLOW-ENTRY +// SCALAR("item 2",plain) +// FLOW-ENTRY +// SCALAR("item 3",plain) +// FLOW-SEQUENCE-END +// STREAM-END +// +// 2. A flow mapping: +// +// { +// a simple key: a value, # Note that the KEY token is produced. +// ? a complex key: another value, +// } +// +// Tokens: +// +// STREAM-START(utf-8) +// FLOW-MAPPING-START +// KEY +// SCALAR("a simple key",plain) +// VALUE +// SCALAR("a value",plain) +// FLOW-ENTRY +// KEY +// SCALAR("a complex key",plain) +// VALUE +// SCALAR("another value",plain) +// FLOW-ENTRY +// FLOW-MAPPING-END +// STREAM-END +// +// A simple key is a key which is not denoted by the '?' indicator. Note that +// the Scanner still produce the KEY token whenever it encounters a simple key. +// +// For scanning block collections, the following tokens are used (note that we +// repeat KEY and VALUE here): +// +// BLOCK-SEQUENCE-START +// BLOCK-MAPPING-START +// BLOCK-END +// BLOCK-ENTRY +// KEY +// VALUE +// +// The tokens BLOCK-SEQUENCE-START and BLOCK-MAPPING-START denote indentation +// increase that precedes a block collection (cf. the INDENT token in Python). +// The token BLOCK-END denote indentation decrease that ends a block collection +// (cf. the DEDENT token in Python). However YAML has some syntax pecularities +// that makes detections of these tokens more complex. +// +// The tokens BLOCK-ENTRY, KEY, and VALUE are used to represent the indicators +// '-', '?', and ':' correspondingly. +// +// The following examples show how the tokens BLOCK-SEQUENCE-START, +// BLOCK-MAPPING-START, and BLOCK-END are emitted by the Scanner: +// +// 1. Block sequences: +// +// - item 1 +// - item 2 +// - +// - item 3.1 +// - item 3.2 +// - +// key 1: value 1 +// key 2: value 2 +// +// Tokens: +// +// STREAM-START(utf-8) +// BLOCK-SEQUENCE-START +// BLOCK-ENTRY +// SCALAR("item 1",plain) +// BLOCK-ENTRY +// SCALAR("item 2",plain) +// BLOCK-ENTRY +// BLOCK-SEQUENCE-START +// BLOCK-ENTRY +// SCALAR("item 3.1",plain) +// BLOCK-ENTRY +// SCALAR("item 3.2",plain) +// BLOCK-END +// BLOCK-ENTRY +// BLOCK-MAPPING-START +// KEY +// SCALAR("key 1",plain) +// VALUE +// SCALAR("value 1",plain) +// KEY +// SCALAR("key 2",plain) +// VALUE +// SCALAR("value 2",plain) +// BLOCK-END +// BLOCK-END +// STREAM-END +// +// 2. Block mappings: +// +// a simple key: a value # The KEY token is produced here. +// ? a complex key +// : another value +// a mapping: +// key 1: value 1 +// key 2: value 2 +// a sequence: +// - item 1 +// - item 2 +// +// Tokens: +// +// STREAM-START(utf-8) +// BLOCK-MAPPING-START +// KEY +// SCALAR("a simple key",plain) +// VALUE +// SCALAR("a value",plain) +// KEY +// SCALAR("a complex key",plain) +// VALUE +// SCALAR("another value",plain) +// KEY +// SCALAR("a mapping",plain) +// BLOCK-MAPPING-START +// KEY +// SCALAR("key 1",plain) +// VALUE +// SCALAR("value 1",plain) +// KEY +// SCALAR("key 2",plain) +// VALUE +// SCALAR("value 2",plain) +// BLOCK-END +// KEY +// SCALAR("a sequence",plain) +// VALUE +// BLOCK-SEQUENCE-START +// BLOCK-ENTRY +// SCALAR("item 1",plain) +// BLOCK-ENTRY +// SCALAR("item 2",plain) +// BLOCK-END +// BLOCK-END +// STREAM-END +// +// YAML does not always require to start a new block collection from a new +// line. If the current line contains only '-', '?', and ':' indicators, a new +// block collection may start at the current line. The following examples +// illustrate this case: +// +// 1. Collections in a sequence: +// +// - - item 1 +// - item 2 +// - key 1: value 1 +// key 2: value 2 +// - ? complex key +// : complex value +// +// Tokens: +// +// STREAM-START(utf-8) +// BLOCK-SEQUENCE-START +// BLOCK-ENTRY +// BLOCK-SEQUENCE-START +// BLOCK-ENTRY +// SCALAR("item 1",plain) +// BLOCK-ENTRY +// SCALAR("item 2",plain) +// BLOCK-END +// BLOCK-ENTRY +// BLOCK-MAPPING-START +// KEY +// SCALAR("key 1",plain) +// VALUE +// SCALAR("value 1",plain) +// KEY +// SCALAR("key 2",plain) +// VALUE +// SCALAR("value 2",plain) +// BLOCK-END +// BLOCK-ENTRY +// BLOCK-MAPPING-START +// KEY +// SCALAR("complex key") +// VALUE +// SCALAR("complex value") +// BLOCK-END +// BLOCK-END +// STREAM-END +// +// 2. Collections in a mapping: +// +// ? a sequence +// : - item 1 +// - item 2 +// ? a mapping +// : key 1: value 1 +// key 2: value 2 +// +// Tokens: +// +// STREAM-START(utf-8) +// BLOCK-MAPPING-START +// KEY +// SCALAR("a sequence",plain) +// VALUE +// BLOCK-SEQUENCE-START +// BLOCK-ENTRY +// SCALAR("item 1",plain) +// BLOCK-ENTRY +// SCALAR("item 2",plain) +// BLOCK-END +// KEY +// SCALAR("a mapping",plain) +// VALUE +// BLOCK-MAPPING-START +// KEY +// SCALAR("key 1",plain) +// VALUE +// SCALAR("value 1",plain) +// KEY +// SCALAR("key 2",plain) +// VALUE +// SCALAR("value 2",plain) +// BLOCK-END +// BLOCK-END +// STREAM-END +// +// YAML also permits non-indented sequences if they are included into a block +// mapping. In this case, the token BLOCK-SEQUENCE-START is not produced: +// +// key: +// - item 1 # BLOCK-SEQUENCE-START is NOT produced here. +// - item 2 +// +// Tokens: +// +// STREAM-START(utf-8) +// BLOCK-MAPPING-START +// KEY +// SCALAR("key",plain) +// VALUE +// BLOCK-ENTRY +// SCALAR("item 1",plain) +// BLOCK-ENTRY +// SCALAR("item 2",plain) +// BLOCK-END +// + +// Ensure that the buffer contains the required number of characters. +// Return true on success, false on failure (reader error or memory error). +func cache(parser *yaml_parser_t, length int) bool { + // [Go] This was inlined: !cache(A, B) -> unread < B && !update(A, B) + return parser.unread >= length || yaml_parser_update_buffer(parser, length) +} + +// Advance the buffer pointer. +func skip(parser *yaml_parser_t) { + if !is_blank(parser.buffer, parser.buffer_pos) { + parser.newlines = 0 + } + parser.mark.index++ + parser.mark.column++ + parser.unread-- + parser.buffer_pos += width(parser.buffer[parser.buffer_pos]) +} + +func skip_line(parser *yaml_parser_t) { + if is_crlf(parser.buffer, parser.buffer_pos) { + parser.mark.index += 2 + parser.mark.column = 0 + parser.mark.line++ + parser.unread -= 2 + parser.buffer_pos += 2 + parser.newlines++ + } else if is_break(parser.buffer, parser.buffer_pos) { + parser.mark.index++ + parser.mark.column = 0 + parser.mark.line++ + parser.unread-- + parser.buffer_pos += width(parser.buffer[parser.buffer_pos]) + parser.newlines++ + } +} + +// Copy a character to a string buffer and advance pointers. +func read(parser *yaml_parser_t, s []byte) []byte { + if !is_blank(parser.buffer, parser.buffer_pos) { + parser.newlines = 0 + } + w := width(parser.buffer[parser.buffer_pos]) + if w == 0 { + panic("invalid character sequence") + } + if len(s) == 0 { + s = make([]byte, 0, 32) + } + if w == 1 && len(s)+w <= cap(s) { + s = s[:len(s)+1] + s[len(s)-1] = parser.buffer[parser.buffer_pos] + parser.buffer_pos++ + } else { + s = append(s, parser.buffer[parser.buffer_pos:parser.buffer_pos+w]...) + parser.buffer_pos += w + } + parser.mark.index++ + parser.mark.column++ + parser.unread-- + return s +} + +// Copy a line break character to a string buffer and advance pointers. +func read_line(parser *yaml_parser_t, s []byte) []byte { + buf := parser.buffer + pos := parser.buffer_pos + switch { + case buf[pos] == '\r' && buf[pos+1] == '\n': + // CR LF . LF + s = append(s, '\n') + parser.buffer_pos += 2 + parser.mark.index++ + parser.unread-- + case buf[pos] == '\r' || buf[pos] == '\n': + // CR|LF . LF + s = append(s, '\n') + parser.buffer_pos += 1 + case buf[pos] == '\xC2' && buf[pos+1] == '\x85': + // NEL . LF + s = append(s, '\n') + parser.buffer_pos += 2 + case buf[pos] == '\xE2' && buf[pos+1] == '\x80' && (buf[pos+2] == '\xA8' || buf[pos+2] == '\xA9'): + // LS|PS . LS|PS + s = append(s, buf[parser.buffer_pos:pos+3]...) + parser.buffer_pos += 3 + default: + return s + } + parser.mark.index++ + parser.mark.column = 0 + parser.mark.line++ + parser.unread-- + parser.newlines++ + return s +} + +// Get the next token. +func yaml_parser_scan(parser *yaml_parser_t, token *yaml_token_t) bool { + // Erase the token object. + *token = yaml_token_t{} // [Go] Is this necessary? + + // No tokens after STREAM-END or error. + if parser.stream_end_produced || parser.error != yaml_NO_ERROR { + return true + } + + // Ensure that the tokens queue contains enough tokens. + if !parser.token_available { + if !yaml_parser_fetch_more_tokens(parser) { + return false + } + } + + // Fetch the next token from the queue. + *token = parser.tokens[parser.tokens_head] + parser.tokens_head++ + parser.tokens_parsed++ + parser.token_available = false + + if token.typ == yaml_STREAM_END_TOKEN { + parser.stream_end_produced = true + } + return true +} + +// Set the scanner error and return false. +func yaml_parser_set_scanner_error(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string) bool { + parser.error = yaml_SCANNER_ERROR + parser.context = context + parser.context_mark = context_mark + parser.problem = problem + parser.problem_mark = parser.mark + return false +} + +func yaml_parser_set_scanner_tag_error(parser *yaml_parser_t, directive bool, context_mark yaml_mark_t, problem string) bool { + context := "while parsing a tag" + if directive { + context = "while parsing a %TAG directive" + } + return yaml_parser_set_scanner_error(parser, context, context_mark, problem) +} + +func trace(args ...interface{}) func() { + pargs := append([]interface{}{"+++"}, args...) + fmt.Println(pargs...) + pargs = append([]interface{}{"---"}, args...) + return func() { fmt.Println(pargs...) } +} + +// Ensure that the tokens queue contains at least one token which can be +// returned to the Parser. +func yaml_parser_fetch_more_tokens(parser *yaml_parser_t) bool { + // While we need more tokens to fetch, do it. + for { + // [Go] The comment parsing logic requires a lookahead of two tokens + // so that foot comments may be parsed in time of associating them + // with the tokens that are parsed before them, and also for line + // comments to be transformed into head comments in some edge cases. + if parser.tokens_head < len(parser.tokens)-2 { + // If a potential simple key is at the head position, we need to fetch + // the next token to disambiguate it. + head_tok_idx, ok := parser.simple_keys_by_tok[parser.tokens_parsed] + if !ok { + break + } else if valid, ok := yaml_simple_key_is_valid(parser, &parser.simple_keys[head_tok_idx]); !ok { + return false + } else if !valid { + break + } + } + // Fetch the next token. + if !yaml_parser_fetch_next_token(parser) { + return false + } + } + + parser.token_available = true + return true +} + +// The dispatcher for token fetchers. +func yaml_parser_fetch_next_token(parser *yaml_parser_t) (ok bool) { + // Ensure that the buffer is initialized. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + // Check if we just started scanning. Fetch STREAM-START then. + if !parser.stream_start_produced { + return yaml_parser_fetch_stream_start(parser) + } + + scan_mark := parser.mark + + // Eat whitespaces and comments until we reach the next token. + if !yaml_parser_scan_to_next_token(parser) { + return false + } + + // [Go] While unrolling indents, transform the head comments of prior + // indentation levels observed after scan_start into foot comments at + // the respective indexes. + + // Check the indentation level against the current column. + if !yaml_parser_unroll_indent(parser, parser.mark.column, scan_mark) { + return false + } + + // Ensure that the buffer contains at least 4 characters. 4 is the length + // of the longest indicators ('--- ' and '... '). + if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { + return false + } + + // Is it the end of the stream? + if is_z(parser.buffer, parser.buffer_pos) { + return yaml_parser_fetch_stream_end(parser) + } + + // Is it a directive? + if parser.mark.column == 0 && parser.buffer[parser.buffer_pos] == '%' { + return yaml_parser_fetch_directive(parser) + } + + buf := parser.buffer + pos := parser.buffer_pos + + // Is it the document start indicator? + if parser.mark.column == 0 && buf[pos] == '-' && buf[pos+1] == '-' && buf[pos+2] == '-' && is_blankz(buf, pos+3) { + return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_START_TOKEN) + } + + // Is it the document end indicator? + if parser.mark.column == 0 && buf[pos] == '.' && buf[pos+1] == '.' && buf[pos+2] == '.' && is_blankz(buf, pos+3) { + return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_END_TOKEN) + } + + comment_mark := parser.mark + if len(parser.tokens) > 0 && (parser.flow_level == 0 && buf[pos] == ':' || parser.flow_level > 0 && buf[pos] == ',') { + // Associate any following comments with the prior token. + comment_mark = parser.tokens[len(parser.tokens)-1].start_mark + } + defer func() { + if !ok { + return + } + if len(parser.tokens) > 0 && parser.tokens[len(parser.tokens)-1].typ == yaml_BLOCK_ENTRY_TOKEN { + // Sequence indicators alone have no line comments. It becomes + // a head comment for whatever follows. + return + } + if !yaml_parser_scan_line_comment(parser, comment_mark) { + ok = false + return + } + }() + + // Is it the flow sequence start indicator? + if buf[pos] == '[' { + return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_SEQUENCE_START_TOKEN) + } + + // Is it the flow mapping start indicator? + if parser.buffer[parser.buffer_pos] == '{' { + return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_MAPPING_START_TOKEN) + } + + // Is it the flow sequence end indicator? + if parser.buffer[parser.buffer_pos] == ']' { + return yaml_parser_fetch_flow_collection_end(parser, + yaml_FLOW_SEQUENCE_END_TOKEN) + } + + // Is it the flow mapping end indicator? + if parser.buffer[parser.buffer_pos] == '}' { + return yaml_parser_fetch_flow_collection_end(parser, + yaml_FLOW_MAPPING_END_TOKEN) + } + + // Is it the flow entry indicator? + if parser.buffer[parser.buffer_pos] == ',' { + return yaml_parser_fetch_flow_entry(parser) + } + + // Is it the block entry indicator? + if parser.buffer[parser.buffer_pos] == '-' && is_blankz(parser.buffer, parser.buffer_pos+1) { + return yaml_parser_fetch_block_entry(parser) + } + + // Is it the key indicator? + if parser.buffer[parser.buffer_pos] == '?' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) { + return yaml_parser_fetch_key(parser) + } + + // Is it the value indicator? + if parser.buffer[parser.buffer_pos] == ':' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) { + return yaml_parser_fetch_value(parser) + } + + // Is it an alias? + if parser.buffer[parser.buffer_pos] == '*' { + return yaml_parser_fetch_anchor(parser, yaml_ALIAS_TOKEN) + } + + // Is it an anchor? + if parser.buffer[parser.buffer_pos] == '&' { + return yaml_parser_fetch_anchor(parser, yaml_ANCHOR_TOKEN) + } + + // Is it a tag? + if parser.buffer[parser.buffer_pos] == '!' { + return yaml_parser_fetch_tag(parser) + } + + // Is it a literal scalar? + if parser.buffer[parser.buffer_pos] == '|' && parser.flow_level == 0 { + return yaml_parser_fetch_block_scalar(parser, true) + } + + // Is it a folded scalar? + if parser.buffer[parser.buffer_pos] == '>' && parser.flow_level == 0 { + return yaml_parser_fetch_block_scalar(parser, false) + } + + // Is it a single-quoted scalar? + if parser.buffer[parser.buffer_pos] == '\'' { + return yaml_parser_fetch_flow_scalar(parser, true) + } + + // Is it a double-quoted scalar? + if parser.buffer[parser.buffer_pos] == '"' { + return yaml_parser_fetch_flow_scalar(parser, false) + } + + // Is it a plain scalar? + // + // A plain scalar may start with any non-blank characters except + // + // '-', '?', ':', ',', '[', ']', '{', '}', + // '#', '&', '*', '!', '|', '>', '\'', '\"', + // '%', '@', '`'. + // + // In the block context (and, for the '-' indicator, in the flow context + // too), it may also start with the characters + // + // '-', '?', ':' + // + // if it is followed by a non-space character. + // + // The last rule is more restrictive than the specification requires. + // [Go] TODO Make this logic more reasonable. + //switch parser.buffer[parser.buffer_pos] { + //case '-', '?', ':', ',', '?', '-', ',', ':', ']', '[', '}', '{', '&', '#', '!', '*', '>', '|', '"', '\'', '@', '%', '-', '`': + //} + if !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '-' || + parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':' || + parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '[' || + parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' || + parser.buffer[parser.buffer_pos] == '}' || parser.buffer[parser.buffer_pos] == '#' || + parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '*' || + parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '|' || + parser.buffer[parser.buffer_pos] == '>' || parser.buffer[parser.buffer_pos] == '\'' || + parser.buffer[parser.buffer_pos] == '"' || parser.buffer[parser.buffer_pos] == '%' || + parser.buffer[parser.buffer_pos] == '@' || parser.buffer[parser.buffer_pos] == '`') || + (parser.buffer[parser.buffer_pos] == '-' && !is_blank(parser.buffer, parser.buffer_pos+1)) || + (parser.flow_level == 0 && + (parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':') && + !is_blankz(parser.buffer, parser.buffer_pos+1)) { + return yaml_parser_fetch_plain_scalar(parser) + } + + // If we don't determine the token type so far, it is an error. + return yaml_parser_set_scanner_error(parser, + "while scanning for the next token", parser.mark, + "found character that cannot start any token") +} + +func yaml_simple_key_is_valid(parser *yaml_parser_t, simple_key *yaml_simple_key_t) (valid, ok bool) { + if !simple_key.possible { + return false, true + } + + // The 1.2 specification says: + // + // "If the ? indicator is omitted, parsing needs to see past the + // implicit key to recognize it as such. To limit the amount of + // lookahead required, the “:” indicator must appear at most 1024 + // Unicode characters beyond the start of the key. In addition, the key + // is restricted to a single line." + // + if simple_key.mark.line < parser.mark.line || simple_key.mark.index+1024 < parser.mark.index { + // Check if the potential simple key to be removed is required. + if simple_key.required { + return false, yaml_parser_set_scanner_error(parser, + "while scanning a simple key", simple_key.mark, + "could not find expected ':'") + } + simple_key.possible = false + return false, true + } + return true, true +} + +// Check if a simple key may start at the current position and add it if +// needed. +func yaml_parser_save_simple_key(parser *yaml_parser_t) bool { + // A simple key is required at the current position if the scanner is in + // the block context and the current column coincides with the indentation + // level. + + required := parser.flow_level == 0 && parser.indent == parser.mark.column + + // + // If the current position may start a simple key, save it. + // + if parser.simple_key_allowed { + simple_key := yaml_simple_key_t{ + possible: true, + required: required, + token_number: parser.tokens_parsed + (len(parser.tokens) - parser.tokens_head), + mark: parser.mark, + } + + if !yaml_parser_remove_simple_key(parser) { + return false + } + parser.simple_keys[len(parser.simple_keys)-1] = simple_key + parser.simple_keys_by_tok[simple_key.token_number] = len(parser.simple_keys) - 1 + } + return true +} + +// Remove a potential simple key at the current flow level. +func yaml_parser_remove_simple_key(parser *yaml_parser_t) bool { + i := len(parser.simple_keys) - 1 + if parser.simple_keys[i].possible { + // If the key is required, it is an error. + if parser.simple_keys[i].required { + return yaml_parser_set_scanner_error(parser, + "while scanning a simple key", parser.simple_keys[i].mark, + "could not find expected ':'") + } + // Remove the key from the stack. + parser.simple_keys[i].possible = false + delete(parser.simple_keys_by_tok, parser.simple_keys[i].token_number) + } + return true +} + +// max_flow_level limits the flow_level +const max_flow_level = 10000 + +// Increase the flow level and resize the simple key list if needed. +func yaml_parser_increase_flow_level(parser *yaml_parser_t) bool { + // Reset the simple key on the next level. + parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{ + possible: false, + required: false, + token_number: parser.tokens_parsed + (len(parser.tokens) - parser.tokens_head), + mark: parser.mark, + }) + + // Increase the flow level. + parser.flow_level++ + if parser.flow_level > max_flow_level { + return yaml_parser_set_scanner_error(parser, + "while increasing flow level", parser.simple_keys[len(parser.simple_keys)-1].mark, + fmt.Sprintf("exceeded max depth of %d", max_flow_level)) + } + return true +} + +// Decrease the flow level. +func yaml_parser_decrease_flow_level(parser *yaml_parser_t) bool { + if parser.flow_level > 0 { + parser.flow_level-- + last := len(parser.simple_keys) - 1 + delete(parser.simple_keys_by_tok, parser.simple_keys[last].token_number) + parser.simple_keys = parser.simple_keys[:last] + } + return true +} + +// max_indents limits the indents stack size +const max_indents = 10000 + +// Push the current indentation level to the stack and set the new level +// the current column is greater than the indentation level. In this case, +// append or insert the specified token into the token queue. +func yaml_parser_roll_indent(parser *yaml_parser_t, column, number int, typ yaml_token_type_t, mark yaml_mark_t) bool { + // In the flow context, do nothing. + if parser.flow_level > 0 { + return true + } + + if parser.indent < column { + // Push the current indentation level to the stack and set the new + // indentation level. + parser.indents = append(parser.indents, parser.indent) + parser.indent = column + if len(parser.indents) > max_indents { + return yaml_parser_set_scanner_error(parser, + "while increasing indent level", parser.simple_keys[len(parser.simple_keys)-1].mark, + fmt.Sprintf("exceeded max depth of %d", max_indents)) + } + + // Create a token and insert it into the queue. + token := yaml_token_t{ + typ: typ, + start_mark: mark, + end_mark: mark, + } + if number > -1 { + number -= parser.tokens_parsed + } + yaml_insert_token(parser, number, &token) + } + return true +} + +// Pop indentation levels from the indents stack until the current level +// becomes less or equal to the column. For each indentation level, append +// the BLOCK-END token. +func yaml_parser_unroll_indent(parser *yaml_parser_t, column int, scan_mark yaml_mark_t) bool { + // In the flow context, do nothing. + if parser.flow_level > 0 { + return true + } + + block_mark := scan_mark + block_mark.index-- + + // Loop through the indentation levels in the stack. + for parser.indent > column { + + // [Go] Reposition the end token before potential following + // foot comments of parent blocks. For that, search + // backwards for recent comments that were at the same + // indent as the block that is ending now. + stop_index := block_mark.index + for i := len(parser.comments) - 1; i >= 0; i-- { + comment := &parser.comments[i] + + if comment.end_mark.index < stop_index { + // Don't go back beyond the start of the comment/whitespace scan, unless column < 0. + // If requested indent column is < 0, then the document is over and everything else + // is a foot anyway. + break + } + if comment.start_mark.column == parser.indent+1 { + // This is a good match. But maybe there's a former comment + // at that same indent level, so keep searching. + block_mark = comment.start_mark + } + + // While the end of the former comment matches with + // the start of the following one, we know there's + // nothing in between and scanning is still safe. + stop_index = comment.scan_mark.index + } + + // Create a token and append it to the queue. + token := yaml_token_t{ + typ: yaml_BLOCK_END_TOKEN, + start_mark: block_mark, + end_mark: block_mark, + } + yaml_insert_token(parser, -1, &token) + + // Pop the indentation level. + parser.indent = parser.indents[len(parser.indents)-1] + parser.indents = parser.indents[:len(parser.indents)-1] + } + return true +} + +// Initialize the scanner and produce the STREAM-START token. +func yaml_parser_fetch_stream_start(parser *yaml_parser_t) bool { + + // Set the initial indentation. + parser.indent = -1 + + // Initialize the simple key stack. + parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{}) + + parser.simple_keys_by_tok = make(map[int]int) + + // A simple key is allowed at the beginning of the stream. + parser.simple_key_allowed = true + + // We have started. + parser.stream_start_produced = true + + // Create the STREAM-START token and append it to the queue. + token := yaml_token_t{ + typ: yaml_STREAM_START_TOKEN, + start_mark: parser.mark, + end_mark: parser.mark, + encoding: parser.encoding, + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the STREAM-END token and shut down the scanner. +func yaml_parser_fetch_stream_end(parser *yaml_parser_t) bool { + + // Force new line. + if parser.mark.column != 0 { + parser.mark.column = 0 + parser.mark.line++ + } + + // Reset the indentation level. + if !yaml_parser_unroll_indent(parser, -1, parser.mark) { + return false + } + + // Reset simple keys. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + parser.simple_key_allowed = false + + // Create the STREAM-END token and append it to the queue. + token := yaml_token_t{ + typ: yaml_STREAM_END_TOKEN, + start_mark: parser.mark, + end_mark: parser.mark, + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce a VERSION-DIRECTIVE or TAG-DIRECTIVE token. +func yaml_parser_fetch_directive(parser *yaml_parser_t) bool { + // Reset the indentation level. + if !yaml_parser_unroll_indent(parser, -1, parser.mark) { + return false + } + + // Reset simple keys. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + parser.simple_key_allowed = false + + // Create the YAML-DIRECTIVE or TAG-DIRECTIVE token. + token := yaml_token_t{} + if !yaml_parser_scan_directive(parser, &token) { + return false + } + // Append the token to the queue. + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the DOCUMENT-START or DOCUMENT-END token. +func yaml_parser_fetch_document_indicator(parser *yaml_parser_t, typ yaml_token_type_t) bool { + // Reset the indentation level. + if !yaml_parser_unroll_indent(parser, -1, parser.mark) { + return false + } + + // Reset simple keys. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + parser.simple_key_allowed = false + + // Consume the token. + start_mark := parser.mark + + skip(parser) + skip(parser) + skip(parser) + + end_mark := parser.mark + + // Create the DOCUMENT-START or DOCUMENT-END token. + token := yaml_token_t{ + typ: typ, + start_mark: start_mark, + end_mark: end_mark, + } + // Append the token to the queue. + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the FLOW-SEQUENCE-START or FLOW-MAPPING-START token. +func yaml_parser_fetch_flow_collection_start(parser *yaml_parser_t, typ yaml_token_type_t) bool { + + // The indicators '[' and '{' may start a simple key. + if !yaml_parser_save_simple_key(parser) { + return false + } + + // Increase the flow level. + if !yaml_parser_increase_flow_level(parser) { + return false + } + + // A simple key may follow the indicators '[' and '{'. + parser.simple_key_allowed = true + + // Consume the token. + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + // Create the FLOW-SEQUENCE-START of FLOW-MAPPING-START token. + token := yaml_token_t{ + typ: typ, + start_mark: start_mark, + end_mark: end_mark, + } + // Append the token to the queue. + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the FLOW-SEQUENCE-END or FLOW-MAPPING-END token. +func yaml_parser_fetch_flow_collection_end(parser *yaml_parser_t, typ yaml_token_type_t) bool { + // Reset any potential simple key on the current flow level. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + // Decrease the flow level. + if !yaml_parser_decrease_flow_level(parser) { + return false + } + + // No simple keys after the indicators ']' and '}'. + parser.simple_key_allowed = false + + // Consume the token. + + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + // Create the FLOW-SEQUENCE-END of FLOW-MAPPING-END token. + token := yaml_token_t{ + typ: typ, + start_mark: start_mark, + end_mark: end_mark, + } + // Append the token to the queue. + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the FLOW-ENTRY token. +func yaml_parser_fetch_flow_entry(parser *yaml_parser_t) bool { + // Reset any potential simple keys on the current flow level. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + // Simple keys are allowed after ','. + parser.simple_key_allowed = true + + // Consume the token. + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + // Create the FLOW-ENTRY token and append it to the queue. + token := yaml_token_t{ + typ: yaml_FLOW_ENTRY_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the BLOCK-ENTRY token. +func yaml_parser_fetch_block_entry(parser *yaml_parser_t) bool { + // Check if the scanner is in the block context. + if parser.flow_level == 0 { + // Check if we are allowed to start a new entry. + if !parser.simple_key_allowed { + return yaml_parser_set_scanner_error(parser, "", parser.mark, + "block sequence entries are not allowed in this context") + } + // Add the BLOCK-SEQUENCE-START token if needed. + if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_SEQUENCE_START_TOKEN, parser.mark) { + return false + } + } else { + // It is an error for the '-' indicator to occur in the flow context, + // but we let the Parser detect and report about it because the Parser + // is able to point to the context. + } + + // Reset any potential simple keys on the current flow level. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + // Simple keys are allowed after '-'. + parser.simple_key_allowed = true + + // Consume the token. + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + // Create the BLOCK-ENTRY token and append it to the queue. + token := yaml_token_t{ + typ: yaml_BLOCK_ENTRY_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the KEY token. +func yaml_parser_fetch_key(parser *yaml_parser_t) bool { + + // In the block context, additional checks are required. + if parser.flow_level == 0 { + // Check if we are allowed to start a new key (not nessesary simple). + if !parser.simple_key_allowed { + return yaml_parser_set_scanner_error(parser, "", parser.mark, + "mapping keys are not allowed in this context") + } + // Add the BLOCK-MAPPING-START token if needed. + if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) { + return false + } + } + + // Reset any potential simple keys on the current flow level. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + // Simple keys are allowed after '?' in the block context. + parser.simple_key_allowed = parser.flow_level == 0 + + // Consume the token. + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + // Create the KEY token and append it to the queue. + token := yaml_token_t{ + typ: yaml_KEY_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the VALUE token. +func yaml_parser_fetch_value(parser *yaml_parser_t) bool { + + simple_key := &parser.simple_keys[len(parser.simple_keys)-1] + + // Have we found a simple key? + if valid, ok := yaml_simple_key_is_valid(parser, simple_key); !ok { + return false + + } else if valid { + + // Create the KEY token and insert it into the queue. + token := yaml_token_t{ + typ: yaml_KEY_TOKEN, + start_mark: simple_key.mark, + end_mark: simple_key.mark, + } + yaml_insert_token(parser, simple_key.token_number-parser.tokens_parsed, &token) + + // In the block context, we may need to add the BLOCK-MAPPING-START token. + if !yaml_parser_roll_indent(parser, simple_key.mark.column, + simple_key.token_number, + yaml_BLOCK_MAPPING_START_TOKEN, simple_key.mark) { + return false + } + + // Remove the simple key. + simple_key.possible = false + delete(parser.simple_keys_by_tok, simple_key.token_number) + + // A simple key cannot follow another simple key. + parser.simple_key_allowed = false + + } else { + // The ':' indicator follows a complex key. + + // In the block context, extra checks are required. + if parser.flow_level == 0 { + + // Check if we are allowed to start a complex value. + if !parser.simple_key_allowed { + return yaml_parser_set_scanner_error(parser, "", parser.mark, + "mapping values are not allowed in this context") + } + + // Add the BLOCK-MAPPING-START token if needed. + if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) { + return false + } + } + + // Simple keys after ':' are allowed in the block context. + parser.simple_key_allowed = parser.flow_level == 0 + } + + // Consume the token. + start_mark := parser.mark + skip(parser) + end_mark := parser.mark + + // Create the VALUE token and append it to the queue. + token := yaml_token_t{ + typ: yaml_VALUE_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the ALIAS or ANCHOR token. +func yaml_parser_fetch_anchor(parser *yaml_parser_t, typ yaml_token_type_t) bool { + // An anchor or an alias could be a simple key. + if !yaml_parser_save_simple_key(parser) { + return false + } + + // A simple key cannot follow an anchor or an alias. + parser.simple_key_allowed = false + + // Create the ALIAS or ANCHOR token and append it to the queue. + var token yaml_token_t + if !yaml_parser_scan_anchor(parser, &token, typ) { + return false + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the TAG token. +func yaml_parser_fetch_tag(parser *yaml_parser_t) bool { + // A tag could be a simple key. + if !yaml_parser_save_simple_key(parser) { + return false + } + + // A simple key cannot follow a tag. + parser.simple_key_allowed = false + + // Create the TAG token and append it to the queue. + var token yaml_token_t + if !yaml_parser_scan_tag(parser, &token) { + return false + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the SCALAR(...,literal) or SCALAR(...,folded) tokens. +func yaml_parser_fetch_block_scalar(parser *yaml_parser_t, literal bool) bool { + // Remove any potential simple keys. + if !yaml_parser_remove_simple_key(parser) { + return false + } + + // A simple key may follow a block scalar. + parser.simple_key_allowed = true + + // Create the SCALAR token and append it to the queue. + var token yaml_token_t + if !yaml_parser_scan_block_scalar(parser, &token, literal) { + return false + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the SCALAR(...,single-quoted) or SCALAR(...,double-quoted) tokens. +func yaml_parser_fetch_flow_scalar(parser *yaml_parser_t, single bool) bool { + // A plain scalar could be a simple key. + if !yaml_parser_save_simple_key(parser) { + return false + } + + // A simple key cannot follow a flow scalar. + parser.simple_key_allowed = false + + // Create the SCALAR token and append it to the queue. + var token yaml_token_t + if !yaml_parser_scan_flow_scalar(parser, &token, single) { + return false + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Produce the SCALAR(...,plain) token. +func yaml_parser_fetch_plain_scalar(parser *yaml_parser_t) bool { + // A plain scalar could be a simple key. + if !yaml_parser_save_simple_key(parser) { + return false + } + + // A simple key cannot follow a flow scalar. + parser.simple_key_allowed = false + + // Create the SCALAR token and append it to the queue. + var token yaml_token_t + if !yaml_parser_scan_plain_scalar(parser, &token) { + return false + } + yaml_insert_token(parser, -1, &token) + return true +} + +// Eat whitespaces and comments until the next token is found. +func yaml_parser_scan_to_next_token(parser *yaml_parser_t) bool { + + scan_mark := parser.mark + + // Until the next token is not found. + for { + // Allow the BOM mark to start a line. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if parser.mark.column == 0 && is_bom(parser.buffer, parser.buffer_pos) { + skip(parser) + } + + // Eat whitespaces. + // Tabs are allowed: + // - in the flow context + // - in the block context, but not at the beginning of the line or + // after '-', '?', or ':' (complex value). + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + for parser.buffer[parser.buffer_pos] == ' ' || ((parser.flow_level > 0 || !parser.simple_key_allowed) && parser.buffer[parser.buffer_pos] == '\t') { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Check if we just had a line comment under a sequence entry that + // looks more like a header to the following content. Similar to this: + // + // - # The comment + // - Some data + // + // If so, transform the line comment to a head comment and reposition. + if len(parser.comments) > 0 && len(parser.tokens) > 1 { + tokenA := parser.tokens[len(parser.tokens)-2] + tokenB := parser.tokens[len(parser.tokens)-1] + comment := &parser.comments[len(parser.comments)-1] + if tokenA.typ == yaml_BLOCK_SEQUENCE_START_TOKEN && tokenB.typ == yaml_BLOCK_ENTRY_TOKEN && len(comment.line) > 0 && !is_break(parser.buffer, parser.buffer_pos) { + // If it was in the prior line, reposition so it becomes a + // header of the follow up token. Otherwise, keep it in place + // so it becomes a header of the former. + comment.head = comment.line + comment.line = nil + if comment.start_mark.line == parser.mark.line-1 { + comment.token_mark = parser.mark + } + } + } + + // Eat a comment until a line break. + if parser.buffer[parser.buffer_pos] == '#' { + if !yaml_parser_scan_comments(parser, scan_mark) { + return false + } + } + + // If it is a line break, eat it. + if is_break(parser.buffer, parser.buffer_pos) { + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + skip_line(parser) + + // In the block context, a new line may start a simple key. + if parser.flow_level == 0 { + parser.simple_key_allowed = true + } + } else { + break // We have found a token. + } + } + + return true +} + +// Scan a YAML-DIRECTIVE or TAG-DIRECTIVE token. +// +// Scope: +// +// %YAML 1.1 # a comment \n +// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +// %TAG !yaml! tag:yaml.org,2002: \n +// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +func yaml_parser_scan_directive(parser *yaml_parser_t, token *yaml_token_t) bool { + // Eat '%'. + start_mark := parser.mark + skip(parser) + + // Scan the directive name. + var name []byte + if !yaml_parser_scan_directive_name(parser, start_mark, &name) { + return false + } + + // Is it a YAML directive? + if bytes.Equal(name, []byte("YAML")) { + // Scan the VERSION directive value. + var major, minor int8 + if !yaml_parser_scan_version_directive_value(parser, start_mark, &major, &minor) { + return false + } + end_mark := parser.mark + + // Create a VERSION-DIRECTIVE token. + *token = yaml_token_t{ + typ: yaml_VERSION_DIRECTIVE_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + major: major, + minor: minor, + } + + // Is it a TAG directive? + } else if bytes.Equal(name, []byte("TAG")) { + // Scan the TAG directive value. + var handle, prefix []byte + if !yaml_parser_scan_tag_directive_value(parser, start_mark, &handle, &prefix) { + return false + } + end_mark := parser.mark + + // Create a TAG-DIRECTIVE token. + *token = yaml_token_t{ + typ: yaml_TAG_DIRECTIVE_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + value: handle, + prefix: prefix, + } + + // Unknown directive. + } else { + yaml_parser_set_scanner_error(parser, "while scanning a directive", + start_mark, "found unknown directive name") + return false + } + + // Eat the rest of the line including any comments. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + for is_blank(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + if parser.buffer[parser.buffer_pos] == '#' { + // [Go] Discard this inline comment for the time being. + //if !yaml_parser_scan_line_comment(parser, start_mark) { + // return false + //} + for !is_breakz(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + } + + // Check if we are at the end of the line. + if !is_breakz(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a directive", + start_mark, "did not find expected comment or line break") + return false + } + + // Eat a line break. + if is_break(parser.buffer, parser.buffer_pos) { + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + skip_line(parser) + } + + return true +} + +// Scan the directive name. +// +// Scope: +// +// %YAML 1.1 # a comment \n +// ^^^^ +// %TAG !yaml! tag:yaml.org,2002: \n +// ^^^ +func yaml_parser_scan_directive_name(parser *yaml_parser_t, start_mark yaml_mark_t, name *[]byte) bool { + // Consume the directive name. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + var s []byte + for is_alpha(parser.buffer, parser.buffer_pos) { + s = read(parser, s) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Check if the name is empty. + if len(s) == 0 { + yaml_parser_set_scanner_error(parser, "while scanning a directive", + start_mark, "could not find expected directive name") + return false + } + + // Check for an blank character after the name. + if !is_blankz(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a directive", + start_mark, "found unexpected non-alphabetical character") + return false + } + *name = s + return true +} + +// Scan the value of VERSION-DIRECTIVE. +// +// Scope: +// +// %YAML 1.1 # a comment \n +// ^^^^^^ +func yaml_parser_scan_version_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, major, minor *int8) bool { + // Eat whitespaces. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + for is_blank(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Consume the major version number. + if !yaml_parser_scan_version_directive_number(parser, start_mark, major) { + return false + } + + // Eat '.'. + if parser.buffer[parser.buffer_pos] != '.' { + return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", + start_mark, "did not find expected digit or '.' character") + } + + skip(parser) + + // Consume the minor version number. + if !yaml_parser_scan_version_directive_number(parser, start_mark, minor) { + return false + } + return true +} + +const max_number_length = 2 + +// Scan the version number of VERSION-DIRECTIVE. +// +// Scope: +// +// %YAML 1.1 # a comment \n +// ^ +// %YAML 1.1 # a comment \n +// ^ +func yaml_parser_scan_version_directive_number(parser *yaml_parser_t, start_mark yaml_mark_t, number *int8) bool { + + // Repeat while the next character is digit. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + var value, length int8 + for is_digit(parser.buffer, parser.buffer_pos) { + // Check if the number is too long. + length++ + if length > max_number_length { + return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", + start_mark, "found extremely long version number") + } + value = value*10 + int8(as_digit(parser.buffer, parser.buffer_pos)) + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Check if the number was present. + if length == 0 { + return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", + start_mark, "did not find expected version number") + } + *number = value + return true +} + +// Scan the value of a TAG-DIRECTIVE token. +// +// Scope: +// +// %TAG !yaml! tag:yaml.org,2002: \n +// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +func yaml_parser_scan_tag_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, handle, prefix *[]byte) bool { + var handle_value, prefix_value []byte + + // Eat whitespaces. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + for is_blank(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Scan a handle. + if !yaml_parser_scan_tag_handle(parser, true, start_mark, &handle_value) { + return false + } + + // Expect a whitespace. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if !is_blank(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", + start_mark, "did not find expected whitespace") + return false + } + + // Eat whitespaces. + for is_blank(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Scan a prefix. + if !yaml_parser_scan_tag_uri(parser, true, nil, start_mark, &prefix_value) { + return false + } + + // Expect a whitespace or line break. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if !is_blankz(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", + start_mark, "did not find expected whitespace or line break") + return false + } + + *handle = handle_value + *prefix = prefix_value + return true +} + +func yaml_parser_scan_anchor(parser *yaml_parser_t, token *yaml_token_t, typ yaml_token_type_t) bool { + var s []byte + + // Eat the indicator character. + start_mark := parser.mark + skip(parser) + + // Consume the value. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + for is_alpha(parser.buffer, parser.buffer_pos) { + s = read(parser, s) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + end_mark := parser.mark + + /* + * Check if length of the anchor is greater than 0 and it is followed by + * a whitespace character or one of the indicators: + * + * '?', ':', ',', ']', '}', '%', '@', '`'. + */ + + if len(s) == 0 || + !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '?' || + parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == ',' || + parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '}' || + parser.buffer[parser.buffer_pos] == '%' || parser.buffer[parser.buffer_pos] == '@' || + parser.buffer[parser.buffer_pos] == '`') { + context := "while scanning an alias" + if typ == yaml_ANCHOR_TOKEN { + context = "while scanning an anchor" + } + yaml_parser_set_scanner_error(parser, context, start_mark, + "did not find expected alphabetic or numeric character") + return false + } + + // Create a token. + *token = yaml_token_t{ + typ: typ, + start_mark: start_mark, + end_mark: end_mark, + value: s, + } + + return true +} + +/* + * Scan a TAG token. + */ + +func yaml_parser_scan_tag(parser *yaml_parser_t, token *yaml_token_t) bool { + var handle, suffix []byte + + start_mark := parser.mark + + // Check if the tag is in the canonical form. + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + + if parser.buffer[parser.buffer_pos+1] == '<' { + // Keep the handle as '' + + // Eat '!<' + skip(parser) + skip(parser) + + // Consume the tag value. + if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) { + return false + } + + // Check for '>' and eat it. + if parser.buffer[parser.buffer_pos] != '>' { + yaml_parser_set_scanner_error(parser, "while scanning a tag", + start_mark, "did not find the expected '>'") + return false + } + + skip(parser) + } else { + // The tag has either the '!suffix' or the '!handle!suffix' form. + + // First, try to scan a handle. + if !yaml_parser_scan_tag_handle(parser, false, start_mark, &handle) { + return false + } + + // Check if it is, indeed, handle. + if handle[0] == '!' && len(handle) > 1 && handle[len(handle)-1] == '!' { + // Scan the suffix now. + if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) { + return false + } + } else { + // It wasn't a handle after all. Scan the rest of the tag. + if !yaml_parser_scan_tag_uri(parser, false, handle, start_mark, &suffix) { + return false + } + + // Set the handle to '!'. + handle = []byte{'!'} + + // A special case: the '!' tag. Set the handle to '' and the + // suffix to '!'. + if len(suffix) == 0 { + handle, suffix = suffix, handle + } + } + } + + // Check the character which ends the tag. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if !is_blankz(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a tag", + start_mark, "did not find expected whitespace or line break") + return false + } + + end_mark := parser.mark + + // Create a token. + *token = yaml_token_t{ + typ: yaml_TAG_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + value: handle, + suffix: suffix, + } + return true +} + +// Scan a tag handle. +func yaml_parser_scan_tag_handle(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, handle *[]byte) bool { + // Check the initial '!' character. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if parser.buffer[parser.buffer_pos] != '!' { + yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "did not find expected '!'") + return false + } + + var s []byte + + // Copy the '!' character. + s = read(parser, s) + + // Copy all subsequent alphabetical and numerical characters. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + for is_alpha(parser.buffer, parser.buffer_pos) { + s = read(parser, s) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Check if the trailing character is '!' and copy it. + if parser.buffer[parser.buffer_pos] == '!' { + s = read(parser, s) + } else { + // It's either the '!' tag or not really a tag handle. If it's a %TAG + // directive, it's an error. If it's a tag token, it must be a part of URI. + if directive && string(s) != "!" { + yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "did not find expected '!'") + return false + } + } + + *handle = s + return true +} + +// Scan a tag. +func yaml_parser_scan_tag_uri(parser *yaml_parser_t, directive bool, head []byte, start_mark yaml_mark_t, uri *[]byte) bool { + //size_t length = head ? strlen((char *)head) : 0 + var s []byte + hasTag := len(head) > 0 + + // Copy the head if needed. + // + // Note that we don't copy the leading '!' character. + if len(head) > 1 { + s = append(s, head[1:]...) + } + + // Scan the tag. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + // The set of characters that may appear in URI is as follows: + // + // '0'-'9', 'A'-'Z', 'a'-'z', '_', '-', ';', '/', '?', ':', '@', '&', + // '=', '+', '$', ',', '.', '!', '~', '*', '\'', '(', ')', '[', ']', + // '%'. + // [Go] TODO Convert this into more reasonable logic. + for is_alpha(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == ';' || + parser.buffer[parser.buffer_pos] == '/' || parser.buffer[parser.buffer_pos] == '?' || + parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == '@' || + parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '=' || + parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '$' || + parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '.' || + parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '~' || + parser.buffer[parser.buffer_pos] == '*' || parser.buffer[parser.buffer_pos] == '\'' || + parser.buffer[parser.buffer_pos] == '(' || parser.buffer[parser.buffer_pos] == ')' || + parser.buffer[parser.buffer_pos] == '[' || parser.buffer[parser.buffer_pos] == ']' || + parser.buffer[parser.buffer_pos] == '%' { + // Check if it is a URI-escape sequence. + if parser.buffer[parser.buffer_pos] == '%' { + if !yaml_parser_scan_uri_escapes(parser, directive, start_mark, &s) { + return false + } + } else { + s = read(parser, s) + } + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + hasTag = true + } + + if !hasTag { + yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "did not find expected tag URI") + return false + } + *uri = s + return true +} + +// Decode an URI-escape sequence corresponding to a single UTF-8 character. +func yaml_parser_scan_uri_escapes(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, s *[]byte) bool { + + // Decode the required number of characters. + w := 1024 + for w > 0 { + // Check for a URI-escaped octet. + if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) { + return false + } + + if !(parser.buffer[parser.buffer_pos] == '%' && + is_hex(parser.buffer, parser.buffer_pos+1) && + is_hex(parser.buffer, parser.buffer_pos+2)) { + return yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "did not find URI escaped octet") + } + + // Get the octet. + octet := byte((as_hex(parser.buffer, parser.buffer_pos+1) << 4) + as_hex(parser.buffer, parser.buffer_pos+2)) + + // If it is the leading octet, determine the length of the UTF-8 sequence. + if w == 1024 { + w = width(octet) + if w == 0 { + return yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "found an incorrect leading UTF-8 octet") + } + } else { + // Check if the trailing octet is correct. + if octet&0xC0 != 0x80 { + return yaml_parser_set_scanner_tag_error(parser, directive, + start_mark, "found an incorrect trailing UTF-8 octet") + } + } + + // Copy the octet and move the pointers. + *s = append(*s, octet) + skip(parser) + skip(parser) + skip(parser) + w-- + } + return true +} + +// Scan a block scalar. +func yaml_parser_scan_block_scalar(parser *yaml_parser_t, token *yaml_token_t, literal bool) bool { + // Eat the indicator '|' or '>'. + start_mark := parser.mark + skip(parser) + + // Scan the additional block scalar indicators. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + // Check for a chomping indicator. + var chomping, increment int + if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' { + // Set the chomping method and eat the indicator. + if parser.buffer[parser.buffer_pos] == '+' { + chomping = +1 + } else { + chomping = -1 + } + skip(parser) + + // Check for an indentation indicator. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if is_digit(parser.buffer, parser.buffer_pos) { + // Check that the indentation is greater than 0. + if parser.buffer[parser.buffer_pos] == '0' { + yaml_parser_set_scanner_error(parser, "while scanning a block scalar", + start_mark, "found an indentation indicator equal to 0") + return false + } + + // Get the indentation level and eat the indicator. + increment = as_digit(parser.buffer, parser.buffer_pos) + skip(parser) + } + + } else if is_digit(parser.buffer, parser.buffer_pos) { + // Do the same as above, but in the opposite order. + + if parser.buffer[parser.buffer_pos] == '0' { + yaml_parser_set_scanner_error(parser, "while scanning a block scalar", + start_mark, "found an indentation indicator equal to 0") + return false + } + increment = as_digit(parser.buffer, parser.buffer_pos) + skip(parser) + + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' { + if parser.buffer[parser.buffer_pos] == '+' { + chomping = +1 + } else { + chomping = -1 + } + skip(parser) + } + } + + // Eat whitespaces and comments to the end of the line. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + for is_blank(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + if parser.buffer[parser.buffer_pos] == '#' { + if !yaml_parser_scan_line_comment(parser, start_mark) { + return false + } + for !is_breakz(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + } + + // Check if we are at the end of the line. + if !is_breakz(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a block scalar", + start_mark, "did not find expected comment or line break") + return false + } + + // Eat a line break. + if is_break(parser.buffer, parser.buffer_pos) { + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + skip_line(parser) + } + + end_mark := parser.mark + + // Set the indentation level if it was specified. + var indent int + if increment > 0 { + if parser.indent >= 0 { + indent = parser.indent + increment + } else { + indent = increment + } + } + + // Scan the leading line breaks and determine the indentation level if needed. + var s, leading_break, trailing_breaks []byte + if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) { + return false + } + + // Scan the block scalar content. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + var leading_blank, trailing_blank bool + for parser.mark.column == indent && !is_z(parser.buffer, parser.buffer_pos) { + // We are at the beginning of a non-empty line. + + // Is it a trailing whitespace? + trailing_blank = is_blank(parser.buffer, parser.buffer_pos) + + // Check if we need to fold the leading line break. + if !literal && !leading_blank && !trailing_blank && len(leading_break) > 0 && leading_break[0] == '\n' { + // Do we need to join the lines by space? + if len(trailing_breaks) == 0 { + s = append(s, ' ') + } + } else { + s = append(s, leading_break...) + } + leading_break = leading_break[:0] + + // Append the remaining line breaks. + s = append(s, trailing_breaks...) + trailing_breaks = trailing_breaks[:0] + + // Is it a leading whitespace? + leading_blank = is_blank(parser.buffer, parser.buffer_pos) + + // Consume the current line. + for !is_breakz(parser.buffer, parser.buffer_pos) { + s = read(parser, s) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Consume the line break. + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + + leading_break = read_line(parser, leading_break) + + // Eat the following indentation spaces and line breaks. + if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) { + return false + } + } + + // Chomp the tail. + if chomping != -1 { + s = append(s, leading_break...) + } + if chomping == 1 { + s = append(s, trailing_breaks...) + } + + // Create a token. + *token = yaml_token_t{ + typ: yaml_SCALAR_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + value: s, + style: yaml_LITERAL_SCALAR_STYLE, + } + if !literal { + token.style = yaml_FOLDED_SCALAR_STYLE + } + return true +} + +// Scan indentation spaces and line breaks for a block scalar. Determine the +// indentation level if needed. +func yaml_parser_scan_block_scalar_breaks(parser *yaml_parser_t, indent *int, breaks *[]byte, start_mark yaml_mark_t, end_mark *yaml_mark_t) bool { + *end_mark = parser.mark + + // Eat the indentation spaces and line breaks. + max_indent := 0 + for { + // Eat the indentation spaces. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + for (*indent == 0 || parser.mark.column < *indent) && is_space(parser.buffer, parser.buffer_pos) { + skip(parser) + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + if parser.mark.column > max_indent { + max_indent = parser.mark.column + } + + // Check for a tab character messing the indentation. + if (*indent == 0 || parser.mark.column < *indent) && is_tab(parser.buffer, parser.buffer_pos) { + return yaml_parser_set_scanner_error(parser, "while scanning a block scalar", + start_mark, "found a tab character where an indentation space is expected") + } + + // Have we found a non-empty line? + if !is_break(parser.buffer, parser.buffer_pos) { + break + } + + // Consume the line break. + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + // [Go] Should really be returning breaks instead. + *breaks = read_line(parser, *breaks) + *end_mark = parser.mark + } + + // Determine the indentation level if needed. + if *indent == 0 { + *indent = max_indent + if *indent < parser.indent+1 { + *indent = parser.indent + 1 + } + if *indent < 1 { + *indent = 1 + } + } + return true +} + +// Scan a quoted scalar. +func yaml_parser_scan_flow_scalar(parser *yaml_parser_t, token *yaml_token_t, single bool) bool { + // Eat the left quote. + start_mark := parser.mark + skip(parser) + + // Consume the content of the quoted scalar. + var s, leading_break, trailing_breaks, whitespaces []byte + for { + // Check that there are no document indicators at the beginning of the line. + if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { + return false + } + + if parser.mark.column == 0 && + ((parser.buffer[parser.buffer_pos+0] == '-' && + parser.buffer[parser.buffer_pos+1] == '-' && + parser.buffer[parser.buffer_pos+2] == '-') || + (parser.buffer[parser.buffer_pos+0] == '.' && + parser.buffer[parser.buffer_pos+1] == '.' && + parser.buffer[parser.buffer_pos+2] == '.')) && + is_blankz(parser.buffer, parser.buffer_pos+3) { + yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", + start_mark, "found unexpected document indicator") + return false + } + + // Check for EOF. + if is_z(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", + start_mark, "found unexpected end of stream") + return false + } + + // Consume non-blank characters. + leading_blanks := false + for !is_blankz(parser.buffer, parser.buffer_pos) { + if single && parser.buffer[parser.buffer_pos] == '\'' && parser.buffer[parser.buffer_pos+1] == '\'' { + // Is is an escaped single quote. + s = append(s, '\'') + skip(parser) + skip(parser) + + } else if single && parser.buffer[parser.buffer_pos] == '\'' { + // It is a right single quote. + break + } else if !single && parser.buffer[parser.buffer_pos] == '"' { + // It is a right double quote. + break + + } else if !single && parser.buffer[parser.buffer_pos] == '\\' && is_break(parser.buffer, parser.buffer_pos+1) { + // It is an escaped line break. + if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) { + return false + } + skip(parser) + skip_line(parser) + leading_blanks = true + break + + } else if !single && parser.buffer[parser.buffer_pos] == '\\' { + // It is an escape sequence. + code_length := 0 + + // Check the escape character. + switch parser.buffer[parser.buffer_pos+1] { + case '0': + s = append(s, 0) + case 'a': + s = append(s, '\x07') + case 'b': + s = append(s, '\x08') + case 't', '\t': + s = append(s, '\x09') + case 'n': + s = append(s, '\x0A') + case 'v': + s = append(s, '\x0B') + case 'f': + s = append(s, '\x0C') + case 'r': + s = append(s, '\x0D') + case 'e': + s = append(s, '\x1B') + case ' ': + s = append(s, '\x20') + case '"': + s = append(s, '"') + case '\'': + s = append(s, '\'') + case '\\': + s = append(s, '\\') + case 'N': // NEL (#x85) + s = append(s, '\xC2') + s = append(s, '\x85') + case '_': // #xA0 + s = append(s, '\xC2') + s = append(s, '\xA0') + case 'L': // LS (#x2028) + s = append(s, '\xE2') + s = append(s, '\x80') + s = append(s, '\xA8') + case 'P': // PS (#x2029) + s = append(s, '\xE2') + s = append(s, '\x80') + s = append(s, '\xA9') + case 'x': + code_length = 2 + case 'u': + code_length = 4 + case 'U': + code_length = 8 + default: + yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", + start_mark, "found unknown escape character") + return false + } + + skip(parser) + skip(parser) + + // Consume an arbitrary escape code. + if code_length > 0 { + var value int + + // Scan the character value. + if parser.unread < code_length && !yaml_parser_update_buffer(parser, code_length) { + return false + } + for k := 0; k < code_length; k++ { + if !is_hex(parser.buffer, parser.buffer_pos+k) { + yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", + start_mark, "did not find expected hexdecimal number") + return false + } + value = (value << 4) + as_hex(parser.buffer, parser.buffer_pos+k) + } + + // Check the value and write the character. + if (value >= 0xD800 && value <= 0xDFFF) || value > 0x10FFFF { + yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", + start_mark, "found invalid Unicode character escape code") + return false + } + if value <= 0x7F { + s = append(s, byte(value)) + } else if value <= 0x7FF { + s = append(s, byte(0xC0+(value>>6))) + s = append(s, byte(0x80+(value&0x3F))) + } else if value <= 0xFFFF { + s = append(s, byte(0xE0+(value>>12))) + s = append(s, byte(0x80+((value>>6)&0x3F))) + s = append(s, byte(0x80+(value&0x3F))) + } else { + s = append(s, byte(0xF0+(value>>18))) + s = append(s, byte(0x80+((value>>12)&0x3F))) + s = append(s, byte(0x80+((value>>6)&0x3F))) + s = append(s, byte(0x80+(value&0x3F))) + } + + // Advance the pointer. + for k := 0; k < code_length; k++ { + skip(parser) + } + } + } else { + // It is a non-escaped non-blank character. + s = read(parser, s) + } + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + } + + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + // Check if we are at the end of the scalar. + if single { + if parser.buffer[parser.buffer_pos] == '\'' { + break + } + } else { + if parser.buffer[parser.buffer_pos] == '"' { + break + } + } + + // Consume blank characters. + for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) { + if is_blank(parser.buffer, parser.buffer_pos) { + // Consume a space or a tab character. + if !leading_blanks { + whitespaces = read(parser, whitespaces) + } else { + skip(parser) + } + } else { + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + + // Check if it is a first line break. + if !leading_blanks { + whitespaces = whitespaces[:0] + leading_break = read_line(parser, leading_break) + leading_blanks = true + } else { + trailing_breaks = read_line(parser, trailing_breaks) + } + } + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Join the whitespaces or fold line breaks. + if leading_blanks { + // Do we need to fold line breaks? + if len(leading_break) > 0 && leading_break[0] == '\n' { + if len(trailing_breaks) == 0 { + s = append(s, ' ') + } else { + s = append(s, trailing_breaks...) + } + } else { + s = append(s, leading_break...) + s = append(s, trailing_breaks...) + } + trailing_breaks = trailing_breaks[:0] + leading_break = leading_break[:0] + } else { + s = append(s, whitespaces...) + whitespaces = whitespaces[:0] + } + } + + // Eat the right quote. + skip(parser) + end_mark := parser.mark + + // Create a token. + *token = yaml_token_t{ + typ: yaml_SCALAR_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + value: s, + style: yaml_SINGLE_QUOTED_SCALAR_STYLE, + } + if !single { + token.style = yaml_DOUBLE_QUOTED_SCALAR_STYLE + } + return true +} + +// Scan a plain scalar. +func yaml_parser_scan_plain_scalar(parser *yaml_parser_t, token *yaml_token_t) bool { + + var s, leading_break, trailing_breaks, whitespaces []byte + var leading_blanks bool + var indent = parser.indent + 1 + + start_mark := parser.mark + end_mark := parser.mark + + // Consume the content of the plain scalar. + for { + // Check for a document indicator. + if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { + return false + } + if parser.mark.column == 0 && + ((parser.buffer[parser.buffer_pos+0] == '-' && + parser.buffer[parser.buffer_pos+1] == '-' && + parser.buffer[parser.buffer_pos+2] == '-') || + (parser.buffer[parser.buffer_pos+0] == '.' && + parser.buffer[parser.buffer_pos+1] == '.' && + parser.buffer[parser.buffer_pos+2] == '.')) && + is_blankz(parser.buffer, parser.buffer_pos+3) { + break + } + + // Check for a comment. + if parser.buffer[parser.buffer_pos] == '#' { + break + } + + // Consume non-blank characters. + for !is_blankz(parser.buffer, parser.buffer_pos) { + + // Check for indicators that may end a plain scalar. + if (parser.buffer[parser.buffer_pos] == ':' && is_blankz(parser.buffer, parser.buffer_pos+1)) || + (parser.flow_level > 0 && + (parser.buffer[parser.buffer_pos] == ',' || + parser.buffer[parser.buffer_pos] == '[' || + parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' || + parser.buffer[parser.buffer_pos] == '}')) { + break + } + + // Check if we need to join whitespaces and breaks. + if leading_blanks || len(whitespaces) > 0 { + if leading_blanks { + // Do we need to fold line breaks? + if leading_break[0] == '\n' { + if len(trailing_breaks) == 0 { + s = append(s, ' ') + } else { + s = append(s, trailing_breaks...) + } + } else { + s = append(s, leading_break...) + s = append(s, trailing_breaks...) + } + trailing_breaks = trailing_breaks[:0] + leading_break = leading_break[:0] + leading_blanks = false + } else { + s = append(s, whitespaces...) + whitespaces = whitespaces[:0] + } + } + + // Copy the character. + s = read(parser, s) + + end_mark = parser.mark + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + } + + // Is it the end? + if !(is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos)) { + break + } + + // Consume blank characters. + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + + for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) { + if is_blank(parser.buffer, parser.buffer_pos) { + + // Check for tab characters that abuse indentation. + if leading_blanks && parser.mark.column < indent && is_tab(parser.buffer, parser.buffer_pos) { + yaml_parser_set_scanner_error(parser, "while scanning a plain scalar", + start_mark, "found a tab character that violates indentation") + return false + } + + // Consume a space or a tab character. + if !leading_blanks { + whitespaces = read(parser, whitespaces) + } else { + skip(parser) + } + } else { + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + + // Check if it is a first line break. + if !leading_blanks { + whitespaces = whitespaces[:0] + leading_break = read_line(parser, leading_break) + leading_blanks = true + } else { + trailing_breaks = read_line(parser, trailing_breaks) + } + } + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + } + + // Check indentation level. + if parser.flow_level == 0 && parser.mark.column < indent { + break + } + } + + // Create a token. + *token = yaml_token_t{ + typ: yaml_SCALAR_TOKEN, + start_mark: start_mark, + end_mark: end_mark, + value: s, + style: yaml_PLAIN_SCALAR_STYLE, + } + + // Note that we change the 'simple_key_allowed' flag. + if leading_blanks { + parser.simple_key_allowed = true + } + return true +} + +func yaml_parser_scan_line_comment(parser *yaml_parser_t, token_mark yaml_mark_t) bool { + if parser.newlines > 0 { + return true + } + + var start_mark yaml_mark_t + var text []byte + + for peek := 0; peek < 512; peek++ { + if parser.unread < peek+1 && !yaml_parser_update_buffer(parser, peek+1) { + break + } + if is_blank(parser.buffer, parser.buffer_pos+peek) { + continue + } + if parser.buffer[parser.buffer_pos+peek] == '#' { + seen := parser.mark.index + peek + for { + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if is_breakz(parser.buffer, parser.buffer_pos) { + if parser.mark.index >= seen { + break + } + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + skip_line(parser) + } else if parser.mark.index >= seen { + if len(text) == 0 { + start_mark = parser.mark + } + text = read(parser, text) + } else { + skip(parser) + } + } + } + break + } + if len(text) > 0 { + parser.comments = append(parser.comments, yaml_comment_t{ + token_mark: token_mark, + start_mark: start_mark, + line: text, + }) + } + return true +} + +func yaml_parser_scan_comments(parser *yaml_parser_t, scan_mark yaml_mark_t) bool { + token := parser.tokens[len(parser.tokens)-1] + + if token.typ == yaml_FLOW_ENTRY_TOKEN && len(parser.tokens) > 1 { + token = parser.tokens[len(parser.tokens)-2] + } + + var token_mark = token.start_mark + var start_mark yaml_mark_t + var next_indent = parser.indent + if next_indent < 0 { + next_indent = 0 + } + + var recent_empty = false + var first_empty = parser.newlines <= 1 + + var line = parser.mark.line + var column = parser.mark.column + + var text []byte + + // The foot line is the place where a comment must start to + // still be considered as a foot of the prior content. + // If there's some content in the currently parsed line, then + // the foot is the line below it. + var foot_line = -1 + if scan_mark.line > 0 { + foot_line = parser.mark.line - parser.newlines + 1 + if parser.newlines == 0 && parser.mark.column > 1 { + foot_line++ + } + } + + var peek = 0 + for ; peek < 512; peek++ { + if parser.unread < peek+1 && !yaml_parser_update_buffer(parser, peek+1) { + break + } + column++ + if is_blank(parser.buffer, parser.buffer_pos+peek) { + continue + } + c := parser.buffer[parser.buffer_pos+peek] + var close_flow = parser.flow_level > 0 && (c == ']' || c == '}') + if close_flow || is_breakz(parser.buffer, parser.buffer_pos+peek) { + // Got line break or terminator. + if close_flow || !recent_empty { + if close_flow || first_empty && (start_mark.line == foot_line && token.typ != yaml_VALUE_TOKEN || start_mark.column-1 < next_indent) { + // This is the first empty line and there were no empty lines before, + // so this initial part of the comment is a foot of the prior token + // instead of being a head for the following one. Split it up. + // Alternatively, this might also be the last comment inside a flow + // scope, so it must be a footer. + if len(text) > 0 { + if start_mark.column-1 < next_indent { + // If dedented it's unrelated to the prior token. + token_mark = start_mark + } + parser.comments = append(parser.comments, yaml_comment_t{ + scan_mark: scan_mark, + token_mark: token_mark, + start_mark: start_mark, + end_mark: yaml_mark_t{parser.mark.index + peek, line, column}, + foot: text, + }) + scan_mark = yaml_mark_t{parser.mark.index + peek, line, column} + token_mark = scan_mark + text = nil + } + } else { + if len(text) > 0 && parser.buffer[parser.buffer_pos+peek] != 0 { + text = append(text, '\n') + } + } + } + if !is_break(parser.buffer, parser.buffer_pos+peek) { + break + } + first_empty = false + recent_empty = true + column = 0 + line++ + continue + } + + if len(text) > 0 && (close_flow || column-1 < next_indent && column != start_mark.column) { + // The comment at the different indentation is a foot of the + // preceding data rather than a head of the upcoming one. + parser.comments = append(parser.comments, yaml_comment_t{ + scan_mark: scan_mark, + token_mark: token_mark, + start_mark: start_mark, + end_mark: yaml_mark_t{parser.mark.index + peek, line, column}, + foot: text, + }) + scan_mark = yaml_mark_t{parser.mark.index + peek, line, column} + token_mark = scan_mark + text = nil + } + + if parser.buffer[parser.buffer_pos+peek] != '#' { + break + } + + if len(text) == 0 { + start_mark = yaml_mark_t{parser.mark.index + peek, line, column} + } else { + text = append(text, '\n') + } + + recent_empty = false + + // Consume until after the consumed comment line. + seen := parser.mark.index + peek + for { + if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { + return false + } + if is_breakz(parser.buffer, parser.buffer_pos) { + if parser.mark.index >= seen { + break + } + if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { + return false + } + skip_line(parser) + } else if parser.mark.index >= seen { + text = read(parser, text) + } else { + skip(parser) + } + } + + peek = 0 + column = 0 + line = parser.mark.line + next_indent = parser.indent + if next_indent < 0 { + next_indent = 0 + } + } + + if len(text) > 0 { + parser.comments = append(parser.comments, yaml_comment_t{ + scan_mark: scan_mark, + token_mark: start_mark, + start_mark: start_mark, + end_mark: yaml_mark_t{parser.mark.index + peek - 1, line, column}, + head: text, + }) + } + return true +} diff --git a/cli/internal/yaml/sorter.go b/cli/internal/yaml/sorter.go new file mode 100644 index 0000000..9210ece --- /dev/null +++ b/cli/internal/yaml/sorter.go @@ -0,0 +1,134 @@ +// +// Copyright (c) 2011-2019 Canonical Ltd +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package yaml + +import ( + "reflect" + "unicode" +) + +type keyList []reflect.Value + +func (l keyList) Len() int { return len(l) } +func (l keyList) Swap(i, j int) { l[i], l[j] = l[j], l[i] } +func (l keyList) Less(i, j int) bool { + a := l[i] + b := l[j] + ak := a.Kind() + bk := b.Kind() + for (ak == reflect.Interface || ak == reflect.Ptr) && !a.IsNil() { + a = a.Elem() + ak = a.Kind() + } + for (bk == reflect.Interface || bk == reflect.Ptr) && !b.IsNil() { + b = b.Elem() + bk = b.Kind() + } + af, aok := keyFloat(a) + bf, bok := keyFloat(b) + if aok && bok { + if af != bf { + return af < bf + } + if ak != bk { + return ak < bk + } + return numLess(a, b) + } + if ak != reflect.String || bk != reflect.String { + return ak < bk + } + ar, br := []rune(a.String()), []rune(b.String()) + digits := false + for i := 0; i < len(ar) && i < len(br); i++ { + if ar[i] == br[i] { + digits = unicode.IsDigit(ar[i]) + continue + } + al := unicode.IsLetter(ar[i]) + bl := unicode.IsLetter(br[i]) + if al && bl { + return ar[i] < br[i] + } + if al || bl { + if digits { + return al + } else { + return bl + } + } + var ai, bi int + var an, bn int64 + if ar[i] == '0' || br[i] == '0' { + for j := i - 1; j >= 0 && unicode.IsDigit(ar[j]); j-- { + if ar[j] != '0' { + an = 1 + bn = 1 + break + } + } + } + for ai = i; ai < len(ar) && unicode.IsDigit(ar[ai]); ai++ { + an = an*10 + int64(ar[ai]-'0') + } + for bi = i; bi < len(br) && unicode.IsDigit(br[bi]); bi++ { + bn = bn*10 + int64(br[bi]-'0') + } + if an != bn { + return an < bn + } + if ai != bi { + return ai < bi + } + return ar[i] < br[i] + } + return len(ar) < len(br) +} + +// keyFloat returns a float value for v if it is a number/bool +// and whether it is a number/bool or not. +func keyFloat(v reflect.Value) (f float64, ok bool) { + switch v.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return float64(v.Int()), true + case reflect.Float32, reflect.Float64: + return v.Float(), true + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return float64(v.Uint()), true + case reflect.Bool: + if v.Bool() { + return 1, true + } + return 0, true + } + return 0, false +} + +// numLess returns whether a < b. +// a and b must necessarily have the same kind. +func numLess(a, b reflect.Value) bool { + switch a.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return a.Int() < b.Int() + case reflect.Float32, reflect.Float64: + return a.Float() < b.Float() + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return a.Uint() < b.Uint() + case reflect.Bool: + return !a.Bool() && b.Bool() + } + panic("not a number") +} diff --git a/cli/internal/yaml/writerc.go b/cli/internal/yaml/writerc.go new file mode 100644 index 0000000..266d0b0 --- /dev/null +++ b/cli/internal/yaml/writerc.go @@ -0,0 +1,48 @@ +// +// Copyright (c) 2011-2019 Canonical Ltd +// Copyright (c) 2006-2010 Kirill Simonov +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of +// this software and associated documentation files (the "Software"), to deal in +// the Software without restriction, including without limitation the rights to +// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do +// so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +package yaml + +// Set the writer error and return false. +func yaml_emitter_set_writer_error(emitter *yaml_emitter_t, problem string) bool { + emitter.error = yaml_WRITER_ERROR + emitter.problem = problem + return false +} + +// Flush the output buffer. +func yaml_emitter_flush(emitter *yaml_emitter_t) bool { + if emitter.write_handler == nil { + panic("write handler not set") + } + + // Check if the buffer is empty. + if emitter.buffer_pos == 0 { + return true + } + + if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil { + return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error()) + } + emitter.buffer_pos = 0 + return true +} diff --git a/cli/internal/yaml/yaml.go b/cli/internal/yaml/yaml.go new file mode 100644 index 0000000..f0bedf3 --- /dev/null +++ b/cli/internal/yaml/yaml.go @@ -0,0 +1,693 @@ +// +// Copyright (c) 2011-2019 Canonical Ltd +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package yaml implements YAML support for the Go language. +// +// Source code and other details for the project are available at GitHub: +// +// https://github.com/go-yaml/yaml +package yaml + +import ( + "errors" + "fmt" + "io" + "reflect" + "strings" + "sync" + "unicode/utf8" +) + +// The Unmarshaler interface may be implemented by types to customize their +// behavior when being unmarshaled from a YAML document. +type Unmarshaler interface { + UnmarshalYAML(value *Node) error +} + +type obsoleteUnmarshaler interface { + UnmarshalYAML(unmarshal func(interface{}) error) error +} + +// The Marshaler interface may be implemented by types to customize their +// behavior when being marshaled into a YAML document. The returned value +// is marshaled in place of the original value implementing Marshaler. +// +// If an error is returned by MarshalYAML, the marshaling procedure stops +// and returns with the provided error. +type Marshaler interface { + MarshalYAML() (interface{}, error) +} + +// Unmarshal decodes the first document found within the in byte slice +// and assigns decoded values into the out value. +// +// Maps and pointers (to a struct, string, int, etc) are accepted as out +// values. If an internal pointer within a struct is not initialized, +// the yaml package will initialize it if necessary for unmarshalling +// the provided data. The out parameter must not be nil. +// +// The type of the decoded values should be compatible with the respective +// values in out. If one or more values cannot be decoded due to a type +// mismatches, decoding continues partially until the end of the YAML +// content, and a *yaml.TypeError is returned with details for all +// missed values. +// +// Struct fields are only unmarshalled if they are exported (have an +// upper case first letter), and are unmarshalled using the field name +// lowercased as the default key. Custom keys may be defined via the +// "yaml" name in the field tag: the content preceding the first comma +// is used as the key, and the following comma-separated options are +// used to tweak the marshalling process (see Marshal). +// Conflicting names result in a runtime error. +// +// For example: +// +// type T struct { +// F int `yaml:"a,omitempty"` +// B int +// } +// var t T +// yaml.Unmarshal([]byte("a: 1\nb: 2"), &t) +// +// See the documentation of Marshal for the format of tags and a list of +// supported tag options. +func Unmarshal(in []byte, out interface{}) (err error) { + return unmarshal(in, out, false) +} + +// A Decoder reads and decodes YAML values from an input stream. +type Decoder struct { + parser *parser + knownFields bool +} + +// NewDecoder returns a new decoder that reads from r. +// +// The decoder introduces its own buffering and may read +// data from r beyond the YAML values requested. +func NewDecoder(r io.Reader) *Decoder { + return &Decoder{ + parser: newParserFromReader(r), + } +} + +// KnownFields ensures that the keys in decoded mappings to +// exist as fields in the struct being decoded into. +func (dec *Decoder) KnownFields(enable bool) { + dec.knownFields = enable +} + +// Decode reads the next YAML-encoded value from its input +// and stores it in the value pointed to by v. +// +// See the documentation for Unmarshal for details about the +// conversion of YAML into a Go value. +func (dec *Decoder) Decode(v interface{}) (err error) { + d := newDecoder() + d.knownFields = dec.knownFields + defer handleErr(&err) + node := dec.parser.parse() + if node == nil { + return io.EOF + } + out := reflect.ValueOf(v) + if out.Kind() == reflect.Ptr && !out.IsNil() { + out = out.Elem() + } + d.unmarshal(node, out) + if len(d.terrors) > 0 { + return &TypeError{d.terrors} + } + return nil +} + +// Decode decodes the node and stores its data into the value pointed to by v. +// +// See the documentation for Unmarshal for details about the +// conversion of YAML into a Go value. +func (n *Node) Decode(v interface{}) (err error) { + d := newDecoder() + defer handleErr(&err) + out := reflect.ValueOf(v) + if out.Kind() == reflect.Ptr && !out.IsNil() { + out = out.Elem() + } + d.unmarshal(n, out) + if len(d.terrors) > 0 { + return &TypeError{d.terrors} + } + return nil +} + +func unmarshal(in []byte, out interface{}, strict bool) (err error) { + defer handleErr(&err) + d := newDecoder() + p := newParser(in) + defer p.destroy() + node := p.parse() + if node != nil { + v := reflect.ValueOf(out) + if v.Kind() == reflect.Ptr && !v.IsNil() { + v = v.Elem() + } + d.unmarshal(node, v) + } + if len(d.terrors) > 0 { + return &TypeError{d.terrors} + } + return nil +} + +// Marshal serializes the value provided into a YAML document. The structure +// of the generated document will reflect the structure of the value itself. +// Maps and pointers (to struct, string, int, etc) are accepted as the in value. +// +// Struct fields are only marshalled if they are exported (have an upper case +// first letter), and are marshalled using the field name lowercased as the +// default key. Custom keys may be defined via the "yaml" name in the field +// tag: the content preceding the first comma is used as the key, and the +// following comma-separated options are used to tweak the marshalling process. +// Conflicting names result in a runtime error. +// +// The field tag format accepted is: +// +// `(...) yaml:"[][,[,]]" (...)` +// +// The following flags are currently supported: +// +// omitempty Only include the field if it's not set to the zero +// value for the type or to empty slices or maps. +// Zero valued structs will be omitted if all their public +// fields are zero, unless they implement an IsZero +// method (see the IsZeroer interface type), in which +// case the field will be excluded if IsZero returns true. +// +// flow Marshal using a flow style (useful for structs, +// sequences and maps). +// +// inline Inline the field, which must be a struct or a map, +// causing all of its fields or keys to be processed as if +// they were part of the outer struct. For maps, keys must +// not conflict with the yaml keys of other struct fields. +// +// In addition, if the key is "-", the field is ignored. +// +// For example: +// +// type T struct { +// F int `yaml:"a,omitempty"` +// B int +// } +// yaml.Marshal(&T{B: 2}) // Returns "b: 2\n" +// yaml.Marshal(&T{F: 1}} // Returns "a: 1\nb: 0\n" +func Marshal(in interface{}) (out []byte, err error) { + defer handleErr(&err) + e := newEncoder() + defer e.destroy() + e.marshalDoc("", reflect.ValueOf(in)) + e.finish() + out = e.out + return +} + +// An Encoder writes YAML values to an output stream. +type Encoder struct { + encoder *encoder +} + +// NewEncoder returns a new encoder that writes to w. +// The Encoder should be closed after use to flush all data +// to w. +func NewEncoder(w io.Writer) *Encoder { + return &Encoder{ + encoder: newEncoderWithWriter(w), + } +} + +// Encode writes the YAML encoding of v to the stream. +// If multiple items are encoded to the stream, the +// second and subsequent document will be preceded +// with a "---" document separator, but the first will not. +// +// See the documentation for Marshal for details about the conversion of Go +// values to YAML. +func (e *Encoder) Encode(v interface{}) (err error) { + defer handleErr(&err) + e.encoder.marshalDoc("", reflect.ValueOf(v)) + return nil +} + +// Encode encodes value v and stores its representation in n. +// +// See the documentation for Marshal for details about the +// conversion of Go values into YAML. +func (n *Node) Encode(v interface{}) (err error) { + defer handleErr(&err) + e := newEncoder() + defer e.destroy() + e.marshalDoc("", reflect.ValueOf(v)) + e.finish() + p := newParser(e.out) + p.textless = true + defer p.destroy() + doc := p.parse() + *n = *doc.Content[0] + return nil +} + +// SetIndent changes the used indentation used when encoding. +func (e *Encoder) SetIndent(spaces int) { + if spaces < 0 { + panic("yaml: cannot indent to a negative number of spaces") + } + e.encoder.indent = spaces +} + +// Close closes the encoder by writing any remaining data. +// It does not write a stream terminating string "...". +func (e *Encoder) Close() (err error) { + defer handleErr(&err) + e.encoder.finish() + return nil +} + +func handleErr(err *error) { + if v := recover(); v != nil { + if e, ok := v.(yamlError); ok { + *err = e.err + } else { + panic(v) + } + } +} + +type yamlError struct { + err error +} + +func fail(err error) { + panic(yamlError{err}) +} + +func failf(format string, args ...interface{}) { + panic(yamlError{fmt.Errorf("yaml: "+format, args...)}) +} + +// A TypeError is returned by Unmarshal when one or more fields in +// the YAML document cannot be properly decoded into the requested +// types. When this error is returned, the value is still +// unmarshaled partially. +type TypeError struct { + Errors []string +} + +func (e *TypeError) Error() string { + return fmt.Sprintf("yaml: unmarshal errors:\n %s", strings.Join(e.Errors, "\n ")) +} + +type Kind uint32 + +const ( + DocumentNode Kind = 1 << iota + SequenceNode + MappingNode + ScalarNode + AliasNode +) + +type Style uint32 + +const ( + TaggedStyle Style = 1 << iota + DoubleQuotedStyle + SingleQuotedStyle + LiteralStyle + FoldedStyle + FlowStyle +) + +// Node represents an element in the YAML document hierarchy. While documents +// are typically encoded and decoded into higher level types, such as structs +// and maps, Node is an intermediate representation that allows detailed +// control over the content being decoded or encoded. +// +// It's worth noting that although Node offers access into details such as +// line numbers, colums, and comments, the content when re-encoded will not +// have its original textual representation preserved. An effort is made to +// render the data plesantly, and to preserve comments near the data they +// describe, though. +// +// Values that make use of the Node type interact with the yaml package in the +// same way any other type would do, by encoding and decoding yaml data +// directly or indirectly into them. +// +// For example: +// +// var person struct { +// Name string +// Address yaml.Node +// } +// err := yaml.Unmarshal(data, &person) +// +// Or by itself: +// +// var person Node +// err := yaml.Unmarshal(data, &person) +type Node struct { + // Kind defines whether the node is a document, a mapping, a sequence, + // a scalar value, or an alias to another node. The specific data type of + // scalar nodes may be obtained via the ShortTag and LongTag methods. + Kind Kind + + // Style allows customizing the apperance of the node in the tree. + Style Style + + // Tag holds the YAML tag defining the data type for the value. + // When decoding, this field will always be set to the resolved tag, + // even when it wasn't explicitly provided in the YAML content. + // When encoding, if this field is unset the value type will be + // implied from the node properties, and if it is set, it will only + // be serialized into the representation if TaggedStyle is used or + // the implicit tag diverges from the provided one. + Tag string + + // Value holds the unescaped and unquoted represenation of the value. + Value string + + // Anchor holds the anchor name for this node, which allows aliases to point to it. + Anchor string + + // Alias holds the node that this alias points to. Only valid when Kind is AliasNode. + Alias *Node + + // Content holds contained nodes for documents, mappings, and sequences. + Content []*Node + + // HeadComment holds any comments in the lines preceding the node and + // not separated by an empty line. + HeadComment string + + // LineComment holds any comments at the end of the line where the node is in. + LineComment string + + // FootComment holds any comments following the node and before empty lines. + FootComment string + + // Line and Column hold the node position in the decoded YAML text. + // These fields are not respected when encoding the node. + Line int + Column int +} + +// IsZero returns whether the node has all of its fields unset. +func (n *Node) IsZero() bool { + return n.Kind == 0 && n.Style == 0 && n.Tag == "" && n.Value == "" && n.Anchor == "" && n.Alias == nil && n.Content == nil && + n.HeadComment == "" && n.LineComment == "" && n.FootComment == "" && n.Line == 0 && n.Column == 0 +} + +// LongTag returns the long form of the tag that indicates the data type for +// the node. If the Tag field isn't explicitly defined, one will be computed +// based on the node properties. +func (n *Node) LongTag() string { + return longTag(n.ShortTag()) +} + +// ShortTag returns the short form of the YAML tag that indicates data type for +// the node. If the Tag field isn't explicitly defined, one will be computed +// based on the node properties. +func (n *Node) ShortTag() string { + if n.indicatedString() { + return strTag + } + if n.Tag == "" || n.Tag == "!" { + switch n.Kind { + case MappingNode: + return mapTag + case SequenceNode: + return seqTag + case AliasNode: + if n.Alias != nil { + return n.Alias.ShortTag() + } + case ScalarNode: + tag, _ := resolve("", n.Value) + return tag + case 0: + // Special case to make the zero value convenient. + if n.IsZero() { + return nullTag + } + } + return "" + } + return shortTag(n.Tag) +} + +func (n *Node) indicatedString() bool { + return n.Kind == ScalarNode && + (shortTag(n.Tag) == strTag || + (n.Tag == "" || n.Tag == "!") && n.Style&(SingleQuotedStyle|DoubleQuotedStyle|LiteralStyle|FoldedStyle) != 0) +} + +// SetString is a convenience function that sets the node to a string value +// and defines its style in a pleasant way depending on its content. +func (n *Node) SetString(s string) { + n.Kind = ScalarNode + if utf8.ValidString(s) { + n.Value = s + n.Tag = strTag + } else { + n.Value = encodeBase64(s) + n.Tag = binaryTag + } + if strings.Contains(n.Value, "\n") { + n.Style = LiteralStyle + } +} + +// -------------------------------------------------------------------------- +// Maintain a mapping of keys to structure field indexes + +// The code in this section was copied from mgo/bson. + +// structInfo holds details for the serialization of fields of +// a given struct. +type structInfo struct { + FieldsMap map[string]fieldInfo + FieldsList []fieldInfo + + // InlineMap is the number of the field in the struct that + // contains an ,inline map, or -1 if there's none. + InlineMap int + + // InlineUnmarshalers holds indexes to inlined fields that + // contain unmarshaler values. + InlineUnmarshalers [][]int +} + +type fieldInfo struct { + Key string + Num int + OmitEmpty bool + Flow bool + // Id holds the unique field identifier, so we can cheaply + // check for field duplicates without maintaining an extra map. + Id int + + // Inline holds the field index if the field is part of an inlined struct. + Inline []int +} + +var structMap = make(map[reflect.Type]*structInfo) +var fieldMapMutex sync.RWMutex +var unmarshalerType reflect.Type + +func init() { + var v Unmarshaler + unmarshalerType = reflect.ValueOf(&v).Elem().Type() +} + +func getStructInfo(st reflect.Type) (*structInfo, error) { + fieldMapMutex.RLock() + sinfo, found := structMap[st] + fieldMapMutex.RUnlock() + if found { + return sinfo, nil + } + + n := st.NumField() + fieldsMap := make(map[string]fieldInfo) + fieldsList := make([]fieldInfo, 0, n) + inlineMap := -1 + inlineUnmarshalers := [][]int(nil) + for i := 0; i != n; i++ { + field := st.Field(i) + if field.PkgPath != "" && !field.Anonymous { + continue // Private field + } + + info := fieldInfo{Num: i} + + tag := field.Tag.Get("yaml") + if tag == "" && strings.Index(string(field.Tag), ":") < 0 { + tag = string(field.Tag) + } + if tag == "-" { + continue + } + + inline := false + fields := strings.Split(tag, ",") + if len(fields) > 1 { + for _, flag := range fields[1:] { + switch flag { + case "omitempty": + info.OmitEmpty = true + case "flow": + info.Flow = true + case "inline": + inline = true + default: + return nil, errors.New(fmt.Sprintf("unsupported flag %q in tag %q of type %s", flag, tag, st)) + } + } + tag = fields[0] + } + + if inline { + switch field.Type.Kind() { + case reflect.Map: + if inlineMap >= 0 { + return nil, errors.New("multiple ,inline maps in struct " + st.String()) + } + if field.Type.Key() != reflect.TypeOf("") { + return nil, errors.New("option ,inline needs a map with string keys in struct " + st.String()) + } + inlineMap = info.Num + case reflect.Struct, reflect.Ptr: + ftype := field.Type + for ftype.Kind() == reflect.Ptr { + ftype = ftype.Elem() + } + if ftype.Kind() != reflect.Struct { + return nil, errors.New("option ,inline may only be used on a struct or map field") + } + if reflect.PtrTo(ftype).Implements(unmarshalerType) { + inlineUnmarshalers = append(inlineUnmarshalers, []int{i}) + } else { + sinfo, err := getStructInfo(ftype) + if err != nil { + return nil, err + } + for _, index := range sinfo.InlineUnmarshalers { + inlineUnmarshalers = append(inlineUnmarshalers, append([]int{i}, index...)) + } + for _, finfo := range sinfo.FieldsList { + if _, found := fieldsMap[finfo.Key]; found { + msg := "duplicated key '" + finfo.Key + "' in struct " + st.String() + return nil, errors.New(msg) + } + if finfo.Inline == nil { + finfo.Inline = []int{i, finfo.Num} + } else { + finfo.Inline = append([]int{i}, finfo.Inline...) + } + finfo.Id = len(fieldsList) + fieldsMap[finfo.Key] = finfo + fieldsList = append(fieldsList, finfo) + } + } + default: + return nil, errors.New("option ,inline may only be used on a struct or map field") + } + continue + } + + if tag != "" { + info.Key = tag + } else { + info.Key = strings.ToLower(field.Name) + } + + if _, found = fieldsMap[info.Key]; found { + msg := "duplicated key '" + info.Key + "' in struct " + st.String() + return nil, errors.New(msg) + } + + info.Id = len(fieldsList) + fieldsList = append(fieldsList, info) + fieldsMap[info.Key] = info + } + + sinfo = &structInfo{ + FieldsMap: fieldsMap, + FieldsList: fieldsList, + InlineMap: inlineMap, + InlineUnmarshalers: inlineUnmarshalers, + } + + fieldMapMutex.Lock() + structMap[st] = sinfo + fieldMapMutex.Unlock() + return sinfo, nil +} + +// IsZeroer is used to check whether an object is zero to +// determine whether it should be omitted when marshaling +// with the omitempty flag. One notable implementation +// is time.Time. +type IsZeroer interface { + IsZero() bool +} + +func isZero(v reflect.Value) bool { + kind := v.Kind() + if z, ok := v.Interface().(IsZeroer); ok { + if (kind == reflect.Ptr || kind == reflect.Interface) && v.IsNil() { + return true + } + return z.IsZero() + } + switch kind { + case reflect.String: + return len(v.String()) == 0 + case reflect.Interface, reflect.Ptr: + return v.IsNil() + case reflect.Slice: + return v.Len() == 0 + case reflect.Map: + return v.Len() == 0 + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return v.Int() == 0 + case reflect.Float32, reflect.Float64: + return v.Float() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return v.Uint() == 0 + case reflect.Bool: + return !v.Bool() + case reflect.Struct: + vt := v.Type() + for i := v.NumField() - 1; i >= 0; i-- { + if vt.Field(i).PkgPath != "" { + continue // Private field + } + if !isZero(v.Field(i)) { + return false + } + } + return true + } + return false +} diff --git a/cli/internal/yaml/yamlh.go b/cli/internal/yaml/yamlh.go new file mode 100644 index 0000000..ddcd551 --- /dev/null +++ b/cli/internal/yaml/yamlh.go @@ -0,0 +1,809 @@ +// +// Copyright (c) 2011-2019 Canonical Ltd +// Copyright (c) 2006-2010 Kirill Simonov +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of +// this software and associated documentation files (the "Software"), to deal in +// the Software without restriction, including without limitation the rights to +// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do +// so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +package yaml + +import ( + "fmt" + "io" +) + +// The version directive data. +type yaml_version_directive_t struct { + major int8 // The major version number. + minor int8 // The minor version number. +} + +// The tag directive data. +type yaml_tag_directive_t struct { + handle []byte // The tag handle. + prefix []byte // The tag prefix. +} + +type yaml_encoding_t int + +// The stream encoding. +const ( + // Let the parser choose the encoding. + yaml_ANY_ENCODING yaml_encoding_t = iota + + yaml_UTF8_ENCODING // The default UTF-8 encoding. + yaml_UTF16LE_ENCODING // The UTF-16-LE encoding with BOM. + yaml_UTF16BE_ENCODING // The UTF-16-BE encoding with BOM. +) + +type yaml_break_t int + +// Line break types. +const ( + // Let the parser choose the break type. + yaml_ANY_BREAK yaml_break_t = iota + + yaml_CR_BREAK // Use CR for line breaks (Mac style). + yaml_LN_BREAK // Use LN for line breaks (Unix style). + yaml_CRLN_BREAK // Use CR LN for line breaks (DOS style). +) + +type yaml_error_type_t int + +// Many bad things could happen with the parser and emitter. +const ( + // No error is produced. + yaml_NO_ERROR yaml_error_type_t = iota + + yaml_MEMORY_ERROR // Cannot allocate or reallocate a block of memory. + yaml_READER_ERROR // Cannot read or decode the input stream. + yaml_SCANNER_ERROR // Cannot scan the input stream. + yaml_PARSER_ERROR // Cannot parse the input stream. + yaml_COMPOSER_ERROR // Cannot compose a YAML document. + yaml_WRITER_ERROR // Cannot write to the output stream. + yaml_EMITTER_ERROR // Cannot emit a YAML stream. +) + +// The pointer position. +type yaml_mark_t struct { + index int // The position index. + line int // The position line. + column int // The position column. +} + +// Node Styles + +type yaml_style_t int8 + +type yaml_scalar_style_t yaml_style_t + +// Scalar styles. +const ( + // Let the emitter choose the style. + yaml_ANY_SCALAR_STYLE yaml_scalar_style_t = 0 + + yaml_PLAIN_SCALAR_STYLE yaml_scalar_style_t = 1 << iota // The plain scalar style. + yaml_SINGLE_QUOTED_SCALAR_STYLE // The single-quoted scalar style. + yaml_DOUBLE_QUOTED_SCALAR_STYLE // The double-quoted scalar style. + yaml_LITERAL_SCALAR_STYLE // The literal scalar style. + yaml_FOLDED_SCALAR_STYLE // The folded scalar style. +) + +type yaml_sequence_style_t yaml_style_t + +// Sequence styles. +const ( + // Let the emitter choose the style. + yaml_ANY_SEQUENCE_STYLE yaml_sequence_style_t = iota + + yaml_BLOCK_SEQUENCE_STYLE // The block sequence style. + yaml_FLOW_SEQUENCE_STYLE // The flow sequence style. +) + +type yaml_mapping_style_t yaml_style_t + +// Mapping styles. +const ( + // Let the emitter choose the style. + yaml_ANY_MAPPING_STYLE yaml_mapping_style_t = iota + + yaml_BLOCK_MAPPING_STYLE // The block mapping style. + yaml_FLOW_MAPPING_STYLE // The flow mapping style. +) + +// Tokens + +type yaml_token_type_t int + +// Token types. +const ( + // An empty token. + yaml_NO_TOKEN yaml_token_type_t = iota + + yaml_STREAM_START_TOKEN // A STREAM-START token. + yaml_STREAM_END_TOKEN // A STREAM-END token. + + yaml_VERSION_DIRECTIVE_TOKEN // A VERSION-DIRECTIVE token. + yaml_TAG_DIRECTIVE_TOKEN // A TAG-DIRECTIVE token. + yaml_DOCUMENT_START_TOKEN // A DOCUMENT-START token. + yaml_DOCUMENT_END_TOKEN // A DOCUMENT-END token. + + yaml_BLOCK_SEQUENCE_START_TOKEN // A BLOCK-SEQUENCE-START token. + yaml_BLOCK_MAPPING_START_TOKEN // A BLOCK-SEQUENCE-END token. + yaml_BLOCK_END_TOKEN // A BLOCK-END token. + + yaml_FLOW_SEQUENCE_START_TOKEN // A FLOW-SEQUENCE-START token. + yaml_FLOW_SEQUENCE_END_TOKEN // A FLOW-SEQUENCE-END token. + yaml_FLOW_MAPPING_START_TOKEN // A FLOW-MAPPING-START token. + yaml_FLOW_MAPPING_END_TOKEN // A FLOW-MAPPING-END token. + + yaml_BLOCK_ENTRY_TOKEN // A BLOCK-ENTRY token. + yaml_FLOW_ENTRY_TOKEN // A FLOW-ENTRY token. + yaml_KEY_TOKEN // A KEY token. + yaml_VALUE_TOKEN // A VALUE token. + + yaml_ALIAS_TOKEN // An ALIAS token. + yaml_ANCHOR_TOKEN // An ANCHOR token. + yaml_TAG_TOKEN // A TAG token. + yaml_SCALAR_TOKEN // A SCALAR token. +) + +func (tt yaml_token_type_t) String() string { + switch tt { + case yaml_NO_TOKEN: + return "yaml_NO_TOKEN" + case yaml_STREAM_START_TOKEN: + return "yaml_STREAM_START_TOKEN" + case yaml_STREAM_END_TOKEN: + return "yaml_STREAM_END_TOKEN" + case yaml_VERSION_DIRECTIVE_TOKEN: + return "yaml_VERSION_DIRECTIVE_TOKEN" + case yaml_TAG_DIRECTIVE_TOKEN: + return "yaml_TAG_DIRECTIVE_TOKEN" + case yaml_DOCUMENT_START_TOKEN: + return "yaml_DOCUMENT_START_TOKEN" + case yaml_DOCUMENT_END_TOKEN: + return "yaml_DOCUMENT_END_TOKEN" + case yaml_BLOCK_SEQUENCE_START_TOKEN: + return "yaml_BLOCK_SEQUENCE_START_TOKEN" + case yaml_BLOCK_MAPPING_START_TOKEN: + return "yaml_BLOCK_MAPPING_START_TOKEN" + case yaml_BLOCK_END_TOKEN: + return "yaml_BLOCK_END_TOKEN" + case yaml_FLOW_SEQUENCE_START_TOKEN: + return "yaml_FLOW_SEQUENCE_START_TOKEN" + case yaml_FLOW_SEQUENCE_END_TOKEN: + return "yaml_FLOW_SEQUENCE_END_TOKEN" + case yaml_FLOW_MAPPING_START_TOKEN: + return "yaml_FLOW_MAPPING_START_TOKEN" + case yaml_FLOW_MAPPING_END_TOKEN: + return "yaml_FLOW_MAPPING_END_TOKEN" + case yaml_BLOCK_ENTRY_TOKEN: + return "yaml_BLOCK_ENTRY_TOKEN" + case yaml_FLOW_ENTRY_TOKEN: + return "yaml_FLOW_ENTRY_TOKEN" + case yaml_KEY_TOKEN: + return "yaml_KEY_TOKEN" + case yaml_VALUE_TOKEN: + return "yaml_VALUE_TOKEN" + case yaml_ALIAS_TOKEN: + return "yaml_ALIAS_TOKEN" + case yaml_ANCHOR_TOKEN: + return "yaml_ANCHOR_TOKEN" + case yaml_TAG_TOKEN: + return "yaml_TAG_TOKEN" + case yaml_SCALAR_TOKEN: + return "yaml_SCALAR_TOKEN" + } + return "" +} + +// The token structure. +type yaml_token_t struct { + // The token type. + typ yaml_token_type_t + + // The start/end of the token. + start_mark, end_mark yaml_mark_t + + // The stream encoding (for yaml_STREAM_START_TOKEN). + encoding yaml_encoding_t + + // The alias/anchor/scalar value or tag/tag directive handle + // (for yaml_ALIAS_TOKEN, yaml_ANCHOR_TOKEN, yaml_SCALAR_TOKEN, yaml_TAG_TOKEN, yaml_TAG_DIRECTIVE_TOKEN). + value []byte + + // The tag suffix (for yaml_TAG_TOKEN). + suffix []byte + + // The tag directive prefix (for yaml_TAG_DIRECTIVE_TOKEN). + prefix []byte + + // The scalar style (for yaml_SCALAR_TOKEN). + style yaml_scalar_style_t + + // The version directive major/minor (for yaml_VERSION_DIRECTIVE_TOKEN). + major, minor int8 +} + +// Events + +type yaml_event_type_t int8 + +// Event types. +const ( + // An empty event. + yaml_NO_EVENT yaml_event_type_t = iota + + yaml_STREAM_START_EVENT // A STREAM-START event. + yaml_STREAM_END_EVENT // A STREAM-END event. + yaml_DOCUMENT_START_EVENT // A DOCUMENT-START event. + yaml_DOCUMENT_END_EVENT // A DOCUMENT-END event. + yaml_ALIAS_EVENT // An ALIAS event. + yaml_SCALAR_EVENT // A SCALAR event. + yaml_SEQUENCE_START_EVENT // A SEQUENCE-START event. + yaml_SEQUENCE_END_EVENT // A SEQUENCE-END event. + yaml_MAPPING_START_EVENT // A MAPPING-START event. + yaml_MAPPING_END_EVENT // A MAPPING-END event. + yaml_TAIL_COMMENT_EVENT +) + +var eventStrings = []string{ + yaml_NO_EVENT: "none", + yaml_STREAM_START_EVENT: "stream start", + yaml_STREAM_END_EVENT: "stream end", + yaml_DOCUMENT_START_EVENT: "document start", + yaml_DOCUMENT_END_EVENT: "document end", + yaml_ALIAS_EVENT: "alias", + yaml_SCALAR_EVENT: "scalar", + yaml_SEQUENCE_START_EVENT: "sequence start", + yaml_SEQUENCE_END_EVENT: "sequence end", + yaml_MAPPING_START_EVENT: "mapping start", + yaml_MAPPING_END_EVENT: "mapping end", + yaml_TAIL_COMMENT_EVENT: "tail comment", +} + +func (e yaml_event_type_t) String() string { + if e < 0 || int(e) >= len(eventStrings) { + return fmt.Sprintf("unknown event %d", e) + } + return eventStrings[e] +} + +// The event structure. +type yaml_event_t struct { + + // The event type. + typ yaml_event_type_t + + // The start and end of the event. + start_mark, end_mark yaml_mark_t + + // The document encoding (for yaml_STREAM_START_EVENT). + encoding yaml_encoding_t + + // The version directive (for yaml_DOCUMENT_START_EVENT). + version_directive *yaml_version_directive_t + + // The list of tag directives (for yaml_DOCUMENT_START_EVENT). + tag_directives []yaml_tag_directive_t + + // The comments + head_comment []byte + line_comment []byte + foot_comment []byte + tail_comment []byte + + // The anchor (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_ALIAS_EVENT). + anchor []byte + + // The tag (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT). + tag []byte + + // The scalar value (for yaml_SCALAR_EVENT). + value []byte + + // Is the document start/end indicator implicit, or the tag optional? + // (for yaml_DOCUMENT_START_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_SCALAR_EVENT). + implicit bool + + // Is the tag optional for any non-plain style? (for yaml_SCALAR_EVENT). + quoted_implicit bool + + // The style (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT). + style yaml_style_t +} + +func (e *yaml_event_t) scalar_style() yaml_scalar_style_t { return yaml_scalar_style_t(e.style) } +func (e *yaml_event_t) sequence_style() yaml_sequence_style_t { return yaml_sequence_style_t(e.style) } +func (e *yaml_event_t) mapping_style() yaml_mapping_style_t { return yaml_mapping_style_t(e.style) } + +// Nodes + +const ( + yaml_NULL_TAG = "tag:yaml.org,2002:null" // The tag !!null with the only possible value: null. + yaml_BOOL_TAG = "tag:yaml.org,2002:bool" // The tag !!bool with the values: true and false. + yaml_STR_TAG = "tag:yaml.org,2002:str" // The tag !!str for string values. + yaml_INT_TAG = "tag:yaml.org,2002:int" // The tag !!int for integer values. + yaml_FLOAT_TAG = "tag:yaml.org,2002:float" // The tag !!float for float values. + yaml_TIMESTAMP_TAG = "tag:yaml.org,2002:timestamp" // The tag !!timestamp for date and time values. + + yaml_SEQ_TAG = "tag:yaml.org,2002:seq" // The tag !!seq is used to denote sequences. + yaml_MAP_TAG = "tag:yaml.org,2002:map" // The tag !!map is used to denote mapping. + + // Not in original libyaml. + yaml_BINARY_TAG = "tag:yaml.org,2002:binary" + yaml_MERGE_TAG = "tag:yaml.org,2002:merge" + + yaml_DEFAULT_SCALAR_TAG = yaml_STR_TAG // The default scalar tag is !!str. + yaml_DEFAULT_SEQUENCE_TAG = yaml_SEQ_TAG // The default sequence tag is !!seq. + yaml_DEFAULT_MAPPING_TAG = yaml_MAP_TAG // The default mapping tag is !!map. +) + +type yaml_node_type_t int + +// Node types. +const ( + // An empty node. + yaml_NO_NODE yaml_node_type_t = iota + + yaml_SCALAR_NODE // A scalar node. + yaml_SEQUENCE_NODE // A sequence node. + yaml_MAPPING_NODE // A mapping node. +) + +// An element of a sequence node. +type yaml_node_item_t int + +// An element of a mapping node. +type yaml_node_pair_t struct { + key int // The key of the element. + value int // The value of the element. +} + +// The node structure. +type yaml_node_t struct { + typ yaml_node_type_t // The node type. + tag []byte // The node tag. + + // The node data. + + // The scalar parameters (for yaml_SCALAR_NODE). + scalar struct { + value []byte // The scalar value. + length int // The length of the scalar value. + style yaml_scalar_style_t // The scalar style. + } + + // The sequence parameters (for YAML_SEQUENCE_NODE). + sequence struct { + items_data []yaml_node_item_t // The stack of sequence items. + style yaml_sequence_style_t // The sequence style. + } + + // The mapping parameters (for yaml_MAPPING_NODE). + mapping struct { + pairs_data []yaml_node_pair_t // The stack of mapping pairs (key, value). + pairs_start *yaml_node_pair_t // The beginning of the stack. + pairs_end *yaml_node_pair_t // The end of the stack. + pairs_top *yaml_node_pair_t // The top of the stack. + style yaml_mapping_style_t // The mapping style. + } + + start_mark yaml_mark_t // The beginning of the node. + end_mark yaml_mark_t // The end of the node. + +} + +// The document structure. +type yaml_document_t struct { + + // The document nodes. + nodes []yaml_node_t + + // The version directive. + version_directive *yaml_version_directive_t + + // The list of tag directives. + tag_directives_data []yaml_tag_directive_t + tag_directives_start int // The beginning of the tag directives list. + tag_directives_end int // The end of the tag directives list. + + start_implicit int // Is the document start indicator implicit? + end_implicit int // Is the document end indicator implicit? + + // The start/end of the document. + start_mark, end_mark yaml_mark_t +} + +// The prototype of a read handler. +// +// The read handler is called when the parser needs to read more bytes from the +// source. The handler should write not more than size bytes to the buffer. +// The number of written bytes should be set to the size_read variable. +// +// [in,out] data A pointer to an application data specified by +// +// yaml_parser_set_input(). +// +// [out] buffer The buffer to write the data from the source. +// [in] size The size of the buffer. +// [out] size_read The actual number of bytes read from the source. +// +// On success, the handler should return 1. If the handler failed, +// the returned value should be 0. On EOF, the handler should set the +// size_read to 0 and return 1. +type yaml_read_handler_t func(parser *yaml_parser_t, buffer []byte) (n int, err error) + +// This structure holds information about a potential simple key. +type yaml_simple_key_t struct { + possible bool // Is a simple key possible? + required bool // Is a simple key required? + token_number int // The number of the token. + mark yaml_mark_t // The position mark. +} + +// The states of the parser. +type yaml_parser_state_t int + +const ( + yaml_PARSE_STREAM_START_STATE yaml_parser_state_t = iota + + yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE // Expect the beginning of an implicit document. + yaml_PARSE_DOCUMENT_START_STATE // Expect DOCUMENT-START. + yaml_PARSE_DOCUMENT_CONTENT_STATE // Expect the content of a document. + yaml_PARSE_DOCUMENT_END_STATE // Expect DOCUMENT-END. + yaml_PARSE_BLOCK_NODE_STATE // Expect a block node. + yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE // Expect a block node or indentless sequence. + yaml_PARSE_FLOW_NODE_STATE // Expect a flow node. + yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a block sequence. + yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE // Expect an entry of a block sequence. + yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE // Expect an entry of an indentless sequence. + yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping. + yaml_PARSE_BLOCK_MAPPING_KEY_STATE // Expect a block mapping key. + yaml_PARSE_BLOCK_MAPPING_VALUE_STATE // Expect a block mapping value. + yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a flow sequence. + yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE // Expect an entry of a flow sequence. + yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE // Expect a key of an ordered mapping. + yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE // Expect a value of an ordered mapping. + yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE // Expect the and of an ordered mapping entry. + yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping. + yaml_PARSE_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping. + yaml_PARSE_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping. + yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE // Expect an empty value of a flow mapping. + yaml_PARSE_END_STATE // Expect nothing. +) + +func (ps yaml_parser_state_t) String() string { + switch ps { + case yaml_PARSE_STREAM_START_STATE: + return "yaml_PARSE_STREAM_START_STATE" + case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE: + return "yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE" + case yaml_PARSE_DOCUMENT_START_STATE: + return "yaml_PARSE_DOCUMENT_START_STATE" + case yaml_PARSE_DOCUMENT_CONTENT_STATE: + return "yaml_PARSE_DOCUMENT_CONTENT_STATE" + case yaml_PARSE_DOCUMENT_END_STATE: + return "yaml_PARSE_DOCUMENT_END_STATE" + case yaml_PARSE_BLOCK_NODE_STATE: + return "yaml_PARSE_BLOCK_NODE_STATE" + case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE: + return "yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE" + case yaml_PARSE_FLOW_NODE_STATE: + return "yaml_PARSE_FLOW_NODE_STATE" + case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE: + return "yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE" + case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE: + return "yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE" + case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE: + return "yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE" + case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE: + return "yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE" + case yaml_PARSE_BLOCK_MAPPING_KEY_STATE: + return "yaml_PARSE_BLOCK_MAPPING_KEY_STATE" + case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE: + return "yaml_PARSE_BLOCK_MAPPING_VALUE_STATE" + case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE: + return "yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE" + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE: + return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE" + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE: + return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE" + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE: + return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE" + case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE: + return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE" + case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE: + return "yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE" + case yaml_PARSE_FLOW_MAPPING_KEY_STATE: + return "yaml_PARSE_FLOW_MAPPING_KEY_STATE" + case yaml_PARSE_FLOW_MAPPING_VALUE_STATE: + return "yaml_PARSE_FLOW_MAPPING_VALUE_STATE" + case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE: + return "yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE" + case yaml_PARSE_END_STATE: + return "yaml_PARSE_END_STATE" + } + return "" +} + +// This structure holds aliases data. +type yaml_alias_data_t struct { + anchor []byte // The anchor. + index int // The node id. + mark yaml_mark_t // The anchor mark. +} + +// The parser structure. +// +// All members are internal. Manage the structure using the +// yaml_parser_ family of functions. +type yaml_parser_t struct { + + // Error handling + + error yaml_error_type_t // Error type. + + problem string // Error description. + + // The byte about which the problem occurred. + problem_offset int + problem_value int + problem_mark yaml_mark_t + + // The error context. + context string + context_mark yaml_mark_t + + // Reader stuff + + read_handler yaml_read_handler_t // Read handler. + + input_reader io.Reader // File input data. + input []byte // String input data. + input_pos int + + eof bool // EOF flag + + buffer []byte // The working buffer. + buffer_pos int // The current position of the buffer. + + unread int // The number of unread characters in the buffer. + + newlines int // The number of line breaks since last non-break/non-blank character + + raw_buffer []byte // The raw buffer. + raw_buffer_pos int // The current position of the buffer. + + encoding yaml_encoding_t // The input encoding. + + offset int // The offset of the current position (in bytes). + mark yaml_mark_t // The mark of the current position. + + // Comments + + head_comment []byte // The current head comments + line_comment []byte // The current line comments + foot_comment []byte // The current foot comments + tail_comment []byte // Foot comment that happens at the end of a block. + stem_comment []byte // Comment in item preceding a nested structure (list inside list item, etc) + + comments []yaml_comment_t // The folded comments for all parsed tokens + comments_head int + + // Scanner stuff + + stream_start_produced bool // Have we started to scan the input stream? + stream_end_produced bool // Have we reached the end of the input stream? + + flow_level int // The number of unclosed '[' and '{' indicators. + + tokens []yaml_token_t // The tokens queue. + tokens_head int // The head of the tokens queue. + tokens_parsed int // The number of tokens fetched from the queue. + token_available bool // Does the tokens queue contain a token ready for dequeueing. + + indent int // The current indentation level. + indents []int // The indentation levels stack. + + simple_key_allowed bool // May a simple key occur at the current position? + simple_keys []yaml_simple_key_t // The stack of simple keys. + simple_keys_by_tok map[int]int // possible simple_key indexes indexed by token_number + + // Parser stuff + + state yaml_parser_state_t // The current parser state. + states []yaml_parser_state_t // The parser states stack. + marks []yaml_mark_t // The stack of marks. + tag_directives []yaml_tag_directive_t // The list of TAG directives. + + // Dumper stuff + + aliases []yaml_alias_data_t // The alias data. + + document *yaml_document_t // The currently parsed document. +} + +type yaml_comment_t struct { + scan_mark yaml_mark_t // Position where scanning for comments started + token_mark yaml_mark_t // Position after which tokens will be associated with this comment + start_mark yaml_mark_t // Position of '#' comment mark + end_mark yaml_mark_t // Position where comment terminated + + head []byte + line []byte + foot []byte +} + +// Emitter Definitions + +// The prototype of a write handler. +// +// The write handler is called when the emitter needs to flush the accumulated +// characters to the output. The handler should write @a size bytes of the +// @a buffer to the output. +// +// @param[in,out] data A pointer to an application data specified by +// +// yaml_emitter_set_output(). +// +// @param[in] buffer The buffer with bytes to be written. +// @param[in] size The size of the buffer. +// +// @returns On success, the handler should return @c 1. If the handler failed, +// the returned value should be @c 0. +type yaml_write_handler_t func(emitter *yaml_emitter_t, buffer []byte) error + +type yaml_emitter_state_t int + +// The emitter states. +const ( + // Expect STREAM-START. + yaml_EMIT_STREAM_START_STATE yaml_emitter_state_t = iota + + yaml_EMIT_FIRST_DOCUMENT_START_STATE // Expect the first DOCUMENT-START or STREAM-END. + yaml_EMIT_DOCUMENT_START_STATE // Expect DOCUMENT-START or STREAM-END. + yaml_EMIT_DOCUMENT_CONTENT_STATE // Expect the content of a document. + yaml_EMIT_DOCUMENT_END_STATE // Expect DOCUMENT-END. + yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a flow sequence. + yaml_EMIT_FLOW_SEQUENCE_TRAIL_ITEM_STATE // Expect the next item of a flow sequence, with the comma already written out + yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE // Expect an item of a flow sequence. + yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping. + yaml_EMIT_FLOW_MAPPING_TRAIL_KEY_STATE // Expect the next key of a flow mapping, with the comma already written out + yaml_EMIT_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping. + yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a flow mapping. + yaml_EMIT_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping. + yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a block sequence. + yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE // Expect an item of a block sequence. + yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping. + yaml_EMIT_BLOCK_MAPPING_KEY_STATE // Expect the key of a block mapping. + yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a block mapping. + yaml_EMIT_BLOCK_MAPPING_VALUE_STATE // Expect a value of a block mapping. + yaml_EMIT_END_STATE // Expect nothing. +) + +// The emitter structure. +// +// All members are internal. Manage the structure using the @c yaml_emitter_ +// family of functions. +type yaml_emitter_t struct { + + // Error handling + + error yaml_error_type_t // Error type. + problem string // Error description. + + // Writer stuff + + write_handler yaml_write_handler_t // Write handler. + + output_buffer *[]byte // String output data. + output_writer io.Writer // File output data. + + buffer []byte // The working buffer. + buffer_pos int // The current position of the buffer. + + raw_buffer []byte // The raw buffer. + raw_buffer_pos int // The current position of the buffer. + + encoding yaml_encoding_t // The stream encoding. + + // Emitter stuff + + canonical bool // If the output is in the canonical style? + best_indent int // The number of indentation spaces. + best_width int // The preferred width of the output lines. + unicode bool // Allow unescaped non-ASCII characters? + line_break yaml_break_t // The preferred line break. + + state yaml_emitter_state_t // The current emitter state. + states []yaml_emitter_state_t // The stack of states. + + events []yaml_event_t // The event queue. + events_head int // The head of the event queue. + + indents []int // The stack of indentation levels. + + tag_directives []yaml_tag_directive_t // The list of tag directives. + + indent int // The current indentation level. + + flow_level int // The current flow level. + + root_context bool // Is it the document root context? + sequence_context bool // Is it a sequence context? + mapping_context bool // Is it a mapping context? + simple_key_context bool // Is it a simple mapping key context? + + line int // The current line. + column int // The current column. + whitespace bool // If the last character was a whitespace? + indention bool // If the last character was an indentation character (' ', '-', '?', ':')? + open_ended bool // If an explicit document end is required? + + space_above bool // Is there's an empty line above? + foot_indent int // The indent used to write the foot comment above, or -1 if none. + + // Anchor analysis. + anchor_data struct { + anchor []byte // The anchor value. + alias bool // Is it an alias? + } + + // Tag analysis. + tag_data struct { + handle []byte // The tag handle. + suffix []byte // The tag suffix. + } + + // Scalar analysis. + scalar_data struct { + value []byte // The scalar value. + multiline bool // Does the scalar contain line breaks? + flow_plain_allowed bool // Can the scalar be expessed in the flow plain style? + block_plain_allowed bool // Can the scalar be expressed in the block plain style? + single_quoted_allowed bool // Can the scalar be expressed in the single quoted style? + block_allowed bool // Can the scalar be expressed in the literal or folded styles? + style yaml_scalar_style_t // The output style. + } + + // Comments + head_comment []byte + line_comment []byte + foot_comment []byte + tail_comment []byte + + key_line_comment []byte + + // Dumper stuff + + opened bool // If the stream was already opened? + closed bool // If the stream was already closed? + + // The information associated with the document nodes. + anchors *struct { + references int // The number of references. + anchor int // The anchor id. + serialized bool // If the node has been emitted? + } + + last_anchor_id int // The last assigned anchor id. + + document *yaml_document_t // The currently emitted document. +} diff --git a/cli/internal/yaml/yamlprivateh.go b/cli/internal/yaml/yamlprivateh.go new file mode 100644 index 0000000..dea1ba9 --- /dev/null +++ b/cli/internal/yaml/yamlprivateh.go @@ -0,0 +1,198 @@ +// +// Copyright (c) 2011-2019 Canonical Ltd +// Copyright (c) 2006-2010 Kirill Simonov +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of +// this software and associated documentation files (the "Software"), to deal in +// the Software without restriction, including without limitation the rights to +// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +// of the Software, and to permit persons to whom the Software is furnished to do +// so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +package yaml + +const ( + // The size of the input raw buffer. + input_raw_buffer_size = 512 + + // The size of the input buffer. + // It should be possible to decode the whole raw buffer. + input_buffer_size = input_raw_buffer_size * 3 + + // The size of the output buffer. + output_buffer_size = 128 + + // The size of the output raw buffer. + // It should be possible to encode the whole output buffer. + output_raw_buffer_size = (output_buffer_size*2 + 2) + + // The size of other stacks and queues. + initial_stack_size = 16 + initial_queue_size = 16 + initial_string_size = 16 +) + +// Check if the character at the specified position is an alphabetical +// character, a digit, '_', or '-'. +func is_alpha(b []byte, i int) bool { + return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'Z' || b[i] >= 'a' && b[i] <= 'z' || b[i] == '_' || b[i] == '-' +} + +// Check if the character at the specified position is a digit. +func is_digit(b []byte, i int) bool { + return b[i] >= '0' && b[i] <= '9' +} + +// Get the value of a digit. +func as_digit(b []byte, i int) int { + return int(b[i]) - '0' +} + +// Check if the character at the specified position is a hex-digit. +func is_hex(b []byte, i int) bool { + return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'F' || b[i] >= 'a' && b[i] <= 'f' +} + +// Get the value of a hex-digit. +func as_hex(b []byte, i int) int { + bi := b[i] + if bi >= 'A' && bi <= 'F' { + return int(bi) - 'A' + 10 + } + if bi >= 'a' && bi <= 'f' { + return int(bi) - 'a' + 10 + } + return int(bi) - '0' +} + +// Check if the character is ASCII. +func is_ascii(b []byte, i int) bool { + return b[i] <= 0x7F +} + +// Check if the character at the start of the buffer can be printed unescaped. +func is_printable(b []byte, i int) bool { + return ((b[i] == 0x0A) || // . == #x0A + (b[i] >= 0x20 && b[i] <= 0x7E) || // #x20 <= . <= #x7E + (b[i] == 0xC2 && b[i+1] >= 0xA0) || // #0xA0 <= . <= #xD7FF + (b[i] > 0xC2 && b[i] < 0xED) || + (b[i] == 0xED && b[i+1] < 0xA0) || + (b[i] == 0xEE) || + (b[i] == 0xEF && // #xE000 <= . <= #xFFFD + !(b[i+1] == 0xBB && b[i+2] == 0xBF) && // && . != #xFEFF + !(b[i+1] == 0xBF && (b[i+2] == 0xBE || b[i+2] == 0xBF)))) +} + +// Check if the character at the specified position is NUL. +func is_z(b []byte, i int) bool { + return b[i] == 0x00 +} + +// Check if the beginning of the buffer is a BOM. +func is_bom(b []byte, i int) bool { + return b[0] == 0xEF && b[1] == 0xBB && b[2] == 0xBF +} + +// Check if the character at the specified position is space. +func is_space(b []byte, i int) bool { + return b[i] == ' ' +} + +// Check if the character at the specified position is tab. +func is_tab(b []byte, i int) bool { + return b[i] == '\t' +} + +// Check if the character at the specified position is blank (space or tab). +func is_blank(b []byte, i int) bool { + //return is_space(b, i) || is_tab(b, i) + return b[i] == ' ' || b[i] == '\t' +} + +// Check if the character at the specified position is a line break. +func is_break(b []byte, i int) bool { + return (b[i] == '\r' || // CR (#xD) + b[i] == '\n' || // LF (#xA) + b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9) // PS (#x2029) +} + +func is_crlf(b []byte, i int) bool { + return b[i] == '\r' && b[i+1] == '\n' +} + +// Check if the character is a line break or NUL. +func is_breakz(b []byte, i int) bool { + //return is_break(b, i) || is_z(b, i) + return ( + // is_break: + b[i] == '\r' || // CR (#xD) + b[i] == '\n' || // LF (#xA) + b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) + // is_z: + b[i] == 0) +} + +// Check if the character is a line break, space, or NUL. +func is_spacez(b []byte, i int) bool { + //return is_space(b, i) || is_breakz(b, i) + return ( + // is_space: + b[i] == ' ' || + // is_breakz: + b[i] == '\r' || // CR (#xD) + b[i] == '\n' || // LF (#xA) + b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) + b[i] == 0) +} + +// Check if the character is a line break, space, tab, or NUL. +func is_blankz(b []byte, i int) bool { + //return is_blank(b, i) || is_breakz(b, i) + return ( + // is_blank: + b[i] == ' ' || b[i] == '\t' || + // is_breakz: + b[i] == '\r' || // CR (#xD) + b[i] == '\n' || // LF (#xA) + b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) + b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) + b[i] == 0) +} + +// Determine the width of the character. +func width(b byte) int { + // Don't replace these by a switch without first + // confirming that it is being inlined. + if b&0x80 == 0x00 { + return 1 + } + if b&0xE0 == 0xC0 { + return 2 + } + if b&0xF0 == 0xE0 { + return 3 + } + if b&0xF8 == 0xF0 { + return 4 + } + return 0 + +} diff --git a/cli/package.json b/cli/package.json new file mode 100644 index 0000000..971eaca --- /dev/null +++ b/cli/package.json @@ -0,0 +1,18 @@ +{ + "name": "cli", + "private": true, + "version": "0.0.0", + "scripts": { + "clean": "make clean", + "build": "make", + "test": "make test-go", + "format": "make fmt-go", + "lint": "make lint-go" + }, + "devDependencies": { + "copy-template-dir": "^1.4.0", + "faker": "^5.1.0", + "ngraph.generators": "^19.3.0", + "shelljs": "^0.8.4" + } +} diff --git a/cli/scripts/generate.mjs b/cli/scripts/generate.mjs new file mode 100644 index 0000000..1b9cbec --- /dev/null +++ b/cli/scripts/generate.mjs @@ -0,0 +1,297 @@ +#!/usr/bin/env node +import shelljs from "shelljs"; +import path from "path"; +import fs from "fs-extra"; +import faker from "faker"; +import graphGenerator from "ngraph.generators"; +import copy from "copy-template-dir"; +import { fileURLToPath } from "url"; + +const __dirname = path.dirname(fileURLToPath(import.meta.url)); +faker.seed(123); + +const scope = `@${faker.hacker + .noun() + .toLowerCase() + .replace(/\s/g, "-") + .replace("1080p-", "rando")}`; + +const type = process.argv[2]; + +// TODO: algo should be customizable along with the size +const packageGraph = graphGenerator.complete(5); + +// Generate the package name & versions +packageGraph.forEachNode((node) => { + node.data = { + name: `${scope}/${faker.hacker.adjective()}-${faker.hacker.noun()}` + .toLocaleLowerCase() + .replace(/\s/g, "-"), + version: faker.system.semver(), + }; +}); + +// Generate package dependencies +packageGraph.forEachNode((node) => { + const links = packageGraph.getLinks(node.id); + + if (links) { + for (const link of links) { + if (link.fromId === node.id) { + const depNode = packageGraph.getNode(link.toId); + node.data.dependencies = node.data.dependencies || {}; + node.data.dependencies[depNode.data.name] = `^${depNode.data.version}`; + node.data.implicitDependencies = node.data.implicitDependencies || []; + node.data.implicitDependencies.push( + depNode.data.name.replace(/^@[^/]+\//, "") + ); + } + } + } +}); + +// Generate the monorepo +// 1. the root package.json +// 2. create packages/ +// 3. create package directories +const root = path.join(__dirname, "../demo", type); + +function generate(root, skipInstall) { + fs.mkdirSync(root, { recursive: true }); + if (type !== "nx") { + fs.writeFileSync( + path.join(root, ".gitignore"), + `node_modules +dist +.turbo +out +turbo +turbo-linux +.yalc` + ); + if (fs.existsSync(root)) { + try { + fs.rmSync(root + "/packages", { recursive: true }); + } catch (error) {} + } + + let deps = + type !== "turbo" + ? { + devDependencies: { + [type]: "*", + }, + } + : {}; + + fs.writeFileSync( + path.join(root, "package.json"), + JSON.stringify( + { + name: "monorepo", + version: "0.0.0", + private: true, + workspaces: ["packages/*"], + ...deps, + packageManager: "yarn@1.22.17", + }, + null, + 2 + ) + ); + + fs.writeFileSync( + path.join(root, "tsconfig.json"), + JSON.stringify( + { + compilerOptions: { + composite: false, + declaration: true, + declarationMap: true, + esModuleInterop: true, + forceConsistentCasingInFileNames: true, + inlineSourceMap: true, + inlineSources: false, + isolatedModules: true, + moduleResolution: "node", + noUnusedLocals: false, + noUnusedParameters: false, + preserveWatchOutput: true, + skipLibCheck: true, + strict: true, + lib: ["es2020"], + module: "commonjs", + target: "es2020", + }, + }, + null, + 2 + ) + ); + } + + if (type === "turbo") { + fs.writeFileSync( + path.join(root, "turbo.json"), + JSON.stringify( + { + npmClient: "yarn", + cacheStorageConfig: { + provider: "local", + cacheUrl: "https://1a77600385dd.ngrok.io", + }, + pipeline: { + build: { + outputs: ["dist/**/*"], + dependsOn: ["^build"], + }, + test: { + dependsOn: ["build"], + }, + dev: { + cache: false, + }, + }, + }, + null, + 2 + ) + ); + } + + if (type === "lerna") { + fs.writeFileSync( + path.join(root, "lerna.json"), + JSON.stringify( + { + packages: ["packages/*"], + version: "0.0.0", + }, + null, + 2 + ) + ); + } + + if (type === "lage") { + fs.writeFileSync( + path.join(root, "lage.config.js"), + ` +module.exports = { + pipeline: { + build: ['^build'], + test: ['build'], + lint: [], + }, + npmClient: 'yarn', + cacheOptions: { + cacheStorageConfig: { + provider: 'local', + }, + outputGlob: ['dist/**'], + }, +}; + ` + ); + } + + if (type !== "nx") { + fs.mkdirSync(path.join(root, "packages")); + } else { + shelljs.exec( + `cd ${path.join( + __dirname, + "../demo" + )} && yarn create nx-workspace nx --preset=empty --nx-cloud=false --packageManager=yarn --cli=nx --linter=eslint` + ); + shelljs.exec(`cd ${root} && yarn add @nrwl/node`); + } + + if (type !== "nx") { + packageGraph.forEachNode((node) => { + const packageRoot = path.join( + root, + "packages", + node.data.name.replace(/^@[^/]+\//, "") + ); + fs.mkdirSync(packageRoot, { recursive: true }); + copy( + path.join(__dirname, "templates"), + path.join(packageRoot), + { + name: node.data.name.replace(/^@[^/]+\//, ""), + }, + () => {} + ); + + fs.writeFileSync( + path.join(packageRoot, "package.json"), + JSON.stringify( + { + name: node.data.name, + version: node.data.version, + dependencies: node.data.dependencies, + files: ["dist/**"], + main: "dist/index.js", + types: "dist/index.d.ts", + devDependencies: { + typescript: "^4.6.3", + jest: "^27.0.0", + "ts-jest": "^27.0.0", + "@types/jest": "^27.0.0", + }, + scripts: { + build: "tsc", + dev: "tsc -w", + test: "jest", + }, + }, + null, + 2 + ) + ); + }); + } + + if (type === "nx") { + packageGraph.forEachNode((node) => { + shelljs.exec( + `cd ${root} && yarn nx g @nrwl/node:library --buildable --publishable --name="${node.data.name.replace( + /^@[^/]+\//, + "" + )}" --importPath="${node.data.name.replace(/^@[^/]+\//, "")}"` + ); + // instead of dealing with actual code, just list as implicitDependencies + const safeName = node.data.name.replace(/^@[^/]+\//, ""); + const workspace = fs.readJSONSync(path.join(root, "workspace.json")); + workspace.projects[safeName] = { + ...workspace.projects[safeName], + implicitDependencies: node.data.implicitDependencies || [], + }; + fs.writeFileSync( + path.join(root, "nx.json"), + JSON.stringify(workspace, null, 2) + ); + }); + } + if (!skipInstall) { + shelljs.exec(`cd ${root} && yarn install`); + } + fs.ensureDirSync(path.join(root, ".git")); + fs.writeFileSync( + path.join(root, ".git", "config"), + ` +[user] + name = GitHub Actions + email = actions@users.noreply.github.com +` + ); + shelljs.exec( + `cd ${root} && git init -q && git add . && git commit -m "init"` + ); +} + +generate(root); +if (type === "turbo") { + generate(root + "-installed", true); +} diff --git a/cli/scripts/nginx/.dockerignore b/cli/scripts/nginx/.dockerignore new file mode 100644 index 0000000..4c8fbef --- /dev/null +++ b/cli/scripts/nginx/.dockerignore @@ -0,0 +1 @@ +cacher_root diff --git a/cli/scripts/nginx/Dockerfile.cacher b/cli/scripts/nginx/Dockerfile.cacher new file mode 100644 index 0000000..aedf629 --- /dev/null +++ b/cli/scripts/nginx/Dockerfile.cacher @@ -0,0 +1,11 @@ +FROM ubuntu:xenial + +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + nginx \ + nginx-extras \ + && rm -rf /var/lib/apt/lists/* + +COPY nginx.conf /etc/nginx/nginx.conf + +CMD nginx -g "daemon off;" diff --git a/cli/scripts/nginx/docker-compose.yml b/cli/scripts/nginx/docker-compose.yml new file mode 100644 index 0000000..d93ef16 --- /dev/null +++ b/cli/scripts/nginx/docker-compose.yml @@ -0,0 +1,9 @@ +services: + cacher: + build: + context: . + dockerfile: Dockerfile.cacher + volumes: + - ./cacher_root:/var/www/cache + ports: + - "7070:7070" diff --git a/cli/scripts/nginx/nginx.conf b/cli/scripts/nginx/nginx.conf new file mode 100644 index 0000000..d56b5c0 --- /dev/null +++ b/cli/scripts/nginx/nginx.conf @@ -0,0 +1,39 @@ +user root; +worker_processes auto; +pid /run/nginx.pid; + +events { + worker_connections 768; + # multi_accept on; +} + +http { + sendfile on; + tcp_nopush on; + tcp_nodelay on; + keepalive_timeout 65; + types_hash_max_size 2048; + # server_tokens off; + + include /etc/nginx/mime.types; + default_type application/octet-stream; + + access_log /dev/stdout; + error_log /dev/stderr; + + gzip on; + gzip_disable "msie6"; + + server { + listen 7070 default_server; + + root /var/www; + + location /v8/artifacts { + dav_methods PUT; + autoindex on; + allow all; + client_max_body_size 512M; + } + } +} diff --git a/cli/scripts/npm-native-packages/.gitignore b/cli/scripts/npm-native-packages/.gitignore new file mode 100644 index 0000000..84c048a --- /dev/null +++ b/cli/scripts/npm-native-packages/.gitignore @@ -0,0 +1 @@ +/build/ diff --git a/cli/scripts/npm-native-packages/npm-native-packages.js b/cli/scripts/npm-native-packages/npm-native-packages.js new file mode 100644 index 0000000..06ab67f --- /dev/null +++ b/cli/scripts/npm-native-packages/npm-native-packages.js @@ -0,0 +1,57 @@ +#!/usr/bin/env node + +const fs = require("fs"); +const path = require("path"); + +// Map to node os and arch names. +const nodeOsLookup = { + darwin: "darwin", + linux: "linux", + windows: "win32", +}; + +const nodeArchLookup = { + amd64: "x64", + arm64: "arm64", +}; + +const humanizedArchLookup = { + amd64: "64", + arm64: "arm64", +}; + +const template = require("./template/template.package.json"); +const os = process.argv[2]; +const arch = process.argv[3]; +const version = process.argv[4]; + +template.name = `turbo-${os}-${humanizedArchLookup[arch]}`; +template.description = `The ${os}-${humanizedArchLookup[arch]} binary for turbo, a monorepo build system.`; +template.os = [nodeOsLookup[os]]; +template.cpu = [nodeArchLookup[arch]]; +template.version = version; + +const outputPath = path.join(__dirname, "build", template.name); +fs.rmSync(outputPath, { recursive: true, force: true }); +fs.mkdirSync(path.join(outputPath, "bin"), { recursive: true }); + +if (os === "windows") { + fs.copyFileSync( + path.join(__dirname, "template", "bin", "turbo"), + path.join(outputPath, "bin", "turbo") + ); +} +fs.copyFileSync( + path.join(__dirname, "template", "README.md"), + path.join(outputPath, "README.md") +); +fs.writeFileSync( + path.join(outputPath, "package.json"), + JSON.stringify(template, null, 2) +); + +const goBin = os === "windows" ? "go-turbo.exe" : "go-turbo"; +fs.copyFileSync( + path.join(__dirname, "..", "..", `dist-${os}-${arch}`, goBin), + path.join(outputPath, "bin", goBin) +); diff --git a/cli/scripts/npm-native-packages/template/README.md b/cli/scripts/npm-native-packages/template/README.md new file mode 100644 index 0000000..fcbd4c0 --- /dev/null +++ b/cli/scripts/npm-native-packages/template/README.md @@ -0,0 +1,3 @@ +# `turbo` + +This is a platform-specific binary for Turborepo, a monorepo build system. See https://github.com/vercel/turbo for details. diff --git a/cli/scripts/npm-native-packages/template/bin/turbo b/cli/scripts/npm-native-packages/template/bin/turbo new file mode 100644 index 0000000..4557a07 --- /dev/null +++ b/cli/scripts/npm-native-packages/template/bin/turbo @@ -0,0 +1,15 @@ +#!/usr/bin/env node + +// Unfortunately even though npm shims "bin" commands on Windows with auto- +// generated forwarding scripts, it doesn't strip the ".exe" from the file name +// first. So it's possible to publish executables via npm on all platforms +// except Windows. I consider this a npm bug. +// +// My workaround is to add this script as another layer of indirection. It'll +// be slower because node has to boot up just to shell out to the actual exe, +// but Windows is somewhat of a second-class platform to npm so it's the best +// I can do I think. +const path = require('path'); +const turbo_exe = path.join(__dirname, 'turbo.exe'); +const child_process = require('child_process'); +child_process.spawnSync(turbo_exe, process.argv.slice(2), { stdio: 'inherit' }); diff --git a/cli/scripts/npm-native-packages/template/template.package.json b/cli/scripts/npm-native-packages/template/template.package.json new file mode 100644 index 0000000..fdc72c0 --- /dev/null +++ b/cli/scripts/npm-native-packages/template/template.package.json @@ -0,0 +1,12 @@ +{ + "name": "turbo-{{Os}}-{{Arch}}", + "version": "{{Version}", + "description": "The {{Os}}-{{Arch}} binary for turbo, a monorepo build system.", + "repository": "https://github.com/vercel/turbo", + "bugs": "https://github.com/vercel/turbo/issues", + "homepage": "https://turbo.build/repo", + "license": "MPL-2.0", + "os": ["{{Os}}"], + "cpu": ["{{Arch}}"], + "preferUnplugged": true +} diff --git a/cli/scripts/templates/jest.config.js b/cli/scripts/templates/jest.config.js new file mode 100644 index 0000000..0548306 --- /dev/null +++ b/cli/scripts/templates/jest.config.js @@ -0,0 +1,10 @@ +module.exports = { + roots: ["/src"], + transform: { + "^.+\\.tsx?$": "ts-jest", + }, + // testRegex: '(/__tests__/.*(\\.|/)(test|spec))\\.tsx?$', + moduleFileExtensions: ["ts", "tsx", "js", "jsx", "json", "node"], + modulePathIgnorePatterns: ["/src/__fixtures__"], + preset: "ts-jest", +}; diff --git a/cli/scripts/templates/src/__tests__/index.test.ts b/cli/scripts/templates/src/__tests__/index.test.ts new file mode 100644 index 0000000..9a4831a --- /dev/null +++ b/cli/scripts/templates/src/__tests__/index.test.ts @@ -0,0 +1,7 @@ +import { sum } from "../."; + +describe("Hello", () => { + it("renders without crashing", () => { + expect(sum(1, 2)).toEqual(3); + }); +}); diff --git a/cli/scripts/templates/src/__tests__/tsconfig.json b/cli/scripts/templates/src/__tests__/tsconfig.json new file mode 100644 index 0000000..bf65be6 --- /dev/null +++ b/cli/scripts/templates/src/__tests__/tsconfig.json @@ -0,0 +1,4 @@ +{ + "extends": "../../tsconfig.json", + "include": [".", "../."] +} diff --git a/cli/scripts/templates/src/index.ts b/cli/scripts/templates/src/index.ts new file mode 100644 index 0000000..715e93e --- /dev/null +++ b/cli/scripts/templates/src/index.ts @@ -0,0 +1,3 @@ +export const sum = (a: number, b: number) => { + return a + b; +}; diff --git a/cli/scripts/templates/tsconfig.json b/cli/scripts/templates/tsconfig.json new file mode 100644 index 0000000..76ae392 --- /dev/null +++ b/cli/scripts/templates/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "rootDir": "src", + "outDir": "dist" + }, + "include": ["src"], + "exclude": ["node_modules"] +} diff --git a/cli/turbo.json b/cli/turbo.json new file mode 100644 index 0000000..b8567ca --- /dev/null +++ b/cli/turbo.json @@ -0,0 +1,17 @@ +{ + "$schema": "../docs/public/schema.json", + "extends": ["//"], + "pipeline": { + "build": { + "outputs": ["turbo", "turbo.exe"] + }, + + "e2e": { + "outputs": [], + "inputs": ["**/*.go", "go.mod", "go.sum", "scripts/e2e/e2e.ts"] + }, + "e2e-prebuilt": { + "inputs": ["**/*.go", "go.mod", "go.sum", "scripts/e2e/e2e.ts"] + } + } +} diff --git a/clippy.toml b/clippy.toml new file mode 100644 index 0000000..d516089 --- /dev/null +++ b/clippy.toml @@ -0,0 +1,14 @@ +disallowed-types = [ + # Use turbo_tasks_hash::{DeterministicHash, Xxh3Hash64Hasher} instead. + "std::collections::hash_map::DefaultHasher", +] +disallowed-methods = [ + # We forbid the use of the Hasher::hash trait item to prevent misuse of + # hashing Vcs. Vcs must themselves be hashable (to be useable in maps and + # sets), but the hash **is not stable** and must not be observed. + # Use Xxh3Hash64Hasher::write with value's bytes directly. + "std::hash::Hasher::hash", + # We forbid the use of VecDeque::new as it allocates, which is kind of unexpected + # Instead use VecDeque::with_capacity to make it explicit or opt-out of that. + "std::collections::VecDeque::new", +] diff --git a/deny.toml b/deny.toml new file mode 100644 index 0000000..9284775 --- /dev/null +++ b/deny.toml @@ -0,0 +1,40 @@ +[licenses] +unlicensed = "deny" +allow-osi-fsf-free = "neither" +copyleft = "deny" +# We want really high confidence when inferring licenses from text +confidence-threshold = 0.93 +allow = [ + "Apache-2.0", + "Apache-2.0 WITH LLVM-exception", + "MIT", + "MIT-0", + "MPL-2.0", + # enum-iterator* + "0BSD", + # base16, notify + "CC0-1.0", + # Inflector, hyper-tungstenite + "BSD-2-Clause", + # inotify*, libloading, is_ci + "ISC", + # fuchsia-zircon*, instant, sourcemap + "BSD-3-Clause", + # unicode-ident + "Unicode-DFS-2016", + # portpicker + "Unlicense", + "OpenSSL", + "Zlib", + # webc (wasmer) + "BUSL-1.1", +] +[[licenses.clarify]] +name = "ring" +expression = "ISC AND MIT AND OpenSSL" +license-files = [{ path = "LICENSE", hash = 0xbd0eed23 }] + +[[licenses.clarify]] +name = "webpki" +expression = "ISC AND MIT AND OpenSSL" +license-files = [{ path = "LICENSE", hash = 0x001c7e6c }] diff --git a/docs/next.config.js b/docs/next.config.js index 2d6cc4e..eb56c65 100644 --- a/docs/next.config.js +++ b/docs/next.config.js @@ -81,159 +81,159 @@ const nextConfig = withNextra({ }, ], }; - // }, - // async redirects() { - // return [ - // ...OLD_TURBOREPO_ROUTES.map((route) => ({ - // source: route, - // destination: `/repo${route}`, - // permanent: true, - // })), - // { - // source: "/docs/getting-started", - // destination: "/repo/docs", - // permanent: true, - // }, - // { - // source: "/usage", - // destination: "/repo/docs/reference/command-line-reference", - // permanent: true, - // }, - // { - // source: "/docs/core-concepts/running-tasks", - // destination: "/repo/docs/core-concepts/monorepos/running-tasks", - // permanent: true, - // }, - // { - // source: "/docs/core-concepts/why-turborepo", - // destination: "/repo/docs/core-concepts/monorepos", - // permanent: true, - // }, - // { - // source: "/docs/core-concepts/filtering", - // destination: "/repo/docs/core-concepts/monorepos/filtering", - // permanent: true, - // }, - // { - // source: "/docs/guides/workspaces", - // destination: "/repo/docs/handbook/workspaces", - // permanent: true, - // }, - // { - // source: "/docs/core-concepts/workspaces", - // destination: "/repo/docs/handbook/workspaces", - // permanent: true, - // }, - // { - // source: "/docs/core-concepts/pipelines", - // destination: "/repo/docs/core-concepts/monorepos/running-tasks", - // permanent: true, - // }, - // { - // source: "/docs/guides/migrate-from-lerna", - // destination: "/repo/docs/handbook/migrating-to-a-monorepo", - // permanent: true, - // }, - // { - // source: "/discord{/}?", - // permanent: true, - // destination: "https://discord.gg/sSzyjxvbf5", - // }, - // { - // source: "/docs/changelog", - // permanent: true, - // destination: "https://github.com/vercel/turbo/releases", - // }, - // { - // source: "/docs/guides/complimentary-tools", - // permanent: true, - // destination: "/repo/docs/handbook", - // }, - // { - // source: "/docs/guides/monorepo-tools", - // permanent: true, - // destination: "/repo/docs/handbook", - // }, - // { - // source: "/docs/glossary", - // permanent: true, - // destination: "/repo/docs/handbook", - // }, - // { - // source: "/docs/guides/continuous-integration", - // permanent: true, - // destination: "/repo/docs/ci", - // }, - // { - // source: "/repo/docs/handbook/prisma", - // permanent: true, - // destination: "/repo/docs/handbook/tools/prisma", - // }, - // { - // source: "/pack/docs/comparisons/turbopack-vs-vite", - // permanent: true, - // destination: "/pack/docs/comparisons/vite", - // }, - // { - // source: "/pack/docs/comparisons/turbopack-vs-webpack", - // permanent: true, - // destination: "/pack/docs/comparisons/webpack", - // }, - // { - // // Accidentally created, eventually removable. See below. - // source: "/repo/docs/core-concepts/running-tasks", - // destination: "/repo/docs/core-concepts/monorepos/running-tasks", - // permanent: true, - // }, - // { - // // Accidentally created, eventually removable. See below. - // source: "/repo/docs/core-concepts/why-turborepo", - // destination: "/repo/docs/core-concepts/monorepos", - // permanent: true, - // }, - // { - // // Accidentally created, eventually removable. See below. - // source: "/repo/docs/core-concepts/filtering", - // destination: "/repo/docs/core-concepts/monorepos/filtering", - // permanent: true, - // }, - // { - // // Accidentally created, eventually removable. See below. - // source: "/repo/docs/core-concepts/pipelines", - // destination: "/repo/docs/core-concepts/monorepos/running-tasks", - // permanent: true, - // }, - // { - // // This rule accidentally created a bunch of URLs. - // // - // // They've _never_ resolved, so _eventually_ we should be able to remove the - // // redirects we added above to fix them. - // source: "/docs/features/:path*", - // permanent: true, - // destination: "/repo/docs/core-concepts/:path*", - // }, - // { - // // Accidentally created, eventually removable. See below. - // source: "/repo/docs/getting-started", - // destination: "/repo/docs", - // permanent: true, - // }, - // { - // // Accidentally created, eventually removable. See below. - // source: "/repo/docs/guides/workspaces", - // destination: "/repo/docs/handbook/workspaces", - // permanent: true, - // }, - // { - // // This rule accidentally created a bunch of URLs. - // // - // // They've _never_ resolved, so _eventually_ we should be able to remove the - // // redirects we added above to fix them. - // source: "/docs/:path*", - // permanent: true, - // destination: "/repo/docs/:path*", - // }, - // ]; + }, + async redirects() { + return [ + ...OLD_TURBOREPO_ROUTES.map((route) => ({ + source: route, + destination: `/repo${route}`, + permanent: true, + })), + { + source: "/docs/getting-started", + destination: "/repo/docs", + permanent: true, + }, + { + source: "/usage", + destination: "/repo/docs/reference/command-line-reference", + permanent: true, + }, + { + source: "/docs/core-concepts/running-tasks", + destination: "/repo/docs/core-concepts/monorepos/running-tasks", + permanent: true, + }, + { + source: "/docs/core-concepts/why-turborepo", + destination: "/repo/docs/core-concepts/monorepos", + permanent: true, + }, + { + source: "/docs/core-concepts/filtering", + destination: "/repo/docs/core-concepts/monorepos/filtering", + permanent: true, + }, + { + source: "/docs/guides/workspaces", + destination: "/repo/docs/handbook/workspaces", + permanent: true, + }, + { + source: "/docs/core-concepts/workspaces", + destination: "/repo/docs/handbook/workspaces", + permanent: true, + }, + { + source: "/docs/core-concepts/pipelines", + destination: "/repo/docs/core-concepts/monorepos/running-tasks", + permanent: true, + }, + { + source: "/docs/guides/migrate-from-lerna", + destination: "/repo/docs/handbook/migrating-to-a-monorepo", + permanent: true, + }, + { + source: "/discord{/}?", + permanent: true, + destination: "https://discord.gg/sSzyjxvbf5", + }, + { + source: "/docs/changelog", + permanent: true, + destination: "https://github.com/vercel/turbo/releases", + }, + { + source: "/docs/guides/complimentary-tools", + permanent: true, + destination: "/repo/docs/handbook", + }, + { + source: "/docs/guides/monorepo-tools", + permanent: true, + destination: "/repo/docs/handbook", + }, + { + source: "/docs/glossary", + permanent: true, + destination: "/repo/docs/handbook", + }, + { + source: "/docs/guides/continuous-integration", + permanent: true, + destination: "/repo/docs/ci", + }, + { + source: "/repo/docs/handbook/prisma", + permanent: true, + destination: "/repo/docs/handbook/tools/prisma", + }, + { + source: "/pack/docs/comparisons/turbopack-vs-vite", + permanent: true, + destination: "/pack/docs/comparisons/vite", + }, + { + source: "/pack/docs/comparisons/turbopack-vs-webpack", + permanent: true, + destination: "/pack/docs/comparisons/webpack", + }, + { + // Accidentally created, eventually removable. See below. + source: "/repo/docs/core-concepts/running-tasks", + destination: "/repo/docs/core-concepts/monorepos/running-tasks", + permanent: true, + }, + { + // Accidentally created, eventually removable. See below. + source: "/repo/docs/core-concepts/why-turborepo", + destination: "/repo/docs/core-concepts/monorepos", + permanent: true, + }, + { + // Accidentally created, eventually removable. See below. + source: "/repo/docs/core-concepts/filtering", + destination: "/repo/docs/core-concepts/monorepos/filtering", + permanent: true, + }, + { + // Accidentally created, eventually removable. See below. + source: "/repo/docs/core-concepts/pipelines", + destination: "/repo/docs/core-concepts/monorepos/running-tasks", + permanent: true, + }, + { + // This rule accidentally created a bunch of URLs. + // + // They've _never_ resolved, so _eventually_ we should be able to remove the + // redirects we added above to fix them. + source: "/docs/features/:path*", + permanent: true, + destination: "/repo/docs/core-concepts/:path*", + }, + { + // Accidentally created, eventually removable. See below. + source: "/repo/docs/getting-started", + destination: "/repo/docs", + permanent: true, + }, + { + // Accidentally created, eventually removable. See below. + source: "/repo/docs/guides/workspaces", + destination: "/repo/docs/handbook/workspaces", + permanent: true, + }, + { + // This rule accidentally created a bunch of URLs. + // + // They've _never_ resolved, so _eventually_ we should be able to remove the + // redirects we added above to fix them. + source: "/docs/:path*", + permanent: true, + destination: "/repo/docs/:path*", + }, + ]; }, }); diff --git a/docs/package.json b/docs/package.json index 8e9e3e7..4abfc5c 100644 --- a/docs/package.json +++ b/docs/package.json @@ -1,7 +1,7 @@ { "name": "docs", "version": "1.0.0", - "private": false, + "private": true, "description": "turbo.build", "scripts": { "dev": "next", @@ -11,7 +11,7 @@ "rss": "node scripts/generate-rss.js", "schema": "turbo-types-generate ./public/schema.json" }, - "author": "简律纯", + "author": "Jared Palmer", "license": "MPL-2.0", "dependencies": { "@headlessui/react": "^1.7.3", @@ -36,6 +36,7 @@ }, "devDependencies": { "@babel/core": "7.20.12", + "@turbo/types": "workspace:*", "@types/node": "^16.11.12", "@types/react": "18.0.21", "autoprefixer": "10.4.14", diff --git a/package.json b/package.json new file mode 100644 index 0000000..9bc24d8 --- /dev/null +++ b/package.json @@ -0,0 +1,63 @@ +{ + "name": "turbo-monorepo", + "version": "0.0.0", + "private": true, + "scripts": { + "build": "pnpm -- turbo run build --filter=docs", + "build:turbo": "pnpm run --filter=cli build", + "build:ts": "tsc -b tsconfig.project.json", + "check:prettier": "prettier -c .", + "check:toml": "taplo format --check", + "format": "run-p format:prettier format:rs format:toml", + "format:prettier": "prettier -w .", + "format:rs": "cargo fmt --all", + "format:toml": "taplo format", + "turbo": "pnpm run build:turbo && node turbow.js", + "turbo-prebuilt": "node turbow.js", + "docs": "pnpm -- turbo run dev --filter=docs --no-cache", + "prepare": "husky install", + "test": "turbo run test" + }, + "devDependencies": { + "@taplo/cli": "^0.5.2", + "@types/react": "18.0.26", + "eslint": "^8.29.0", + "eslint-config-next": "^13.0.6", + "eslint-config-prettier": "^8.5.0", + "husky": "^8.0.0", + "lint-staged": "^13.1.0", + "next": "^13.0.6", + "npm-run-all": "^4.1.5", + "prettier": "^2.8.7", + "semver": "^7.3.8", + "typescript": "^4.9.4" + }, + "lint-staged": { + "*.{js,jsx,ts,tsx}": [ + "eslint --ext js,jsx,ts,tsx --quiet --fix --", + "prettier --write" + ], + "*.{md,mdx,mjs,yml,yaml,css}": [ + "prettier --write" + ], + "*.go": [ + "pnpm --filter cli format" + ], + "*.toml": [ + "taplo format" + ], + "*.rs": [ + "cargo fmt --" + ] + }, + "pnpm": { + "overrides": { + "next@latest": "13.0.6" + } + }, + "packageManager": "pnpm@7.18.2", + "engines": { + "node": "16.x", + "pnpm": "7" + } +} diff --git a/packages/create-turbo/.gitignore b/packages/create-turbo/.gitignore new file mode 100644 index 0000000..47f732d --- /dev/null +++ b/packages/create-turbo/.gitignore @@ -0,0 +1 @@ +!templates/*/.npmrc diff --git a/packages/create-turbo/LICENSE b/packages/create-turbo/LICENSE new file mode 100644 index 0000000..fa0086a --- /dev/null +++ b/packages/create-turbo/LICENSE @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. \ No newline at end of file diff --git a/packages/create-turbo/README.md b/packages/create-turbo/README.md new file mode 100644 index 0000000..485485f --- /dev/null +++ b/packages/create-turbo/README.md @@ -0,0 +1,13 @@ +# Welcome to Turborepo + +[Turborepo](https://turbo.build/repo) is a high-performance monorepo build-system for modern JavaScript and TypeScript codebases. + +To get started, open a new shell and run: + +```sh +npx create-turbo@latest +``` + +Then follow the prompts you see in your terminal. + +For more information about Turborepo, [visit turbo.build/repo](https://turbo.build/repo) and follow us on Twitter ([@turborepo](https://twitter.com/turborepo))! diff --git a/packages/create-turbo/__tests__/examples.test.ts b/packages/create-turbo/__tests__/examples.test.ts new file mode 100644 index 0000000..20d4464 --- /dev/null +++ b/packages/create-turbo/__tests__/examples.test.ts @@ -0,0 +1,134 @@ +import got from "got"; +import * as Got from "got"; +import { isUrlOk, getRepoInfo, hasRepo } from "../src/utils/examples"; + +jest.mock("got", () => ({ + __esModule: true, + ...jest.requireActual("got"), +})); + +describe("examples", () => { + describe("isUrlOk", () => { + it("returns true if url returns 200", async () => { + const mockGot = jest + .spyOn(got, "head") + .mockReturnValue({ statusCode: 200 } as any); + + const url = "https://github.com/vercel/turbo/"; + const result = await isUrlOk(url); + expect(result).toBe(true); + + expect(mockGot).toHaveBeenCalledWith(url); + mockGot.mockRestore(); + }); + + it("returns false if url returns status != 200", async () => { + const mockGot = jest + .spyOn(got, "head") + .mockReturnValue({ statusCode: 401 } as any); + + const url = "https://not-github.com/vercel/turbo/"; + const result = await isUrlOk(url); + expect(result).toBe(false); + + expect(mockGot).toHaveBeenCalledWith(url); + mockGot.mockRestore(); + }); + }); + + describe("getRepoInfo", () => { + test.each([ + { + repoUrl: "https://github.com/vercel/turbo/", + examplePath: undefined, + defaultBranch: "main", + expectBranchLookup: true, + expected: { + username: "vercel", + name: "turbo", + branch: "main", + filePath: "", + }, + }, + { + repoUrl: + "https://github.com/vercel/turbo/tree/canary/examples/kitchen-sink", + examplePath: undefined, + defaultBranch: "canary", + expectBranchLookup: false, + expected: { + username: "vercel", + name: "turbo", + branch: "canary", + filePath: "examples/kitchen-sink", + }, + }, + { + repoUrl: "https://github.com/vercel/turbo/tree/tek/test-branch/", + examplePath: "examples/basic", + defaultBranch: "canary", + expectBranchLookup: false, + expected: { + username: "vercel", + name: "turbo", + branch: "tek/test-branch", + filePath: "examples/basic", + }, + }, + ])( + "retrieves repo info for $repoUrl and $examplePath", + async ({ + repoUrl, + examplePath, + defaultBranch, + expectBranchLookup, + expected, + }) => { + const mockGot = jest.spyOn(Got, "default").mockReturnValue({ + body: JSON.stringify({ default_branch: defaultBranch }), + } as any); + + const url = new URL(repoUrl); + const result = await getRepoInfo(url, examplePath); + expect(result).toMatchObject(expected); + + if (result && expectBranchLookup) { + expect(mockGot).toHaveBeenCalledWith( + `https://api.github.com/repos/${result.username}/${result.name}` + ); + } + + mockGot.mockRestore(); + } + ); + }); + + describe("hasRepo", () => { + test.each([ + { + repoInfo: { + username: "vercel", + name: "turbo", + branch: "main", + filePath: "", + }, + expected: true, + expectedUrl: + "https://api.github.com/repos/vercel/turbo/contents/package.json?ref=main", + }, + ])( + "checks repo at $expectedUrl", + async ({ expected, repoInfo, expectedUrl }) => { + const mockGot = jest + .spyOn(got, "head") + .mockReturnValue({ statusCode: 200 } as any); + + const result = await hasRepo(repoInfo); + expect(result).toBe(expected); + + expect(mockGot).toHaveBeenCalledWith(expectedUrl); + mockGot.mockRestore(); + } + ); + }); +}); diff --git a/packages/create-turbo/__tests__/git.test.ts b/packages/create-turbo/__tests__/git.test.ts new file mode 100644 index 0000000..27ac118 --- /dev/null +++ b/packages/create-turbo/__tests__/git.test.ts @@ -0,0 +1,239 @@ +import path from "path"; +import { + DEFAULT_IGNORE, + GIT_REPO_COMMAND, + HG_REPO_COMMAND, + isInGitRepository, + isInMercurialRepository, + tryGitInit, +} from "../src/utils/git"; +import childProcess from "child_process"; +import { setupTestFixtures } from "@turbo/test-utils"; + +describe("git", () => { + // just to make sure this doesn't get lost + it("default .gitignore includes .turbo", async () => { + expect(DEFAULT_IGNORE).toContain(".turbo"); + }); + + describe("isInGitRepository", () => { + it("returns true when in a repo", async () => { + const mockExecSync = jest + .spyOn(childProcess, "execSync") + .mockReturnValue("true"); + + const result = isInGitRepository(); + expect(result).toBe(true); + + expect(mockExecSync).toHaveBeenCalledWith(GIT_REPO_COMMAND, { + stdio: "ignore", + }); + mockExecSync.mockRestore(); + }); + + it("returns false when not in a repo", async () => { + const mockExecSync = jest + .spyOn(childProcess, "execSync") + .mockImplementation(() => { + throw new Error( + "fatal: not a git repository (or any of the parent directories): .git" + ); + }); + + const result = isInGitRepository(); + expect(result).toBe(false); + + expect(mockExecSync).toHaveBeenCalledWith(GIT_REPO_COMMAND, { + stdio: "ignore", + }); + mockExecSync.mockRestore(); + }); + + it("returns false on error", async () => { + const mockExecSync = jest + .spyOn(childProcess, "execSync") + .mockImplementation(() => { + throw new Error("unknown error"); + }); + + const result = isInGitRepository(); + expect(result).toBe(false); + + expect(mockExecSync).toHaveBeenCalledWith(GIT_REPO_COMMAND, { + stdio: "ignore", + }); + mockExecSync.mockRestore(); + }); + }); + + describe("isInMercurialRepository", () => { + it("returns true when in a repo", async () => { + const mockExecSync = jest + .spyOn(childProcess, "execSync") + .mockReturnValue("true"); + + const result = isInMercurialRepository(); + expect(result).toBe(true); + + expect(mockExecSync).toHaveBeenCalledWith(HG_REPO_COMMAND, { + stdio: "ignore", + }); + mockExecSync.mockRestore(); + }); + + it("returns false when not in a repo", async () => { + const mockExecSync = jest + .spyOn(childProcess, "execSync") + .mockImplementation(() => { + throw new Error("abort: no repository found (.hg not found)"); + }); + + const result = isInMercurialRepository(); + expect(result).toBe(false); + + expect(mockExecSync).toHaveBeenCalledWith(HG_REPO_COMMAND, { + stdio: "ignore", + }); + mockExecSync.mockRestore(); + }); + + it("returns false on error", async () => { + const mockExecSync = jest + .spyOn(childProcess, "execSync") + .mockImplementation(() => { + throw new Error("unknown error"); + }); + + const result = isInMercurialRepository(); + expect(result).toBe(false); + + expect(mockExecSync).toHaveBeenCalledWith(HG_REPO_COMMAND, { + stdio: "ignore", + }); + mockExecSync.mockRestore(); + }); + }); + + describe("tryGitInit", () => { + const { useFixture } = setupTestFixtures({ + directory: path.join(__dirname, "../"), + }); + + it("inits a repo succesfully", async () => { + const { root } = useFixture({ fixture: `git` }); + const mockExecSync = jest + .spyOn(childProcess, "execSync") + .mockReturnValueOnce("git version 2.38.1") + .mockImplementationOnce(() => { + throw new Error( + "fatal: not a git repository (or any of the parent directories): .git" + ); + }) + .mockImplementationOnce(() => { + throw new Error("abort: no repository found (.hg not found)"); + }) + .mockReturnValue("success"); + + const result = tryGitInit(root, "test commit"); + expect(result).toBe(true); + + const calls = [ + "git --version", + "git init", + "git checkout -b main", + "git add -A", + 'git commit -m "test commit"', + ]; + expect(mockExecSync).toHaveBeenCalledTimes(calls.length + 2); + calls.forEach((call) => { + expect(mockExecSync).toHaveBeenCalledWith(call, { + stdio: "ignore", + }); + }); + mockExecSync.mockRestore(); + }); + + it("skips init if already in a repo", async () => { + const { root } = useFixture({ fixture: `git` }); + const mockExecSync = jest + .spyOn(childProcess, "execSync") + .mockReturnValueOnce("git version 2.38.1") + .mockReturnValueOnce("true") + .mockReturnValue("success"); + + const result = tryGitInit(root, "test commit"); + expect(result).toBe(false); + + const calls = ["git --version"]; + + // 1 call for git --version, 1 call for isInGitRepository + expect(mockExecSync).toHaveBeenCalledTimes(calls.length + 1); + calls.forEach((call) => { + expect(mockExecSync).toHaveBeenCalledWith(call, { + stdio: "ignore", + }); + }); + mockExecSync.mockRestore(); + }); + + it("returns false on unexpected error", async () => { + const { root } = useFixture({ fixture: `git` }); + const mockExecSync = jest + .spyOn(childProcess, "execSync") + .mockImplementationOnce(() => { + throw new Error("fatal: unknown command git"); + }); + + const result = tryGitInit(root, "test commit"); + expect(result).toBe(false); + + const calls = ["git --version"]; + + expect(mockExecSync).toHaveBeenCalledTimes(calls.length); + calls.forEach((call) => { + expect(mockExecSync).toHaveBeenCalledWith(call, { + stdio: "ignore", + }); + }); + mockExecSync.mockRestore(); + }); + + it("cleans up from partial init on failure", async () => { + const { root } = useFixture({ fixture: `git` }); + const mockExecSync = jest + .spyOn(childProcess, "execSync") + .mockReturnValueOnce("git version 2.38.1") + .mockImplementationOnce(() => { + throw new Error( + "fatal: not a git repository (or any of the parent directories): .git" + ); + }) + .mockImplementationOnce(() => { + throw new Error("abort: no repository found (.hg not found)"); + }) + .mockReturnValueOnce("success") + .mockReturnValueOnce("success") + .mockImplementationOnce(() => { + throw new Error("fatal: could not add files"); + }); + + const result = tryGitInit(root, "test commit"); + expect(result).toBe(false); + + const calls = [ + "git --version", + "git init", + "git checkout -b main", + "git add -A", + ]; + + expect(mockExecSync).toHaveBeenCalledTimes(calls.length + 2); + calls.forEach((call) => { + expect(mockExecSync).toHaveBeenCalledWith(call, { + stdio: "ignore", + }); + }); + mockExecSync.mockRestore(); + }); + }); +}); diff --git a/packages/create-turbo/__tests__/index.test.ts b/packages/create-turbo/__tests__/index.test.ts new file mode 100644 index 0000000..641b193 --- /dev/null +++ b/packages/create-turbo/__tests__/index.test.ts @@ -0,0 +1,90 @@ +import path from "path"; +import chalk from "chalk"; +import childProcess from "child_process"; +import { setupTestFixtures, spyConsole } from "@turbo/test-utils"; +import { create } from "../src/commands/create"; +import type { CreateCommandArgument } from "../src/commands/create/types"; +import { turboGradient } from "../src/logger"; +import type { PackageManager } from "@turbo/workspaces"; + +// imports for mocks +import * as createProject from "../src/commands/create/createProject"; +import * as turboWorkspaces from "@turbo/workspaces"; +import { getWorkspaceDetailsMockReturnValue } from "./test-utils"; + +jest.mock("@turbo/workspaces", () => ({ + __esModule: true, + ...jest.requireActual("@turbo/workspaces"), +})); + +describe("create-turbo", () => { + const { useFixture } = setupTestFixtures({ + directory: path.join(__dirname, "../"), + }); + + const mockConsole = spyConsole(); + + test.each<{ packageManager: PackageManager }>([ + { packageManager: "yarn" }, + { packageManager: "npm" }, + { packageManager: "pnpm" }, + ])( + "outputs expected console messages when using $packageManager", + async ({ packageManager }) => { + const { root } = useFixture({ fixture: `create-turbo` }); + + const availableScripts = ["build", "test", "dev"]; + + const mockCreateProject = jest + .spyOn(createProject, "createProject") + .mockResolvedValue({ + cdPath: "", + hasPackageJson: true, + availableScripts, + }); + + const mockGetWorkspaceDetails = jest + .spyOn(turboWorkspaces, "getWorkspaceDetails") + .mockResolvedValue( + getWorkspaceDetailsMockReturnValue({ + root, + packageManager, + }) + ); + + const mockExecSync = jest + .spyOn(childProcess, "execSync") + .mockImplementation(() => { + return "success"; + }); + + await create( + root as CreateCommandArgument, + packageManager as CreateCommandArgument, + { + skipInstall: true, + example: "default", + } + ); + + const expected = `${chalk.bold( + turboGradient(">>> Success!") + )} Created a new Turborepo at "${path.relative(process.cwd(), root)}".`; + + expect(mockConsole.log).toHaveBeenCalledWith(expected); + expect(mockConsole.log).toHaveBeenCalledWith( + "Inside that directory, you can run several commands:" + ); + + availableScripts.forEach((script) => { + expect(mockConsole.log).toHaveBeenCalledWith( + chalk.cyan(` ${packageManager} run ${script}`) + ); + }); + + mockCreateProject.mockRestore(); + mockGetWorkspaceDetails.mockRestore(); + mockExecSync.mockRestore(); + } + ); +}); diff --git a/packages/create-turbo/__tests__/isFolderEmpty.test.ts b/packages/create-turbo/__tests__/isFolderEmpty.test.ts new file mode 100644 index 0000000..66b2310 --- /dev/null +++ b/packages/create-turbo/__tests__/isFolderEmpty.test.ts @@ -0,0 +1,41 @@ +import fs from "fs-extra"; +import path from "path"; +import { isFolderEmpty } from "../src/utils/isFolderEmpty"; +import { setupTestFixtures } from "@turbo/test-utils"; + +describe("isFolderEmpty", () => { + const { useFixture } = setupTestFixtures({ + directory: path.join(__dirname, "../"), + }); + + it("correctly identifies an empty directory", async () => { + const { root } = useFixture({ fixture: `is-folder-empty` }); + const result = isFolderEmpty(root); + expect(result.isEmpty).toEqual(true); + expect(result.conflicts).toEqual([]); + }); + + it("correctly identifies a directory with non-conflicting files", async () => { + const { root } = useFixture({ fixture: `is-folder-empty` }); + fs.writeFileSync(path.join(root, "LICENSE"), "MIT"); + const result = isFolderEmpty(root); + expect(result.isEmpty).toEqual(true); + expect(result.conflicts).toEqual([]); + }); + + it("correctly identifies a directory non-conflicting files (intelliJ)", async () => { + const { root } = useFixture({ fixture: `is-folder-empty` }); + fs.writeFileSync(path.join(root, "intellij-idea-config.iml"), "{}"); + const result = isFolderEmpty(root); + expect(result.isEmpty).toEqual(true); + expect(result.conflicts).toEqual([]); + }); + + it("correctly identifies a directory conflicting files", async () => { + const { root } = useFixture({ fixture: `is-folder-empty` }); + fs.writeFileSync(path.join(root, "README.md"), "my cool project"); + const result = isFolderEmpty(root); + expect(result.isEmpty).toEqual(false); + expect(result.conflicts).toEqual(["README.md"]); + }); +}); diff --git a/packages/create-turbo/__tests__/isWritable.test.ts b/packages/create-turbo/__tests__/isWritable.test.ts new file mode 100644 index 0000000..b06670b --- /dev/null +++ b/packages/create-turbo/__tests__/isWritable.test.ts @@ -0,0 +1,35 @@ +import path from "path"; +import { isWriteable } from "../src/utils/isWriteable"; +import { setupTestFixtures } from "@turbo/test-utils"; +import fs from "fs-extra"; + +describe("isWriteable", () => { + const { useFixture } = setupTestFixtures({ + directory: path.join(__dirname, "../"), + }); + + it("correctly identifies a writeable directory", async () => { + const { root } = useFixture({ fixture: `is-writeable` }); + const result = await isWriteable(root); + expect(result).toEqual(true); + }); + + it("correctly identifies a non-writeable directory", async () => { + const { root } = useFixture({ fixture: `is-writeable` }); + const result = await isWriteable(path.join(root, "does-not-exist")); + expect(result).toEqual(false); + }); + + it("returns false on unexpected failure", async () => { + const { root } = useFixture({ fixture: `is-writeable` }); + const mockFsAccess = jest + .spyOn(fs, "access") + .mockRejectedValue(new Error("unknown error")); + + const result = await isWriteable(root); + expect(result).toEqual(false); + expect(mockFsAccess).toHaveBeenCalledWith(root, fs.constants.W_OK); + + mockFsAccess.mockRestore(); + }); +}); diff --git a/packages/create-turbo/__tests__/test-utils.ts b/packages/create-turbo/__tests__/test-utils.ts new file mode 100644 index 0000000..fa6c204 --- /dev/null +++ b/packages/create-turbo/__tests__/test-utils.ts @@ -0,0 +1,34 @@ +import path from "path"; +import { PackageManager } from "@turbo/workspaces"; + +export function getWorkspaceDetailsMockReturnValue({ + root, + packageManager = "npm", +}: { + root: string; + packageManager: PackageManager; +}) { + return { + name: "mock-project", + packageManager, + paths: { + root, + packageJson: path.join(root, "package.json"), + lockfile: path.join(root, "yarn.lock"), + nodeModules: path.join(root, "node_modules"), + }, + workspaceData: { + globs: ["packages/*"], + workspaces: [ + { + name: "packages/mock-package", + paths: { + root: path.join(root, "packages/mock-package"), + packageJson: path.join(root, "packages/mock-package/package.json"), + nodeModules: path.join(root, "packages/mock-package/node_modules"), + }, + }, + ], + }, + }; +} diff --git a/packages/create-turbo/jest.config.js b/packages/create-turbo/jest.config.js new file mode 100644 index 0000000..b738f4b --- /dev/null +++ b/packages/create-turbo/jest.config.js @@ -0,0 +1,11 @@ +/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */ +module.exports = { + preset: "ts-jest/presets/js-with-ts", + testEnvironment: "node", + testPathIgnorePatterns: ["/__fixtures__/", "/__tests__/test-utils.ts"], + coveragePathIgnorePatterns: ["/__fixtures__/", "/__tests__/test-utils.ts"], + transformIgnorePatterns: ["/node_modules/(?!(ansi-regex)/)"], + modulePathIgnorePatterns: ["/node_modules", "/dist"], + collectCoverage: true, + verbose: true, +}; diff --git a/packages/create-turbo/package.json b/packages/create-turbo/package.json new file mode 100644 index 0000000..9f723ba --- /dev/null +++ b/packages/create-turbo/package.json @@ -0,0 +1,65 @@ +{ + "name": "create-turbo", + "version": "1.9.4-canary.2", + "description": "Create a new Turborepo", + "homepage": "https://turbo.build/repo", + "license": "MPL-2.0", + "repository": { + "type": "git", + "url": "https://github.com/vercel/turbo", + "directory": "packages/create-turbo" + }, + "bugs": { + "url": "https://github.com/vercel/turbo/issues" + }, + "bin": { + "create-turbo": "dist/cli.js" + }, + "scripts": { + "build": "tsup", + "test": "jest", + "lint": "eslint src/**/*.ts", + "check-types": "tsc --noEmit" + }, + "dependencies": { + "async-retry": "^1.3.3", + "chalk": "2.4.2", + "commander": "^10.0.0", + "cross-spawn": "^7.0.3", + "execa": "5.1.1", + "fs-extra": "^10.1.0", + "got": "^11.8.5", + "gradient-string": "^2.0.0", + "inquirer": "^8.0.0", + "ora": "4.1.1", + "rimraf": "^3.0.2", + "semver": "^7.3.8", + "tar": "6.1.13", + "update-check": "^1.5.4" + }, + "devDependencies": { + "@turbo/workspaces": "workspace:*", + "@types/async-retry": "^1.4.5", + "@types/cross-spawn": "^6.0.2", + "@types/fs-extra": "^9.0.13", + "@types/gradient-string": "^1.1.2", + "@types/inquirer": "^7.3.1", + "@types/jest": "^27.4.0", + "@types/node": "^16.11.12", + "@types/rimraf": "^3.0.2", + "@types/semver": "^7.3.9", + "@types/tar": "^6.1.4", + "eslint": "^7.23.0", + "jest": "^27.4.3", + "strip-ansi": "^6.0.1", + "ts-jest": "^27.1.1", + "@turbo/tsconfig": "workspace:*", + "tsup": "^5.10.3", + "@turbo/utils": "workspace:*", + "@turbo/test-utils": "workspace:*", + "typescript": "^4.5.5" + }, + "files": [ + "dist" + ] +} diff --git a/packages/create-turbo/src/cli.ts b/packages/create-turbo/src/cli.ts new file mode 100644 index 0000000..1290a13 --- /dev/null +++ b/packages/create-turbo/src/cli.ts @@ -0,0 +1,65 @@ +#!/usr/bin/env node + +import chalk from "chalk"; +import { Command } from "commander"; +import notifyUpdate from "./utils/notifyUpdate"; +import { turboGradient, error } from "./logger"; + +import { create } from "./commands"; +import cliPkg from "../package.json"; + +const createTurboCli = new Command(); + +// create +createTurboCli + .name(chalk.bold(turboGradient("create-turbo"))) + .description("Create a new Turborepo") + .usage(`${chalk.bold(" ")} [options]`) + .argument("[project-directory]") + .argument("[package-manager]") + .option( + "--skip-install", + "Do not run a package manager install after creating the project", + false + ) + .option( + "--skip-transforms", + "Do not run any code transformation after creating the project", + false + ) + .option( + "-e, --example [name]|[github-url]", + ` + An example to bootstrap the app with. You can use an example name + from the official Turborepo repo or a GitHub URL. The URL can use + any branch and/or subdirectory +` + ) + .option( + "-p, --example-path ", + ` + In a rare case, your GitHub URL might contain a branch name with + a slash (e.g. bug/fix-1) and the path to the example (e.g. foo/bar). + In this case, you must specify the path to the example separately: + --example-path foo/bar +` + ) + .version(cliPkg.version, "-v, --version", "output the current version") + .helpOption() + .action(create); + +createTurboCli + .parseAsync() + .then(notifyUpdate) + .catch(async (reason) => { + console.log(); + if (reason.command) { + error(`${chalk.bold(reason.command)} has failed.`); + } else { + error("Unexpected error. Please report it as a bug:"); + console.log(reason); + } + console.log(); + await notifyUpdate(); + process.exit(1); + }); diff --git a/packages/create-turbo/src/commands/create/createProject.ts b/packages/create-turbo/src/commands/create/createProject.ts new file mode 100644 index 0000000..0c1d2ac --- /dev/null +++ b/packages/create-turbo/src/commands/create/createProject.ts @@ -0,0 +1,192 @@ +import retry from "async-retry"; +import chalk from "chalk"; +import fs from "fs-extra"; +import path from "path"; + +import { + downloadAndExtractExample, + downloadAndExtractRepo, + getRepoInfo, + existsInRepo, + hasRepo, + RepoInfo, +} from "../../utils/examples"; +import { isFolderEmpty } from "../../utils/isFolderEmpty"; +import { isWriteable } from "../../utils/isWriteable"; +import { turboLoader, error } from "../../logger"; +import { isDefaultExample } from "../../utils/isDefaultExample"; + +export class DownloadError extends Error {} + +export async function createProject({ + appPath, + example, + examplePath, +}: { + appPath: string; + example: string; + examplePath?: string; +}): Promise<{ + cdPath: string; + hasPackageJson: boolean; + availableScripts: Array; + repoInfo?: RepoInfo; +}> { + let repoInfo: RepoInfo | undefined; + let repoUrl: URL | undefined; + const defaultExample = isDefaultExample(example); + + try { + repoUrl = new URL(example); + } catch (err: any) { + if (err.code !== "ERR_INVALID_URL") { + error(err); + process.exit(1); + } + } + + if (repoUrl) { + if (repoUrl.origin !== "https://github.com") { + error( + `Invalid URL: ${chalk.red( + `"${example}"` + )}. Only GitHub repositories are supported. Please use a GitHub URL and try again.` + ); + process.exit(1); + } + + repoInfo = await getRepoInfo(repoUrl, examplePath); + + if (!repoInfo) { + error( + `Unable to fetch repository information from: ${chalk.red( + `"${example}"` + )}. Please fix the URL and try again.` + ); + process.exit(1); + } + + const found = await hasRepo(repoInfo); + + if (!found) { + error( + `Could not locate the repository for ${chalk.red( + `"${example}"` + )}. Please check that the repository exists and try again.` + ); + process.exit(1); + } + } else { + const found = await existsInRepo(example); + + if (!found) { + error( + `Could not locate an example named ${chalk.red( + `"${example}"` + )}. It could be due to the following:\n`, + `1. Your spelling of example ${chalk.red( + `"${example}"` + )} might be incorrect.\n`, + `2. You might not be connected to the internet or you are behind a proxy.` + ); + process.exit(1); + } + } + + const root = path.resolve(appPath); + + if (!(await isWriteable(path.dirname(root)))) { + error( + "The application path is not writable, please check folder permissions and try again." + ); + error("It is likely you do not have write permissions for this folder."); + process.exit(1); + } + + const appName = path.basename(root); + try { + await fs.mkdir(root, { recursive: true }); + } catch (err) { + error("Unable to create project directory"); + console.error(err); + process.exit(1); + } + const { isEmpty, conflicts } = isFolderEmpty(root); + if (!isEmpty) { + error( + `${chalk.dim(root)} has ${conflicts.length} conflicting ${ + conflicts.length === 1 ? "file" : "files" + } - please try a different location` + ); + process.exit(1); + } + + const originalDirectory = process.cwd(); + process.chdir(root); + + /** + * clone the example repository + */ + const loader = turboLoader("Downloading files..."); + try { + if (repoInfo) { + console.log( + `\nDownloading files from repo ${chalk.cyan( + example + )}. This might take a moment.` + ); + console.log(); + loader.start(); + await retry(() => downloadAndExtractRepo(root, repoInfo as RepoInfo), { + retries: 3, + }); + } else { + console.log( + `\nDownloading files${ + !defaultExample ? ` for example ${chalk.cyan(example)}` : "" + }. This might take a moment.` + ); + console.log(); + loader.start(); + await retry(() => downloadAndExtractExample(root, example), { + retries: 3, + }); + } + } catch (reason) { + function isErrorLike(err: unknown): err is { message: string } { + return ( + typeof err === "object" && + err !== null && + typeof (err as { message?: unknown }).message === "string" + ); + } + throw new DownloadError(isErrorLike(reason) ? reason.message : reason + ""); + } finally { + loader.stop(); + } + + const rootPackageJsonPath = path.join(root, "package.json"); + const hasPackageJson = fs.existsSync(rootPackageJsonPath); + const availableScripts = []; + + if (hasPackageJson) { + let packageJsonContent; + try { + packageJsonContent = fs.readJsonSync(rootPackageJsonPath); + } catch { + // ignore + } + + if (packageJsonContent) { + // read the scripts from the package.json + availableScripts.push(...Object.keys(packageJsonContent.scripts || {})); + } + } + + let cdPath: string = appPath; + if (path.join(originalDirectory, appName) === appPath) { + cdPath = appName; + } + + return { cdPath, hasPackageJson, availableScripts, repoInfo }; +} diff --git a/packages/create-turbo/src/commands/create/index.ts b/packages/create-turbo/src/commands/create/index.ts new file mode 100644 index 0000000..419328b --- /dev/null +++ b/packages/create-turbo/src/commands/create/index.ts @@ -0,0 +1,243 @@ +import path from "path"; +import chalk from "chalk"; +import type { Project } from "@turbo/workspaces"; +import { + getWorkspaceDetails, + install, + getPackageManagerMeta, + ConvertError, +} from "@turbo/workspaces"; +import { getAvailablePackageManagers } from "@turbo/utils"; +import type { CreateCommandArgument, CreateCommandOptions } from "./types"; +import * as prompts from "./prompts"; +import { createProject } from "./createProject"; +import { tryGitCommit, tryGitInit } from "../../utils/git"; +import { isOnline } from "../../utils/isOnline"; +import { transforms } from "../../transforms"; +import { turboGradient, turboLoader, info, error, warn } from "../../logger"; +import { TransformError } from "../../transforms/errors"; + +function handleErrors(err: unknown) { + // handle errors from ../../transforms + if (err instanceof TransformError) { + error(chalk.bold(err.transform), chalk.red(err.message)); + if (err.fatal) { + process.exit(1); + } + // handle errors from @turbo/workspaces + } else if (err instanceof ConvertError && err.type !== "unknown") { + error(chalk.red(err.message)); + process.exit(1); + // handle unknown errors (no special handling, just re-throw to catch at root) + } else { + throw err; + } +} + +const SCRIPTS_TO_DISPLAY: Record = { + build: "Build", + dev: "Develop", + test: "Test", + lint: "Lint", +}; + +export async function create( + directory: CreateCommandArgument, + packageManager: CreateCommandArgument, + opts: CreateCommandOptions +) { + const { skipInstall, skipTransforms } = opts; + console.log(chalk.bold(turboGradient(`\n>>> TURBOREPO\n`))); + info(`Welcome to Turborepo! Let's get you set up with a new codebase.`); + console.log(); + + const [online, availablePackageManagers] = await Promise.all([ + isOnline(), + getAvailablePackageManagers(), + ]); + + if (!online) { + error( + "You appear to be offline. Please check your network connection and try again." + ); + process.exit(1); + } + const { root, projectName } = await prompts.directory({ directory }); + const relativeProjectDir = path.relative(process.cwd(), root); + const projectDirIsCurrentDir = relativeProjectDir === ""; + + // selected package manager can be undefined if the user chooses to skip transforms + const selectedPackageManagerDetails = await prompts.packageManager({ + packageManager, + skipTransforms, + }); + + if (packageManager && opts.skipTransforms) { + warn( + "--skip-transforms conflicts with . The package manager argument will be ignored." + ); + } + + const { example, examplePath } = opts; + const exampleName = example && example !== "default" ? example : "basic"; + const { hasPackageJson, availableScripts, repoInfo } = await createProject({ + appPath: root, + example: exampleName, + examplePath, + }); + + // create a new git repo after creating the project + tryGitInit(root, `feat(create-turbo): create ${exampleName}`); + + // read the project after creating it to get details about workspaces, package manager, etc. + let project: Project = {} as Project; + try { + project = await getWorkspaceDetails({ root }); + } catch (err) { + handleErrors(err); + } + + // run any required transforms + if (!skipTransforms) { + for (const transform of transforms) { + try { + const transformResult = await transform({ + example: { + repo: repoInfo, + name: exampleName, + }, + project, + prompts: { + projectName, + root, + packageManager: selectedPackageManagerDetails, + }, + opts, + }); + if (transformResult.result === "success") { + tryGitCommit( + `feat(create-turbo): apply ${transformResult.name} transform` + ); + } + } catch (err) { + handleErrors(err); + } + } + } + + // if the user opted out of transforms, the package manager will be the same as the source example + const projectPackageManager = + skipTransforms || !selectedPackageManagerDetails + ? { + name: project.packageManager, + version: availablePackageManagers[project.packageManager].version, + } + : selectedPackageManagerDetails; + + info("Created a new Turborepo with the following:"); + console.log(); + if (project.workspaceData.workspaces.length > 0) { + const workspacesForDisplay = project.workspaceData.workspaces + .map((w) => ({ + group: path.relative(root, w.paths.root).split(path.sep)?.[0] || "", + title: path.relative(root, w.paths.root), + description: w.description, + })) + .sort((a, b) => a.title.localeCompare(b.title)); + + let lastGroup: string | undefined; + workspacesForDisplay.forEach(({ group, title, description }, idx) => { + if (idx === 0 || group !== lastGroup) { + console.log(chalk.cyan(group)); + } + console.log( + ` - ${chalk.bold(title)}${description ? `: ${description}` : ""}` + ); + lastGroup = group; + }); + } else { + console.log(chalk.cyan("apps")); + console.log(` - ${chalk.bold(projectName)}`); + } + + // run install + console.log(); + if (hasPackageJson && !skipInstall) { + // in the case when the user opted out of transforms, but not install, we need to make sure the package manager is available + // before we attempt an install + if ( + opts.skipTransforms && + !availablePackageManagers[project.packageManager].available + ) { + warn( + `Unable to install dependencies - "${exampleName}" uses "${project.packageManager}" which could not be found.` + ); + warn( + `Try running without "--skip-transforms" to convert "${exampleName}" to a package manager that is available on your system.` + ); + console.log(); + } else if (projectPackageManager) { + console.log("Installing packages. This might take a couple of minutes."); + console.log(); + + const loader = turboLoader("Installing dependencies...").start(); + await install({ + project, + to: projectPackageManager, + options: { + interactive: false, + }, + }); + + tryGitCommit("feat(create-turbo): install dependencies"); + loader.stop(); + } + } + + if (projectDirIsCurrentDir) { + console.log( + `${chalk.bold( + turboGradient(">>> Success!") + )} Your new Turborepo is ready.` + ); + } else { + console.log( + `${chalk.bold( + turboGradient(">>> Success!") + )} Created a new Turborepo at "${relativeProjectDir}".` + ); + } + + // get the package manager details so we display the right commands to the user in log messages + const packageManagerMeta = getPackageManagerMeta(projectPackageManager); + if (packageManagerMeta && hasPackageJson) { + console.log( + `Inside ${ + projectDirIsCurrentDir ? "this" : "that" + } directory, you can run several commands:` + ); + console.log(); + availableScripts + .filter((script) => SCRIPTS_TO_DISPLAY[script]) + .forEach((script) => { + console.log( + chalk.cyan(` ${packageManagerMeta.command} run ${script}`) + ); + console.log(` ${SCRIPTS_TO_DISPLAY[script]} all apps and packages`); + console.log(); + }); + console.log(`Turborepo will cache locally by default. For an additional`); + console.log(`speed boost, enable Remote Caching with Vercel by`); + console.log(`entering the following command:`); + console.log(); + console.log(chalk.cyan(` ${packageManagerMeta.executable} turbo login`)); + console.log(); + console.log(`We suggest that you begin by typing:`); + console.log(); + if (!projectDirIsCurrentDir) { + console.log(` ${chalk.cyan("cd")} ${relativeProjectDir}`); + } + console.log(chalk.cyan(` ${packageManagerMeta.executable} turbo login`)); + console.log(); + } +} diff --git a/packages/create-turbo/src/commands/create/prompts.ts b/packages/create-turbo/src/commands/create/prompts.ts new file mode 100644 index 0000000..a5ed7bf --- /dev/null +++ b/packages/create-turbo/src/commands/create/prompts.ts @@ -0,0 +1,124 @@ +import path from "path"; +import fs from "fs-extra"; +import chalk from "chalk"; +import type { PackageManager } from "@turbo/workspaces"; +import type { CreateCommandArgument } from "./types"; +import { getAvailablePackageManagers } from "@turbo/utils"; +import { isFolderEmpty } from "../../utils/isFolderEmpty"; +import inquirer from "inquirer"; + +function validateDirectory(directory: string): { + valid: boolean; + root: string; + projectName: string; + error?: string; +} { + const root = path.resolve(directory); + const projectName = path.basename(root); + const exists = fs.existsSync(root); + + const stat = fs.lstatSync(root, { throwIfNoEntry: false }); + if (stat && !stat.isDirectory()) { + return { + valid: false, + root, + projectName, + error: `${chalk.dim( + projectName + )} is not a directory - please try a different location`, + }; + } + + if (exists) { + const { isEmpty, conflicts } = isFolderEmpty(root); + if (!isEmpty) { + return { + valid: false, + root, + projectName, + error: `${chalk.dim(projectName)} has ${conflicts.length} conflicting ${ + conflicts.length === 1 ? "file" : "files" + } - please try a different location`, + }; + } + } + + return { valid: true, root, projectName }; +} + +export async function directory({ + directory, +}: { + directory: CreateCommandArgument; +}) { + const projectDirectoryAnswer = await inquirer.prompt<{ + projectDirectory: string; + }>({ + type: "input", + name: "projectDirectory", + message: "Where would you like to create your turborepo?", + when: !directory, + default: "./my-turborepo", + validate: (directory: string) => { + const { valid, error } = validateDirectory(directory); + if (!valid && error) { + return error; + } + return true; + }, + filter: (directory: string) => directory.trim(), + }); + + const { projectDirectory: selectedProjectDirectory = directory as string } = + projectDirectoryAnswer; + + return validateDirectory(selectedProjectDirectory); +} + +export async function packageManager({ + packageManager, + skipTransforms, +}: { + packageManager: CreateCommandArgument; + skipTransforms?: boolean; +}) { + // if skip transforms is passed, we don't need to ask about the package manager (because that requires a transform) + if (skipTransforms) { + return undefined; + } + + const availablePackageManagers = await getAvailablePackageManagers(); + const packageManagerAnswer = await inquirer.prompt<{ + packageManagerInput?: PackageManager; + }>({ + name: "packageManagerInput", + type: "list", + message: "Which package manager do you want to use?", + when: + // prompt for package manager if it wasn't provided as an argument, or if it was + // provided, but isn't available (always allow npm) + !packageManager || + (packageManager as PackageManager) !== "npm" || + !Object.keys(availablePackageManagers).includes(packageManager), + choices: ["npm", "pnpm", "yarn"].map((p) => ({ + name: p, + value: p, + disabled: + // npm should always be available + p === "npm" || + availablePackageManagers?.[p as PackageManager]?.available + ? false + : `not installed`, + })), + }); + + const { + packageManagerInput: + selectedPackageManager = packageManager as PackageManager, + } = packageManagerAnswer; + + return { + name: selectedPackageManager, + version: availablePackageManagers[selectedPackageManager].version, + }; +} diff --git a/packages/create-turbo/src/commands/create/types.ts b/packages/create-turbo/src/commands/create/types.ts new file mode 100644 index 0000000..094c8d2 --- /dev/null +++ b/packages/create-turbo/src/commands/create/types.ts @@ -0,0 +1,8 @@ +export type CreateCommandArgument = "string" | undefined; + +export interface CreateCommandOptions { + skipInstall?: boolean; + skipTransforms?: boolean; + example?: string; + examplePath?: string; +} diff --git a/packages/create-turbo/src/commands/index.ts b/packages/create-turbo/src/commands/index.ts new file mode 100644 index 0000000..7c5f96b --- /dev/null +++ b/packages/create-turbo/src/commands/index.ts @@ -0,0 +1 @@ +export { create } from "./create"; diff --git a/packages/create-turbo/src/logger.ts b/packages/create-turbo/src/logger.ts new file mode 100644 index 0000000..ee6d584 --- /dev/null +++ b/packages/create-turbo/src/logger.ts @@ -0,0 +1,32 @@ +import chalk from "chalk"; +import ora from "ora"; +import gradient from "gradient-string"; + +const BLUE = "#0099F7"; +const RED = "#F11712"; +const YELLOW = "#FFFF00"; + +export const turboGradient = gradient(BLUE, RED); +export const turboBlue = chalk.hex(BLUE); +export const turboRed = chalk.hex(RED); +export const yellow = chalk.hex(YELLOW); + +export const turboLoader = (text: string) => + ora({ + text, + spinner: { + frames: [" ", turboBlue("> "), turboBlue(">> "), turboBlue(">>>")], + }, + }); + +export const info = (...args: any[]) => { + console.log(turboBlue.bold(">>>"), ...args); +}; + +export const error = (...args: any[]) => { + console.error(turboRed.bold(">>>"), ...args); +}; + +export const warn = (...args: any[]) => { + console.error(yellow.bold(">>>"), ...args); +}; diff --git a/packages/create-turbo/src/transforms/errors.ts b/packages/create-turbo/src/transforms/errors.ts new file mode 100644 index 0000000..a5b8a7a --- /dev/null +++ b/packages/create-turbo/src/transforms/errors.ts @@ -0,0 +1,17 @@ +export type TransformErrorOptions = { + transform?: string; + fatal?: boolean; +}; + +export class TransformError extends Error { + public transform: string; + public fatal: boolean; + + constructor(message: string, opts?: TransformErrorOptions) { + super(message); + this.name = "TransformError"; + this.transform = opts?.transform ?? "unknown"; + this.fatal = opts?.fatal ?? true; + Error.captureStackTrace(this, TransformError); + } +} diff --git a/packages/create-turbo/src/transforms/git-ignore.ts b/packages/create-turbo/src/transforms/git-ignore.ts new file mode 100644 index 0000000..bb61ca7 --- /dev/null +++ b/packages/create-turbo/src/transforms/git-ignore.ts @@ -0,0 +1,30 @@ +import path from "path"; +import fs from "fs-extra"; +import { DEFAULT_IGNORE } from "../utils/git"; +import { TransformInput, TransformResult } from "./types"; +import { TransformError } from "./errors"; + +const meta = { + name: "git-ignore", +}; + +export async function transform(args: TransformInput): TransformResult { + const { prompts } = args; + const ignorePath = path.join(prompts.root, ".gitignore"); + try { + if (!fs.existsSync(ignorePath)) { + fs.writeFileSync(ignorePath, DEFAULT_IGNORE); + } else { + return { result: "not-applicable", ...meta }; + } + } catch (err) { + // existsSync cannot throw, so we don't need to narrow here and can + // assume this came from writeFileSync + throw new TransformError("Unable to write .gitignore", { + transform: meta.name, + fatal: false, + }); + } + + return { result: "success", ...meta }; +} diff --git a/packages/create-turbo/src/transforms/index.ts b/packages/create-turbo/src/transforms/index.ts new file mode 100644 index 0000000..1918ecc --- /dev/null +++ b/packages/create-turbo/src/transforms/index.ts @@ -0,0 +1,13 @@ +import { transform as packageManagerTransform } from "./package-manager"; +import { transform as officialStarter } from "./official-starter"; +import { transform as gitIgnoreTransform } from "./git-ignore"; +import type { TransformInput, TransformResult } from "./types"; + +/** + * In the future, we may want to support sourcing additional transforms from the templates themselves. + */ +export const transforms: Array<(args: TransformInput) => TransformResult> = [ + officialStarter, + gitIgnoreTransform, + packageManagerTransform, +]; diff --git a/packages/create-turbo/src/transforms/official-starter.ts b/packages/create-turbo/src/transforms/official-starter.ts new file mode 100644 index 0000000..1d71909 --- /dev/null +++ b/packages/create-turbo/src/transforms/official-starter.ts @@ -0,0 +1,73 @@ +import path from "path"; +import fs from "fs-extra"; +import semverPrerelease from "semver/functions/prerelease"; +import cliPkgJson from "../../package.json"; +import { isDefaultExample } from "../utils/isDefaultExample"; +import { TransformInput, TransformResult } from "./types"; +import { TransformError } from "./errors"; + +const meta = { + name: "official-starter", +}; + +// applied to "official starter" examples (those hosted within vercel/turbo/examples) +export async function transform(args: TransformInput): TransformResult { + const { prompts, example } = args; + + const defaultExample = isDefaultExample(example.name); + const isOfficialStarter = + !example.repo || + (example.repo?.username === "vercel" && example.repo?.name === "turbo"); + + if (!isOfficialStarter) { + return { result: "not-applicable", ...meta }; + } + + // paths + const rootPackageJsonPath = path.join(prompts.root, "package.json"); + const rootMetaJsonPath = path.join(prompts.root, "meta.json"); + const hasPackageJson = fs.existsSync(rootPackageJsonPath); + + // 1. remove meta file (used for generating the examples page on turbo.build) + try { + fs.rmSync(rootMetaJsonPath, { force: true }); + } catch (_err) {} + + if (hasPackageJson) { + let packageJsonContent; + try { + packageJsonContent = fs.readJsonSync(rootPackageJsonPath); + } catch { + throw new TransformError("Unable to read package.json", { + transform: meta.name, + fatal: false, + }); + } + + // if using the basic example, set the name to the project name (legacy behavior) + if (packageJsonContent) { + if (defaultExample) { + packageJsonContent.name = prompts.projectName; + } + + // if we're using a pre-release version of create-turbo, install turbo canary instead of latest + const shouldUsePreRelease = semverPrerelease(cliPkgJson.version) !== null; + if (shouldUsePreRelease && packageJsonContent?.devDependencies?.turbo) { + packageJsonContent.devDependencies.turbo = "canary"; + } + + try { + fs.writeJsonSync(rootPackageJsonPath, packageJsonContent, { + spaces: 2, + }); + } catch (err) { + throw new TransformError("Unable to write package.json", { + transform: meta.name, + fatal: false, + }); + } + } + } + + return { result: "success", ...meta }; +} diff --git a/packages/create-turbo/src/transforms/package-manager.ts b/packages/create-turbo/src/transforms/package-manager.ts new file mode 100644 index 0000000..9c0af24 --- /dev/null +++ b/packages/create-turbo/src/transforms/package-manager.ts @@ -0,0 +1,26 @@ +import { convert } from "@turbo/workspaces"; +import { TransformInput, TransformResult } from "./types"; + +const meta = { + name: "package-manager", +}; + +export async function transform(args: TransformInput): TransformResult { + const { project, prompts } = args; + const { root, packageManager } = prompts; + + if (packageManager && project.packageManager !== packageManager.name) { + await convert({ + root, + to: packageManager.name, + options: { + // skip install after conversion- we will do it later + skipInstall: true, + }, + }); + } else { + return { result: "not-applicable", ...meta }; + } + + return { result: "success", ...meta }; +} diff --git a/packages/create-turbo/src/transforms/types.ts b/packages/create-turbo/src/transforms/types.ts new file mode 100644 index 0000000..6a8e141 --- /dev/null +++ b/packages/create-turbo/src/transforms/types.ts @@ -0,0 +1,30 @@ +import { CreateCommandOptions } from "../commands/create/types"; +import { RepoInfo } from "../utils/examples"; +import type { Project, PackageManager } from "@turbo/workspaces"; + +export interface TransformInput { + example: { + repo: RepoInfo | undefined; + name: string; + }; + project: Project; + prompts: { + projectName: string; + root: string; + packageManager: + | { + name: PackageManager; + version: string | undefined; + } + | undefined; + }; + opts: CreateCommandOptions; +} + +export interface TransformResponse { + // errors should be thrown as instances of TransformError + result: "not-applicable" | "success"; + name: string; +} + +export type TransformResult = Promise; diff --git a/packages/create-turbo/src/utils/examples.ts b/packages/create-turbo/src/utils/examples.ts new file mode 100644 index 0000000..b7c4812 --- /dev/null +++ b/packages/create-turbo/src/utils/examples.ts @@ -0,0 +1,139 @@ +import got from "got"; +import tar from "tar"; +import { Stream } from "stream"; +import { promisify } from "util"; +import { join } from "path"; +import { tmpdir } from "os"; +import { createWriteStream, promises as fs } from "fs"; + +const pipeline = promisify(Stream.pipeline); + +export type RepoInfo = { + username: string; + name: string; + branch: string; + filePath: string; +}; + +export async function isUrlOk(url: string): Promise { + try { + const res = await got.head(url); + return res.statusCode === 200; + } catch (err) { + return false; + } +} + +export async function getRepoInfo( + url: URL, + examplePath?: string +): Promise { + const [, username, name, tree, sourceBranch, ...file] = + url.pathname.split("/"); + const filePath = examplePath + ? examplePath.replace(/^\//, "") + : file.join("/"); + + if ( + // Support repos whose entire purpose is to be a Turborepo example, e.g. + // https://github.com/:username/:my-cool-turborepo-example-repo-name. + tree === undefined || + // Support GitHub URL that ends with a trailing slash, e.g. + // https://github.com/:username/:my-cool-turborepo-example-repo-name/ + // In this case "t" will be an empty string while the turbo part "_branch" will be undefined + (tree === "" && sourceBranch === undefined) + ) { + try { + const infoResponse = await got( + `https://api.github.com/repos/${username}/${name}` + ); + const info = JSON.parse(infoResponse.body); + return { username, name, branch: info["default_branch"], filePath }; + } catch (err) { + return; + } + } + + // If examplePath is available, the branch name takes the entire path + const branch = examplePath + ? `${sourceBranch}/${file.join("/")}`.replace( + new RegExp(`/${filePath}|/$`), + "" + ) + : sourceBranch; + + if (username && name && branch && tree === "tree") { + return { username, name, branch, filePath }; + } +} + +export function hasRepo({ + username, + name, + branch, + filePath, +}: RepoInfo): Promise { + const contentsUrl = `https://api.github.com/repos/${username}/${name}/contents`; + const packagePath = `${filePath ? `/${filePath}` : ""}/package.json`; + + return isUrlOk(contentsUrl + packagePath + `?ref=${branch}`); +} + +export function existsInRepo(nameOrUrl: string): Promise { + try { + const url = new URL(nameOrUrl); + return isUrlOk(url.href); + } catch { + return isUrlOk( + `https://api.github.com/repos/vercel/turbo/contents/examples/${encodeURIComponent( + nameOrUrl + )}` + ); + } +} + +async function downloadTar(url: string, name: string) { + const tempFile = join(tmpdir(), `${name}.temp-${Date.now()}`); + await pipeline(got.stream(url), createWriteStream(tempFile)); + return tempFile; +} + +export async function downloadAndExtractRepo( + root: string, + { username, name, branch, filePath }: RepoInfo +) { + const tempFile = await downloadTar( + `https://codeload.github.com/${username}/${name}/tar.gz/${branch}`, + `turbo-ct-example` + ); + + await tar.x({ + file: tempFile, + cwd: root, + strip: filePath ? filePath.split("/").length + 1 : 1, + filter: (p: string) => + p.startsWith( + `${name}-${branch.replace(/\//g, "-")}${ + filePath ? `/${filePath}/` : "/" + }` + ), + }); + + await fs.unlink(tempFile); +} + +export async function downloadAndExtractExample(root: string, name: string) { + const tempFile = await downloadTar( + `https://codeload.github.com/vercel/turbo/tar.gz/main`, + `turbo-ct-example` + ); + + await tar.x({ + file: tempFile, + cwd: root, + strip: 2 + name.split("/").length, + filter: (p: string) => p.includes(`turbo-main/examples/${name}/`), + }); + + await fs.unlink(tempFile); +} diff --git a/packages/create-turbo/src/utils/git.ts b/packages/create-turbo/src/utils/git.ts new file mode 100644 index 0000000..593e7ea --- /dev/null +++ b/packages/create-turbo/src/utils/git.ts @@ -0,0 +1,90 @@ +import fs from "fs-extra"; +import { execSync } from "child_process"; +import path from "path"; +import rimraf from "rimraf"; + +export const DEFAULT_IGNORE = ` +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +node_modules +.pnp +.pnp.js + +# testing +coverage + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# turbo +.turbo + +# vercel +.vercel +`; + +export const GIT_REPO_COMMAND = "git rev-parse --is-inside-work-tree"; +export const HG_REPO_COMMAND = "hg --cwd . root"; + +export function isInGitRepository(): boolean { + try { + execSync(GIT_REPO_COMMAND, { stdio: "ignore" }); + return true; + } catch (_) {} + return false; +} + +export function isInMercurialRepository(): boolean { + try { + execSync(HG_REPO_COMMAND, { stdio: "ignore" }); + return true; + } catch (_) {} + return false; +} + +export function tryGitInit(root: string, message: string): boolean { + let didInit = false; + try { + execSync("git --version", { stdio: "ignore" }); + if (isInGitRepository() || isInMercurialRepository()) { + return false; + } + + execSync("git init", { stdio: "ignore" }); + didInit = true; + + execSync("git checkout -b main", { stdio: "ignore" }); + + execSync("git add -A", { stdio: "ignore" }); + execSync(`git commit -m "${message}"`, { + stdio: "ignore", + }); + return true; + } catch (err) { + if (didInit) { + try { + rimraf.sync(path.join(root, ".git")); + } catch (_) {} + } + return false; + } +} + +export function tryGitCommit(message: string): boolean { + try { + execSync("git add -A", { stdio: "ignore" }); + execSync(`git commit -m "${message}"`, { + stdio: "ignore", + }); + return true; + } catch (err) { + return false; + } +} diff --git a/packages/create-turbo/src/utils/isDefaultExample.ts b/packages/create-turbo/src/utils/isDefaultExample.ts new file mode 100644 index 0000000..9fb2ef2 --- /dev/null +++ b/packages/create-turbo/src/utils/isDefaultExample.ts @@ -0,0 +1,5 @@ +export const DEFAULT_EXAMPLES = new Set(["basic", "default"]); + +export function isDefaultExample(example: string): boolean { + return DEFAULT_EXAMPLES.has(example); +} diff --git a/packages/create-turbo/src/utils/isFolderEmpty.ts b/packages/create-turbo/src/utils/isFolderEmpty.ts new file mode 100644 index 0000000..4de2d58 --- /dev/null +++ b/packages/create-turbo/src/utils/isFolderEmpty.ts @@ -0,0 +1,37 @@ +import fs from "fs-extra"; + +const VALID_FILES = [ + ".DS_Store", + ".git", + ".gitattributes", + ".gitignore", + ".gitlab-ci.yml", + ".hg", + ".hgcheck", + ".hgignore", + ".idea", + ".npmignore", + ".travis.yml", + "LICENSE", + "Thumbs.db", + "docs", + "mkdocs.yml", + "npm-debug.log", + "yarn-debug.log", + "yarn-error.log", + "yarnrc.yml", + ".yarn", +]; + +export function isFolderEmpty(root: string): { + isEmpty: boolean; + conflicts: Array; +} { + const conflicts = fs + .readdirSync(root) + .filter((file) => !VALID_FILES.includes(file)) + // Support IntelliJ IDEA-based editors + .filter((file) => !/\.iml$/.test(file)); + + return { isEmpty: conflicts.length === 0, conflicts }; +} diff --git a/packages/create-turbo/src/utils/isOnline.ts b/packages/create-turbo/src/utils/isOnline.ts new file mode 100644 index 0000000..f02b2e6 --- /dev/null +++ b/packages/create-turbo/src/utils/isOnline.ts @@ -0,0 +1,40 @@ +import { execSync } from "child_process"; +import dns from "dns"; +import url from "url"; + +function getProxy(): string | undefined { + if (process.env.https_proxy) { + return process.env.https_proxy; + } + + try { + const httpsProxy = execSync("npm config get https-proxy").toString().trim(); + return httpsProxy !== "null" ? httpsProxy : undefined; + } catch (e) { + return; + } +} + +export function isOnline(): Promise { + return new Promise((resolve) => { + dns.lookup("registry.yarnpkg.com", (registryErr) => { + if (!registryErr) { + return resolve(true); + } + + const proxy = getProxy(); + if (!proxy) { + return resolve(false); + } + + const { hostname } = url.parse(proxy); + if (!hostname) { + return resolve(false); + } + + dns.lookup(hostname, (proxyErr) => { + resolve(proxyErr == null); + }); + }); + }); +} diff --git a/packages/create-turbo/src/utils/isWriteable.ts b/packages/create-turbo/src/utils/isWriteable.ts new file mode 100644 index 0000000..132c42a --- /dev/null +++ b/packages/create-turbo/src/utils/isWriteable.ts @@ -0,0 +1,10 @@ +import fs from "fs-extra"; + +export async function isWriteable(directory: string): Promise { + try { + await fs.access(directory, (fs.constants || fs).W_OK); + return true; + } catch (err) { + return false; + } +} diff --git a/packages/create-turbo/src/utils/notifyUpdate.ts b/packages/create-turbo/src/utils/notifyUpdate.ts new file mode 100644 index 0000000..e1dadc0 --- /dev/null +++ b/packages/create-turbo/src/utils/notifyUpdate.ts @@ -0,0 +1,22 @@ +import chalk from "chalk"; +import checkForUpdate from "update-check"; + +import cliPkgJson from "../../package.json"; + +const update = checkForUpdate(cliPkgJson).catch(() => null); + +export default async function notifyUpdate(): Promise { + try { + const res = await update; + if (res?.latest) { + console.log(); + console.log( + chalk.yellow.bold("A new version of `create-turbo` is available!") + ); + console.log(); + } + process.exit(); + } catch (_e: any) { + // ignore error + } +} diff --git a/packages/create-turbo/tsconfig.json b/packages/create-turbo/tsconfig.json new file mode 100644 index 0000000..abcb2c6 --- /dev/null +++ b/packages/create-turbo/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "@turbo/tsconfig/library.json", + "exclude": ["templates"], + "compilerOptions": { + "rootDir": "." + } +} diff --git a/packages/create-turbo/tsup.config.ts b/packages/create-turbo/tsup.config.ts new file mode 100644 index 0000000..18b0666 --- /dev/null +++ b/packages/create-turbo/tsup.config.ts @@ -0,0 +1,9 @@ +import { defineConfig, Options } from "tsup"; + +export default defineConfig((options: Options) => ({ + entry: ["src/cli.ts"], + format: ["cjs"], + clean: true, + minify: true, + ...options, +})); diff --git a/packages/create-turbo/turbo.json b/packages/create-turbo/turbo.json new file mode 100644 index 0000000..6466b2d --- /dev/null +++ b/packages/create-turbo/turbo.json @@ -0,0 +1,12 @@ +{ + "$schema": "../../docs/public/schema.json", + "extends": ["//"], + "pipeline": { + "test": { + "dependsOn": ["build"] + }, + "build": { + "dependsOn": ["^build"] + } + } +} diff --git a/packages/eslint-config-turbo/LICENSE b/packages/eslint-config-turbo/LICENSE new file mode 100644 index 0000000..fa0086a --- /dev/null +++ b/packages/eslint-config-turbo/LICENSE @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. \ No newline at end of file diff --git a/packages/eslint-config-turbo/README.md b/packages/eslint-config-turbo/README.md new file mode 100644 index 0000000..834b887 --- /dev/null +++ b/packages/eslint-config-turbo/README.md @@ -0,0 +1,27 @@ +# `eslint-config-turbo` + +Ease configuration for Turborepo + +## Installation + +1. You'll first need to install [ESLint](https://eslint.org/): + +```sh +npm install eslint --save-dev +``` + +2. Next, install `eslint-config-turbo`: + +```sh +npm install eslint-config-turbo --save-dev +``` + +## Usage + +Add `turbo` to the extends section of your eslint configuration file. You can omit the `eslint-config-` prefix: + +```json +{ + "extends": ["turbo"] +} +``` diff --git a/packages/eslint-config-turbo/index.js b/packages/eslint-config-turbo/index.js new file mode 100644 index 0000000..5d932b3 --- /dev/null +++ b/packages/eslint-config-turbo/index.js @@ -0,0 +1,3 @@ +module.exports = { + extends: ["plugin:turbo/recommended"], +}; diff --git a/packages/eslint-config-turbo/package.json b/packages/eslint-config-turbo/package.json new file mode 100644 index 0000000..88717ca --- /dev/null +++ b/packages/eslint-config-turbo/package.json @@ -0,0 +1,32 @@ +{ + "name": "eslint-config-turbo", + "version": "1.9.4-canary.2", + "description": "ESLint config for Turborepo", + "repository": { + "type": "git", + "url": "https://github.com/vercel/turbo", + "directory": "packages/eslint-config-turbo" + }, + "bugs": { + "url": "https://github.com/vercel/turbo/issues" + }, + "keywords": [ + "turbo", + "eslint", + "turborepo", + "eslintconfig", + "eslint-config" + ], + "main": "index.js", + "author": "Vercel", + "dependencies": { + "eslint-plugin-turbo": "workspace:*" + }, + "peerDependencies": { + "eslint": ">6.6.0" + }, + "license": "MPL-2.0", + "devDependencies": { + "@types/eslint": "^8.4.5" + } +} diff --git a/packages/eslint-plugin-turbo/LICENSE b/packages/eslint-plugin-turbo/LICENSE new file mode 100644 index 0000000..fa0086a --- /dev/null +++ b/packages/eslint-plugin-turbo/LICENSE @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. \ No newline at end of file diff --git a/packages/eslint-plugin-turbo/README.md b/packages/eslint-plugin-turbo/README.md new file mode 100644 index 0000000..83627a1 --- /dev/null +++ b/packages/eslint-plugin-turbo/README.md @@ -0,0 +1,53 @@ +# `eslint-plugin-turbo` + +Ease configuration for Turborepo + +## Installation + +1. You'll first need to install [ESLint](https://eslint.org/): + +```sh +npm install eslint --save-dev +``` + +2. Next, install `eslint-plugin-turbo`: + +```sh +npm install eslint-plugin-turbo --save-dev +``` + +## Usage + +Add `turbo` to the plugins section of your `.eslintrc` configuration file. You can omit the `eslint-plugin-` prefix: + +```json +{ + "plugins": ["turbo"] +} +``` + +Then configure the rules you want to use under the rules section. + +```json +{ + "rules": { + "turbo/no-undeclared-env-vars": "error" + } +} +``` + +### Example + +```json +{ + "plugins": ["turbo"], + "rules": { + "turbo/no-undeclared-env-vars": [ + "error", + { + "allowList": ["^ENV_[A-Z]+$"] + } + ] + } +} +``` diff --git a/packages/eslint-plugin-turbo/__fixtures__/configs/single/turbo.json b/packages/eslint-plugin-turbo/__fixtures__/configs/single/turbo.json new file mode 100644 index 0000000..22b79b5 --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/configs/single/turbo.json @@ -0,0 +1,25 @@ +{ + // new style, global env dependency + "globalEnv": ["NEW_STYLE_GLOBAL_ENV_KEY", "$NEW_STYLE_GLOBAL_ENV_KEY"], + // old style, global env dependency (deprecated) + "globalDependencies": ["$GLOBAL_ENV_KEY"], + "pipeline": { + "test": { + "outputs": ["coverage/**"], + "dependsOn": ["^build"] + }, + "lint": { + "outputs": [] + }, + "dev": { + "cache": false + }, + "build": { + "outputs": ["dist/**", ".next/**", "!.next/.cache/**"], + // task level env var deps + "env": ["NEW_STYLE_ENV_KEY"], + // old task level env var deps (deprecated) + "dependsOn": ["^build", "$TASK_ENV_KEY", "$ANOTHER_ENV_KEY"] + } + } +} diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/docs/index.js b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/docs/index.js new file mode 100644 index 0000000..4de53f5 --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/docs/index.js @@ -0,0 +1,6 @@ +export default function docs() { + if (process.env.ENV_1 === undefined) { + return "does not exist"; + } + return "exists"; +} diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/docs/package.json b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/docs/package.json new file mode 100644 index 0000000..82f9a44 --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/docs/package.json @@ -0,0 +1,4 @@ +{ + "name": "docs", + "version": "1.0.0" +} diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/docs/turbo.json b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/docs/turbo.json new file mode 100644 index 0000000..a3713ef --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/docs/turbo.json @@ -0,0 +1,9 @@ +{ + "$schema": "https://turbo.build/schema.json", + "extends": ["//"], + "pipeline": { + "build": { + "env": ["ENV_3"] + } + } +} diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/index.js b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/index.js new file mode 100644 index 0000000..bfd3ab8 --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/index.js @@ -0,0 +1,6 @@ +export default function web() { + if (!process.env.ENV_2) { + return "bar"; + } + return "foo"; +} diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/package.json b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/package.json new file mode 100644 index 0000000..d8a83ed --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/package.json @@ -0,0 +1,4 @@ +{ + "name": "web", + "version": "1.0.0" +} diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/turbo.json b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/turbo.json new file mode 100644 index 0000000..0d1b80f --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/apps/web/turbo.json @@ -0,0 +1,9 @@ +{ + "$schema": "https://turbo.build/schema.json", + "extends": ["//"], + "pipeline": { + "build": { + "env": ["ENV_2"] + } + } +} diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/package.json b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/package.json new file mode 100644 index 0000000..c6616a6 --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/package.json @@ -0,0 +1,14 @@ +{ + "private": true, + "workspaces": [ + "apps/*", + "packages/*" + ], + "scripts": { + "build": "turbo run build" + }, + "devDependencies": { + "turbo": "latest" + }, + "packageManager": "yarn@1.22.19" +} diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/packages/ui/index.js b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/packages/ui/index.js new file mode 100644 index 0000000..dee5e80 --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/packages/ui/index.js @@ -0,0 +1,6 @@ +export default function foo() { + if (!process.env.IS_SERVER) { + return "bar"; + } + return "foo"; +} diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/packages/ui/package.json b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/packages/ui/package.json new file mode 100644 index 0000000..7cb7cf1 --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/packages/ui/package.json @@ -0,0 +1,4 @@ +{ + "name": "ui", + "version": "1.0.0" +} diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/packages/ui/turbo.json b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/packages/ui/turbo.json new file mode 100644 index 0000000..8bff09e --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/packages/ui/turbo.json @@ -0,0 +1,9 @@ +{ + "$schema": "https://turbo.build/schema.json", + "extends": ["//"], + "pipeline": { + "build": { + "env": ["IS_SERVER"] + } + } +} diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/turbo.json b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/turbo.json new file mode 100644 index 0000000..cb4fb20 --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace-configs/turbo.json @@ -0,0 +1,9 @@ +{ + "$schema": "https://turbo.build/schema.json", + "globalEnv": ["CI"], + "pipeline": { + "build": { + "env": ["ENV_1"] + } + } +} diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace/.eslintrc.js b/packages/eslint-plugin-turbo/__fixtures__/workspace/.eslintrc.js new file mode 100644 index 0000000..8dc66dc --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace/.eslintrc.js @@ -0,0 +1,4 @@ +module.exports = { + root: true, + extends: ["plugin:turbo/recommended"], +}; diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace/child/child.js b/packages/eslint-plugin-turbo/__fixtures__/workspace/child/child.js new file mode 100644 index 0000000..9e799a2 --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace/child/child.js @@ -0,0 +1,2 @@ +process.env.NONEXISTENT; +process.env.CI; diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace/package-lock.json b/packages/eslint-plugin-turbo/__fixtures__/workspace/package-lock.json new file mode 100644 index 0000000..301f072 --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace/package-lock.json @@ -0,0 +1,58 @@ +{ + "name": "workspace", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "dependencies": { + "eslint-plugin-turbo": "../../" + } + }, + "../..": { + "version": "1.9.0", + "license": "MPL-2.0", + "devDependencies": { + "@turbo/test-utils": "workspace:*", + "@turbo/tsconfig": "workspace:*", + "@turbo/types": "workspace:*", + "@turbo/utils": "workspace:*", + "@types/eslint": "^8.4.5", + "@types/estree": "^1.0.0", + "@types/jest": "^27.4.0", + "@types/node": "^16.11.12", + "jest": "^27.4.3", + "json5": "^2.2.1", + "ts-jest": "^27.1.1", + "tsup": "^6.2.0", + "typescript": "^4.7.4" + }, + "peerDependencies": { + "eslint": ">6.6.0" + } + }, + "node_modules/eslint-plugin-turbo": { + "resolved": "../..", + "link": true + } + }, + "dependencies": { + "eslint-plugin-turbo": { + "version": "file:../..", + "requires": { + "@turbo/test-utils": "workspace:*", + "@turbo/tsconfig": "workspace:*", + "@turbo/types": "workspace:*", + "@turbo/utils": "workspace:*", + "@types/eslint": "^8.4.5", + "@types/estree": "^1.0.0", + "@types/jest": "^27.4.0", + "@types/node": "^16.11.12", + "jest": "^27.4.3", + "json5": "^2.2.1", + "ts-jest": "^27.1.1", + "tsup": "^6.2.0", + "typescript": "^4.7.4" + } + } + } +} diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace/package.json b/packages/eslint-plugin-turbo/__fixtures__/workspace/package.json new file mode 100644 index 0000000..a1b2929 --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace/package.json @@ -0,0 +1,5 @@ +{ + "dependencies": { + "eslint-plugin-turbo": "../../" + } +} diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace/peer.js b/packages/eslint-plugin-turbo/__fixtures__/workspace/peer.js new file mode 100644 index 0000000..16c8bb0 --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace/peer.js @@ -0,0 +1 @@ +process.env.CI; diff --git a/packages/eslint-plugin-turbo/__fixtures__/workspace/turbo.json b/packages/eslint-plugin-turbo/__fixtures__/workspace/turbo.json new file mode 100644 index 0000000..8079eb2 --- /dev/null +++ b/packages/eslint-plugin-turbo/__fixtures__/workspace/turbo.json @@ -0,0 +1,34 @@ +{ + "$schema": "https://turbo.build/schema.json", + "globalEnv": ["UNORDERED", "CI"], + "pipeline": { + "build": { + // A workspace's `build` task depends on that workspace's + // topological dependencies' and devDependencies' + // `build` tasks being completed first. The `^` symbol + // indicates an upstream dependency. + "dependsOn": ["^build"] + }, + "test": { + // A workspace's `test` task depends on that workspace's + // own `build` task being completed first. + "dependsOn": ["build"], + "outputs": [], + // A workspace's `test` task should only be rerun when + // either a `.tsx` or `.ts` file has changed. + "inputs": ["src/**/*.tsx", "src/**/*.ts", "test/**/*.ts", "test/**/*.tsx"] + }, + "lint": { + // A workspace's `lint` task has no dependencies and + // can be run whenever. + "outputs": [] + }, + "deploy": { + // A workspace's `deploy` task depends on the `build`, + // `test`, and `lint` tasks of the same workspace + // being completed. + "dependsOn": ["build", "test", "lint"], + "outputs": [] + } + } +} diff --git a/packages/eslint-plugin-turbo/__tests__/cwd.test.ts b/packages/eslint-plugin-turbo/__tests__/cwd.test.ts new file mode 100644 index 0000000..15a2695 --- /dev/null +++ b/packages/eslint-plugin-turbo/__tests__/cwd.test.ts @@ -0,0 +1,88 @@ +import path from "path"; +import JSON5 from "json5"; +import { execSync } from "child_process"; +import { Schema } from "@turbo/types"; +import { setupTestFixtures } from "@turbo/test-utils"; + +describe("eslint settings check", () => { + const { useFixture } = setupTestFixtures({ + directory: path.join(__dirname, "../"), + }); + + it("does the right thing for peers", () => { + const { root: cwd } = useFixture({ fixture: "workspace" }); + execSync(`npm install`, { cwd }); + + const configString = execSync(`eslint --print-config peer.js`, { + cwd, + encoding: "utf8", + }); + const configJson = JSON5.parse(configString); + + expect(configJson.settings).toEqual({ + turbo: { envVars: ["CI", "UNORDERED"] }, + }); + }); + + it("does the right thing for child dirs", () => { + const { root } = useFixture({ fixture: "workspace" }); + execSync(`npm install`, { cwd: root }); + + const cwd = path.join(root, "child"); + const configString = execSync(`eslint --print-config child.js`, { + cwd, + encoding: "utf8", + }); + const configJson = JSON5.parse(configString); + + expect(configJson.settings).toEqual({ + turbo: { envVars: ["CI", "UNORDERED"] }, + }); + }); +}); + +describe("eslint cache is busted", () => { + const { useFixture } = setupTestFixtures({ + directory: path.join(__dirname, "../"), + }); + + it("catches a lint error after changing config", () => { + expect.assertions(2); + + // ensure that we populate the cache with a failure. + const { root, readJson, write } = useFixture({ fixture: "workspace" }); + execSync(`npm install`, { cwd: root }); + + const cwd = path.join(root, "child"); + try { + execSync(`eslint --format=json child.js`, { cwd, encoding: "utf8" }); + } catch (error: any) { + const outputJson = JSON5.parse(error.stdout); + expect(outputJson).toMatchObject([ + { + messages: [ + { + message: + "NONEXISTENT is not listed as a dependency in turbo.json", + }, + ], + }, + ]); + } + + // change the configuration + const turboJson = readJson("turbo.json"); + if (turboJson && "globalEnv" in turboJson) { + turboJson.globalEnv = ["CI", "NONEXISTENT"]; + write("turbo.json", JSON5.stringify(turboJson, null, 2)); + } + + // test that we invalidated the eslint cache + const output = execSync(`eslint --format=json child.js`, { + cwd, + encoding: "utf8", + }); + const outputJson = JSON5.parse(output); + expect(outputJson).toMatchObject([{ errorCount: 0 }]); + }); +}); diff --git a/packages/eslint-plugin-turbo/__tests__/lib/no-undeclared-env-vars.test.ts b/packages/eslint-plugin-turbo/__tests__/lib/no-undeclared-env-vars.test.ts new file mode 100644 index 0000000..5c753dd --- /dev/null +++ b/packages/eslint-plugin-turbo/__tests__/lib/no-undeclared-env-vars.test.ts @@ -0,0 +1,433 @@ +import { RULES } from "../../lib/constants"; +import rule from "../../lib/rules/no-undeclared-env-vars"; +import { RuleTester } from "eslint"; +import path from "path"; + +const ruleTester = new RuleTester({ + parserOptions: { ecmaVersion: 2020 }, +}); + +ruleTester.run(RULES.noUndeclaredEnvVars, rule, { + valid: [ + { + code: ` + const { ENV_2 } = process.env; + `, + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/workspace-configs") }, + ], + filename: path.join( + __dirname, + "../../__fixtures__/workspace-configs/apps/web/index.js" + ), + }, + { + code: ` + const { ENV_1 } = process.env; + `, + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/workspace-configs") }, + ], + filename: path.join( + __dirname, + "../../__fixtures__/workspace-configs/apps/web/index.js" + ), + }, + { + code: ` + const { ENV_1 } = process.env; + `, + options: [{ cwd: "/some/random/path" }], + }, + { + code: ` + const { CI } = process.env; + `, + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/workspace-configs") }, + ], + filename: path.join( + __dirname, + "../../__fixtures__/workspace-configs/apps/web/index.js" + ), + }, + { + code: ` + const { TASK_ENV_KEY, ANOTHER_ENV_KEY } = process.env; + `, + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/configs/single") }, + ], + }, + { + code: ` + const { NEW_STYLE_ENV_KEY, TASK_ENV_KEY } = process.env; + `, + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/configs/single") }, + ], + }, + { + code: ` + const { NEW_STYLE_GLOBAL_ENV_KEY, TASK_ENV_KEY } = process.env; + `, + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/configs/single") }, + ], + }, + { + code: ` + const val = process.env["NEW_STYLE_GLOBAL_ENV_KEY"]; + `, + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/configs/single") }, + ], + }, + { + code: ` + const { TASK_ENV_KEY, ANOTHER_ENV_KEY } = process.env; + `, + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/configs/single") }, + ], + }, + { + code: ` + const x = process.env.GLOBAL_ENV_KEY; + const { TASK_ENV_KEY, GLOBAL_ENV_KEY: renamedX } = process.env; + `, + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/configs/single") }, + ], + }, + { + code: "var x = process.env.GLOBAL_ENV_KEY;", + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/configs/single") }, + ], + }, + { + code: "let x = process.env.TASK_ENV_KEY;", + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/configs/single") }, + ], + }, + { + code: "const x = process.env.ANOTHER_KEY_VALUE;", + options: [ + { + cwd: path.join(__dirname, "../../__fixtures__/configs/single"), + allowList: ["^ANOTHER_KEY_[A-Z]+$"], + }, + ], + }, + { + code: ` + var x = process.env.ENV_VAR_ONE; + var y = process.env.ENV_VAR_TWO; + `, + options: [ + { + cwd: path.join(__dirname, "../../__fixtures__/configs/single"), + allowList: ["^ENV_VAR_[A-Z]+$"], + }, + ], + }, + { + code: ` + var x = process.env.ENV_VAR_ONE; + var y = process.env.ENV_VAR_TWO; + `, + options: [ + { + cwd: path.join(__dirname, "../../__fixtures__/configs/single"), + allowList: ["^ENV_VAR_O[A-Z]+$", "ENV_VAR_TWO"], + }, + ], + }, + { + code: ` + var globalOrTask = process.env.TASK_ENV_KEY || process.env.GLOBAL_ENV_KEY; + var oneOrTwo = process.env.ENV_VAR_ONE || process.env.ENV_VAR_TWO; + `, + options: [ + { + cwd: path.join(__dirname, "../../__fixtures__/configs/single"), + allowList: ["^ENV_VAR_[A-Z]+$"], + }, + ], + }, + { + code: ` + () => { return process.env.GLOBAL_ENV_KEY } + () => { return process.env.TASK_ENV_KEY } + () => { return process.env.ENV_VAR_ALLOWED } + `, + options: [ + { + cwd: path.join(__dirname, "../../__fixtures__/configs/single"), + allowList: ["^ENV_VAR_[A-Z]+$"], + }, + ], + }, + { + code: ` + var foo = process?.env.GLOBAL_ENV_KEY + var foo = process?.env.TASK_ENV_KEY + var foo = process?.env.ENV_VAR_ALLOWED + `, + options: [ + { + cwd: path.join(__dirname, "../../__fixtures__/configs/single"), + allowList: ["^ENV_VAR_[A-Z]+$"], + }, + ], + }, + { + code: ` + function test(arg1 = process.env.GLOBAL_ENV_KEY) {}; + function test(arg1 = process.env.TASK_ENV_KEY) {}; + function test(arg1 = process.env.ENV_VAR_ALLOWED) {}; + `, + options: [ + { + cwd: path.join(__dirname, "../../__fixtures__/configs/single"), + allowList: ["^ENV_VAR_[A-Z]+$"], + }, + ], + }, + { + code: ` + (arg1 = process.env.GLOBAL_ENV_KEY) => {} + (arg1 = process.env.TASK_ENV_KEY) => {} + (arg1 = process.env.ENV_VAR_ALLOWED) => {} + `, + options: [ + { + cwd: path.join(__dirname, "../../__fixtures__/configs/single"), + allowList: ["^ENV_VAR_[A-Z]+$"], + }, + ], + }, + { + code: "const getEnv = (key) => process.env[key];", + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/configs/single") }, + ], + }, + { + code: "function getEnv(key) { return process.env[key]; }", + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/configs/single") }, + ], + }, + { + code: "for (let x of ['ONE', 'TWO', 'THREE']) { console.log(process.env[x]); }", + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/configs/single") }, + ], + }, + ], + + invalid: [ + { + code: ` + const { ENV_2 } = process.env; + `, + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/workspace-configs") }, + ], + filename: path.join( + __dirname, + "../../__fixtures__/workspace-configs/apps/docs/index.js" + ), + errors: [ + { + message: + "ENV_2 is not listed as a dependency in the root turbo.json or workspace (apps/docs) turbo.json", + }, + ], + }, + { + code: "let { X } = process.env;", + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/configs/single") }, + ], + errors: [{ message: "X is not listed as a dependency in turbo.json" }], + }, + { + code: "const { X, Y, Z } = process.env;", + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/configs/single") }, + ], + errors: [ + { message: "X is not listed as a dependency in turbo.json" }, + { message: "Y is not listed as a dependency in turbo.json" }, + { message: "Z is not listed as a dependency in turbo.json" }, + ], + }, + { + code: "const { X, Y: NewName, Z } = process.env;", + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/configs/single") }, + ], + errors: [ + { message: "X is not listed as a dependency in turbo.json" }, + { message: "Y is not listed as a dependency in turbo.json" }, + { message: "Z is not listed as a dependency in turbo.json" }, + ], + }, + { + code: "var x = process.env.NOT_THERE;", + options: [ + { cwd: path.join(__dirname, "../../__fixtures__/configs/single") }, + ], + errors: [ + { + message: "NOT_THERE is not listed as a dependency in turbo.json", + }, + ], + }, + { + code: "var x = process.env.KEY;", + options: [ + { + cwd: path.join(__dirname, "../../__fixtures__/configs/single"), + allowList: ["^ANOTHER_KEY_[A-Z]+$"], + }, + ], + errors: [{ message: "KEY is not listed as a dependency in turbo.json" }], + }, + { + code: ` + var globalOrTask = process.env.TASK_ENV_KEY_NEW || process.env.GLOBAL_ENV_KEY_NEW; + var oneOrTwo = process.env.ENV_VAR_ONE || process.env.ENV_VAR_TWO; + `, + options: [ + { + cwd: path.join(__dirname, "../../__fixtures__/configs/single"), + }, + ], + errors: [ + { + message: + "TASK_ENV_KEY_NEW is not listed as a dependency in turbo.json", + }, + { + message: + "GLOBAL_ENV_KEY_NEW is not listed as a dependency in turbo.json", + }, + { + message: "ENV_VAR_ONE is not listed as a dependency in turbo.json", + }, + { + message: "ENV_VAR_TWO is not listed as a dependency in turbo.json", + }, + ], + }, + { + code: ` + () => { return process.env.GLOBAL_ENV_KEY_NEW } + () => { return process.env.TASK_ENV_KEY_NEW } + () => { return process.env.ENV_VAR_NOT_ALLOWED } + `, + options: [ + { + cwd: path.join(__dirname, "../../__fixtures__/configs/single"), + }, + ], + errors: [ + { + message: + "GLOBAL_ENV_KEY_NEW is not listed as a dependency in turbo.json", + }, + { + message: + "TASK_ENV_KEY_NEW is not listed as a dependency in turbo.json", + }, + { + message: + "ENV_VAR_NOT_ALLOWED is not listed as a dependency in turbo.json", + }, + ], + }, + { + code: ` + var foo = process?.env.GLOBAL_ENV_KEY_NEW + var foo = process?.env.TASK_ENV_KEY_NEW + var foo = process?.env.ENV_VAR_NOT_ALLOWED + `, + options: [ + { + cwd: path.join(__dirname, "../../__fixtures__/configs/single"), + }, + ], + errors: [ + { + message: + "GLOBAL_ENV_KEY_NEW is not listed as a dependency in turbo.json", + }, + { + message: + "TASK_ENV_KEY_NEW is not listed as a dependency in turbo.json", + }, + { + message: + "ENV_VAR_NOT_ALLOWED is not listed as a dependency in turbo.json", + }, + ], + }, + { + code: ` + function test(arg1 = process.env.GLOBAL_ENV_KEY_NEW) {}; + function test(arg1 = process.env.TASK_ENV_KEY_NEW) {}; + function test(arg1 = process.env.ENV_VAR_NOT_ALLOWED) {}; + `, + options: [ + { + cwd: path.join(__dirname, "../../__fixtures__/configs/single"), + }, + ], + errors: [ + { + message: + "GLOBAL_ENV_KEY_NEW is not listed as a dependency in turbo.json", + }, + { + message: + "TASK_ENV_KEY_NEW is not listed as a dependency in turbo.json", + }, + { + message: + "ENV_VAR_NOT_ALLOWED is not listed as a dependency in turbo.json", + }, + ], + }, + { + code: ` + (arg1 = process.env.GLOBAL_ENV_KEY_NEW) => {} + (arg1 = process.env.TASK_ENV_KEY_NEW) => {} + (arg1 = process.env.ENV_VAR_NOT_ALLOWED) => {} + `, + options: [ + { + cwd: path.join(__dirname, "../../__fixtures__/configs/single"), + }, + ], + errors: [ + { + message: + "GLOBAL_ENV_KEY_NEW is not listed as a dependency in turbo.json", + }, + { + message: + "TASK_ENV_KEY_NEW is not listed as a dependency in turbo.json", + }, + { + message: + "ENV_VAR_NOT_ALLOWED is not listed as a dependency in turbo.json", + }, + ], + }, + ], +}); diff --git a/packages/eslint-plugin-turbo/docs/rules/no-undeclared-env-vars.md b/packages/eslint-plugin-turbo/docs/rules/no-undeclared-env-vars.md new file mode 100644 index 0000000..049d7af --- /dev/null +++ b/packages/eslint-plugin-turbo/docs/rules/no-undeclared-env-vars.md @@ -0,0 +1,74 @@ +# Ensure all environment variables are correctly included in cache keys (`no-undeclared-env-vars`) + +Ensures that all detectable usage of environment variables are correctly included in cache keys. This ensures build outputs remain correctly cacheable across environments. + +## Rule Details + +This rule aims to prevent users from forgetting to include an environment variable in their `turbo.json` configuration. + +The following examples assume the following code: + +```js +const client = MyAPI({ token: process.env.MY_API_TOKEN }); +``` + +Examples of **incorrect** code for this rule: + +```json +{ + "pipeline": { + "build": { + "dependsOn": ["^build"], + "outputs": ["dist/**", ".next/**", "!.next/cache/**"] + }, + "lint": {}, + "dev": { + "cache": false + } + } +} +``` + +Examples of **correct** code for this rule: + +```json +{ + "globalEnv": ["MY_API_TOKEN"], + "pipeline": { + "build": { + "dependsOn": ["^build"], + "outputs": ["dist/**", ".next/**", "!.next/cache/**"] + }, + "lint": {}, + "dev": { + "cache": false + } + } +} +``` + +```json +{ + "pipeline": { + "build": { + "dependsOn": ["^build"], + "env": ["MY_API_TOKEN"], + "outputs": ["dist/**", ".next/**", "!.next/cache/**"] + }, + "lint": {}, + "dev": { + "cache": false + } + } +} +``` + +## Options + +| Option | Required | Default | Details | Example | +| ----------- | -------- | ------- | ------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------- | +| `allowList` | No | [] | An array of strings (or regular expressions) to exclude. NOTE: an env variable should only be excluded if it has no effect on build outputs | `["MY_API_TOKEN", "^MY_ENV_PREFIX_[A-Z]+$"]` | + +## Further Reading + +- [Altering Caching Based on Environment Variables](https://turbo.build/repo/docs/core-concepts/caching#altering-caching-based-on-environment-variables) diff --git a/packages/eslint-plugin-turbo/jest.config.js b/packages/eslint-plugin-turbo/jest.config.js new file mode 100644 index 0000000..102773e --- /dev/null +++ b/packages/eslint-plugin-turbo/jest.config.js @@ -0,0 +1,12 @@ +/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */ +module.exports = { + roots: [""], + transform: { + "^.+\\.tsx?$": "ts-jest", + }, + testPathIgnorePatterns: ["/__fixtures__/"], + coveragePathIgnorePatterns: ["/__fixtures__/"], + moduleFileExtensions: ["ts", "tsx", "js", "jsx", "json", "node"], + modulePathIgnorePatterns: ["/node_modules", "/dist"], + preset: "ts-jest", +}; diff --git a/packages/eslint-plugin-turbo/lib/configs/recommended.ts b/packages/eslint-plugin-turbo/lib/configs/recommended.ts new file mode 100644 index 0000000..e247503 --- /dev/null +++ b/packages/eslint-plugin-turbo/lib/configs/recommended.ts @@ -0,0 +1,26 @@ +import { RULES } from "../constants"; +import getEnvVarDependencies from "../utils/getEnvVarDependencies"; + +// Add the environment variables into the ESLint incremental cache key. +const envVars = getEnvVarDependencies({ + cwd: process.cwd(), +}); +const settings = { + turbo: { + envVars: envVars + ? Object.values(envVars) + .flatMap((s) => Array.from(s)) + .sort() + : [], + }, +}; + +const config = { + settings, + plugins: ["turbo"], + rules: { + [`turbo/${RULES.noUndeclaredEnvVars}`]: "error", + }, +}; + +export default config; diff --git a/packages/eslint-plugin-turbo/lib/constants.ts b/packages/eslint-plugin-turbo/lib/constants.ts new file mode 100644 index 0000000..5af2e6f --- /dev/null +++ b/packages/eslint-plugin-turbo/lib/constants.ts @@ -0,0 +1,5 @@ +const RULES = { + noUndeclaredEnvVars: `no-undeclared-env-vars`, +}; + +export { RULES }; diff --git a/packages/eslint-plugin-turbo/lib/index.ts b/packages/eslint-plugin-turbo/lib/index.ts new file mode 100644 index 0000000..e7f113c --- /dev/null +++ b/packages/eslint-plugin-turbo/lib/index.ts @@ -0,0 +1,17 @@ +import { RULES } from "./constants"; + +// rules +import noUndeclaredEnvVars from "./rules/no-undeclared-env-vars"; + +// configs +import recommended from "./configs/recommended"; + +const rules = { + [RULES.noUndeclaredEnvVars]: noUndeclaredEnvVars, +}; + +const configs = { + recommended, +}; + +export { rules, configs }; diff --git a/packages/eslint-plugin-turbo/lib/rules/no-undeclared-env-vars.ts b/packages/eslint-plugin-turbo/lib/rules/no-undeclared-env-vars.ts new file mode 100644 index 0000000..372d21a --- /dev/null +++ b/packages/eslint-plugin-turbo/lib/rules/no-undeclared-env-vars.ts @@ -0,0 +1,187 @@ +import type { Rule } from "eslint"; +import path from "path"; +import { Node, MemberExpression } from "estree"; +import { RULES } from "../constants"; +import getEnvVarDependencies from "../utils/getEnvVarDependencies"; + +const meta: Rule.RuleMetaData = { + type: "problem", + docs: { + description: + "Do not allow the use of `process.env` without including the env key in any turbo.json", + category: "Configuration Issues", + recommended: true, + url: `https://github.com/vercel/turbo/tree/main/packages/eslint-plugin-turbo/docs/rules/${RULES.noUndeclaredEnvVars}.md`, + }, + schema: [ + { + type: "object", + default: {}, + additionalProperties: false, + properties: { + // override cwd, primarily exposed for easier testing + cwd: { + require: false, + type: "string", + }, + allowList: { + default: [], + type: "array", + items: { + type: "string", + }, + }, + }, + }, + ], +}; + +/** + * Normalize the value of the cwd + * Extracted from eslint + * SPDX-License-Identifier: MIT + */ +function normalizeCwd( + cwd: string | undefined, + options: Array +): string | undefined { + if (options?.[0]?.cwd) { + return options[0].cwd; + } + + if (cwd) { + return cwd; + } + if (typeof process === "object") { + return process.cwd(); + } + + return undefined; +} + +function create(context: Rule.RuleContext): Rule.RuleListener { + const { options, getPhysicalFilename } = context; + const allowList: Array = options?.[0]?.allowList || []; + const regexAllowList: Array = []; + allowList.forEach((allowed) => { + try { + regexAllowList.push(new RegExp(allowed)); + } catch (err) { + // log the error, but just move on without this allowList entry + console.error(`Unable to convert "${allowed}" to regex`); + } + }); + + const cwd = normalizeCwd( + context.getCwd ? context.getCwd() : undefined, + options + ); + const filePath = getPhysicalFilename(); + const allTurboVars = getEnvVarDependencies({ + cwd, + }); + + // if allTurboVars is null, something went wrong reading from the turbo config + // (this is different from finding a config with no env vars present, which would + // return an empty set) - so there is no point continuing if we have nothing to check against + if (!allTurboVars) { + // return of {} bails early from a rule check + return {}; + } + + const globalTurboVars = allTurboVars["//"]; + const hasWorkspaceConfigs = Object.keys(allTurboVars).length > 1; + + // find any workspace configs that match the current file path + // find workspace config (if any) that match the current file path + const workspaceKey = Object.keys(allTurboVars).find( + (workspacePath) => filePath !== "//" && filePath.startsWith(workspacePath) + ); + + let workspaceTurboVars: Set | null = null; + if (workspaceKey) { + workspaceTurboVars = allTurboVars[workspaceKey]; + } + + const checkKey = (node: Node, envKey?: string) => { + if ( + envKey && + !globalTurboVars.has(envKey) && + !regexAllowList.some((regex) => regex.test(envKey)) + ) { + // if we have a workspace config, check that too + if (workspaceTurboVars && workspaceTurboVars.has(envKey)) { + return {}; + } else { + let message = `{{ envKey }} is not listed as a dependency in ${ + hasWorkspaceConfigs ? "root turbo.json" : "turbo.json" + }`; + if (workspaceKey && workspaceTurboVars) { + if (cwd) { + // if we have a cwd, we can provide a relative path to the workspace config + message = `{{ envKey }} is not listed as a dependency in the root turbo.json or workspace (${path.relative( + cwd, + workspaceKey + )}) turbo.json`; + } else { + message = `{{ envKey }} is not listed as a dependency in the root turbo.json or workspace turbo.json`; + } + } + + context.report({ + node, + message, + data: { envKey }, + }); + } + } + }; + + const isComputed = ( + node: MemberExpression & Rule.NodeParentExtension + ): boolean => { + if ("computed" in node.parent) { + return node.parent.computed; + } + + return false; + }; + + return { + MemberExpression(node) { + // we only care about complete process env declarations and non-computed keys + if ( + "name" in node.object && + "name" in node.property && + !isComputed(node) + ) { + const objectName = node.object.name; + const propertyName = node.property.name; + + // we're doing something with process.env + if (objectName === "process" && propertyName === "env") { + // destructuring from process.env + if ("id" in node.parent && node.parent.id?.type === "ObjectPattern") { + const values = node.parent.id.properties.values(); + Array.from(values).forEach((item) => { + if ("key" in item && "name" in item.key) { + checkKey(node.parent, item.key.name); + } + }); + } + + // accessing key on process.env + else if ( + "property" in node.parent && + "name" in node.parent.property + ) { + checkKey(node.parent, node.parent.property?.name); + } + } + } + }, + }; +} + +const rule = { create, meta }; +export default rule; diff --git a/packages/eslint-plugin-turbo/lib/utils/getEnvVarDependencies.ts b/packages/eslint-plugin-turbo/lib/utils/getEnvVarDependencies.ts new file mode 100644 index 0000000..a57e5eb --- /dev/null +++ b/packages/eslint-plugin-turbo/lib/utils/getEnvVarDependencies.ts @@ -0,0 +1,75 @@ +import { getTurboConfigs } from "@turbo/utils"; + +function findDependsOnEnvVars({ + dependencies, +}: { + dependencies?: Array; +}) { + if (dependencies) { + return ( + dependencies + // filter for dep env vars + .filter((dep) => dep.startsWith("$")) + // remove leading $ + .map((envVar) => envVar.slice(1, envVar.length)) + ); + } + + return []; +} + +function getEnvVarDependencies({ + cwd, +}: { + cwd: string | undefined; +}): Record> | null { + const turboConfigs = getTurboConfigs(cwd); + + if (!turboConfigs.length) { + return null; + } + + const envVars: Record> = { + "//": new Set(), + }; + + turboConfigs.forEach((turboConfig) => { + const { config, workspacePath, isRootConfig } = turboConfig; + + const key = isRootConfig ? "//" : workspacePath; + if (!envVars[key]) { + envVars[key] = new Set(); + } + + // handle globals + if (!("extends" in config)) { + const { globalDependencies = [], globalEnv = [] } = config; + + const keys = [ + ...findDependsOnEnvVars({ + dependencies: globalDependencies, + }), + ...globalEnv, + ]; + keys.forEach((k) => envVars[key].add(k)); + } + + // handle pipelines + const { pipeline = {} } = config; + Object.values(pipeline).forEach(({ env, dependsOn }) => { + if (dependsOn) { + findDependsOnEnvVars({ dependencies: dependsOn }).forEach((k) => + envVars[key].add(k) + ); + } + + if (env) { + env.forEach((k) => envVars[key].add(k)); + } + }); + }); + + return envVars; +} + +export default getEnvVarDependencies; diff --git a/packages/eslint-plugin-turbo/package.json b/packages/eslint-plugin-turbo/package.json new file mode 100644 index 0000000..d19eef2 --- /dev/null +++ b/packages/eslint-plugin-turbo/package.json @@ -0,0 +1,51 @@ +{ + "name": "eslint-plugin-turbo", + "version": "1.9.4-canary.2", + "description": "ESLint plugin for Turborepo", + "keywords": [ + "turbo", + "eslint", + "turborepo", + "eslintplugin", + "eslint-plugin" + ], + "repository": { + "type": "git", + "url": "https://github.com/vercel/turbo", + "directory": "packages/eslint-plugin-turbo" + }, + "bugs": { + "url": "https://github.com/vercel/turbo/issues" + }, + "author": "Vercel", + "main": "./dist/index.js", + "files": [ + "dist/**" + ], + "scripts": { + "release": "pnpm build && pnpm publish", + "test": "jest", + "build": "tsup", + "check-types": "tsc --noEmit", + "lint": "eslint lib/**/*.ts" + }, + "devDependencies": { + "@types/eslint": "^8.4.5", + "@types/estree": "^1.0.0", + "@types/jest": "^27.4.0", + "@types/node": "^16.11.12", + "jest": "^27.4.3", + "json5": "^2.2.1", + "ts-jest": "^27.1.1", + "@turbo/tsconfig": "workspace:*", + "tsup": "^6.2.0", + "@turbo/test-utils": "workspace:*", + "@turbo/types": "workspace:*", + "@turbo/utils": "workspace:*", + "typescript": "^4.7.4" + }, + "peerDependencies": { + "eslint": ">6.6.0" + }, + "license": "MPL-2.0" +} diff --git a/packages/eslint-plugin-turbo/tsconfig.json b/packages/eslint-plugin-turbo/tsconfig.json new file mode 100644 index 0000000..0620a3c --- /dev/null +++ b/packages/eslint-plugin-turbo/tsconfig.json @@ -0,0 +1,6 @@ +{ + "extends": "@turbo/tsconfig/library.json", + "compilerOptions": { + "rootDir": "." + } +} diff --git a/packages/eslint-plugin-turbo/tsup.config.ts b/packages/eslint-plugin-turbo/tsup.config.ts new file mode 100644 index 0000000..bbda8cb --- /dev/null +++ b/packages/eslint-plugin-turbo/tsup.config.ts @@ -0,0 +1,8 @@ +import { defineConfig, Options } from "tsup"; + +export default defineConfig((options: Options) => ({ + entry: ["lib/index.ts"], + clean: true, + minify: true, + ...options, +})); diff --git a/packages/eslint-plugin-turbo/turbo.json b/packages/eslint-plugin-turbo/turbo.json new file mode 100644 index 0000000..f86a4db --- /dev/null +++ b/packages/eslint-plugin-turbo/turbo.json @@ -0,0 +1,9 @@ +{ + "$schema": "../../docs/public/schema.json", + "extends": ["//"], + "pipeline": { + "test": { + "dependsOn": ["build"] + } + } +} diff --git a/packages/node-module-trace/package.json b/packages/node-module-trace/package.json new file mode 100644 index 0000000..0494edc --- /dev/null +++ b/packages/node-module-trace/package.json @@ -0,0 +1,10 @@ +{ + "name": "@vercel/experimental-nft", + "version": "0.0.5-alpha.0", + "description": "Node.js module trace", + "license": "MPL-2.0", + "alias": "node-file-trace", + "publishConfig": { + "access": "public" + } +} diff --git a/packages/tsconfig/README.md b/packages/tsconfig/README.md new file mode 100644 index 0000000..c78d9de --- /dev/null +++ b/packages/tsconfig/README.md @@ -0,0 +1,3 @@ +# `tsconfig` + +Collection of internal tsconfigs shared between [turborepo/packages/](https://github.com/vercel/turbo/tree/main/packages) diff --git a/packages/tsconfig/base.json b/packages/tsconfig/base.json new file mode 100644 index 0000000..aab01c7 --- /dev/null +++ b/packages/tsconfig/base.json @@ -0,0 +1,20 @@ +{ + "$schema": "https://json.schemastore.org/tsconfig", + "display": "Default", + "compilerOptions": { + "composite": false, + "declaration": true, + "declarationMap": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "inlineSources": false, + "isolatedModules": true, + "moduleResolution": "node", + "noUnusedLocals": false, + "noUnusedParameters": false, + "preserveWatchOutput": true, + "skipLibCheck": true, + "strict": true + }, + "exclude": ["node_modules", "dist"] +} diff --git a/packages/tsconfig/library.json b/packages/tsconfig/library.json new file mode 100644 index 0000000..66db367 --- /dev/null +++ b/packages/tsconfig/library.json @@ -0,0 +1,12 @@ +{ + "extends": "./base.json", + "display": "TS Library", + "compilerOptions": { + "lib": ["ES2019"], + "target": "ES2019", + "skipLibCheck": true, + "resolveJsonModule": true, + "outDir": "dist", + "allowJs": false + } +} diff --git a/packages/tsconfig/package.json b/packages/tsconfig/package.json new file mode 100644 index 0000000..913e344 --- /dev/null +++ b/packages/tsconfig/package.json @@ -0,0 +1,5 @@ +{ + "name": "@turbo/tsconfig", + "version": "0.0.0", + "private": true +} diff --git a/packages/turbo-codemod/LICENSE b/packages/turbo-codemod/LICENSE new file mode 100644 index 0000000..fa0086a --- /dev/null +++ b/packages/turbo-codemod/LICENSE @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. \ No newline at end of file diff --git a/packages/turbo-codemod/README.md b/packages/turbo-codemod/README.md new file mode 100644 index 0000000..5545561 --- /dev/null +++ b/packages/turbo-codemod/README.md @@ -0,0 +1,55 @@ +# Turborepo Codemods + +Turborepo provides Codemod transformations to help upgrade your Turborepo codebase. + +Codemods are transformations that run on your codebase programmatically. This allows for a large amount of changes to be applied without having to manually go through every file. + +## Commands + +### `migrate` + +Updates your Turborepo codebase to the specified version of Turborepo (defaults to the latest), running any required codemods, and installing the new version of Turborepo. + +``` +Usage: @turbo/codemod migrate|update [options] [path] + +Migrate a project to the latest version of Turborepo + +Arguments: + path Directory where the transforms should be applied + +Options: + --from Specify the version to migrate from (default: current version) + --to Specify the version to migrate to (default: latest) + --install Install new version of turbo after migration (default: true) + --force Bypass Git safety checks and forcibly run codemods (default: false) + --dry Dry run (no changes are made to files) (default: false) + --print Print transformed files to your terminal (default: false) + -h, --help display help for command +``` + +### `transform` (default) + +Runs a single codemod on your codebase. This is the default command, and can be omitted. + +``` +Usage: @turbo/codemod transform [options] [transform] [path] + @turbo/codemod [options] [transform] [path] + +Apply a single code transformation to a project + +Arguments: + transform The transformer to run + path Directory where the transforms should be applied + +Options: + --force Bypass Git safety checks and forcibly run codemods (default: false) + --list List all available transforms (default: false) + --dry Dry run (no changes are made to files) (default: false) + --print Print transformed files to your terminal (default: false) + -h, --help display help for command +``` + +## Developing + +To add a new transformer, run `pnpm add-transformer`, or [view the complete guide](./src/transforms/README.md). diff --git a/packages/turbo-codemod/__tests__/__fixtures__/add-package-manager/has-package-manager/package.json b/packages/turbo-codemod/__tests__/__fixtures__/add-package-manager/has-package-manager/package.json new file mode 100644 index 0000000..d6edac5 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/add-package-manager/has-package-manager/package.json @@ -0,0 +1,7 @@ +{ + "name": "has-package-manager", + "version": "1.0.0", + "dependencies": {}, + "devDependencies": {}, + "packageManager": "npm@1.2.3" +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/add-package-manager/no-package-manager/package.json b/packages/turbo-codemod/__tests__/__fixtures__/add-package-manager/no-package-manager/package.json new file mode 100644 index 0000000..2e28fe4 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/add-package-manager/no-package-manager/package.json @@ -0,0 +1,6 @@ +{ + "name": "no-package-manager", + "version": "1.0.0", + "dependencies": {}, + "devDependencies": {} +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/add-package-manager/wrong-package-manager/package.json b/packages/turbo-codemod/__tests__/__fixtures__/add-package-manager/wrong-package-manager/package.json new file mode 100644 index 0000000..f58aca2 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/add-package-manager/wrong-package-manager/package.json @@ -0,0 +1,7 @@ +{ + "name": "has-package-manager", + "version": "1.0.0", + "dependencies": {}, + "devDependencies": {}, + "packageManager": "turbo@1.7.0" +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/both-configs/package.json b/packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/both-configs/package.json new file mode 100644 index 0000000..c4606fa --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/both-configs/package.json @@ -0,0 +1,28 @@ +{ + "name": "both-configs", + "version": "1.0.0", + "dependencies": {}, + "devDependencies": {}, + "packageManager": "npm@1.2.3", + "turbo": { + "$schema": "https://turbo.build/schema.json", + "pipeline": { + "package-only": { + "cache": false, + "persistent": true + }, + "build": { + "outputs": [ + ".next/**", + "!.next/cache/**" + ] + }, + "lint": { + "outputs": [] + }, + "dev": { + "cache": false + } + } + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/both-configs/turbo.json b/packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/both-configs/turbo.json new file mode 100644 index 0000000..e6eb652 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/both-configs/turbo.json @@ -0,0 +1,18 @@ +{ + "$schema": "https://turbo.build/schema.json", + "pipeline": { + "turbo-only": { + "cache": false, + "persistent": true + }, + "build": { + "outputs": [".next/**", "!.next/cache/**"] + }, + "lint": { + "outputs": [] + }, + "dev": { + "cache": false + } + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/no-package-json-config/package.json b/packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/no-package-json-config/package.json new file mode 100644 index 0000000..b965b7d --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/no-package-json-config/package.json @@ -0,0 +1,7 @@ +{ + "name": "no-turbo-json-config", + "version": "1.0.0", + "dependencies": {}, + "devDependencies": {}, + "packageManager": "npm@1.2.3" +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/no-package-json-file/a-random-file.txt b/packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/no-package-json-file/a-random-file.txt new file mode 100644 index 0000000..7488fec --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/no-package-json-file/a-random-file.txt @@ -0,0 +1 @@ +Nothing exists here diff --git a/packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/no-turbo-json-config/package.json b/packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/no-turbo-json-config/package.json new file mode 100644 index 0000000..7754c7d --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/no-turbo-json-config/package.json @@ -0,0 +1,24 @@ +{ + "name": "no-turbo-json-config", + "version": "1.0.0", + "dependencies": {}, + "devDependencies": {}, + "packageManager": "npm@1.2.3", + "turbo": { + "$schema": "https://turbo.build/schema.json", + "pipeline": { + "build": { + "outputs": [ + ".next/**", + "!.next/cache/**" + ] + }, + "lint": { + "outputs": [] + }, + "dev": { + "cache": false + } + } + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/turbo-json-config/package.json b/packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/turbo-json-config/package.json new file mode 100644 index 0000000..a48d0ec --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/turbo-json-config/package.json @@ -0,0 +1,7 @@ +{ + "name": "both-configs", + "version": "1.0.0", + "dependencies": {}, + "devDependencies": {}, + "packageManager": "npm@1.2.3" +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/turbo-json-config/turbo.json b/packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/turbo-json-config/turbo.json new file mode 100644 index 0000000..e6eb652 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/create-turbo-config/turbo-json-config/turbo.json @@ -0,0 +1,18 @@ +{ + "$schema": "https://turbo.build/schema.json", + "pipeline": { + "turbo-only": { + "cache": false, + "persistent": true + }, + "build": { + "outputs": [".next/**", "!.next/cache/**"] + }, + "lint": { + "outputs": [] + }, + "dev": { + "cache": false + } + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/no-deps/package.json b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/no-deps/package.json new file mode 100644 index 0000000..b632eef --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/no-deps/package.json @@ -0,0 +1,4 @@ +{ + "name": "no-turbo", + "version": "0.0.0" +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/no-package/README.md b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/no-package/README.md new file mode 100644 index 0000000..64355e7 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/no-package/README.md @@ -0,0 +1 @@ +Nothing here diff --git a/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/no-turbo/package.json b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/no-turbo/package.json new file mode 100644 index 0000000..524df50 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/no-turbo/package.json @@ -0,0 +1,6 @@ +{ + "name": "no-turbo", + "version": "0.0.0", + "dependencies": {}, + "devDependencies": {} +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/normal-workspaces-dev-install/package.json b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/normal-workspaces-dev-install/package.json new file mode 100644 index 0000000..f5b2368 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/normal-workspaces-dev-install/package.json @@ -0,0 +1,12 @@ +{ + "name": "normal-workspaces", + "version": "0.0.0", + "workspaces": [ + "apps/*", + "packages/*" + ], + "dependencies": {}, + "devDependencies": { + "turbo": "1.0.0" + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/normal-workspaces/package.json b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/normal-workspaces/package.json new file mode 100644 index 0000000..6344a38 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/normal-workspaces/package.json @@ -0,0 +1,12 @@ +{ + "name": "normal-workspaces", + "version": "0.0.0", + "workspaces": [ + "apps/*", + "packages/*" + ], + "dependencies": { + "turbo": "1.0.0" + }, + "devDependencies": {} +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/pnpm-workspaces-dev-install/package.json b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/pnpm-workspaces-dev-install/package.json new file mode 100644 index 0000000..5c12f28 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/pnpm-workspaces-dev-install/package.json @@ -0,0 +1,8 @@ +{ + "name": "pnpm-workspaces", + "version": "0.0.0", + "dependencies": {}, + "devDependencies": { + "turbo": "1.0.0" + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/pnpm-workspaces-dev-install/pnpm-workspace.yaml b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/pnpm-workspaces-dev-install/pnpm-workspace.yaml new file mode 100644 index 0000000..3ff5faa --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/pnpm-workspaces-dev-install/pnpm-workspace.yaml @@ -0,0 +1,3 @@ +packages: + - "apps/*" + - "packages/*" diff --git a/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/pnpm-workspaces/package.json b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/pnpm-workspaces/package.json new file mode 100644 index 0000000..fedeb8d --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/pnpm-workspaces/package.json @@ -0,0 +1,8 @@ +{ + "name": "pnpm-workspaces", + "version": "0.0.0", + "dependencies": { + "turbo": "1.0.0" + }, + "devDependencies": {} +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/pnpm-workspaces/pnpm-workspace.yaml b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/pnpm-workspaces/pnpm-workspace.yaml new file mode 100644 index 0000000..3ff5faa --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/pnpm-workspaces/pnpm-workspace.yaml @@ -0,0 +1,3 @@ +packages: + - "apps/*" + - "packages/*" diff --git a/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/single-package-dev-install/package.json b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/single-package-dev-install/package.json new file mode 100644 index 0000000..38bd995 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/single-package-dev-install/package.json @@ -0,0 +1,8 @@ +{ + "name": "single-package-dev-install", + "version": "0.0.0", + "dependencies": {}, + "devDependencies": { + "turbo": "1.0.0" + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/single-package/package.json b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/single-package/package.json new file mode 100644 index 0000000..0fd3453 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/get-turbo-upgrade-command/single-package/package.json @@ -0,0 +1,8 @@ +{ + "name": "single-package", + "version": "0.0.0", + "dependencies": { + "turbo": "1.0.0" + }, + "devDependencies": {} +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/env-dependencies/turbo.json b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/env-dependencies/turbo.json new file mode 100644 index 0000000..bb3e248 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/env-dependencies/turbo.json @@ -0,0 +1,21 @@ +{ + "$schema": "https://turbo.build/schema.json", + "globalDependencies": ["$NEXT_PUBLIC_API_KEY", "$STRIPE_API_KEY", ".env"], + "pipeline": { + "build": { + "outputs": [".next/**", "!.next/cache/**"], + "dependsOn": ["^build", "$PROD_API_KEY"] + }, + "lint": { + "outputs": [], + "dependsOn": ["$IS_CI"] + }, + "test": { + "outputs": [], + "dependsOn": ["$IS_CI", "test"] + }, + "dev": { + "cache": false + } + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/migrated-env-dependencies/turbo.json b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/migrated-env-dependencies/turbo.json new file mode 100644 index 0000000..9217af6 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/migrated-env-dependencies/turbo.json @@ -0,0 +1,25 @@ +{ + "$schema": "https://turbo.build/schema.json", + "globalDependencies": [], + "globalEnv": ["NEXT_PUBLIC_API_KEY", "STRIPE_API_KEY"], + "pipeline": { + "build": { + "dependsOn": ["^build"], + "env": ["PROD_API_KEY"], + "outputs": [".next/**", "!.next/cache/**"] + }, + "dev": { + "cache": false + }, + "lint": { + "dependsOn": [], + "env": ["IS_CI"], + "outputs": [] + }, + "test": { + "dependsOn": ["test"], + "env": ["IS_CI"], + "outputs": [] + } + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/no-turbo-json/package.json b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/no-turbo-json/package.json new file mode 100644 index 0000000..83443be --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/no-turbo-json/package.json @@ -0,0 +1,7 @@ +{ + "name": "no-turbo-json", + "version": "1.0.0", + "dependencies": {}, + "devDependencies": {}, + "packageManager": "npm@1.2.3" +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/old-config/package.json b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/old-config/package.json new file mode 100644 index 0000000..6774d3c --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/old-config/package.json @@ -0,0 +1,20 @@ +{ + "name": "migrate-env-var-dependencies-old-config", + "version": "1.0.0", + "dependencies": {}, + "devDependencies": {}, + "packageManager": "npm@1.2.3", + "turbo": { + "pipeline": { + "build-one": { + "outputs": [ + "foo" + ] + }, + "build-two": { + "outputs": [] + }, + "build-three": {} + } + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/old-config/turbo.json b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/old-config/turbo.json new file mode 100644 index 0000000..b0f6150 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/old-config/turbo.json @@ -0,0 +1,12 @@ +{ + "$schema": "https://turbo.build/schema.json", + "pipeline": { + "build-one": { + "outputs": ["foo"] + }, + "build-two": { + "outputs": [] + }, + "build-three": {} + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/docs/index.js b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/docs/index.js new file mode 100644 index 0000000..4de53f5 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/docs/index.js @@ -0,0 +1,6 @@ +export default function docs() { + if (process.env.ENV_1 === undefined) { + return "does not exist"; + } + return "exists"; +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/docs/package.json b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/docs/package.json new file mode 100644 index 0000000..82f9a44 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/docs/package.json @@ -0,0 +1,4 @@ +{ + "name": "docs", + "version": "1.0.0" +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/docs/turbo.json b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/docs/turbo.json new file mode 100644 index 0000000..a3713ef --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/docs/turbo.json @@ -0,0 +1,9 @@ +{ + "$schema": "https://turbo.build/schema.json", + "extends": ["//"], + "pipeline": { + "build": { + "env": ["ENV_3"] + } + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/web/index.js b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/web/index.js new file mode 100644 index 0000000..bfd3ab8 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/web/index.js @@ -0,0 +1,6 @@ +export default function web() { + if (!process.env.ENV_2) { + return "bar"; + } + return "foo"; +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/web/package.json b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/web/package.json new file mode 100644 index 0000000..d8a83ed --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/web/package.json @@ -0,0 +1,4 @@ +{ + "name": "web", + "version": "1.0.0" +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/web/turbo.json b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/web/turbo.json new file mode 100644 index 0000000..dd69c31 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/apps/web/turbo.json @@ -0,0 +1,12 @@ +{ + "$schema": "https://turbo.build/schema.json", + "extends": ["//"], + "pipeline": { + "build": { + // old + "dependsOn": ["build", "$ENV_2"], + // new + "env": ["ENV_1"] + } + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/package.json b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/package.json new file mode 100644 index 0000000..c6616a6 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/package.json @@ -0,0 +1,14 @@ +{ + "private": true, + "workspaces": [ + "apps/*", + "packages/*" + ], + "scripts": { + "build": "turbo run build" + }, + "devDependencies": { + "turbo": "latest" + }, + "packageManager": "yarn@1.22.19" +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/packages/ui/index.js b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/packages/ui/index.js new file mode 100644 index 0000000..dee5e80 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/packages/ui/index.js @@ -0,0 +1,6 @@ +export default function foo() { + if (!process.env.IS_SERVER) { + return "bar"; + } + return "foo"; +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/packages/ui/package.json b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/packages/ui/package.json new file mode 100644 index 0000000..7cb7cf1 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/packages/ui/package.json @@ -0,0 +1,4 @@ +{ + "name": "ui", + "version": "1.0.0" +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/packages/ui/turbo.json b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/packages/ui/turbo.json new file mode 100644 index 0000000..6ce7b30 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/packages/ui/turbo.json @@ -0,0 +1,9 @@ +{ + "$schema": "https://turbo.build/schema.json", + "extends": ["//"], + "pipeline": { + "build": { + "dependsOn": ["$IS_SERVER"] + } + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/turbo.json b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/turbo.json new file mode 100644 index 0000000..718e461 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/migrate-env-var-dependencies/workspace-configs/turbo.json @@ -0,0 +1,21 @@ +{ + "$schema": "https://turbo.build/schema.json", + "globalDependencies": ["$NEXT_PUBLIC_API_KEY", "$STRIPE_API_KEY", ".env"], + "pipeline": { + "build": { + "outputs": [".next/**", "!.next/cache/**"], + "dependsOn": ["^build", "$PROD_API_KEY"] + }, + "lint": { + "outputs": [], + "dependsOn": ["$IS_TEST"] + }, + "test": { + "outputs": [], + "dependsOn": ["$IS_CI", "test"] + }, + "dev": { + "cache": false + } + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/migrate/no-repo/README.md b/packages/turbo-codemod/__tests__/__fixtures__/migrate/no-repo/README.md new file mode 100644 index 0000000..64355e7 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/migrate/no-repo/README.md @@ -0,0 +1 @@ +Nothing here diff --git a/packages/turbo-codemod/__tests__/__fixtures__/migrate/old-turbo/package.json b/packages/turbo-codemod/__tests__/__fixtures__/migrate/old-turbo/package.json new file mode 100644 index 0000000..62959b8 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/migrate/old-turbo/package.json @@ -0,0 +1,26 @@ +{ + "name": "no-turbo-json", + "version": "1.0.0", + "dependencies": {}, + "devDependencies": { + "turbo": "1.0.0" + }, + "turbo": { + "$schema": "https://turbo.build/schema.json", + "pipeline": { + "build": { + "outputs": [ + ".next/**", + "!.next/cache/**" + ] + }, + "lint": { + "outputs": [] + }, + "test": {}, + "dev": { + "cache": false + } + } + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/invalid-outputs/package.json b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/invalid-outputs/package.json new file mode 100644 index 0000000..6b50aac --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/invalid-outputs/package.json @@ -0,0 +1,7 @@ +{ + "name": "invalid-outputs", + "version": "1.0.0", + "dependencies": {}, + "devDependencies": {}, + "packageManager": "npm@1.2.3" +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/invalid-outputs/turbo.json b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/invalid-outputs/turbo.json new file mode 100644 index 0000000..33c2b93 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/invalid-outputs/turbo.json @@ -0,0 +1,36 @@ +{ + "$schema": "https://turbo.build/schema.json", + "pipeline": { + "build-one": { + "outputs": ["foo"] + }, + "build-two": { + "outputs": [] + }, + "build-three": {}, + "garbage-in-numeric-0": { + "outputs": 0 + }, + "garbage-in-numeric": { + "outputs": 42 + }, + "garbage-in-string": { + "outputs": "string" + }, + "garbage-in-empty-string": { + "outputs": "" + }, + "garbage-in-null": { + "outputs": null + }, + "garbage-in-false": { + "outputs": false + }, + "garbage-in-true": { + "outputs": true + }, + "garbage-in-object": { + "outputs": {} + } + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/no-outputs/package.json b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/no-outputs/package.json new file mode 100644 index 0000000..4e17dc1 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/no-outputs/package.json @@ -0,0 +1,7 @@ +{ + "name": "no-outputs", + "version": "1.0.0", + "dependencies": {}, + "devDependencies": {}, + "packageManager": "npm@1.2.3" +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/no-outputs/turbo.json b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/no-outputs/turbo.json new file mode 100644 index 0000000..f5d57fc --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/no-outputs/turbo.json @@ -0,0 +1,14 @@ +{ + "$schema": "https://turbo.build/schema.json", + "pipeline": { + "build-one": { + "dependsOn": ["build-two"] + }, + "build-two": { + "cache": false + }, + "build-three": { + "persistent": true + } + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/no-pipeline/package.json b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/no-pipeline/package.json new file mode 100644 index 0000000..6e20fc8 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/no-pipeline/package.json @@ -0,0 +1,7 @@ +{ + "name": "no-pipeline", + "version": "1.0.0", + "dependencies": {}, + "devDependencies": {}, + "packageManager": "npm@1.2.3" +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/no-pipeline/turbo.json b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/no-pipeline/turbo.json new file mode 100644 index 0000000..0e2d6fd --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/no-pipeline/turbo.json @@ -0,0 +1,5 @@ +{ + "$schema": "https://turbo.build/schema.json", + "globalDependencies": ["$NEXT_PUBLIC_API_KEY", "$STRIPE_API_KEY", ".env"], + "pipeline": {} +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/no-turbo-json/package.json b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/no-turbo-json/package.json new file mode 100644 index 0000000..cd98334 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/no-turbo-json/package.json @@ -0,0 +1,7 @@ +{ + "name": "set-default-outputs-no-turbo-json", + "version": "1.0.0", + "dependencies": {}, + "devDependencies": {}, + "packageManager": "npm@1.2.3" +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/old-config/package.json b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/old-config/package.json new file mode 100644 index 0000000..4c816c2 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/old-config/package.json @@ -0,0 +1,20 @@ +{ + "name": "set-default-outputs-old-config", + "version": "1.0.0", + "dependencies": {}, + "devDependencies": {}, + "packageManager": "npm@1.2.3", + "turbo": { + "pipeline": { + "build-one": { + "outputs": [ + "foo" + ] + }, + "build-two": { + "outputs": [] + }, + "build-three": {} + } + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/old-config/turbo.json b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/old-config/turbo.json new file mode 100644 index 0000000..b0f6150 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/old-config/turbo.json @@ -0,0 +1,12 @@ +{ + "$schema": "https://turbo.build/schema.json", + "pipeline": { + "build-one": { + "outputs": ["foo"] + }, + "build-two": { + "outputs": [] + }, + "build-three": {} + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/old-outputs/package.json b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/old-outputs/package.json new file mode 100644 index 0000000..e4220ba --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/old-outputs/package.json @@ -0,0 +1,7 @@ +{ + "name": "old-outputs", + "version": "1.0.0", + "dependencies": {}, + "devDependencies": {}, + "packageManager": "npm@1.2.3" +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/old-outputs/turbo.json b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/old-outputs/turbo.json new file mode 100644 index 0000000..b0f6150 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/old-outputs/turbo.json @@ -0,0 +1,12 @@ +{ + "$schema": "https://turbo.build/schema.json", + "pipeline": { + "build-one": { + "outputs": ["foo"] + }, + "build-two": { + "outputs": [] + }, + "build-three": {} + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/docs/index.js b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/docs/index.js new file mode 100644 index 0000000..4de53f5 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/docs/index.js @@ -0,0 +1,6 @@ +export default function docs() { + if (process.env.ENV_1 === undefined) { + return "does not exist"; + } + return "exists"; +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/docs/package.json b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/docs/package.json new file mode 100644 index 0000000..82f9a44 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/docs/package.json @@ -0,0 +1,4 @@ +{ + "name": "docs", + "version": "1.0.0" +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/docs/turbo.json b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/docs/turbo.json new file mode 100644 index 0000000..e60cdb7 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/docs/turbo.json @@ -0,0 +1,7 @@ +{ + "$schema": "https://turbo.build/schema.json", + "extends": ["//"], + "pipeline": { + "build": {} + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/web/index.js b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/web/index.js new file mode 100644 index 0000000..bfd3ab8 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/web/index.js @@ -0,0 +1,6 @@ +export default function web() { + if (!process.env.ENV_2) { + return "bar"; + } + return "foo"; +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/web/package.json b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/web/package.json new file mode 100644 index 0000000..d8a83ed --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/web/package.json @@ -0,0 +1,4 @@ +{ + "name": "web", + "version": "1.0.0" +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/web/turbo.json b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/web/turbo.json new file mode 100644 index 0000000..b239cbf --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/apps/web/turbo.json @@ -0,0 +1,10 @@ +{ + "$schema": "https://turbo.build/schema.json", + "extends": ["//"], + "pipeline": { + "build": { + // old + "outputs": [] + } + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/package.json b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/package.json new file mode 100644 index 0000000..c6616a6 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/package.json @@ -0,0 +1,14 @@ +{ + "private": true, + "workspaces": [ + "apps/*", + "packages/*" + ], + "scripts": { + "build": "turbo run build" + }, + "devDependencies": { + "turbo": "latest" + }, + "packageManager": "yarn@1.22.19" +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/packages/ui/index.js b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/packages/ui/index.js new file mode 100644 index 0000000..dee5e80 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/packages/ui/index.js @@ -0,0 +1,6 @@ +export default function foo() { + if (!process.env.IS_SERVER) { + return "bar"; + } + return "foo"; +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/packages/ui/package.json b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/packages/ui/package.json new file mode 100644 index 0000000..7cb7cf1 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/packages/ui/package.json @@ -0,0 +1,4 @@ +{ + "name": "ui", + "version": "1.0.0" +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/packages/ui/turbo.json b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/packages/ui/turbo.json new file mode 100644 index 0000000..fe51119 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/packages/ui/turbo.json @@ -0,0 +1,7 @@ +{ + "$schema": "https://turbo.build/schema.json", + "extends": ["//"], + "pipeline": { + "build-three": {} + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/turbo.json b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/turbo.json new file mode 100644 index 0000000..b0f6150 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/set-default-outputs/workspace-configs/turbo.json @@ -0,0 +1,12 @@ +{ + "$schema": "https://turbo.build/schema.json", + "pipeline": { + "build-one": { + "outputs": ["foo"] + }, + "build-two": { + "outputs": [] + }, + "build-three": {} + } +} diff --git a/packages/turbo-codemod/__tests__/__fixtures__/transform/basic/package.json b/packages/turbo-codemod/__tests__/__fixtures__/transform/basic/package.json new file mode 100644 index 0000000..651edb6 --- /dev/null +++ b/packages/turbo-codemod/__tests__/__fixtures__/transform/basic/package.json @@ -0,0 +1,8 @@ +{ + "name": "transform-basic", + "version": "1.0.0", + "dependencies": {}, + "devDependencies": { + "turbo": "1.0.0" + } +} diff --git a/packages/turbo-codemod/__tests__/add-package-manager.test.ts b/packages/turbo-codemod/__tests__/add-package-manager.test.ts new file mode 100644 index 0000000..5bde7e0 --- /dev/null +++ b/packages/turbo-codemod/__tests__/add-package-manager.test.ts @@ -0,0 +1,504 @@ +import { transformer } from "../src/transforms/add-package-manager"; +import { setupTestFixtures } from "@turbo/test-utils"; +import fs from "fs-extra"; +import * as getPackageManager from "../src/utils/getPackageManager"; +import * as getPackageManagerVersion from "../src/utils/getPackageManagerVersion"; + +describe("add-package-manager", () => { + const { useFixture } = setupTestFixtures({ + directory: __dirname, + test: "add-package-manager", + }); + test("no package manager - basic", () => { + // load the fixture for the test + const { root, read } = useFixture({ fixture: "no-package-manager" }); + + const packageManager = "pnpm"; + const packageManagerVersion = "1.2.3"; + + // mock out workspace and version detection so we're not dependent on our actual repo + const mockGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue(packageManagerVersion); + + const mockGetPackageManager = jest + .spyOn(getPackageManager, "default") + .mockReturnValue(packageManager); + + // package manager should not exist + expect( + JSON.parse(read("package.json") || "{}").packageManager + ).toBeUndefined(); + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + expect(mockGetPackageManager).toHaveBeenCalledWith({ directory: root }); + expect(mockGetPackageManagerVersion).toHaveBeenCalledWith( + packageManager, + root + ); + + // package manager should now exist + expect(JSON.parse(read("package.json") || "{}").packageManager).toBe( + `${packageManager}@${packageManagerVersion}` + ); + // result should be correct + expect(result.changes).toMatchInlineSnapshot(` + Object { + "package.json": Object { + "action": "modified", + "additions": 1, + "deletions": 0, + }, + } + `); + + mockGetPackageManagerVersion.mockRestore(); + mockGetPackageManager.mockRestore(); + }); + + test("no package manager - repeat run", () => { + // load the fixture for the test + const { root, read } = useFixture({ fixture: "no-package-manager" }); + + const packageManager = "pnpm"; + const packageManagerVersion = "1.2.3"; + + // mock out workspace and version detection so we're not dependent on our actual repo + const mockGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue(packageManagerVersion); + + const mockGetPackageManager = jest + .spyOn(getPackageManager, "default") + .mockReturnValue(packageManager); + + // package manager should not exist + expect( + JSON.parse(read("package.json") || "{}").packageManager + ).toBeUndefined(); + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + expect(mockGetPackageManager).toHaveBeenCalledWith({ directory: root }); + expect(mockGetPackageManagerVersion).toHaveBeenCalled(); + expect(mockGetPackageManagerVersion).toHaveBeenCalledWith( + packageManager, + root + ); + + // package manager should now exist + expect(JSON.parse(read("package.json") || "{}").packageManager).toBe( + `${packageManager}@${packageManagerVersion}` + ); + // result should be correct + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "package.json": Object { + "action": "modified", + "additions": 1, + "deletions": 0, + }, + } + `); + + // run the transformer again to ensure nothing changes on a second run + const repeatResult = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + expect(repeatResult.fatalError).toBeUndefined(); + expect(repeatResult.changes).toMatchInlineSnapshot(` + Object { + "package.json": Object { + "action": "unchanged", + "additions": 0, + "deletions": 0, + }, + } + `); + + mockGetPackageManagerVersion.mockRestore(); + mockGetPackageManager.mockRestore(); + }); + + test("no package manager - dry", () => { + // load the fixture for the test + const { root, read } = useFixture({ fixture: "no-package-manager" }); + + const packageManager = "npm"; + const packageManagerVersion = "1.2.3"; + + // mock out workspace and version detection so we're not dependent on our actual repo + const mockGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue(packageManagerVersion); + const mockGetPackageManager = jest + .spyOn(getPackageManager, "default") + .mockReturnValue(packageManager); + + // package manager should not exist + expect( + JSON.parse(read("package.json") || "{}").packageManager + ).toBeUndefined(); + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: true, print: false }, + }); + + expect(mockGetPackageManager).toHaveBeenCalledWith({ directory: root }); + expect(mockGetPackageManagerVersion).toHaveBeenCalledWith( + packageManager, + root + ); + + // package manager should not exist + expect( + JSON.parse(read("package.json") || "{}").packageManager + ).toBeUndefined(); + // result should be correct + expect(result.changes).toMatchInlineSnapshot(` + Object { + "package.json": Object { + "action": "skipped", + "additions": 1, + "deletions": 0, + }, + } + `); + + mockGetPackageManagerVersion.mockRestore(); + }); + + test("no package manager - print", () => { + // load the fixture for the test + const { root, read } = useFixture({ fixture: "no-package-manager" }); + + const packageManager = "yarn"; + const packageManagerVersion = "1.2.3"; + + // mock out workspace and version detection so we're not dependent on our actual repo + const mockGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue(packageManagerVersion); + + const mockGetPackageManager = jest + .spyOn(getPackageManager, "default") + .mockReturnValue(packageManager); + + // package manager should not exist + expect( + JSON.parse(read("package.json") || "{}").packageManager + ).toBeUndefined(); + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: true }, + }); + + expect(mockGetPackageManager).toHaveBeenCalledWith({ directory: root }); + expect(mockGetPackageManagerVersion).toHaveBeenCalledWith( + packageManager, + root + ); + // package manager should now exist + expect(JSON.parse(read("package.json") || "{}").packageManager).toBe( + `${packageManager}@${packageManagerVersion}` + ); + // result should be correct + expect(result.changes).toMatchInlineSnapshot(` + Object { + "package.json": Object { + "action": "modified", + "additions": 1, + "deletions": 0, + }, + } + `); + + mockGetPackageManagerVersion.mockRestore(); + }); + + test("no package manager - dry & print", () => { + // load the fixture for the test + const { root, read } = useFixture({ fixture: "no-package-manager" }); + + const packageManager = "npm"; + const packageManagerVersion = "1.2.3"; + + // mock out workspace and version detection so we're not dependent on our actual repo + const mockGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue(packageManagerVersion); + + const mockGetPackageManager = jest + .spyOn(getPackageManager, "default") + .mockReturnValue(packageManager); + + // package manager should not exist + expect( + JSON.parse(read("package.json") || "{}").packageManager + ).toBeUndefined(); + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: true, print: true }, + }); + + expect(mockGetPackageManager).toHaveBeenCalledWith({ directory: root }); + expect(mockGetPackageManagerVersion).toHaveBeenCalledWith( + packageManager, + root + ); + + // package manager should not exist + expect( + JSON.parse(read("package.json") || "{}").packageManager + ).toBeUndefined(); + // result should be correct + expect(result.changes).toMatchInlineSnapshot(` + Object { + "package.json": Object { + "action": "skipped", + "additions": 1, + "deletions": 0, + }, + } + `); + + mockGetPackageManagerVersion.mockRestore(); + mockGetPackageManager.mockRestore(); + }); + + test("package manager already exists", () => { + // load the fixture for the test + const { root, read } = useFixture({ fixture: "has-package-manager" }); + const packageManager = "npm"; + const packageManagerVersion = "1.2.3"; + + // mock out workspace and version detection so we're not dependent on our actual repo + const mockGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue(packageManagerVersion); + + const mockGetPackageManager = jest + .spyOn(getPackageManager, "default") + .mockReturnValue(packageManager); + + // package manager should exist + expect(JSON.parse(read("package.json") || "{}").packageManager).toBe( + `${packageManager}@${packageManagerVersion}` + ); + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + expect(mockGetPackageManager).toHaveBeenCalledWith({ directory: root }); + expect(mockGetPackageManagerVersion).toHaveBeenCalledWith( + packageManager, + root + ); + + // package manager should still exist + expect(JSON.parse(read("package.json") || "{}").packageManager).toBe( + `${packageManager}@${packageManagerVersion}` + ); + // result should be correct + expect(result.changes).toMatchInlineSnapshot(` + Object { + "package.json": Object { + "action": "unchanged", + "additions": 0, + "deletions": 0, + }, + } + `); + + mockGetPackageManagerVersion.mockRestore(); + mockGetPackageManager.mockRestore(); + }); + + test("package manager exists but is wrong", () => { + // load the fixture for the test + const { root, read } = useFixture({ fixture: "wrong-package-manager" }); + + const packageManager = "pnpm"; + const packageManagerVersion = "1.2.3"; + + // mock out workspace and version detection so we're not dependent on our actual repo + const mockGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue(packageManagerVersion); + + const mockGetPackageManager = jest + .spyOn(getPackageManager, "default") + .mockReturnValue(packageManager); + + // package manager should exist + expect(JSON.parse(read("package.json") || "{}").packageManager).toBe( + "turbo@1.7.0" + ); + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + expect(mockGetPackageManager).toHaveBeenCalledWith({ directory: root }); + expect(mockGetPackageManagerVersion).toHaveBeenCalledWith( + packageManager, + root + ); + + // package manager should still exist + expect(JSON.parse(read("package.json") || "{}").packageManager).toBe( + `${packageManager}@${packageManagerVersion}` + ); + // result should be correct + expect(result.changes).toMatchInlineSnapshot(` + Object { + "package.json": Object { + "action": "modified", + "additions": 1, + "deletions": 1, + }, + } + `); + + mockGetPackageManagerVersion.mockRestore(); + mockGetPackageManager.mockRestore(); + }); + + test("errors when unable to determine workspace manager", () => { + // load the fixture for the test + const { root, read } = useFixture({ fixture: "no-package-manager" }); + + const mockGetPackageManager = jest + .spyOn(getPackageManager, "default") + .mockReturnValue(undefined); + + // package manager should not exist + expect( + JSON.parse(read("package.json") || "{}").packageManager + ).toBeUndefined(); + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + expect(mockGetPackageManager).toHaveBeenCalledTimes(1); + expect(mockGetPackageManager).toHaveBeenCalledWith({ directory: root }); + + // result should be correct + // result should be correct + expect(result.fatalError?.message).toMatch( + /Unable to determine package manager for .*?/ + ); + + mockGetPackageManager.mockRestore(); + }); + + test("errors when unable to determine package manager", () => { + // load the fixture for the test + const { root, read } = useFixture({ fixture: "no-package-manager" }); + + const mockGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockImplementation(() => { + throw new Error("package manager not supported"); + }); + + // package manager should not exist + expect( + JSON.parse(read("package.json") || "{}").packageManager + ).toBeUndefined(); + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + expect(mockGetPackageManagerVersion).toHaveBeenCalledTimes(1); + + // result should be correct + expect(result.fatalError?.message).toMatch( + /Unable to determine package manager version for .*?/ + ); + + mockGetPackageManagerVersion.mockRestore(); + }); + + test("errors when unable to write json", () => { + // load the fixture for the test + const { root, read } = useFixture({ fixture: "no-package-manager" }); + + const packageManager = "pnpm"; + const packageManagerVersion = "1.2.3"; + + // mock out workspace and version detection so we're not dependent on our actual repo + const mockGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue(packageManagerVersion); + + const mockGetPackageManager = jest + .spyOn(getPackageManager, "default") + .mockReturnValue(packageManager); + + const mockWriteJsonSync = jest + .spyOn(fs, "writeJsonSync") + .mockImplementation(() => { + throw new Error("could not write file"); + }); + + // package manager should not exist + expect( + JSON.parse(read("package.json") || "{}").packageManager + ).toBeUndefined(); + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + expect(mockGetPackageManager).toHaveBeenCalledWith({ directory: root }); + expect(mockGetPackageManagerVersion).toHaveBeenCalledWith( + packageManager, + root + ); + + // package manager should still not exist (we couldn't write it) + expect( + JSON.parse(read("package.json") || "{}").packageManager + ).toBeUndefined(); + + // result should be correct + expect(result.fatalError?.message).toMatch( + "Encountered an error while transforming files" + ); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "package.json": Object { + "action": "error", + "additions": 1, + "deletions": 0, + "error": [Error: could not write file], + }, + } + `); + + mockWriteJsonSync.mockRestore(); + mockGetPackageManagerVersion.mockRestore(); + mockGetPackageManager.mockRestore(); + }); +}); diff --git a/packages/turbo-codemod/__tests__/create-turbo-config.test.ts b/packages/turbo-codemod/__tests__/create-turbo-config.test.ts new file mode 100644 index 0000000..8938c78 --- /dev/null +++ b/packages/turbo-codemod/__tests__/create-turbo-config.test.ts @@ -0,0 +1,416 @@ +import { transformer } from "../src/transforms/create-turbo-config"; +import { setupTestFixtures } from "@turbo/test-utils"; +import fs from "fs-extra"; + +describe("create-turbo-config", () => { + const { useFixture } = setupTestFixtures({ + directory: __dirname, + test: "create-turbo-config", + }); + + test("package.json config exists but no turbo.json config - basic", () => { + // load the fixture for the test + const { root, read } = useFixture({ fixture: "no-turbo-json-config" }); + + // turbo.json should not exist + expect(read("turbo.json")).toBeUndefined(); + + // get config from package.json for comparison later + const turboConfig = JSON.parse(read("package.json") || "{}").turbo; + expect(turboConfig).toBeDefined(); + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + // turbo.json should now exist (and match the package.json config) + expect(JSON.parse(read("turbo.json") || "{}")).toEqual(turboConfig); + + // result should be correct + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "package.json": Object { + "action": "modified", + "additions": 0, + "deletions": 1, + }, + "turbo.json": Object { + "action": "modified", + "additions": 1, + "deletions": 0, + }, + } + `); + }); + + test("package.json config exists but no turbo.json config - repeat run", () => { + // load the fixture for the test + const { root, read } = useFixture({ fixture: "no-turbo-json-config" }); + + // turbo.json should not exist + expect(read("turbo.json")).toBeUndefined(); + + // get config from package.json for comparison later + const turboConfig = JSON.parse(read("package.json") || "{}").turbo; + expect(turboConfig).toBeDefined(); + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + // turbo.json should now exist (and match the package.json config) + expect(JSON.parse(read("turbo.json") || "{}")).toEqual(turboConfig); + + // result should be correct + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "package.json": Object { + "action": "modified", + "additions": 0, + "deletions": 1, + }, + "turbo.json": Object { + "action": "modified", + "additions": 1, + "deletions": 0, + }, + } + `); + + // run the transformer + const repeatResult = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + // result should be correct + expect(repeatResult.fatalError).toBeUndefined(); + expect(repeatResult.changes).toMatchInlineSnapshot(` + Object { + "package.json": Object { + "action": "unchanged", + "additions": 0, + "deletions": 0, + }, + "turbo.json": Object { + "action": "unchanged", + "additions": 0, + "deletions": 0, + }, + } + `); + }); + + test("package.json config exists but no turbo.json config - dry", () => { + // load the fixture for the test + const { root, read } = useFixture({ fixture: "no-turbo-json-config" }); + + // turbo.json should not exist + expect(read("turbo.json")).toBeUndefined(); + + // get config from package.json for comparison later + const turboConfig = JSON.parse(read("package.json") || "{}").turbo; + expect(turboConfig).toBeDefined(); + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: true, print: false }, + }); + + // turbo.json still not exist (dry run) + expect(read("turbo.json")).toBeUndefined(); + + // result should be correct + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "package.json": Object { + "action": "skipped", + "additions": 0, + "deletions": 1, + }, + "turbo.json": Object { + "action": "skipped", + "additions": 1, + "deletions": 0, + }, + } + `); + }); + + test("package.json config exists but no turbo.json config - print", () => { + // load the fixture for the test + const { root, read } = useFixture({ fixture: "no-turbo-json-config" }); + + // turbo.json should not exist + expect(read("turbo.json")).toBeUndefined(); + + // get config from package.json for comparison later + const turboConfig = JSON.parse(read("package.json") || "{}").turbo; + expect(turboConfig).toBeDefined(); + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: true }, + }); + + // turbo.json should now exist (and match the package.json config) + expect(JSON.parse(read("turbo.json") || "{}")).toEqual(turboConfig); + + // result should be correct + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "package.json": Object { + "action": "modified", + "additions": 0, + "deletions": 1, + }, + "turbo.json": Object { + "action": "modified", + "additions": 1, + "deletions": 0, + }, + } + `); + }); + + test("package.json config exists but no turbo.json config - dry & print", () => { + // load the fixture for the test + const { root, read } = useFixture({ fixture: "no-turbo-json-config" }); + + // turbo.json should not exist + expect(read("turbo.json")).toBeUndefined(); + + // get config from package.json for comparison later + const turboConfig = JSON.parse(read("package.json") || "{}").turbo; + expect(turboConfig).toBeDefined(); + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: true, print: true }, + }); + + // turbo.json still not exist (dry run) + expect(read("turbo.json")).toBeUndefined(); + + // result should be correct + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "package.json": Object { + "action": "skipped", + "additions": 0, + "deletions": 1, + }, + "turbo.json": Object { + "action": "skipped", + "additions": 1, + "deletions": 0, + }, + } + `); + }); + + test("no package.json config or turbo.json file exists", () => { + // load the fixture for the test + const { root, read } = useFixture({ fixture: "no-package-json-config" }); + + // turbo.json should not exist + expect(read("turbo.json")).toBeUndefined(); + + // get config from package.json for comparison later + const packageJsonConfig = JSON.parse(read("package.json") || "{}"); + const turboConfig = packageJsonConfig.turbo; + expect(turboConfig).toBeUndefined(); + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + // turbo.json should still not exist + expect(read("turbo.json")).toBeUndefined(); + + // make sure we didn't change the package.json + expect(JSON.parse(read("package.json") || "{}")).toEqual(packageJsonConfig); + + // result should be correct + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "package.json": Object { + "action": "unchanged", + "additions": 0, + "deletions": 0, + }, + "turbo.json": Object { + "action": "unchanged", + "additions": 0, + "deletions": 0, + }, + } + `); + }); + + test("no package.json file exists", () => { + // load the fixture for the test + const { root, read } = useFixture({ fixture: "no-package-json-file" }); + + // turbo.json should not exist + expect(read("turbo.json")).toBeUndefined(); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + // turbo.json should still not exist + expect(read("turbo.json")).toBeUndefined(); + + // result should be correct + expect(result.fatalError?.message).toMatch( + /No package\.json found at .*?\. Is the path correct\?/ + ); + }); + + test("turbo.json file exists and no package.json config exists", () => { + // load the fixture for the test + const { root, read } = useFixture({ fixture: "turbo-json-config" }); + + // turbo.json should exist + expect(read("turbo.json")).toBeDefined(); + + // no config should exist in package.json + const packageJsonConfig = JSON.parse(read("package.json") || "{}"); + const turboConfig = packageJsonConfig.turbo; + expect(turboConfig).toBeUndefined(); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + // turbo.json should still exist + expect(read("turbo.json")).toBeDefined(); + + // make sure we didn't change the package.json + expect(JSON.parse(read("package.json") || "{}")).toEqual(packageJsonConfig); + + // result should be correct + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "package.json": Object { + "action": "unchanged", + "additions": 0, + "deletions": 0, + }, + "turbo.json": Object { + "action": "unchanged", + "additions": 0, + "deletions": 0, + }, + } + `); + }); + + test("turbo.json file exists and package.json config exists", () => { + // load the fixture for the test + const { root, read } = useFixture({ fixture: "both-configs" }); + + // turbo.json should exist + const turboJsonConfig = JSON.parse(read("turbo.json") || "{}"); + expect(turboJsonConfig.pipeline).toBeDefined(); + + // no config should exist in package.json + const packageJsonConfig = JSON.parse(read("package.json") || "{}"); + const turboConfig = JSON.parse(read("package.json") || "{}").turbo; + expect(turboConfig).toBeDefined(); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + // make sure we didn't change the package.json + expect(JSON.parse(read("package.json") || "{}")).toEqual(packageJsonConfig); + + // make sure we didn't change the turbo.json + expect(JSON.parse(read("turbo.json") || "{}")).toEqual(turboJsonConfig); + + // result should be correct + expect(result.fatalError?.message).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "package.json": Object { + "action": "unchanged", + "additions": 0, + "deletions": 0, + }, + "turbo.json": Object { + "action": "unchanged", + "additions": 0, + "deletions": 0, + }, + } + `); + }); + + test("errors when unable to write json", () => { + // load the fixture for the test + const { root, read } = useFixture({ fixture: "no-turbo-json-config" }); + + // turbo.json should not exist + expect(read("turbo.json")).toBeUndefined(); + + // get config from package.json for comparison later + const turboConfig = JSON.parse(read("package.json") || "{}").turbo; + expect(turboConfig).toBeDefined(); + + const mockWriteJsonSync = jest + .spyOn(fs, "writeJsonSync") + .mockImplementation(() => { + throw new Error("could not write file"); + }); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + // turbo.json should still not exist (error writing) + expect(read("turbo.json")).toBeUndefined(); + + // result should be correct + expect(result.fatalError).toBeDefined(); + expect(result.fatalError?.message).toMatch( + "Encountered an error while transforming files" + ); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "package.json": Object { + "action": "error", + "additions": 0, + "deletions": 1, + "error": [Error: could not write file], + }, + "turbo.json": Object { + "action": "error", + "additions": 1, + "deletions": 0, + "error": [Error: could not write file], + }, + } + `); + + mockWriteJsonSync.mockRestore(); + }); +}); diff --git a/packages/turbo-codemod/__tests__/get-turbo-upgrade-command.test.ts b/packages/turbo-codemod/__tests__/get-turbo-upgrade-command.test.ts new file mode 100644 index 0000000..1015589 --- /dev/null +++ b/packages/turbo-codemod/__tests__/get-turbo-upgrade-command.test.ts @@ -0,0 +1,576 @@ +import { setupTestFixtures } from "@turbo/test-utils"; +import getTurboUpgradeCommand from "../src/commands/migrate/steps/getTurboUpgradeCommand"; +import * as utils from "../src/commands/migrate/utils"; +import * as getPackageManager from "../src/utils/getPackageManager"; +import * as getPackageManagerVersion from "../src/utils/getPackageManagerVersion"; + +const LOCAL_INSTALL_COMMANDS = [ + // npm - workspaces + [ + "latest", + "npm", + "7.0.0", + "normal-workspaces-dev-install", + "npm install turbo@latest --save-dev", + ], + [ + "1.6.3", + "npm", + "7.0.0", + "normal-workspaces-dev-install", + "npm install turbo@1.6.3 --save-dev", + ], + [ + "canary", + "npm", + "7.0.0", + "normal-workspaces-dev-install", + "npm install turbo@canary --save-dev", + ], + ["latest", "npm", "7.0.0", "normal-workspaces", "npm install turbo@latest"], + // npm - single package + [ + "latest", + "npm", + "7.0.0", + "single-package-dev-install", + "npm install turbo@latest --save-dev", + ], + ["latest", "npm", "7.0.0", "single-package", "npm install turbo@latest"], + // pnpm - workspaces + [ + "latest", + "pnpm", + "7.0.0", + "pnpm-workspaces-dev-install", + "pnpm install turbo@latest --save-dev -w", + ], + [ + "1.6.3", + "pnpm", + "7.0.0", + "pnpm-workspaces-dev-install", + "pnpm install turbo@1.6.3 --save-dev -w", + ], + [ + "canary", + "pnpm", + "7.0.0", + "pnpm-workspaces-dev-install", + "pnpm install turbo@canary --save-dev -w", + ], + [ + "latest", + "pnpm", + "7.0.0", + "pnpm-workspaces", + "pnpm install turbo@latest -w", + ], + // pnpm - single package + [ + "latest", + "pnpm", + "7.0.0", + "single-package-dev-install", + "pnpm install turbo@latest --save-dev", + ], + ["latest", "pnpm", "7.0.0", "single-package", "pnpm install turbo@latest"], + // yarn 1.x - workspaces + [ + "latest", + "yarn", + "1.22.19", + "normal-workspaces-dev-install", + "yarn add turbo@latest --dev -W", + ], + [ + "latest", + "yarn", + "1.22.19", + "normal-workspaces", + "yarn add turbo@latest -W", + ], + [ + "1.6.3", + "yarn", + "1.22.19", + "normal-workspaces-dev-install", + "yarn add turbo@1.6.3 --dev -W", + ], + [ + "canary", + "yarn", + "1.22.19", + "normal-workspaces-dev-install", + "yarn add turbo@canary --dev -W", + ], + // yarn 1.x - single package + [ + "latest", + "yarn", + "1.22.19", + "single-package-dev-install", + "yarn add turbo@latest --dev", + ], + ["latest", "yarn", "1.22.19", "single-package", "yarn add turbo@latest"], + // yarn 2.x - workspaces + [ + "latest", + "yarn", + "2.3.4", + "normal-workspaces-dev-install", + "yarn add turbo@latest --dev", + ], + ["latest", "yarn", "2.3.4", "normal-workspaces", "yarn add turbo@latest"], + [ + "1.6.3", + "yarn", + "2.3.4", + "normal-workspaces-dev-install", + "yarn add turbo@1.6.3 --dev", + ], + [ + "canary", + "yarn", + "2.3.4", + "normal-workspaces-dev-install", + "yarn add turbo@canary --dev", + ], + // yarn 2.x - single package + [ + "latest", + "yarn", + "2.3.4", + "single-package-dev-install", + "yarn add turbo@latest --dev", + ], + ["latest", "yarn", "2.3.4", "single-package", "yarn add turbo@latest"], + // yarn 3.x - workspaces + [ + "latest", + "yarn", + "3.3.4", + "normal-workspaces-dev-install", + "yarn add turbo@latest --dev", + ], + ["latest", "yarn", "3.3.4", "normal-workspaces", "yarn add turbo@latest"], + [ + "1.6.3", + "yarn", + "3.3.4", + "normal-workspaces-dev-install", + "yarn add turbo@1.6.3 --dev", + ], + [ + "canary", + "yarn", + "3.3.4", + "normal-workspaces-dev-install", + "yarn add turbo@canary --dev", + ], + // yarn 3.x - single package + [ + "latest", + "yarn", + "3.3.4", + "single-package-dev-install", + "yarn add turbo@latest --dev", + ], + ["latest", "yarn", "3.3.4", "single-package", "yarn add turbo@latest"], +]; + +const GLOBAL_INSTALL_COMMANDS = [ + // npm + [ + "latest", + "npm", + "7.0.0", + "normal-workspaces-dev-install", + "npm install turbo@latest --global", + ], + [ + "1.6.3", + "npm", + "7.0.0", + "normal-workspaces-dev-install", + "npm install turbo@1.6.3 --global", + ], + [ + "latest", + "npm", + "7.0.0", + "normal-workspaces", + "npm install turbo@latest --global", + ], + [ + "latest", + "npm", + "7.0.0", + "single-package", + "npm install turbo@latest --global", + ], + [ + "latest", + "npm", + "7.0.0", + "single-package-dev-install", + "npm install turbo@latest --global", + ], + // pnpm + [ + "latest", + "pnpm", + "7.0.0", + "pnpm-workspaces-dev-install", + "pnpm install turbo@latest --global", + ], + [ + "1.6.3", + "pnpm", + "7.0.0", + "pnpm-workspaces-dev-install", + "pnpm install turbo@1.6.3 --global", + ], + [ + "latest", + "pnpm", + "7.0.0", + "pnpm-workspaces", + "pnpm install turbo@latest --global", + ], + [ + "latest", + "pnpm", + "7.0.0", + "single-package", + "pnpm install turbo@latest --global", + ], + [ + "latest", + "pnpm", + "7.0.0", + "single-package-dev-install", + "pnpm install turbo@latest --global", + ], + // yarn 1.x + [ + "latest", + "yarn", + "1.22.19", + "normal-workspaces-dev-install", + "yarn global add turbo@latest", + ], + [ + "latest", + "yarn", + "1.22.19", + "normal-workspaces", + "yarn global add turbo@latest", + ], + [ + "1.6.3", + "yarn", + "1.22.19", + "normal-workspaces-dev-install", + "yarn global add turbo@1.6.3", + ], + [ + "latest", + "yarn", + "1.22.19", + "single-package", + "yarn global add turbo@latest", + ], + [ + "latest", + "yarn", + "1.22.19", + "single-package-dev-install", + "yarn global add turbo@latest", + ], + // yarn 2.x + [ + "latest", + "yarn", + "2.3.4", + "normal-workspaces-dev-install", + "yarn global add turbo@latest", + ], + [ + "latest", + "yarn", + "2.3.4", + "normal-workspaces", + "yarn global add turbo@latest", + ], + [ + "1.6.3", + "yarn", + "2.3.4", + "normal-workspaces-dev-install", + "yarn global add turbo@1.6.3", + ], + ["latest", "yarn", "2.3.4", "single-package", "yarn global add turbo@latest"], + [ + "latest", + "yarn", + "2.3.4", + "single-package-dev-install", + "yarn global add turbo@latest", + ], + // yarn 3.x + [ + "latest", + "yarn", + "3.3.3", + "normal-workspaces-dev-install", + "yarn global add turbo@latest", + ], + [ + "latest", + "yarn", + "3.3.3", + "normal-workspaces", + "yarn global add turbo@latest", + ], + [ + "1.6.3", + "yarn", + "3.3.3", + "normal-workspaces-dev-install", + "yarn global add turbo@1.6.3", + ], + ["latest", "yarn", "3.3.4", "single-package", "yarn global add turbo@latest"], + [ + "latest", + "yarn", + "3.3.4", + "single-package-dev-install", + "yarn global add turbo@latest", + ], +]; + +describe("get-turbo-upgrade-command", () => { + const { useFixture } = setupTestFixtures({ + directory: __dirname, + test: "get-turbo-upgrade-command", + }); + + test.each(LOCAL_INSTALL_COMMANDS)( + "returns correct upgrade command for local install of turbo@%s using %s@%s (fixture: %s)", + ( + turboVersion, + packageManager, + packageManagerVersion, + fixture, + expectedUpgradeCommand + ) => { + const { root } = useFixture({ + fixture, + }); + + const mockedExec = jest + .spyOn(utils, "exec") + .mockImplementation((command: string) => { + // fail the check for the turbo, and package manager bins to force local + if (command.includes("bin")) { + return undefined; + } + }); + const mockedGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue(packageManagerVersion); + const mockedGetPackageManager = jest + .spyOn(getPackageManager, "default") + .mockReturnValue(packageManager as getPackageManager.PackageManager); + + // get the command + const upgradeCommand = getTurboUpgradeCommand({ + directory: root, + to: turboVersion === "latest" ? undefined : turboVersion, + }); + + expect(upgradeCommand).toEqual(expectedUpgradeCommand); + + mockedExec.mockRestore(); + mockedGetPackageManager.mockRestore(); + mockedGetPackageManagerVersion.mockRestore(); + } + ); + + test.each(GLOBAL_INSTALL_COMMANDS)( + "returns correct upgrade command for global install of turbo@%s using %s@%s (fixture: %s)", + ( + turboVersion, + packageManager, + packageManagerVersion, + fixture, + expectedUpgradeCommand + ) => { + const { root } = useFixture({ + fixture, + }); + + const mockedExec = jest + .spyOn(utils, "exec") + .mockImplementation((command: string) => { + if (command === "turbo bin") { + return `/global/${packageManager}/bin/turbo`; + } + if (command.includes(packageManager)) { + return `/global/${packageManager}/bin`; + } + }); + const mockedGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue(packageManagerVersion); + const mockedGetPackageManager = jest + .spyOn(getPackageManager, "default") + .mockReturnValue(packageManager as getPackageManager.PackageManager); + + // get the command + const upgradeCommand = getTurboUpgradeCommand({ + directory: root, + to: turboVersion === "latest" ? undefined : turboVersion, + }); + + expect(upgradeCommand).toEqual(expectedUpgradeCommand); + + mockedExec.mockRestore(); + mockedGetPackageManager.mockRestore(); + mockedGetPackageManagerVersion.mockRestore(); + } + ); + + test("fails gracefully if no package.json exists", () => { + const { root } = useFixture({ + fixture: "no-package", + }); + + const mockedExec = jest + .spyOn(utils, "exec") + .mockImplementation((command: string) => { + // fail the check for the turbo, and package manager bins to force local + if (command.includes("bin")) { + return undefined; + } + }); + + const mockedGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue("8.0.0"); + const mockedGetPackageManager = jest + .spyOn(getPackageManager, "default") + .mockReturnValue("pnpm" as getPackageManager.PackageManager); + + // get the command + const upgradeCommand = getTurboUpgradeCommand({ + directory: root, + }); + + expect(upgradeCommand).toEqual(undefined); + + mockedExec.mockRestore(); + mockedGetPackageManager.mockRestore(); + mockedGetPackageManagerVersion.mockRestore(); + }); + + test("fails gracefully if turbo cannot be found in package.json", () => { + const { root } = useFixture({ + fixture: "no-turbo", + }); + + const mockedExec = jest + .spyOn(utils, "exec") + .mockImplementation((command: string) => { + // fail the check for the turbo, and package manager bins to force local + if (command.includes("bin")) { + return undefined; + } + }); + + const mockedGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue("8.0.0"); + const mockedGetPackageManager = jest + .spyOn(getPackageManager, "default") + .mockReturnValue("pnpm" as getPackageManager.PackageManager); + + // get the command + const upgradeCommand = getTurboUpgradeCommand({ + directory: root, + }); + + expect(upgradeCommand).toEqual(undefined); + + mockedExec.mockRestore(); + mockedGetPackageManager.mockRestore(); + mockedGetPackageManagerVersion.mockRestore(); + }); + + test("fails gracefully if package.json has no deps or devDeps", () => { + const { root } = useFixture({ + fixture: "no-deps", + }); + + const mockedExec = jest + .spyOn(utils, "exec") + .mockImplementation((command: string) => { + // fail the check for the turbo, and package manager bins to force local + if (command.includes("bin")) { + return undefined; + } + }); + + const mockedGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue("8.0.0"); + const mockedGetPackageManager = jest + .spyOn(getPackageManager, "default") + .mockReturnValue("pnpm" as getPackageManager.PackageManager); + + // get the command + const upgradeCommand = getTurboUpgradeCommand({ + directory: root, + }); + + expect(upgradeCommand).toEqual(undefined); + + mockedExec.mockRestore(); + mockedGetPackageManager.mockRestore(); + mockedGetPackageManagerVersion.mockRestore(); + }); + + test("fails gracefully if can't find packageManager", () => { + const { root } = useFixture({ + fixture: "no-deps", + }); + + const mockedExec = jest + .spyOn(utils, "exec") + .mockImplementation((command: string) => { + // fail the check for the turbo, and package manager bins to force local + if (command.includes("bin")) { + return undefined; + } + }); + + const mockedGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue("8.0.0"); + const mockedGetPackageManager = jest + .spyOn(getPackageManager, "default") + .mockReturnValue("pnpm" as getPackageManager.PackageManager); + + // get the command + const upgradeCommand = getTurboUpgradeCommand({ + directory: root, + }); + + expect(upgradeCommand).toEqual(undefined); + + mockedExec.mockRestore(); + mockedGetPackageManager.mockRestore(); + mockedGetPackageManagerVersion.mockRestore(); + }); +}); diff --git a/packages/turbo-codemod/__tests__/migrate-env-var-dependencies.test.ts b/packages/turbo-codemod/__tests__/migrate-env-var-dependencies.test.ts new file mode 100644 index 0000000..fbc5d8d --- /dev/null +++ b/packages/turbo-codemod/__tests__/migrate-env-var-dependencies.test.ts @@ -0,0 +1,758 @@ +import merge from "deepmerge"; +import { + hasLegacyEnvVarDependencies, + migratePipeline, + migrateConfig, + transformer, +} from "../src/transforms/migrate-env-var-dependencies"; +import { setupTestFixtures } from "@turbo/test-utils"; +import type { Schema } from "@turbo/types"; + +const getTestTurboConfig = (override: Schema = { pipeline: {} }): Schema => { + const config = { + $schema: "./docs/public/schema.json", + globalDependencies: ["$GLOBAL_ENV_KEY"], + pipeline: { + test: { + outputs: ["coverage/**/*"], + dependsOn: ["^build"], + }, + lint: { + outputs: [], + }, + dev: { + cache: false, + }, + build: { + outputs: ["dist/**/*", ".next/**/*", "!.next/cache/**"], + dependsOn: ["^build", "$TASK_ENV_KEY", "$ANOTHER_ENV_KEY"], + }, + }, + }; + + return merge(config, override, { + arrayMerge: (_, sourceArray) => sourceArray, + }); +}; + +describe("migrate-env-var-dependencies", () => { + describe("hasLegacyEnvVarDependencies - utility", () => { + it("finds env keys in legacy turbo.json - has keys", async () => { + const config = getTestTurboConfig(); + const { hasKeys, envVars } = hasLegacyEnvVarDependencies(config); + expect(hasKeys).toEqual(true); + expect(envVars).toMatchInlineSnapshot(` + Array [ + "$GLOBAL_ENV_KEY", + "$TASK_ENV_KEY", + "$ANOTHER_ENV_KEY", + ] + `); + }); + + it("finds env keys in legacy turbo.json - multiple pipeline keys", async () => { + const config = getTestTurboConfig({ + pipeline: { test: { dependsOn: ["$MY_ENV"] } }, + }); + const { hasKeys, envVars } = hasLegacyEnvVarDependencies(config); + expect(hasKeys).toEqual(true); + expect(envVars).toMatchInlineSnapshot(` + Array [ + "$GLOBAL_ENV_KEY", + "$MY_ENV", + "$TASK_ENV_KEY", + "$ANOTHER_ENV_KEY", + ] + `); + }); + + it("finds env keys in legacy turbo.json - no keys", async () => { + // override to exclude keys + const config = getTestTurboConfig({ + globalDependencies: [], + pipeline: { build: { dependsOn: [] } }, + }); + const { hasKeys, envVars } = hasLegacyEnvVarDependencies(config); + expect(hasKeys).toEqual(false); + expect(envVars).toMatchInlineSnapshot(`Array []`); + }); + + it("finds env keys in turbo.json - no global", async () => { + const { hasKeys, envVars } = hasLegacyEnvVarDependencies({ + pipeline: { build: { dependsOn: ["$cool"] } }, + }); + expect(hasKeys).toEqual(true); + expect(envVars).toMatchInlineSnapshot(` + Array [ + "$cool", + ] + `); + }); + }); + + describe("migratePipeline - utility", () => { + it("migrates pipeline with env var dependencies", async () => { + const config = getTestTurboConfig(); + const { build } = config.pipeline; + const pipeline = migratePipeline(build); + expect(pipeline).toHaveProperty("env"); + expect(pipeline?.env).toMatchInlineSnapshot(` + Array [ + "TASK_ENV_KEY", + "ANOTHER_ENV_KEY", + ] + `); + expect(pipeline?.dependsOn).toMatchInlineSnapshot(` + Array [ + "^build", + ] + `); + }); + + it("migrates pipeline with no env var dependencies", async () => { + const config = getTestTurboConfig(); + const { test } = config.pipeline; + const pipeline = migratePipeline(test); + expect(pipeline.env).toBeUndefined(); + expect(pipeline?.dependsOn).toMatchInlineSnapshot(` + Array [ + "^build", + ] + `); + }); + + it("migrates pipeline with existing env key", async () => { + const config = getTestTurboConfig({ + pipeline: { test: { env: ["$MY_ENV"], dependsOn: ["^build"] } }, + }); + const { test } = config.pipeline; + const pipeline = migratePipeline(test); + expect(pipeline).toHaveProperty("env"); + expect(pipeline?.env).toMatchInlineSnapshot(` + Array [ + "$MY_ENV", + ] + `); + expect(pipeline?.dependsOn).toMatchInlineSnapshot(` + Array [ + "^build", + ] + `); + }); + + it("migrates pipeline with incomplete env key", async () => { + const config = getTestTurboConfig({ + pipeline: { + test: { env: ["$MY_ENV"], dependsOn: ["^build", "$SUPER_COOL"] }, + }, + }); + const { test } = config.pipeline; + const pipeline = migratePipeline(test); + expect(pipeline).toHaveProperty("env"); + expect(pipeline?.env).toMatchInlineSnapshot(` + Array [ + "$MY_ENV", + "SUPER_COOL", + ] + `); + expect(pipeline?.dependsOn).toMatchInlineSnapshot(` + Array [ + "^build", + ] + `); + }); + + it("migrates pipeline with duplicate env keys", async () => { + const config = getTestTurboConfig({ + pipeline: { + test: { env: ["$MY_ENV"], dependsOn: ["^build", "$MY_ENV"] }, + }, + }); + const { test } = config.pipeline; + const pipeline = migratePipeline(test); + expect(pipeline).toHaveProperty("env"); + expect(pipeline?.env).toMatchInlineSnapshot(` + Array [ + "$MY_ENV", + "MY_ENV", + ] + `); + expect(pipeline?.dependsOn).toMatchInlineSnapshot(` + Array [ + "^build", + ] + `); + }); + }); + + describe("migrateConfig - utility", () => { + it("migrates config with env var dependencies", async () => { + const config = getTestTurboConfig(); + const pipeline = migrateConfig(config); + expect(pipeline).toMatchInlineSnapshot(` + Object { + "$schema": "./docs/public/schema.json", + "globalEnv": Array [ + "GLOBAL_ENV_KEY", + ], + "pipeline": Object { + "build": Object { + "dependsOn": Array [ + "^build", + ], + "env": Array [ + "TASK_ENV_KEY", + "ANOTHER_ENV_KEY", + ], + "outputs": Array [ + "dist/**/*", + ".next/**/*", + "!.next/cache/**", + ], + }, + "dev": Object { + "cache": false, + }, + "lint": Object { + "outputs": Array [], + }, + "test": Object { + "dependsOn": Array [ + "^build", + ], + "outputs": Array [ + "coverage/**/*", + ], + }, + }, + } + `); + }); + + it("migrates config with no env var dependencies", async () => { + const config = getTestTurboConfig({ + globalDependencies: [], + pipeline: { + build: { dependsOn: ["^build"] }, + }, + }); + const pipeline = migrateConfig(config); + expect(pipeline).toMatchInlineSnapshot(` + Object { + "$schema": "./docs/public/schema.json", + "pipeline": Object { + "build": Object { + "dependsOn": Array [ + "^build", + ], + "outputs": Array [ + "dist/**/*", + ".next/**/*", + "!.next/cache/**", + ], + }, + "dev": Object { + "cache": false, + }, + "lint": Object { + "outputs": Array [], + }, + "test": Object { + "dependsOn": Array [ + "^build", + ], + "outputs": Array [ + "coverage/**/*", + ], + }, + }, + } + `); + }); + + it("migrates config with inconsistent config", async () => { + const config = getTestTurboConfig({ + pipeline: { + test: { env: ["$MY_ENV"], dependsOn: ["^build", "$SUPER_COOL"] }, + }, + }); + const pipeline = migrateConfig(config); + expect(pipeline).toMatchInlineSnapshot(` + Object { + "$schema": "./docs/public/schema.json", + "globalEnv": Array [ + "GLOBAL_ENV_KEY", + ], + "pipeline": Object { + "build": Object { + "dependsOn": Array [ + "^build", + ], + "env": Array [ + "TASK_ENV_KEY", + "ANOTHER_ENV_KEY", + ], + "outputs": Array [ + "dist/**/*", + ".next/**/*", + "!.next/cache/**", + ], + }, + "dev": Object { + "cache": false, + }, + "lint": Object { + "outputs": Array [], + }, + "test": Object { + "dependsOn": Array [ + "^build", + ], + "env": Array [ + "$MY_ENV", + "SUPER_COOL", + ], + "outputs": Array [ + "coverage/**/*", + ], + }, + }, + } + `); + }); + + it("migrates config with duplicate env keys", async () => { + const config = getTestTurboConfig({ + pipeline: { + test: { env: ["$MY_ENV"], dependsOn: ["^build", "$MY_ENV"] }, + }, + }); + const pipeline = migrateConfig(config); + expect(pipeline).toMatchInlineSnapshot(` + Object { + "$schema": "./docs/public/schema.json", + "globalEnv": Array [ + "GLOBAL_ENV_KEY", + ], + "pipeline": Object { + "build": Object { + "dependsOn": Array [ + "^build", + ], + "env": Array [ + "TASK_ENV_KEY", + "ANOTHER_ENV_KEY", + ], + "outputs": Array [ + "dist/**/*", + ".next/**/*", + "!.next/cache/**", + ], + }, + "dev": Object { + "cache": false, + }, + "lint": Object { + "outputs": Array [], + }, + "test": Object { + "dependsOn": Array [ + "^build", + ], + "env": Array [ + "$MY_ENV", + "MY_ENV", + ], + "outputs": Array [ + "coverage/**/*", + ], + }, + }, + } + `); + }); + }); + + describe("transform", () => { + const { useFixture } = setupTestFixtures({ + directory: __dirname, + test: "migrate-env-var-dependencies", + }); + + it("migrates turbo.json env var dependencies - basic", async () => { + // load the fixture for the test + const { root, read } = useFixture({ + fixture: "env-dependencies", + }); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + expect(JSON.parse(read("turbo.json") || "{}")).toStrictEqual({ + $schema: "https://turbo.build/schema.json", + globalDependencies: [".env"], + globalEnv: ["NEXT_PUBLIC_API_KEY", "STRIPE_API_KEY"], + pipeline: { + build: { + dependsOn: ["^build"], + env: ["PROD_API_KEY"], + outputs: [".next/**", "!.next/cache/**"], + }, + dev: { + cache: false, + }, + lint: { + dependsOn: [], + env: ["IS_CI"], + outputs: [], + }, + test: { + dependsOn: ["test"], + env: ["IS_CI"], + outputs: [], + }, + }, + }); + + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "turbo.json": Object { + "action": "modified", + "additions": 4, + "deletions": 4, + }, + } + `); + }); + + it("migrates turbo.json env var dependencies - workspace configs", async () => { + // load the fixture for the test + const { root, readJson } = useFixture({ + fixture: "workspace-configs", + }); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + expect(readJson("turbo.json") || "{}").toStrictEqual({ + $schema: "https://turbo.build/schema.json", + globalDependencies: [".env"], + globalEnv: ["NEXT_PUBLIC_API_KEY", "STRIPE_API_KEY"], + pipeline: { + build: { + dependsOn: ["^build"], + env: ["PROD_API_KEY"], + outputs: [".next/**", "!.next/cache/**"], + }, + dev: { + cache: false, + }, + lint: { + dependsOn: [], + env: ["IS_TEST"], + outputs: [], + }, + test: { + dependsOn: ["test"], + env: ["IS_CI"], + outputs: [], + }, + }, + }); + + expect(readJson("apps/web/turbo.json") || "{}").toStrictEqual({ + $schema: "https://turbo.build/schema.json", + extends: ["//"], + pipeline: { + build: { + // old + dependsOn: ["build"], + // new + env: ["ENV_1", "ENV_2"], + }, + }, + }); + + expect(readJson("packages/ui/turbo.json") || "{}").toStrictEqual({ + $schema: "https://turbo.build/schema.json", + extends: ["//"], + pipeline: { + build: { + dependsOn: [], + env: ["IS_SERVER"], + }, + }, + }); + + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "apps/web/turbo.json": Object { + "action": "modified", + "additions": 1, + "deletions": 0, + }, + "packages/ui/turbo.json": Object { + "action": "modified", + "additions": 1, + "deletions": 1, + }, + "turbo.json": Object { + "action": "modified", + "additions": 4, + "deletions": 4, + }, + } + `); + }); + + it("migrates turbo.json env var dependencies - repeat run", async () => { + // load the fixture for the test + const { root, read } = useFixture({ + fixture: "env-dependencies", + }); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + expect(JSON.parse(read("turbo.json") || "{}")).toStrictEqual({ + $schema: "https://turbo.build/schema.json", + globalDependencies: [".env"], + globalEnv: ["NEXT_PUBLIC_API_KEY", "STRIPE_API_KEY"], + pipeline: { + build: { + dependsOn: ["^build"], + env: ["PROD_API_KEY"], + outputs: [".next/**", "!.next/cache/**"], + }, + dev: { + cache: false, + }, + lint: { + dependsOn: [], + env: ["IS_CI"], + outputs: [], + }, + test: { + dependsOn: ["test"], + env: ["IS_CI"], + outputs: [], + }, + }, + }); + + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "turbo.json": Object { + "action": "modified", + "additions": 4, + "deletions": 4, + }, + } + `); + + // run the transformer + const repeatResult = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + expect(repeatResult.fatalError).toBeUndefined(); + expect(repeatResult.changes).toMatchInlineSnapshot(` + Object { + "turbo.json": Object { + "action": "unchanged", + "additions": 0, + "deletions": 0, + }, + } + `); + }); + + it("migrates turbo.json env var dependencies - dry", async () => { + // load the fixture for the test + const { root, read } = useFixture({ + fixture: "env-dependencies", + }); + + const turboJson = JSON.parse(read("turbo.json") || "{}"); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: true, print: false }, + }); + + // make sure it didn't change + expect(JSON.parse(read("turbo.json") || "{}")).toEqual(turboJson); + + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "turbo.json": Object { + "action": "skipped", + "additions": 4, + "deletions": 4, + }, + } + `); + }); + + it("migrates turbo.json env var dependencies - print", async () => { + // load the fixture for the test + const { root, read } = useFixture({ + fixture: "env-dependencies", + }); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: true }, + }); + + expect(JSON.parse(read("turbo.json") || "{}")).toStrictEqual({ + $schema: "https://turbo.build/schema.json", + globalEnv: ["NEXT_PUBLIC_API_KEY", "STRIPE_API_KEY"], + globalDependencies: [".env"], + pipeline: { + build: { + dependsOn: ["^build"], + env: ["PROD_API_KEY"], + outputs: [".next/**", "!.next/cache/**"], + }, + dev: { + cache: false, + }, + lint: { + dependsOn: [], + env: ["IS_CI"], + outputs: [], + }, + test: { + dependsOn: ["test"], + env: ["IS_CI"], + outputs: [], + }, + }, + }); + + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "turbo.json": Object { + "action": "modified", + "additions": 4, + "deletions": 4, + }, + } + `); + }); + + it("migrates turbo.json env var dependencies - dry & print", async () => { + // load the fixture for the test + const { root, read } = useFixture({ + fixture: "env-dependencies", + }); + + const turboJson = JSON.parse(read("turbo.json") || "{}"); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: true, print: true }, + }); + + // make sure it didn't change + expect(JSON.parse(read("turbo.json") || "{}")).toEqual(turboJson); + + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "turbo.json": Object { + "action": "skipped", + "additions": 4, + "deletions": 4, + }, + } + `); + }); + + it("does not change turbo.json if already migrated", async () => { + // load the fixture for the test + const { root, read } = useFixture({ + fixture: "migrated-env-dependencies", + }); + + const turboJson = JSON.parse(read("turbo.json") || "{}"); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + expect(JSON.parse(read("turbo.json") || "{}")).toEqual(turboJson); + + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "turbo.json": Object { + "action": "unchanged", + "additions": 0, + "deletions": 0, + }, + } + `); + }); + + it("errors if no turbo.json can be found", async () => { + // load the fixture for the test + const { root, read } = useFixture({ + fixture: "no-turbo-json", + }); + + expect(read("turbo.json")).toBeUndefined(); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + expect(read("turbo.json")).toBeUndefined(); + expect(result.fatalError).toBeDefined(); + expect(result.fatalError?.message).toMatch( + /No turbo\.json found at .*?\. Is the path correct\?/ + ); + }); + + it("errors if package.json config exists and has not been migrated", async () => { + // load the fixture for the test + const { root } = useFixture({ + fixture: "old-config", + }); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + expect(result.fatalError).toBeDefined(); + expect(result.fatalError?.message).toMatch( + 'turbo" key detected in package.json. Run `npx @turbo/codemod transform create-turbo-config` first' + ); + }); + }); +}); diff --git a/packages/turbo-codemod/__tests__/migrate.test.ts b/packages/turbo-codemod/__tests__/migrate.test.ts new file mode 100644 index 0000000..652ea41 --- /dev/null +++ b/packages/turbo-codemod/__tests__/migrate.test.ts @@ -0,0 +1,761 @@ +import { MigrateCommandArgument } from "../src/commands"; +import migrate from "../src/commands/migrate"; +import { setupTestFixtures, spyExit } from "@turbo/test-utils"; +import childProcess from "child_process"; +import * as checkGitStatus from "../src/utils/checkGitStatus"; +import * as getCurrentVersion from "../src/commands/migrate/steps/getCurrentVersion"; +import * as getLatestVersion from "../src/commands/migrate/steps/getLatestVersion"; +import * as getTurboUpgradeCommand from "../src/commands/migrate/steps/getTurboUpgradeCommand"; +import * as workspaceImplementation from "../src/utils/getPackageManager"; +import * as getPackageManagerVersion from "../src/utils/getPackageManagerVersion"; + +describe("migrate", () => { + const mockExit = spyExit(); + const { useFixture } = setupTestFixtures({ + directory: __dirname, + test: "migrate", + }); + + it("migrates from 1.0.0 to 1.7.0", async () => { + const { root, readJson } = useFixture({ + fixture: "old-turbo", + }); + + const packageManager = "pnpm"; + const packageManagerVersion = "1.2.3"; + + // setup mocks + const mockedCheckGitStatus = jest + .spyOn(checkGitStatus, "default") + .mockReturnValue(undefined); + const mockedGetCurrentVersion = jest + .spyOn(getCurrentVersion, "default") + .mockReturnValue("1.0.0"); + const mockedGetLatestVersion = jest + .spyOn(getLatestVersion, "default") + .mockResolvedValue("1.7.0"); + const mockedGetTurboUpgradeCommand = jest + .spyOn(getTurboUpgradeCommand, "default") + .mockReturnValue("pnpm install -g turbo@latest"); + const mockedGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue(packageManagerVersion); + const mockedGetWorkspaceImplementation = jest + .spyOn(workspaceImplementation, "default") + .mockReturnValue(packageManager); + + await migrate(root as MigrateCommandArgument, { + force: false, + dry: false, + print: false, + install: false, + }); + + expect(readJson("package.json")).toStrictEqual({ + dependencies: {}, + devDependencies: { + turbo: "1.0.0", + }, + name: "no-turbo-json", + packageManager: "pnpm@1.2.3", + version: "1.0.0", + }); + expect(readJson("turbo.json")).toStrictEqual({ + $schema: "https://turbo.build/schema.json", + pipeline: { + build: { + outputs: [".next/**", "!.next/cache/**"], + }, + dev: { + cache: false, + }, + lint: {}, + test: { + outputs: ["dist/**", "build/**"], + }, + }, + }); + + // verify mocks were called + expect(mockedCheckGitStatus).toHaveBeenCalled(); + expect(mockedGetCurrentVersion).toHaveBeenCalled(); + expect(mockedGetLatestVersion).toHaveBeenCalled(); + expect(mockedGetTurboUpgradeCommand).toHaveBeenCalled(); + expect(mockedGetPackageManagerVersion).toHaveBeenCalled(); + expect(mockedGetWorkspaceImplementation).toHaveBeenCalled(); + + // restore mocks + mockedCheckGitStatus.mockRestore(); + mockedGetCurrentVersion.mockRestore(); + mockedGetLatestVersion.mockRestore(); + mockedGetTurboUpgradeCommand.mockRestore(); + mockedGetPackageManagerVersion.mockRestore(); + mockedGetWorkspaceImplementation.mockRestore(); + }); + + it("migrates from 1.0.0 to 1.2.0 (dry run)", async () => { + const { root, readJson } = useFixture({ + fixture: "old-turbo", + }); + + const packageManager = "pnpm"; + const packageManagerVersion = "1.2.3"; + + // setup mocks + const mockedCheckGitStatus = jest + .spyOn(checkGitStatus, "default") + .mockReturnValue(undefined); + const mockedGetCurrentVersion = jest + .spyOn(getCurrentVersion, "default") + .mockReturnValue("1.0.0"); + const mockedGetLatestVersion = jest + .spyOn(getLatestVersion, "default") + .mockResolvedValue("1.2.0"); + const mockedGetTurboUpgradeCommand = jest + .spyOn(getTurboUpgradeCommand, "default") + .mockReturnValue("pnpm install -g turbo@latest"); + const mockedGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue(packageManagerVersion); + const mockedGetWorkspaceImplementation = jest + .spyOn(workspaceImplementation, "default") + .mockReturnValue(packageManager); + + const packageJson = readJson("package.json"); + const turboJson = readJson("turbo.json"); + + await migrate(root as MigrateCommandArgument, { + force: false, + dry: true, + print: false, + install: true, + }); + + // make sure nothing changed + expect(readJson("package.json")).toStrictEqual(packageJson); + expect(readJson("turbo.json")).toStrictEqual(turboJson); + + // verify mocks were called + expect(mockedCheckGitStatus).not.toHaveBeenCalled(); + expect(mockedGetCurrentVersion).toHaveBeenCalled(); + expect(mockedGetLatestVersion).toHaveBeenCalled(); + expect(mockedGetTurboUpgradeCommand).toHaveBeenCalled(); + expect(mockedGetPackageManagerVersion).toHaveBeenCalled(); + expect(mockedGetWorkspaceImplementation).toHaveBeenCalled(); + + // restore mocks + mockedCheckGitStatus.mockRestore(); + mockedGetCurrentVersion.mockRestore(); + mockedGetLatestVersion.mockRestore(); + mockedGetTurboUpgradeCommand.mockRestore(); + mockedGetPackageManagerVersion.mockRestore(); + mockedGetWorkspaceImplementation.mockRestore(); + }); + + it("next version can be passed as an option", async () => { + const { root, readJson } = useFixture({ + fixture: "old-turbo", + }); + + const packageManager = "pnpm"; + const packageManagerVersion = "1.2.3"; + + // setup mocks + const mockedCheckGitStatus = jest + .spyOn(checkGitStatus, "default") + .mockReturnValue(undefined); + const mockedGetCurrentVersion = jest + .spyOn(getCurrentVersion, "default") + .mockReturnValue("1.0.0"); + const mockedGetLatestVersion = jest + .spyOn(getLatestVersion, "default") + .mockResolvedValue("1.7.0"); + const mockedGetTurboUpgradeCommand = jest + .spyOn(getTurboUpgradeCommand, "default") + .mockReturnValue("pnpm install -g turbo@latest"); + const mockedGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue(packageManagerVersion); + const mockedGetWorkspaceImplementation = jest + .spyOn(workspaceImplementation, "default") + .mockReturnValue(packageManager); + + await migrate(root as MigrateCommandArgument, { + force: false, + dry: false, + print: false, + install: false, + to: "1.7.0", + }); + + expect(readJson("package.json")).toStrictEqual({ + dependencies: {}, + devDependencies: { + turbo: "1.0.0", + }, + name: "no-turbo-json", + packageManager: "pnpm@1.2.3", + version: "1.0.0", + }); + expect(readJson("turbo.json")).toStrictEqual({ + $schema: "https://turbo.build/schema.json", + pipeline: { + build: { + outputs: [".next/**", "!.next/cache/**"], + }, + dev: { + cache: false, + }, + test: { + outputs: ["dist/**", "build/**"], + }, + lint: {}, + }, + }); + + // verify mocks were called + expect(mockedCheckGitStatus).toHaveBeenCalled(); + expect(mockedGetCurrentVersion).toHaveBeenCalled(); + expect(mockedGetLatestVersion).toHaveBeenCalled(); + expect(mockedGetTurboUpgradeCommand).toHaveBeenCalled(); + expect(mockedGetPackageManagerVersion).toHaveBeenCalled(); + expect(mockedGetWorkspaceImplementation).toHaveBeenCalled(); + + // restore mocks + mockedCheckGitStatus.mockRestore(); + mockedGetCurrentVersion.mockRestore(); + mockedGetLatestVersion.mockRestore(); + mockedGetTurboUpgradeCommand.mockRestore(); + mockedGetPackageManagerVersion.mockRestore(); + mockedGetWorkspaceImplementation.mockRestore(); + }); + + it("current version can be passed as an option", async () => { + const { root, readJson } = useFixture({ + fixture: "old-turbo", + }); + + const packageManager = "pnpm"; + const packageManagerVersion = "1.2.3"; + + // setup mocks + const mockedCheckGitStatus = jest + .spyOn(checkGitStatus, "default") + .mockReturnValue(undefined); + const mockedGetLatestVersion = jest + .spyOn(getLatestVersion, "default") + .mockResolvedValue("1.7.0"); + const mockedGetTurboUpgradeCommand = jest + .spyOn(getTurboUpgradeCommand, "default") + .mockReturnValue("pnpm install -g turbo@latest"); + const mockedGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue(packageManagerVersion); + + const mockedGetWorkspaceImplementation = jest + .spyOn(workspaceImplementation, "default") + .mockReturnValue(packageManager); + + await migrate(root as MigrateCommandArgument, { + force: false, + dry: false, + print: false, + install: false, + from: "1.0.0", + }); + + expect(readJson("package.json")).toStrictEqual({ + dependencies: {}, + devDependencies: { + turbo: "1.0.0", + }, + name: "no-turbo-json", + packageManager: "pnpm@1.2.3", + version: "1.0.0", + }); + expect(readJson("turbo.json")).toStrictEqual({ + $schema: "https://turbo.build/schema.json", + pipeline: { + build: { + outputs: [".next/**", "!.next/cache/**"], + }, + dev: { + cache: false, + }, + lint: {}, + test: { + outputs: ["dist/**", "build/**"], + }, + }, + }); + + // verify mocks were called + expect(mockedCheckGitStatus).toHaveBeenCalled(); + expect(mockedGetLatestVersion).toHaveBeenCalled(); + expect(mockedGetTurboUpgradeCommand).toHaveBeenCalled(); + expect(mockedGetPackageManagerVersion).toHaveBeenCalled(); + expect(mockedGetWorkspaceImplementation).toHaveBeenCalled(); + + // restore mocks + mockedCheckGitStatus.mockRestore(); + mockedGetLatestVersion.mockRestore(); + mockedGetTurboUpgradeCommand.mockRestore(); + mockedGetPackageManagerVersion.mockRestore(); + mockedGetWorkspaceImplementation.mockRestore(); + }); + + it("exits if the current version is the same as the new version", async () => { + const { root } = useFixture({ + fixture: "old-turbo", + }); + + // setup mocks + const mockedCheckGitStatus = jest + .spyOn(checkGitStatus, "default") + .mockReturnValue(undefined); + const mockedGetCurrentVersion = jest + .spyOn(getCurrentVersion, "default") + .mockReturnValue("1.7.0"); + const mockedGetLatestVersion = jest + .spyOn(getLatestVersion, "default") + .mockResolvedValue("1.7.0"); + + await migrate(root as MigrateCommandArgument, { + force: false, + dry: false, + print: false, + install: false, + }); + + expect(mockExit.exit).toHaveBeenCalledWith(0); + + // verify mocks were called + expect(mockedCheckGitStatus).toHaveBeenCalled(); + expect(mockedGetCurrentVersion).toHaveBeenCalled(); + expect(mockedGetLatestVersion).toHaveBeenCalled(); + + // restore mocks + mockedCheckGitStatus.mockRestore(); + mockedGetCurrentVersion.mockRestore(); + mockedGetLatestVersion.mockRestore(); + }); + + it("continues when migration doesn't require codemods", async () => { + const { root } = useFixture({ + fixture: "old-turbo", + }); + + // setup mocks + const mockedCheckGitStatus = jest + .spyOn(checkGitStatus, "default") + .mockReturnValue(undefined); + const mockedGetCurrentVersion = jest + .spyOn(getCurrentVersion, "default") + .mockReturnValue("1.3.0"); + const mockedGetLatestVersion = jest + .spyOn(getLatestVersion, "default") + .mockResolvedValue("1.3.1"); + const mockedGetTurboUpgradeCommand = jest + .spyOn(getTurboUpgradeCommand, "default") + .mockReturnValue("npm install turbo@1.3.1"); + const mockExecSync = jest + .spyOn(childProcess, "execSync") + .mockReturnValue("installed"); + + await migrate(root as MigrateCommandArgument, { + force: false, + dry: false, + print: false, + install: true, + }); + + // verify mocks were called + expect(mockedCheckGitStatus).toHaveBeenCalled(); + expect(mockedGetCurrentVersion).toHaveBeenCalled(); + expect(mockedGetLatestVersion).toHaveBeenCalled(); + expect(mockedGetTurboUpgradeCommand).toHaveBeenCalled(); + expect(mockExecSync).toHaveBeenCalledWith("npm install turbo@1.3.1", { + cwd: root, + }); + + // restore mocks + mockedCheckGitStatus.mockRestore(); + mockedGetCurrentVersion.mockRestore(); + mockedGetLatestVersion.mockRestore(); + mockedGetTurboUpgradeCommand.mockRestore(); + mockExecSync.mockRestore(); + }); + + it("installs the correct turbo version", async () => { + const { root, readJson } = useFixture({ + fixture: "old-turbo", + }); + + const packageManager = "pnpm"; + const packageManagerVersion = "1.2.3"; + + // setup mocks + const mockedCheckGitStatus = jest + .spyOn(checkGitStatus, "default") + .mockReturnValue(undefined); + const mockedGetCurrentVersion = jest + .spyOn(getCurrentVersion, "default") + .mockReturnValue("1.0.0"); + const mockedGetLatestVersion = jest + .spyOn(getLatestVersion, "default") + .mockResolvedValue("1.7.0"); + const mockedGetTurboUpgradeCommand = jest + .spyOn(getTurboUpgradeCommand, "default") + .mockReturnValue("pnpm install -g turbo@1.7.0"); + const mockedGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue(packageManagerVersion); + const mockedGetWorkspaceImplementation = jest + .spyOn(workspaceImplementation, "default") + .mockReturnValue(packageManager); + const mockExecSync = jest + .spyOn(childProcess, "execSync") + .mockReturnValue("installed"); + + await migrate(root as MigrateCommandArgument, { + force: false, + dry: false, + print: false, + install: true, + }); + + expect(readJson("package.json")).toStrictEqual({ + dependencies: {}, + devDependencies: { + turbo: "1.0.0", + }, + name: "no-turbo-json", + packageManager: "pnpm@1.2.3", + version: "1.0.0", + }); + expect(readJson("turbo.json")).toStrictEqual({ + $schema: "https://turbo.build/schema.json", + pipeline: { + build: { + outputs: [".next/**", "!.next/cache/**"], + }, + dev: { + cache: false, + }, + lint: {}, + test: { + outputs: ["dist/**", "build/**"], + }, + }, + }); + + // verify mocks were called + expect(mockedCheckGitStatus).toHaveBeenCalled(); + expect(mockedGetCurrentVersion).toHaveBeenCalled(); + expect(mockedGetLatestVersion).toHaveBeenCalled(); + expect(mockedGetTurboUpgradeCommand).toHaveBeenCalled(); + expect(mockedGetPackageManagerVersion).toHaveBeenCalled(); + expect(mockedGetWorkspaceImplementation).toHaveBeenCalled(); + expect(mockExecSync).toHaveBeenCalled(); + expect(mockExecSync).toHaveBeenCalledWith("pnpm install -g turbo@1.7.0", { + cwd: root, + }); + + // restore mocks + mockedCheckGitStatus.mockRestore(); + mockedGetCurrentVersion.mockRestore(); + mockedGetLatestVersion.mockRestore(); + mockedGetTurboUpgradeCommand.mockRestore(); + mockedGetPackageManagerVersion.mockRestore(); + mockedGetWorkspaceImplementation.mockRestore(); + mockExecSync.mockRestore(); + }); + + it("fails gracefully when the correct upgrade command cannot be found", async () => { + const { root, readJson } = useFixture({ + fixture: "old-turbo", + }); + + const packageManager = "pnpm"; + const packageManagerVersion = "1.2.3"; + + // setup mocks + const mockedCheckGitStatus = jest + .spyOn(checkGitStatus, "default") + .mockReturnValue(undefined); + const mockedGetCurrentVersion = jest + .spyOn(getCurrentVersion, "default") + .mockReturnValue("1.0.0"); + const mockedGetLatestVersion = jest + .spyOn(getLatestVersion, "default") + .mockResolvedValue("1.7.0"); + const mockedGetTurboUpgradeCommand = jest + .spyOn(getTurboUpgradeCommand, "default") + .mockReturnValue(undefined); + const mockedGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue(packageManagerVersion); + const mockedGetWorkspaceImplementation = jest + .spyOn(workspaceImplementation, "default") + .mockReturnValue(packageManager); + const mockExecSync = jest + .spyOn(childProcess, "execSync") + .mockReturnValue("installed"); + + await migrate(root as MigrateCommandArgument, { + force: false, + dry: false, + print: false, + install: true, + }); + + expect(readJson("package.json")).toStrictEqual({ + dependencies: {}, + devDependencies: { + turbo: "1.0.0", + }, + name: "no-turbo-json", + packageManager: "pnpm@1.2.3", + version: "1.0.0", + }); + expect(readJson("turbo.json")).toStrictEqual({ + $schema: "https://turbo.build/schema.json", + pipeline: { + build: { + outputs: [".next/**", "!.next/cache/**"], + }, + dev: { + cache: false, + }, + lint: {}, + test: { + outputs: ["dist/**", "build/**"], + }, + }, + }); + + expect(mockExit.exit).toHaveBeenCalledWith(1); + + // verify mocks were called + expect(mockedCheckGitStatus).toHaveBeenCalled(); + expect(mockedGetCurrentVersion).toHaveBeenCalled(); + expect(mockedGetLatestVersion).toHaveBeenCalled(); + expect(mockedGetTurboUpgradeCommand).toHaveBeenCalled(); + expect(mockedGetPackageManagerVersion).toHaveBeenCalled(); + expect(mockedGetWorkspaceImplementation).toHaveBeenCalled(); + expect(mockExecSync).not.toHaveBeenCalled(); + + // restore mocks + mockedCheckGitStatus.mockRestore(); + mockedGetCurrentVersion.mockRestore(); + mockedGetLatestVersion.mockRestore(); + mockedGetTurboUpgradeCommand.mockRestore(); + mockedGetPackageManagerVersion.mockRestore(); + mockedGetWorkspaceImplementation.mockRestore(); + mockExecSync.mockRestore(); + }); + + it("exits if current version is not passed and cannot be inferred", async () => { + const { root } = useFixture({ + fixture: "old-turbo", + }); + + // setup mocks + const mockedCheckGitStatus = jest + .spyOn(checkGitStatus, "default") + .mockReturnValue(undefined); + const mockedGetCurrentVersion = jest + .spyOn(getCurrentVersion, "default") + .mockReturnValue(undefined); + + await migrate(root as MigrateCommandArgument, { + force: false, + dry: false, + print: false, + install: false, + }); + + expect(mockExit.exit).toHaveBeenCalledWith(1); + + // verify mocks were called + expect(mockedCheckGitStatus).toHaveBeenCalled(); + expect(mockedGetCurrentVersion).toHaveBeenCalled(); + + // restore mocks + mockedCheckGitStatus.mockRestore(); + mockedGetCurrentVersion.mockRestore(); + }); + + it("exits if latest version is not passed and cannot be inferred", async () => { + const { root } = useFixture({ + fixture: "old-turbo", + }); + + // setup mocks + const mockedCheckGitStatus = jest + .spyOn(checkGitStatus, "default") + .mockReturnValue(undefined); + const mockedGetCurrentVersion = jest + .spyOn(getCurrentVersion, "default") + .mockReturnValue("1.5.0"); + const mockedGetLatestVersion = jest + .spyOn(getLatestVersion, "default") + .mockResolvedValue(undefined); + + await migrate(root as MigrateCommandArgument, { + force: false, + dry: false, + print: false, + install: false, + }); + + expect(mockExit.exit).toHaveBeenCalledWith(1); + + // verify mocks were called + expect(mockedCheckGitStatus).toHaveBeenCalled(); + expect(mockedGetCurrentVersion).toHaveBeenCalled(); + expect(mockedGetLatestVersion).toHaveBeenCalled(); + + // restore mocks + mockedCheckGitStatus.mockRestore(); + mockedGetCurrentVersion.mockRestore(); + mockedGetLatestVersion.mockRestore(); + }); + + it("exits if latest version throws", async () => { + const { root } = useFixture({ + fixture: "old-turbo", + }); + + // setup mocks + const mockedCheckGitStatus = jest + .spyOn(checkGitStatus, "default") + .mockReturnValue(undefined); + const mockedGetCurrentVersion = jest + .spyOn(getCurrentVersion, "default") + .mockReturnValue("1.5.0"); + const mockedGetLatestVersion = jest + .spyOn(getLatestVersion, "default") + .mockRejectedValue(new Error("failed to fetch version")); + + await migrate(root as MigrateCommandArgument, { + force: false, + dry: false, + print: false, + install: false, + }); + + expect(mockExit.exit).toHaveBeenCalledWith(1); + + // verify mocks were called + expect(mockedCheckGitStatus).toHaveBeenCalled(); + expect(mockedGetCurrentVersion).toHaveBeenCalled(); + expect(mockedGetLatestVersion).toHaveBeenCalled(); + + // restore mocks + mockedCheckGitStatus.mockRestore(); + mockedGetCurrentVersion.mockRestore(); + mockedGetLatestVersion.mockRestore(); + }); + + it("exits if any transforms encounter an error", async () => { + const { root } = useFixture({ + fixture: "old-turbo", + }); + + const packageManager = "pnpm"; + const packageManagerVersion = "1.2.3"; + + // setup mocks + const mockedCheckGitStatus = jest + .spyOn(checkGitStatus, "default") + .mockReturnValue(undefined); + const mockedGetCurrentVersion = jest + .spyOn(getCurrentVersion, "default") + .mockReturnValue("1.0.0"); + const mockedGetLatestVersion = jest + .spyOn(getLatestVersion, "default") + .mockResolvedValue("1.7.0"); + const mockedGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue(packageManagerVersion); + const mockedGetWorkspaceImplementation = jest + .spyOn(workspaceImplementation, "default") + .mockReturnValue(packageManager); + + await migrate(root as MigrateCommandArgument, { + force: false, + dry: true, + print: false, + install: true, + }); + + expect(mockExit.exit).toHaveBeenCalledWith(1); + + // verify mocks were called + expect(mockedCheckGitStatus).not.toHaveBeenCalled(); + expect(mockedGetCurrentVersion).toHaveBeenCalled(); + expect(mockedGetLatestVersion).toHaveBeenCalled(); + expect(mockedGetPackageManagerVersion).toHaveBeenCalled(); + expect(mockedGetWorkspaceImplementation).toHaveBeenCalled(); + + // restore mocks + mockedCheckGitStatus.mockRestore(); + mockedGetCurrentVersion.mockRestore(); + mockedGetLatestVersion.mockRestore(); + mockedGetPackageManagerVersion.mockRestore(); + mockedGetWorkspaceImplementation.mockRestore(); + }); + + it("exits if invalid directory is passed", async () => { + const { root } = useFixture({ + fixture: "old-turbo", + }); + + // setup mocks + const mockedCheckGitStatus = jest + .spyOn(checkGitStatus, "default") + .mockReturnValue(undefined); + + await migrate("~/path/that/does/not/exist" as MigrateCommandArgument, { + force: false, + dry: false, + print: false, + install: false, + }); + + expect(mockExit.exit).toHaveBeenCalledWith(1); + + // verify mocks were called + expect(mockedCheckGitStatus).toHaveBeenCalled(); + + // restore mocks + mockedCheckGitStatus.mockRestore(); + }); + + it("exits if directory with no repo is passed", async () => { + const { root } = useFixture({ + fixture: "no-repo", + }); + + // setup mocks + const mockedCheckGitStatus = jest + .spyOn(checkGitStatus, "default") + .mockReturnValue(undefined); + + await migrate(root as MigrateCommandArgument, { + force: false, + dry: false, + print: false, + install: false, + }); + + expect(mockExit.exit).toHaveBeenCalledWith(1); + + // verify mocks were called + expect(mockedCheckGitStatus).toHaveBeenCalled(); + + // restore mocks + mockedCheckGitStatus.mockRestore(); + }); +}); diff --git a/packages/turbo-codemod/__tests__/set-default-outputs.test.ts b/packages/turbo-codemod/__tests__/set-default-outputs.test.ts new file mode 100644 index 0000000..4a71fa7 --- /dev/null +++ b/packages/turbo-codemod/__tests__/set-default-outputs.test.ts @@ -0,0 +1,391 @@ +import { transformer } from "../src/transforms/set-default-outputs"; +import { setupTestFixtures } from "@turbo/test-utils"; + +describe("set-default-outputs", () => { + const { useFixture } = setupTestFixtures({ + directory: __dirname, + test: "set-default-outputs", + }); + it("migrates turbo.json outputs - basic", async () => { + // load the fixture for the test + const { root, read } = useFixture({ + fixture: "old-outputs", + }); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + expect(JSON.parse(read("turbo.json") || "{}")).toStrictEqual({ + $schema: "https://turbo.build/schema.json", + pipeline: { + "build-one": { + outputs: ["foo"], + }, + "build-two": {}, + "build-three": { + outputs: ["dist/**", "build/**"], + }, + }, + }); + + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "turbo.json": Object { + "action": "modified", + "additions": 2, + "deletions": 1, + }, + } + `); + }); + + it("migrates turbo.json outputs - workspace configs", async () => { + // load the fixture for the test + const { root, readJson } = useFixture({ + fixture: "workspace-configs", + }); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + expect(readJson("turbo.json") || "{}").toStrictEqual({ + $schema: "https://turbo.build/schema.json", + pipeline: { + "build-one": { + outputs: ["foo"], + }, + "build-two": {}, + "build-three": { + outputs: ["dist/**", "build/**"], + }, + }, + }); + + expect(readJson("apps/docs/turbo.json") || "{}").toStrictEqual({ + $schema: "https://turbo.build/schema.json", + extends: ["//"], + pipeline: { + build: { + outputs: ["dist/**", "build/**"], + }, + }, + }); + + expect(readJson("apps/web/turbo.json") || "{}").toStrictEqual({ + $schema: "https://turbo.build/schema.json", + extends: ["//"], + pipeline: { + build: {}, + }, + }); + + expect(readJson("packages/ui/turbo.json") || "{}").toStrictEqual({ + $schema: "https://turbo.build/schema.json", + extends: ["//"], + pipeline: { + "build-three": { + outputs: ["dist/**", "build/**"], + }, + }, + }); + + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "apps/docs/turbo.json": Object { + "action": "modified", + "additions": 1, + "deletions": 1, + }, + "apps/web/turbo.json": Object { + "action": "modified", + "additions": 1, + "deletions": 0, + }, + "packages/ui/turbo.json": Object { + "action": "modified", + "additions": 1, + "deletions": 1, + }, + "turbo.json": Object { + "action": "modified", + "additions": 2, + "deletions": 1, + }, + } + `); + }); + + it("migrates turbo.json outputs - dry", async () => { + // load the fixture for the test + const { root, read } = useFixture({ + fixture: "old-outputs", + }); + + const turboJson = JSON.parse(read("turbo.json") || "{}"); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: true, print: false }, + }); + + // make sure it didn't change + expect(JSON.parse(read("turbo.json") || "{}")).toEqual(turboJson); + + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "turbo.json": Object { + "action": "skipped", + "additions": 2, + "deletions": 1, + }, + } + `); + }); + + it("migrates turbo.json outputs - print", async () => { + // load the fixture for the test + const { root, read } = useFixture({ + fixture: "old-outputs", + }); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: true }, + }); + + expect(JSON.parse(read("turbo.json") || "{}")).toStrictEqual({ + $schema: "https://turbo.build/schema.json", + pipeline: { + "build-one": { + outputs: ["foo"], + }, + "build-two": {}, + "build-three": { + outputs: ["dist/**", "build/**"], + }, + }, + }); + + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "turbo.json": Object { + "action": "modified", + "additions": 2, + "deletions": 1, + }, + } + `); + }); + + it("migrates turbo.json outputs - dry & print", async () => { + // load the fixture for the test + const { root, read } = useFixture({ + fixture: "old-outputs", + }); + + const turboJson = JSON.parse(read("turbo.json") || "{}"); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: true, print: false }, + }); + + // make sure it didn't change + expect(JSON.parse(read("turbo.json") || "{}")).toEqual(turboJson); + + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "turbo.json": Object { + "action": "skipped", + "additions": 2, + "deletions": 1, + }, + } + `); + }); + + it("migrates turbo.json outputs - invalid", async () => { + // load the fixture for the test + const { root, read } = useFixture({ + fixture: "invalid-outputs", + }); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + expect(JSON.parse(read("turbo.json") || "{}")).toStrictEqual({ + $schema: "https://turbo.build/schema.json", + pipeline: { + "build-one": { + outputs: ["foo"], + }, + "build-two": {}, + "build-three": { + outputs: ["dist/**", "build/**"], + }, + "garbage-in-numeric-0": { + outputs: ["dist/**", "build/**"], + }, + "garbage-in-numeric": { + outputs: 42, + }, + "garbage-in-string": { + outputs: "string", + }, + "garbage-in-empty-string": { + outputs: ["dist/**", "build/**"], + }, + "garbage-in-null": { + outputs: ["dist/**", "build/**"], + }, + "garbage-in-false": { + outputs: ["dist/**", "build/**"], + }, + "garbage-in-true": { + outputs: true, + }, + "garbage-in-object": { + outputs: {}, + }, + }, + }); + + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "turbo.json": Object { + "action": "modified", + "additions": 6, + "deletions": 5, + }, + } + `); + }); + + it("migrates turbo.json outputs - config with no pipeline", async () => { + // load the fixture for the test + const { root, read } = useFixture({ + fixture: "no-pipeline", + }); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + expect(JSON.parse(read("turbo.json") || "{}")).toStrictEqual({ + $schema: "https://turbo.build/schema.json", + globalDependencies: ["$NEXT_PUBLIC_API_KEY", "$STRIPE_API_KEY", ".env"], + pipeline: {}, + }); + + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "turbo.json": Object { + "action": "unchanged", + "additions": 0, + "deletions": 0, + }, + } + `); + }); + + it("migrates turbo.json outputs - config with no outputs", async () => { + // load the fixture for the test + const { root, read } = useFixture({ + fixture: "no-outputs", + }); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + expect(JSON.parse(read("turbo.json") || "{}")).toStrictEqual({ + $schema: "https://turbo.build/schema.json", + pipeline: { + "build-one": { + dependsOn: ["build-two"], + outputs: ["dist/**", "build/**"], + }, + "build-two": { + cache: false, + }, + "build-three": { + persistent: true, + outputs: ["dist/**", "build/**"], + }, + }, + }); + + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(` + Object { + "turbo.json": Object { + "action": "modified", + "additions": 2, + "deletions": 0, + }, + } + `); + }); + + it("errors if no turbo.json can be found", async () => { + // load the fixture for the test + const { root, read } = useFixture({ + fixture: "no-turbo-json", + }); + + expect(read("turbo.json")).toBeUndefined(); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + expect(read("turbo.json")).toBeUndefined(); + expect(result.fatalError).toBeDefined(); + expect(result.fatalError?.message).toMatch( + /No turbo\.json found at .*?\. Is the path correct\?/ + ); + }); + + it("errors if package.json config exists and has not been migrated", async () => { + // load the fixture for the test + const { root } = useFixture({ + fixture: "old-config", + }); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + expect(result.fatalError).toBeDefined(); + expect(result.fatalError?.message).toMatch( + 'turbo" key detected in package.json. Run `npx @turbo/codemod transform create-turbo-config` first' + ); + }); +}); diff --git a/packages/turbo-codemod/__tests__/transform.test.ts b/packages/turbo-codemod/__tests__/transform.test.ts new file mode 100644 index 0000000..abd015d --- /dev/null +++ b/packages/turbo-codemod/__tests__/transform.test.ts @@ -0,0 +1,172 @@ +import transform from "../src/commands/transform"; +import { MigrateCommandArgument } from "../src/commands"; +import { setupTestFixtures, spyExit } from "@turbo/test-utils"; +import * as checkGitStatus from "../src/utils/checkGitStatus"; +import * as getPackageManager from "../src/utils/getPackageManager"; +import * as getPackageManagerVersion from "../src/utils/getPackageManagerVersion"; + +describe("transform", () => { + const mockExit = spyExit(); + const { useFixture } = setupTestFixtures({ + directory: __dirname, + test: "transform", + }); + + it("runs the selected transform", async () => { + const { root, readJson } = useFixture({ + fixture: "basic", + }); + + const packageManager = "pnpm"; + const packageManagerVersion = "1.2.3"; + + // setup mocks + const mockedCheckGitStatus = jest + .spyOn(checkGitStatus, "default") + .mockReturnValue(undefined); + const mockedGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue(packageManagerVersion); + const mockedGetPackageManager = jest + .spyOn(getPackageManager, "default") + .mockReturnValue(packageManager); + + await transform( + "add-package-manager" as MigrateCommandArgument, + root as MigrateCommandArgument, + { + list: false, + force: false, + dry: false, + print: false, + } + ); + + expect(readJson("package.json")).toStrictEqual({ + dependencies: {}, + devDependencies: { + turbo: "1.0.0", + }, + name: "transform-basic", + packageManager: "pnpm@1.2.3", + version: "1.0.0", + }); + + // verify mocks were called + expect(mockedCheckGitStatus).toHaveBeenCalled(); + expect(mockedGetPackageManagerVersion).toHaveBeenCalled(); + expect(mockedGetPackageManager).toHaveBeenCalled(); + + // restore mocks + mockedCheckGitStatus.mockRestore(); + mockedGetPackageManagerVersion.mockRestore(); + mockedGetPackageManager.mockRestore(); + }); + + it("runs the selected transform - dry & print", async () => { + const { root, readJson } = useFixture({ + fixture: "basic", + }); + + const packageManager = "pnpm"; + const packageManagerVersion = "1.2.3"; + + // setup mocks + const mockedCheckGitStatus = jest + .spyOn(checkGitStatus, "default") + .mockReturnValue(undefined); + const mockedGetPackageManagerVersion = jest + .spyOn(getPackageManagerVersion, "default") + .mockReturnValue(packageManagerVersion); + const mockedGetPackageManager = jest + .spyOn(getPackageManager, "default") + .mockReturnValue(packageManager); + + await transform( + "add-package-manager" as MigrateCommandArgument, + root as MigrateCommandArgument, + { + list: false, + force: false, + dry: true, + print: true, + } + ); + + expect(readJson("package.json")).toStrictEqual({ + dependencies: {}, + devDependencies: { + turbo: "1.0.0", + }, + name: "transform-basic", + version: "1.0.0", + }); + + // verify mocks were called + expect(mockedCheckGitStatus).not.toHaveBeenCalled(); + expect(mockedGetPackageManagerVersion).toHaveBeenCalled(); + expect(mockedGetPackageManager).toHaveBeenCalled(); + + // restore mocks + mockedCheckGitStatus.mockRestore(); + mockedGetPackageManagerVersion.mockRestore(); + mockedGetPackageManager.mockRestore(); + }); + + it("lists transforms", async () => { + const { root } = useFixture({ + fixture: "basic", + }); + + await transform( + "add-package-manager" as MigrateCommandArgument, + root as MigrateCommandArgument, + { + list: true, + force: false, + dry: false, + print: false, + } + ); + + expect(mockExit.exit).toHaveBeenCalledWith(0); + }); + + it("exits on invalid transform", async () => { + const { root } = useFixture({ + fixture: "basic", + }); + + await transform( + "not-a-real-option" as MigrateCommandArgument, + root as MigrateCommandArgument, + { + list: false, + force: false, + dry: false, + print: false, + } + ); + + expect(mockExit.exit).toHaveBeenCalledWith(1); + }); + + it("exits on invalid directory", async () => { + const { root } = useFixture({ + fixture: "basic", + }); + + await transform( + "add-package-manager" as MigrateCommandArgument, + "~/path/that/does/not/exist" as MigrateCommandArgument, + { + list: false, + force: false, + dry: false, + print: false, + } + ); + + expect(mockExit.exit).toHaveBeenCalledWith(1); + }); +}); diff --git a/packages/turbo-codemod/index.d.ts b/packages/turbo-codemod/index.d.ts new file mode 100644 index 0000000..c3a4874 --- /dev/null +++ b/packages/turbo-codemod/index.d.ts @@ -0,0 +1 @@ +declare module "is-git-clean"; diff --git a/packages/turbo-codemod/jest.config.js b/packages/turbo-codemod/jest.config.js new file mode 100644 index 0000000..2c7542a --- /dev/null +++ b/packages/turbo-codemod/jest.config.js @@ -0,0 +1,18 @@ +/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */ +module.exports = { + preset: "ts-jest/presets/js-with-ts", + testEnvironment: "node", + transformIgnorePatterns: ["/node_modules/(?!(ansi-regex)/)"], + modulePathIgnorePatterns: ["/node_modules", "/dist"], + testPathIgnorePatterns: ["/__fixtures__/"], + coveragePathIgnorePatterns: ["/__fixtures__/"], + collectCoverage: true, + coverageThreshold: { + global: { + branches: 80, + functions: 89, + lines: 89, + statements: 89, + }, + }, +}; diff --git a/packages/turbo-codemod/package.json b/packages/turbo-codemod/package.json new file mode 100644 index 0000000..d16bb05 --- /dev/null +++ b/packages/turbo-codemod/package.json @@ -0,0 +1,67 @@ +{ + "name": "@turbo/codemod", + "version": "1.9.4-canary.2", + "description": "Provides Codemod transformations to help upgrade your Turborepo codebase when a feature is deprecated.", + "homepage": "https://turbo.build/repo", + "license": "MPL-2.0", + "repository": { + "type": "git", + "url": "https://github.com/vercel/turbo", + "directory": "packages/turbo-codemod" + }, + "bugs": { + "url": "https://github.com/vercel/turbo/issues" + }, + "bin": "dist/cli.js", + "scripts": { + "build": "tsup", + "test": "jest", + "lint": "eslint src/**/*.ts", + "check-types": "tsc --noEmit", + "add-transformer": "plop" + }, + "dependencies": { + "axios": "0.27.2", + "chalk": "2.4.2", + "commander": "^9.5.0", + "diff": "^5.1.0", + "find-up": "4.1.0", + "fs-extra": "^10.0.0", + "gradient-string": "^2.0.0", + "inquirer": "^8.2.4", + "inquirer-file-tree-selection-prompt": "^1.0.19", + "is-git-clean": "^1.1.0", + "ora": "4.1.1", + "semver": "^7.3.7", + "update-check": "^1.5.4" + }, + "devDependencies": { + "@types/chalk-animation": "^1.6.0", + "@types/diff": "^5.0.2", + "@types/fs-extra": "^9.0.13", + "@types/gradient-string": "^1.1.2", + "@types/inquirer": "^8.2.0", + "@types/jest": "^27.4.0", + "@types/node": "^16.11.12", + "@types/semver": "^7.3.9", + "@types/uuid": "^9.0.0", + "deepmerge": "^4.2.2", + "eslint": "^7.23.0", + "jest": "^27.4.3", + "plop": "^3.1.1", + "semver": "^7.3.5", + "ts-jest": "^27.1.1", + "@turbo/tsconfig": "workspace:*", + "tsup": "^5.10.3", + "@turbo/test-utils": "workspace:*", + "@turbo/types": "workspace:*", + "@turbo/utils": "workspace:*", + "typescript": "^4.5.5" + }, + "files": [ + "dist" + ], + "publishConfig": { + "access": "public" + } +} diff --git a/packages/turbo-codemod/plopfile.js b/packages/turbo-codemod/plopfile.js new file mode 100644 index 0000000..9cc2dd7 --- /dev/null +++ b/packages/turbo-codemod/plopfile.js @@ -0,0 +1,46 @@ +const fs = require("fs-extra"); + +module.exports = function plopConfig(plop) { + // controller generator + plop.setGenerator("controller", { + description: "Add a new transformer", + prompts: [ + { + type: "input", + name: "name", + message: 'key for the transform (example: "create-turbo-config")', + }, + { + type: "input", + name: "description", + message: + 'description for the transform (example: "Create the `turbo.json` file from an existing "turbo" key in `package.json`")', + }, + { + type: "input", + name: "introducedIn", + message: + 'the semantic version of turbo where this change was introduced (example: "1.1.0")', + }, + ], + actions: [ + { + type: "add", + path: "src/transforms/{{name}}.ts", + templateFile: "templates/transformer.hbs", + }, + { + type: "add", + path: "__tests__/{{name}}.test.ts", + templateFile: "templates/transformer.test.hbs", + }, + function createFixturesDirectory(answers) { + process.chdir(plop.getPlopfilePath()); + const directory = `__tests__/__fixtures__/${answers.name}`; + fs.mkdirSync(`__tests__/__fixtures__/${answers.name}`); + + return `created empty ${directory} directory for fixtures`; + }, + ], + }); +}; diff --git a/packages/turbo-codemod/src/cli.ts b/packages/turbo-codemod/src/cli.ts new file mode 100644 index 0000000..451816f --- /dev/null +++ b/packages/turbo-codemod/src/cli.ts @@ -0,0 +1,73 @@ +#!/usr/bin/env node + +import chalk from "chalk"; +import { Command } from "commander"; + +import { transform, migrate } from "./commands"; +import notifyUpdate from "./utils/notifyUpdate"; +import cliPkg from "../package.json"; + +const codemodCli = new Command(); + +codemodCli + .name("@turbo/codemod") + .description( + "Codemod transformations to help upgrade your Turborepo codebase when a feature is deprecated." + ) + .version(cliPkg.version, "-v, --version", "output the current version"); + +// migrate +codemodCli + .command("migrate") + .aliases(["update", "upgrade"]) + .description("Migrate a project to the latest version of Turborepo") + .argument("[path]", "Directory where the transforms should be applied") + .option( + "--from ", + "Specify the version to migrate from (default: current version)" + ) + .option( + "--to ", + "Specify the version to migrate to (default: latest)" + ) + .option("--install", "Install new version of turbo after migration", true) + .option( + "--force", + "Bypass Git safety checks and forcibly run codemods", + false + ) + .option("--dry", "Dry run (no changes are made to files)", false) + .option("--print", "Print transformed files to your terminal", false) + .action(migrate); + +// transform +codemodCli + .command("transform", { isDefault: true }) + .description("Apply a single code transformation to a project") + .argument("[transform]", "The transformer to run") + .argument("[path]", "Directory where the transforms should be applied") + .option( + "--force", + "Bypass Git safety checks and forcibly run codemods", + false + ) + .option("--list", "List all available transforms", false) + .option("--dry", "Dry run (no changes are made to files)", false) + .option("--print", "Print transformed files to your terminal", false) + .action(transform); + +codemodCli + .parseAsync() + .then(notifyUpdate) + .catch(async (reason) => { + console.log(); + if (reason.command) { + console.log(` ${chalk.cyan(reason.command)} has failed.`); + } else { + console.log(chalk.red("Unexpected error. Please report it as a bug:")); + console.log(reason); + } + console.log(); + await notifyUpdate(); + process.exit(1); + }); diff --git a/packages/turbo-codemod/src/commands/index.ts b/packages/turbo-codemod/src/commands/index.ts new file mode 100644 index 0000000..a7aeee6 --- /dev/null +++ b/packages/turbo-codemod/src/commands/index.ts @@ -0,0 +1,11 @@ +export { default as migrate } from "./migrate"; +export { default as transform } from "./transform"; + +export type { + TransformCommandArgument, + TransformCommandOptions, +} from "./transform/types"; +export type { + MigrateCommandArgument, + MigrateCommandOptions, +} from "./migrate/types"; diff --git a/packages/turbo-codemod/src/commands/migrate/index.ts b/packages/turbo-codemod/src/commands/migrate/index.ts new file mode 100644 index 0000000..c4c6d02 --- /dev/null +++ b/packages/turbo-codemod/src/commands/migrate/index.ts @@ -0,0 +1,215 @@ +import chalk from "chalk"; +import os from "os"; +import inquirer from "inquirer"; +import { execSync } from "child_process"; + +import getCurrentVersion from "./steps/getCurrentVersion"; +import getLatestVersion from "./steps/getLatestVersion"; +import getCodemodsForMigration from "./steps/getTransformsForMigration"; +import checkGitStatus from "../../utils/checkGitStatus"; +import directoryInfo from "../../utils/directoryInfo"; +import getTurboUpgradeCommand from "./steps/getTurboUpgradeCommand"; +import Runner from "../../runner/Runner"; +import type { MigrateCommandArgument, MigrateCommandOptions } from "./types"; +import looksLikeRepo from "../../utils/looksLikeRepo"; + +function endMigration({ + message, + success, +}: { + message?: string; + success: boolean; +}) { + if (success) { + console.log(chalk.bold(chalk.green("Migration completed"))); + if (message) { + console.log(message); + } + return process.exit(0); + } + + console.log(chalk.bold(chalk.red("Migration failed"))); + if (message) { + console.log(message); + } + return process.exit(1); +} + +/** +Migration is done in 4 steps: + -- gather information + 1. find the version (x) of turbo to migrate from (if not specified) + 2. find the version (y) of turbo to migrate to (if not specified) + 3. determine which codemods need to be run to move from version x to version y + -- action + 4. execute the codemods (serially, and in order) + 5. update the turbo version (optionally) +**/ +export default async function migrate( + directory: MigrateCommandArgument, + options: MigrateCommandOptions +) { + // check git status + if (!options.dry) { + checkGitStatus({ directory, force: options.force }); + } + + const answers = await inquirer.prompt<{ + directoryInput?: string; + }>([ + { + type: "input", + name: "directoryInput", + message: "Where is the root of the repo to migrate?", + when: !directory, + default: ".", + validate: (directory: string) => { + const { exists, absolute } = directoryInfo({ directory }); + if (exists) { + return true; + } else { + return `Directory ${chalk.dim(`(${absolute})`)} does not exist`; + } + }, + filter: (directory: string) => directory.trim(), + }, + ]); + + const { directoryInput: selectedDirectory = directory as string } = answers; + const { exists, absolute: root } = directoryInfo({ + directory: selectedDirectory, + }); + if (!exists) { + return endMigration({ + success: false, + message: `Directory ${chalk.dim(`(${root})`)} does not exist`, + }); + } + + if (!looksLikeRepo({ directory: root })) { + return endMigration({ + success: false, + message: `Directory (${chalk.dim( + root + )}) does not appear to be a repository`, + }); + } + + // step 1 + const fromVersion = getCurrentVersion(selectedDirectory, options); + if (!fromVersion) { + return endMigration({ + success: false, + message: `Unable to infer the version of turbo being used by ${directory}`, + }); + } + + // step 2 + let toVersion = options.to; + try { + toVersion = await getLatestVersion(options); + } catch (err) { + let message = "UNKNOWN_ERROR"; + if (err instanceof Error) { + message = err.message; + } + return endMigration({ + success: false, + message, + }); + } + + if (!toVersion) { + return endMigration({ + success: false, + message: `Unable to fetch the latest version of turbo`, + }); + } + + if (fromVersion === toVersion) { + return endMigration({ + success: true, + message: `Nothing to do, current version (${chalk.bold( + fromVersion + )}) is the same as the requested version (${chalk.bold(toVersion)})`, + }); + } + + // step 3 + const codemods = getCodemodsForMigration({ fromVersion, toVersion }); + if (codemods.length === 0) { + console.log( + `No codemods required to migrate from ${fromVersion} to ${toVersion}`, + os.EOL + ); + } + + // step 4 + console.log( + `Upgrading turbo from ${chalk.bold(fromVersion)} to ${chalk.bold( + toVersion + )} (${ + codemods.length === 0 + ? "no codemods required" + : `${codemods.length} required codemod${ + codemods.length === 1 ? "" : "s" + }` + })`, + os.EOL + ); + const results = codemods.map((codemod, idx) => { + console.log( + `(${idx + 1}/${codemods.length}) ${chalk.bold( + `Running ${codemod.value}` + )}` + ); + + const result = codemod.transformer({ root: selectedDirectory, options }); + Runner.logResults(result); + return result; + }); + + const hasTransformError = results.some( + (result) => + result.fatalError || + Object.keys(result.changes).some((key) => result.changes[key].error) + ); + + if (hasTransformError) { + return endMigration({ + success: false, + message: `Could not complete migration due to codemod errors. Please fix the errors and try again.`, + }); + } + + // step 5 + const upgradeCommand = getTurboUpgradeCommand({ + directory: selectedDirectory, + to: options.to, + }); + + if (!upgradeCommand) { + return endMigration({ + success: false, + message: "Unable to determine upgrade command", + }); + } + + if (options.install) { + if (options.dry) { + console.log( + `Upgrading turbo with ${chalk.bold(upgradeCommand)} ${chalk.dim( + "(dry run)" + )}`, + os.EOL + ); + } else { + console.log(`Upgrading turbo with ${chalk.bold(upgradeCommand)}`, os.EOL); + execSync(upgradeCommand, { cwd: selectedDirectory }); + } + } else { + console.log(`Upgrade turbo with ${chalk.bold(upgradeCommand)}`, os.EOL); + } + + endMigration({ success: true }); +} diff --git a/packages/turbo-codemod/src/commands/migrate/steps/getCurrentVersion.ts b/packages/turbo-codemod/src/commands/migrate/steps/getCurrentVersion.ts new file mode 100644 index 0000000..3644f8b --- /dev/null +++ b/packages/turbo-codemod/src/commands/migrate/steps/getCurrentVersion.ts @@ -0,0 +1,45 @@ +import path from "path"; +import { existsSync } from "fs-extra"; + +import getPackageManager from "../../../utils/getPackageManager"; +import { exec } from "../utils"; +import type { MigrateCommandOptions } from "../types"; + +function getCurrentVersion( + directory: string, + opts: MigrateCommandOptions +): string | undefined { + const { from } = opts; + if (from) { + return from; + } + + // try global first + const turboVersionFromGlobal = exec(`turbo --version`, { cwd: directory }); + + if (turboVersionFromGlobal) { + return turboVersionFromGlobal; + } + + // try to use the package manager to find the version + const packageManager = getPackageManager({ directory }); + if (packageManager) { + if (packageManager === "yarn") { + return exec(`yarn turbo --version`, { cwd: directory }); + } + if (packageManager === "pnpm") { + return exec(`pnpm turbo --version`, { cwd: directory }); + } else { + // this doesn't work for npm, so manually build the binary path + const turboBin = path.join(directory, "node_modules", ".bin", "turbo"); + if (existsSync(turboBin)) { + return exec(`${turboBin} --version`, { cwd: directory }); + } + } + } + + // unable to determine local version, + return undefined; +} + +export default getCurrentVersion; diff --git a/packages/turbo-codemod/src/commands/migrate/steps/getLatestVersion.ts b/packages/turbo-codemod/src/commands/migrate/steps/getLatestVersion.ts new file mode 100644 index 0000000..a6ab7e6 --- /dev/null +++ b/packages/turbo-codemod/src/commands/migrate/steps/getLatestVersion.ts @@ -0,0 +1,31 @@ +import axios from "axios"; + +import type { MigrateCommandOptions } from "../types"; + +const REGISTRY = "https://registry.npmjs.org"; + +async function getPackageDetails({ packageName }: { packageName: string }) { + try { + const result = await axios.get(`${REGISTRY}/${packageName}`); + return result.data; + } catch (err) { + throw new Error(`Unable to fetch the latest version of ${packageName}`); + } +} + +export default async function getLatestVersion({ + to, +}: MigrateCommandOptions): Promise { + const packageDetails = await getPackageDetails({ packageName: "turbo" }); + const { "dist-tags": tags, versions } = packageDetails; + + if (to) { + if (tags[to] || versions[to]) { + return to; + } else { + throw new Error(`turbo@${to} does not exist`); + } + } + + return tags.latest as string; +} diff --git a/packages/turbo-codemod/src/commands/migrate/steps/getTransformsForMigration.ts b/packages/turbo-codemod/src/commands/migrate/steps/getTransformsForMigration.ts new file mode 100644 index 0000000..2224c06 --- /dev/null +++ b/packages/turbo-codemod/src/commands/migrate/steps/getTransformsForMigration.ts @@ -0,0 +1,25 @@ +import { gt, lte } from "semver"; + +import loadTransformers from "../../../utils/loadTransformers"; +import type { Transformer } from "../../../types"; + +/** + Returns all transformers introduced after fromVersion, but before or equal to toVersion +**/ +function getTransformsForMigration({ + fromVersion, + toVersion, +}: { + fromVersion: string; + toVersion: string; +}): Array { + const transforms = loadTransformers(); + return transforms.filter((transformer) => { + return ( + gt(transformer.introducedIn, fromVersion) && + lte(transformer.introducedIn, toVersion) + ); + }); +} + +export default getTransformsForMigration; diff --git a/packages/turbo-codemod/src/commands/migrate/steps/getTurboUpgradeCommand.ts b/packages/turbo-codemod/src/commands/migrate/steps/getTurboUpgradeCommand.ts new file mode 100644 index 0000000..8fd5972 --- /dev/null +++ b/packages/turbo-codemod/src/commands/migrate/steps/getTurboUpgradeCommand.ts @@ -0,0 +1,182 @@ +import os from "os"; +import path from "path"; +import fs from "fs-extra"; +import { gte } from "semver"; + +import { exec } from "../utils"; +import getPackageManager, { + PackageManager, +} from "../../../utils/getPackageManager"; +import getPackageManagerVersion from "../../../utils/getPackageManagerVersion"; + +type InstallType = "dependencies" | "devDependencies"; + +function getGlobalBinaryPaths(): Record { + return { + // we run these from a tmpdir to avoid corepack interference + yarn: exec(`yarn global bin`, { cwd: os.tmpdir() }), + npm: exec(`npm bin --global`, { cwd: os.tmpdir() }), + pnpm: exec(`pnpm bin --global`, { cwd: os.tmpdir() }), + }; +} + +function getGlobalUpgradeCommand( + packageManager: PackageManager, + to: string = "latest" +) { + switch (packageManager) { + case "yarn": + return `yarn global add turbo@${to}`; + case "npm": + return `npm install turbo@${to} --global`; + case "pnpm": + return `pnpm install turbo@${to} --global`; + } +} + +function getLocalUpgradeCommand({ + packageManager, + packageManagerVersion, + installType, + isUsingWorkspaces, + to = "latest", +}: { + packageManager: PackageManager; + packageManagerVersion: string; + installType: InstallType; + isUsingWorkspaces?: boolean; + to?: string; +}) { + const renderCommand = ( + command: Array + ): string => command.filter(Boolean).join(" "); + switch (packageManager) { + // yarn command differs depending on the version + case "yarn": + // yarn 2.x and 3.x (berry) + if (gte(packageManagerVersion, "2.0.0")) { + return renderCommand([ + "yarn", + "add", + `turbo@${to}`, + installType === "devDependencies" && "--dev", + ]); + // yarn 1.x + } else { + return renderCommand([ + "yarn", + "add", + `turbo@${to}`, + installType === "devDependencies" && "--dev", + isUsingWorkspaces && "-W", + ]); + } + case "npm": + return renderCommand([ + "npm", + "install", + `turbo@${to}`, + installType === "devDependencies" && "--save-dev", + ]); + case "pnpm": + return renderCommand([ + "pnpm", + "install", + `turbo@${to}`, + installType === "devDependencies" && "--save-dev", + isUsingWorkspaces && "-w", + ]); + } +} + +function getInstallType({ directory }: { directory: string }): { + installType?: InstallType; + isUsingWorkspaces?: boolean; +} { + // read package.json to make sure we have a reference to turbo + const packageJsonPath = path.join(directory, "package.json"); + const pnpmWorkspaceConfig = path.join(directory, "pnpm-workspace.yaml"); + const isPnpmWorkspaces = fs.existsSync(pnpmWorkspaceConfig); + + if (!fs.existsSync(packageJsonPath)) { + console.error(`Unable to find package.json at ${packageJsonPath}`); + return { installType: undefined, isUsingWorkspaces: undefined }; + } + + const packageJson = fs.readJsonSync(packageJsonPath); + const isDevDependency = + packageJson.devDependencies && "turbo" in packageJson.devDependencies; + const isDependency = + packageJson.dependencies && "turbo" in packageJson.dependencies; + let isUsingWorkspaces = "workspaces" in packageJson || isPnpmWorkspaces; + + if (isDependency || isDevDependency) { + return { + installType: isDependency ? "dependencies" : "devDependencies", + isUsingWorkspaces, + }; + } + + return { + installType: undefined, + isUsingWorkspaces, + }; +} + +/** + Finding the correct command to upgrade depends on two things: + 1. The package manager + 2. The install method (local or global) + + We try global first to let turbo handle the inference, then we try local. +**/ +export default function getTurboUpgradeCommand({ + directory, + to, +}: { + directory: string; + to?: string; +}) { + const turboBinaryPathFromGlobal = exec(`turbo bin`, { + cwd: directory, + stdio: "pipe", + }); + const packageManagerGlobalBinaryPaths = getGlobalBinaryPaths(); + + const globalPackageManager = Object.keys( + packageManagerGlobalBinaryPaths + ).find((packageManager) => { + const packageManagerBinPath = + packageManagerGlobalBinaryPaths[packageManager as PackageManager]; + if (packageManagerBinPath && turboBinaryPathFromGlobal) { + return turboBinaryPathFromGlobal.includes(packageManagerBinPath); + } + + return false; + }) as PackageManager; + + if (turboBinaryPathFromGlobal && globalPackageManager) { + // figure which package manager we need to upgrade + return getGlobalUpgradeCommand(globalPackageManager, to); + } else { + const packageManager = getPackageManager({ directory }); + // we didn't find a global install, so we'll try to find a local one + const { installType, isUsingWorkspaces } = getInstallType({ directory }); + if (packageManager && installType) { + const packageManagerVersion = getPackageManagerVersion( + packageManager, + directory + ); + + return getLocalUpgradeCommand({ + packageManager, + packageManagerVersion, + installType, + isUsingWorkspaces, + to, + }); + } + } + + return undefined; +} diff --git a/packages/turbo-codemod/src/commands/migrate/types.ts b/packages/turbo-codemod/src/commands/migrate/types.ts new file mode 100644 index 0000000..ae90965 --- /dev/null +++ b/packages/turbo-codemod/src/commands/migrate/types.ts @@ -0,0 +1,9 @@ +import { TransformerOptions } from "../../types"; + +export type MigrateCommandArgument = "string" | undefined; + +export interface MigrateCommandOptions extends TransformerOptions { + from?: string; + to?: string; + install: boolean; +} diff --git a/packages/turbo-codemod/src/commands/migrate/utils.ts b/packages/turbo-codemod/src/commands/migrate/utils.ts new file mode 100644 index 0000000..512d78b --- /dev/null +++ b/packages/turbo-codemod/src/commands/migrate/utils.ts @@ -0,0 +1,16 @@ +import { execSync, ExecSyncOptions } from "child_process"; + +function exec( + command: string, + opts: ExecSyncOptions, + fallback?: string +): string | undefined { + try { + const rawResult = execSync(command, opts); + return rawResult.toString("utf8").trim(); + } catch (err) { + return fallback || undefined; + } +} + +export { exec }; diff --git a/packages/turbo-codemod/src/commands/transform/index.ts b/packages/turbo-codemod/src/commands/transform/index.ts new file mode 100644 index 0000000..e3b86aa --- /dev/null +++ b/packages/turbo-codemod/src/commands/transform/index.ts @@ -0,0 +1,101 @@ +import chalk from "chalk"; +import inquirer from "inquirer"; + +import loadTransformers from "../../utils/loadTransformers"; +import checkGitStatus from "../../utils/checkGitStatus"; +import directoryInfo from "../../utils/directoryInfo"; +import type { + TransformCommandOptions, + TransformCommandArgument, +} from "./types"; +import { Runner } from "../../runner"; + +export default async function transform( + transform: TransformCommandArgument, + directory: TransformCommandArgument, + options: TransformCommandOptions +) { + const transforms = loadTransformers(); + if (options.list) { + console.log( + transforms + .map((transform) => `- ${chalk.cyan(transform.value)}`) + .join("\n") + ); + return process.exit(0); + } + + // check git status + if (!options.dry) { + checkGitStatus({ directory, force: options.force }); + } + + const answers = await inquirer.prompt<{ + directoryInput?: string; + transformerInput?: string; + }>([ + { + type: "input", + name: "directoryInput", + message: "Where is the root of the repo where the transform should run?", + when: !directory, + default: ".", + validate: (directory: string) => { + const { exists, absolute } = directoryInfo({ directory }); + if (exists) { + return true; + } else { + return `Directory ${chalk.dim(`(${absolute})`)} does not exist`; + } + }, + filter: (directory: string) => directory.trim(), + }, + { + type: "list", + name: "transformerInput", + message: "Which transform would you like to apply?", + when: !transform, + pageSize: transforms.length, + choices: transforms, + }, + ]); + + const { + directoryInput: selectedDirectory = directory as string, + transformerInput: selectedTransformer = transform as string, + } = answers; + const { exists, absolute: root } = directoryInfo({ + directory: selectedDirectory, + }); + if (!exists) { + console.error(`Directory ${chalk.dim(`(${root})`)} does not exist`); + return process.exit(1); + } + + const transformKeys = transforms.map((transform) => transform.value); + const transformData = transforms.find( + (transform) => transform.value === selectedTransformer + ); + + // validate transforms + if (!transformData) { + console.error( + `Invalid transform choice ${chalk.dim(`(${transform})`)}, pick one of:` + ); + console.error(transformKeys.map((key) => `- ${key}`).join("\n")); + return process.exit(1); + } + + // run the transform + const result = transformData.transformer({ + root, + options, + }); + + if (result.fatalError) { + // Runner already logs this, so we can just exit + return process.exit(1); + } + + Runner.logResults(result); +} diff --git a/packages/turbo-codemod/src/commands/transform/types.ts b/packages/turbo-codemod/src/commands/transform/types.ts new file mode 100644 index 0000000..9ac2db0 --- /dev/null +++ b/packages/turbo-codemod/src/commands/transform/types.ts @@ -0,0 +1,7 @@ +import { TransformerOptions } from "../../types"; + +export type TransformCommandArgument = "string" | undefined; + +export interface TransformCommandOptions extends TransformerOptions { + list: boolean; +} diff --git a/packages/turbo-codemod/src/runner/FileTransform.ts b/packages/turbo-codemod/src/runner/FileTransform.ts new file mode 100644 index 0000000..3b23f73 --- /dev/null +++ b/packages/turbo-codemod/src/runner/FileTransform.ts @@ -0,0 +1,94 @@ +import chalk from "chalk"; +import { diffLines, Change, diffJson } from "diff"; +import fs from "fs-extra"; +import os from "os"; +import path from "path"; + +import type { FileTransformArgs, LogFileArgs } from "./types"; + +export default class FileTransform { + filePath: string; + rootPath: string; + before: string | object; + after?: string | object; + error?: Error; + changes: Array = []; + + constructor(args: FileTransformArgs) { + this.filePath = args.filePath; + this.rootPath = args.rootPath; + this.after = args.after; + this.error = args.error; + + // load original file for comparison + if (args.before === undefined) { + try { + if (path.extname(args.filePath) === ".json") { + this.before = fs.readJsonSync(args.filePath); + } else { + this.before = fs.readFileSync(args.filePath); + } + } catch (err) { + this.before = ""; + } + } else if (args.before === null) { + this.before = ""; + } else { + this.before = args.before; + } + + // determine diff + if (args.after) { + if (typeof this.before === "object" || typeof args.after === "object") { + this.changes = diffJson(this.before, args.after); + } else { + this.changes = diffLines(this.before, args.after); + } + } else { + this.changes = []; + } + } + + fileName(): string { + return path.relative(this.rootPath, this.filePath); + } + + write(): void { + if (this.after) { + if (typeof this.after === "object") { + fs.writeJsonSync(this.filePath, this.after, { spaces: 2 }); + } else { + fs.writeFileSync(this.filePath, this.after); + } + } + } + + additions(): number { + return this.changes.filter((c) => c.added).length; + } + + deletions(): number { + return this.changes.filter((c) => c.removed).length; + } + + hasChanges(): boolean { + return this.additions() > 0 || this.deletions() > 0; + } + + log(args: LogFileArgs): void { + if (args.diff) { + this.changes.forEach((part) => { + if (part.added) { + process.stdout.write(chalk.green(part.value)); + } else if (part.removed) { + process.stdout.write(chalk.red(part.value)); + } else { + process.stdout.write(chalk.dim(part.value)); + } + }); + console.log(os.EOL); + } else { + console.log(this.after); + } + } +} diff --git a/packages/turbo-codemod/src/runner/Runner.ts b/packages/turbo-codemod/src/runner/Runner.ts new file mode 100644 index 0000000..8f8803d --- /dev/null +++ b/packages/turbo-codemod/src/runner/Runner.ts @@ -0,0 +1,132 @@ +import chalk from "chalk"; + +import FileTransform from "./FileTransform"; +import Logger from "../utils/logger"; +import type { UtilityArgs } from "../types"; +import type { + FileResult, + ModifyFileArgs, + AbortTransformArgs, + TransformerResults, +} from "./types"; + +class Runner { + transform: string; + rootPath: string; + dry: boolean; + print: boolean; + modifications: Record = {}; + logger: Logger; + + constructor(options: UtilityArgs) { + this.transform = options.transformer; + this.rootPath = options.rootPath; + this.dry = options.dry; + this.print = options.print; + this.logger = new Logger(options); + } + + abortTransform(args: AbortTransformArgs): TransformerResults { + this.logger.error(args.reason); + return { + fatalError: new Error(args.reason), + changes: args.changes || {}, + }; + } + + // add a file to be transformed + modifyFile(args: ModifyFileArgs): void { + this.modifications[args.filePath] = new FileTransform({ + rootPath: this.rootPath, + ...args, + }); + } + + // execute all transforms and track results for reporting + finish(): TransformerResults { + const results: TransformerResults = { changes: {} }; + // perform all actions and track results + Object.keys(this.modifications).forEach((filePath) => { + const mod = this.modifications[filePath]; + const result: FileResult = { + action: "unchanged", + additions: mod.additions(), + deletions: mod.deletions(), + }; + + if (mod.hasChanges()) { + if (this.dry) { + result.action = "skipped"; + this.logger.skipped(chalk.dim(mod.fileName())); + } else { + try { + mod.write(); + result.action = "modified"; + this.logger.modified(chalk.bold(mod.fileName())); + } catch (err) { + let message = "Unknown error"; + if (err instanceof Error) { + message = err.message; + } + result.error = new Error(message); + result.action = "error"; + this.logger.error(mod.fileName(), message); + } + } + + if (this.print) { + mod.log({ diff: true }); + } + } else { + this.logger.unchanged(chalk.dim(mod.fileName())); + } + + results.changes[mod.fileName()] = result; + }); + + const encounteredError = Object.keys(results.changes).some((fileName) => { + return results.changes[fileName].action === "error"; + }); + + if (encounteredError) { + return this.abortTransform({ + reason: "Encountered an error while transforming files", + changes: results.changes, + }); + } + + return results; + } + + static logResults(results: TransformerResults): void { + const changedFiles = Object.keys(results.changes); + console.log(); + if (changedFiles.length > 0) { + console.log(chalk.bold(`Results:`)); + const table: Record< + string, + { + action: FileResult["action"]; + additions: FileResult["additions"]; + deletions: FileResult["deletions"]; + error?: string; + } + > = {}; + + changedFiles.forEach((fileName) => { + const fileChanges = results.changes[fileName]; + table[fileName] = { + action: fileChanges.action, + additions: fileChanges.additions, + deletions: fileChanges.deletions, + error: fileChanges.error?.message || "None", + }; + }); + + console.table(table); + console.log(); + } + } +} + +export default Runner; diff --git a/packages/turbo-codemod/src/runner/index.ts b/packages/turbo-codemod/src/runner/index.ts new file mode 100644 index 0000000..2aa323d --- /dev/null +++ b/packages/turbo-codemod/src/runner/index.ts @@ -0,0 +1,3 @@ +export { default as Runner } from "./Runner"; + +export type { TransformerResults, FileDiffer, FileWriter } from "./types"; diff --git a/packages/turbo-codemod/src/runner/types.ts b/packages/turbo-codemod/src/runner/types.ts new file mode 100644 index 0000000..e7c37d4 --- /dev/null +++ b/packages/turbo-codemod/src/runner/types.ts @@ -0,0 +1,40 @@ +import { Change } from "diff"; + +export interface FileResult { + action: "skipped" | "modified" | "unchanged" | "error"; + error?: Error; + additions: number; + deletions: number; +} + +export interface FileTransformArgs extends ModifyFileArgs { + rootPath: string; +} + +export interface ModifyFileArgs { + filePath: string; + before?: string | object; + after?: string | object; + error?: Error; +} + +export interface AbortTransformArgs { + reason: string; + changes?: Record; +} + +export interface LogFileArgs { + diff?: boolean; +} + +export type FileWriter = (filePath: string, contents: string | object) => void; + +export type FileDiffer = ( + before: string | object, + after: string | object +) => Array; + +export interface TransformerResults { + fatalError?: Error; + changes: Record; +} diff --git a/packages/turbo-codemod/src/transforms/README.md b/packages/turbo-codemod/src/transforms/README.md new file mode 100644 index 0000000..8e4430f --- /dev/null +++ b/packages/turbo-codemod/src/transforms/README.md @@ -0,0 +1,36 @@ +# `@turbo/codemod` Transformers + +## Adding new transformers + +Add new transformers using the [plopjs](https://github.com/plopjs/plop) template by running: + +```bash +pnpm add-transformer +``` + +New Transformers will be automatically surfaced to the `transform` CLI command and used by the `migrate` CLI command when appropriate. + +## How it works + +Transformers are loaded automatically from the `src/transforms/` directory via the [`loadTransforms`](../utils/loadTransformers.ts) function. + +All new transformers must contain a default export that matches the [`Transformer`](../types.ts) type: + +```ts +export type Transformer = { + name: string; + value: string; + introducedIn: string; + transformer: (args: TransformerArgs) => TransformerResults; +}; +``` + +## Writing a Transform + +Transforms are ran using the [TransformRunner](../runner/Runner.ts). This class is designed to make writing transforms as simple as possible by abstracting away all of the boilerplate that determines what should be logged, saved, or output as a result. + +To use the TransformRunner: + +1. Transform each file in memory (do not write it back to disk `TransformRunner` takes care of this depending on the options passed in by the user), and pass to `TransformRunner.modifyFile` method. +2. If the transform encounters an unrecoverable error, pass it to the `TransformRunner.abortTransform` method. +3. When all files have been modified and passed to `TransformRunner.modifyFile`, call `TransformRunner.finish` method to write the files to disk (when not running in `dry` mode) and log the results. diff --git a/packages/turbo-codemod/src/transforms/add-package-manager.ts b/packages/turbo-codemod/src/transforms/add-package-manager.ts new file mode 100644 index 0000000..bd6581f --- /dev/null +++ b/packages/turbo-codemod/src/transforms/add-package-manager.ts @@ -0,0 +1,75 @@ +import path from "path"; +import fs from "fs-extra"; + +import getPackageManager from "../utils/getPackageManager"; +import getPackageManagerVersion from "../utils/getPackageManagerVersion"; +import getTransformerHelpers from "../utils/getTransformerHelpers"; +import { TransformerResults } from "../runner"; +import type { TransformerArgs } from "../types"; + +// transformer details +const TRANSFORMER = "add-package-manager"; +const DESCRIPTION = "Set the `packageManager` key in root `package.json` file"; +const INTRODUCED_IN = "1.1.0"; + +export function transformer({ + root, + options, +}: TransformerArgs): TransformerResults { + const { log, runner } = getTransformerHelpers({ + transformer: TRANSFORMER, + rootPath: root, + options, + }); + + log.info(`Set "packageManager" key in root "package.json" file...`); + const packageManager = getPackageManager({ directory: root }); + if (!packageManager) { + return runner.abortTransform({ + reason: `Unable to determine package manager for ${root}`, + }); + } + + // handle workspaces... + let version = null; + try { + version = getPackageManagerVersion(packageManager, root); + } catch (err) { + return runner.abortTransform({ + reason: `Unable to determine package manager version for ${root}`, + }); + } + const pkgManagerString = `${packageManager}@${version}`; + const rootPackageJsonPath = path.join(root, "package.json"); + const rootPackageJson = fs.readJsonSync(rootPackageJsonPath); + const allWorkspaces = [ + { + name: "package.json", + path: root, + packageJson: { + ...rootPackageJson, + packageJsonPath: rootPackageJsonPath, + }, + }, + ]; + + for (const workspace of allWorkspaces) { + const { packageJsonPath, ...pkgJson } = workspace.packageJson; + const newJson = { ...pkgJson, packageManager: pkgManagerString }; + runner.modifyFile({ + filePath: packageJsonPath, + after: newJson, + }); + } + + return runner.finish(); +} + +const transformerMeta = { + name: `${TRANSFORMER}: ${DESCRIPTION}`, + value: TRANSFORMER, + introducedIn: INTRODUCED_IN, + transformer, +}; + +export default transformerMeta; diff --git a/packages/turbo-codemod/src/transforms/create-turbo-config.ts b/packages/turbo-codemod/src/transforms/create-turbo-config.ts new file mode 100644 index 0000000..0e8549a --- /dev/null +++ b/packages/turbo-codemod/src/transforms/create-turbo-config.ts @@ -0,0 +1,70 @@ +import fs from "fs-extra"; +import path from "path"; + +import { TransformerResults } from "../runner"; +import getTransformerHelpers from "../utils/getTransformerHelpers"; +import type { TransformerArgs } from "../types"; + +// transformer details +const TRANSFORMER = "create-turbo-config"; +const DESCRIPTION = + 'Create the `turbo.json` file from an existing "turbo" key in `package.json`'; +const INTRODUCED_IN = "1.1.0"; + +export function transformer({ + root, + options, +}: TransformerArgs): TransformerResults { + const { log, runner } = getTransformerHelpers({ + transformer: TRANSFORMER, + rootPath: root, + options, + }); + + log.info(`Migrating "package.json" "turbo" key to "turbo.json" file...`); + const turboConfigPath = path.join(root, "turbo.json"); + const rootPackageJsonPath = path.join(root, "package.json"); + if (!fs.existsSync(rootPackageJsonPath)) { + return runner.abortTransform({ + reason: `No package.json found at ${root}. Is the path correct?`, + }); + } + + // read files + const rootPackageJson = fs.readJsonSync(rootPackageJsonPath); + let rootTurboJson = null; + try { + rootTurboJson = fs.readJSONSync(turboConfigPath); + } catch (err) { + rootTurboJson = null; + } + + // modify files + let transformedPackageJson = rootPackageJson; + let transformedTurboConfig = rootTurboJson; + if (!rootTurboJson && rootPackageJson["turbo"]) { + const { turbo: turboConfig, ...remainingPkgJson } = rootPackageJson; + transformedTurboConfig = turboConfig; + transformedPackageJson = remainingPkgJson; + } + + runner.modifyFile({ + filePath: turboConfigPath, + after: transformedTurboConfig, + }); + runner.modifyFile({ + filePath: rootPackageJsonPath, + after: transformedPackageJson, + }); + + return runner.finish(); +} + +const transformerMeta = { + name: `${TRANSFORMER}: ${DESCRIPTION}`, + value: TRANSFORMER, + introducedIn: INTRODUCED_IN, + transformer, +}; + +export default transformerMeta; diff --git a/packages/turbo-codemod/src/transforms/migrate-env-var-dependencies.ts b/packages/turbo-codemod/src/transforms/migrate-env-var-dependencies.ts new file mode 100644 index 0000000..ef3a34c --- /dev/null +++ b/packages/turbo-codemod/src/transforms/migrate-env-var-dependencies.ts @@ -0,0 +1,181 @@ +import fs from "fs-extra"; +import path from "path"; +import { getTurboConfigs } from "@turbo/utils"; +import type { Schema, Pipeline } from "@turbo/types"; + +import getTransformerHelpers from "../utils/getTransformerHelpers"; +import { TransformerResults } from "../runner"; +import type { TransformerArgs } from "../types"; + +// transformer details +const TRANSFORMER = "migrate-env-var-dependencies"; +const DESCRIPTION = + 'Migrate environment variable dependencies from "dependsOn" to "env" in `turbo.json`'; +const INTRODUCED_IN = "1.5.0"; + +export function hasLegacyEnvVarDependencies(config: Schema) { + const dependsOn = [ + "extends" in config ? [] : config.globalDependencies, + Object.values(config.pipeline).flatMap( + (pipeline) => pipeline.dependsOn ?? [] + ), + ].flat(); + const envVars = dependsOn.filter((dep) => dep?.startsWith("$")); + return { hasKeys: !!envVars.length, envVars }; +} + +export function migrateDependencies({ + env, + deps, +}: { + env?: string[]; + deps?: string[]; +}) { + const envDeps: Set = new Set(env); + const otherDeps: string[] = []; + deps?.forEach((dep) => { + if (dep.startsWith("$")) { + envDeps.add(dep.slice(1)); + } else { + otherDeps.push(dep); + } + }); + if (envDeps.size) { + return { + deps: otherDeps, + env: Array.from(envDeps), + }; + } else { + return { env, deps }; + } +} + +export function migratePipeline(pipeline: Pipeline) { + const { deps: dependsOn, env } = migrateDependencies({ + env: pipeline.env, + deps: pipeline.dependsOn, + }); + const migratedPipeline = { ...pipeline }; + if (dependsOn) { + migratedPipeline.dependsOn = dependsOn; + } else { + delete migratedPipeline.dependsOn; + } + if (env && env.length) { + migratedPipeline.env = env; + } else { + delete migratedPipeline.env; + } + + return migratedPipeline; +} + +export function migrateGlobal(config: Schema) { + if ("extends" in config) { + return config; + } + + const { deps: globalDependencies, env } = migrateDependencies({ + env: config.globalEnv, + deps: config.globalDependencies, + }); + const migratedConfig = { ...config }; + if (globalDependencies && globalDependencies.length) { + migratedConfig.globalDependencies = globalDependencies; + } else { + delete migratedConfig.globalDependencies; + } + if (env && env.length) { + migratedConfig.globalEnv = env; + } else { + delete migratedConfig.globalEnv; + } + return migratedConfig; +} + +export function migrateConfig(config: Schema) { + let migratedConfig = migrateGlobal(config); + Object.keys(config.pipeline).forEach((pipelineKey) => { + config.pipeline; + if (migratedConfig.pipeline && config.pipeline[pipelineKey]) { + const pipeline = migratedConfig.pipeline[pipelineKey]; + migratedConfig.pipeline[pipelineKey] = { + ...pipeline, + ...migratePipeline(pipeline), + }; + } + }); + return migratedConfig; +} + +export function transformer({ + root, + options, +}: TransformerArgs): TransformerResults { + const { log, runner } = getTransformerHelpers({ + transformer: TRANSFORMER, + rootPath: root, + options, + }); + + log.info( + `Migrating environment variable dependencies from "globalDependencies" and "dependsOn" to "env" in "turbo.json"...` + ); + + // validate we don't have a package.json config + const packageJsonPath = path.join(root, "package.json"); + let packageJSON = {}; + try { + packageJSON = fs.readJSONSync(packageJsonPath); + } catch (e) { + // readJSONSync probably failed because the file doesn't exist + } + + if ("turbo" in packageJSON) { + return runner.abortTransform({ + reason: + '"turbo" key detected in package.json. Run `npx @turbo/codemod transform create-turbo-config` first', + }); + } + + // validate we have a root config + const turboConfigPath = path.join(root, "turbo.json"); + if (!fs.existsSync(turboConfigPath)) { + return runner.abortTransform({ + reason: `No turbo.json found at ${root}. Is the path correct?`, + }); + } + + let turboJson: Schema = fs.readJsonSync(turboConfigPath); + if (hasLegacyEnvVarDependencies(turboJson).hasKeys) { + turboJson = migrateConfig(turboJson); + } + + runner.modifyFile({ + filePath: turboConfigPath, + after: turboJson, + }); + + // find and migrate any workspace configs + const workspaceConfigs = getTurboConfigs(root); + workspaceConfigs.forEach((workspaceConfig) => { + const { config, turboConfigPath } = workspaceConfig; + if (hasLegacyEnvVarDependencies(config).hasKeys) { + runner.modifyFile({ + filePath: turboConfigPath, + after: migrateConfig(config), + }); + } + }); + + return runner.finish(); +} + +const transformerMeta = { + name: `${TRANSFORMER}: ${DESCRIPTION}`, + value: TRANSFORMER, + introducedIn: INTRODUCED_IN, + transformer, +}; + +export default transformerMeta; diff --git a/packages/turbo-codemod/src/transforms/set-default-outputs.ts b/packages/turbo-codemod/src/transforms/set-default-outputs.ts new file mode 100644 index 0000000..44f7fd1 --- /dev/null +++ b/packages/turbo-codemod/src/transforms/set-default-outputs.ts @@ -0,0 +1,97 @@ +import path from "path"; +import fs from "fs-extra"; +import { getTurboConfigs } from "@turbo/utils"; +import type { Schema as TurboJsonSchema } from "@turbo/types"; + +import type { TransformerArgs } from "../types"; +import getTransformerHelpers from "../utils/getTransformerHelpers"; +import { TransformerResults } from "../runner"; + +const DEFAULT_OUTPUTS = ["dist/**", "build/**"]; + +// transformer details +const TRANSFORMER = "set-default-outputs"; +const DESCRIPTION = + 'Add the "outputs" key with defaults where it is missing in `turbo.json`'; +const INTRODUCED_IN = "1.7.0"; + +function migrateConfig(config: TurboJsonSchema) { + for (const [_, taskDef] of Object.entries(config.pipeline)) { + if (taskDef.cache !== false) { + if (!taskDef.outputs) { + taskDef.outputs = DEFAULT_OUTPUTS; + } else if ( + Array.isArray(taskDef.outputs) && + taskDef.outputs.length === 0 + ) { + delete taskDef.outputs; + } + } + } + + return config; +} + +export function transformer({ + root, + options, +}: TransformerArgs): TransformerResults { + const { log, runner } = getTransformerHelpers({ + transformer: TRANSFORMER, + rootPath: root, + options, + }); + + // If `turbo` key is detected in package.json, require user to run the other codemod first. + const packageJsonPath = path.join(root, "package.json"); + // package.json should always exist, but if it doesn't, it would be a silly place to blow up this codemod + let packageJSON = {}; + + try { + packageJSON = fs.readJSONSync(packageJsonPath); + } catch (e) { + // readJSONSync probably failed because the file doesn't exist + } + + if ("turbo" in packageJSON) { + return runner.abortTransform({ + reason: + '"turbo" key detected in package.json. Run `npx @turbo/codemod transform create-turbo-config` first', + }); + } + + log.info(`Adding default \`outputs\` key into tasks if it doesn't exist`); + const turboConfigPath = path.join(root, "turbo.json"); + if (!fs.existsSync(turboConfigPath)) { + return runner.abortTransform({ + reason: `No turbo.json found at ${root}. Is the path correct?`, + }); + } + + const turboJson: TurboJsonSchema = fs.readJsonSync(turboConfigPath); + runner.modifyFile({ + filePath: turboConfigPath, + after: migrateConfig(turboJson), + }); + + // find and migrate any workspace configs + const workspaceConfigs = getTurboConfigs(root); + workspaceConfigs.forEach((workspaceConfig) => { + const { config, turboConfigPath } = workspaceConfig; + runner.modifyFile({ + filePath: turboConfigPath, + after: migrateConfig(config), + }); + }); + + return runner.finish(); +} + +const transformerMeta = { + name: `${TRANSFORMER}: ${DESCRIPTION}`, + value: TRANSFORMER, + introducedIn: INTRODUCED_IN, + transformer, +}; + +export default transformerMeta; diff --git a/packages/turbo-codemod/src/types.ts b/packages/turbo-codemod/src/types.ts new file mode 100644 index 0000000..d5c13c3 --- /dev/null +++ b/packages/turbo-codemod/src/types.ts @@ -0,0 +1,24 @@ +import { TransformerResults } from "./runner"; + +export type Transformer = { + name: string; + value: string; + introducedIn: string; + transformer: (args: TransformerArgs) => TransformerResults; +}; + +export type TransformerOptions = { + force: boolean; + dry: boolean; + print: boolean; +}; + +export type TransformerArgs = { + root: string; + options: TransformerOptions; +}; + +export interface UtilityArgs extends TransformerOptions { + transformer: string; + rootPath: string; +} diff --git a/packages/turbo-codemod/src/utils/checkGitStatus.ts b/packages/turbo-codemod/src/utils/checkGitStatus.ts new file mode 100644 index 0000000..68d39ae --- /dev/null +++ b/packages/turbo-codemod/src/utils/checkGitStatus.ts @@ -0,0 +1,40 @@ +import chalk from "chalk"; +import isGitClean from "is-git-clean"; + +export default function checkGitStatus({ + directory, + force, +}: { + directory?: string; + force: boolean; +}) { + let clean = false; + let errorMessage = "Unable to determine if git directory is clean"; + try { + clean = isGitClean.sync(directory || process.cwd()); + errorMessage = "Git directory is not clean"; + } catch (err: any) { + if (err && err.stderr && err.stderr.indexOf("not a git repository") >= 0) { + clean = true; + } + } + + if (!clean) { + if (force) { + console.log( + `${chalk.yellow("WARNING")}: ${errorMessage}. Forcibly continuing...` + ); + } else { + console.log("Thank you for using @turbo/codemod!"); + console.log( + chalk.yellow( + "\nBut before we continue, please stash or commit your git changes." + ) + ); + console.log( + "\nYou may use the --force flag to override this safety check." + ); + process.exit(1); + } + } +} diff --git a/packages/turbo-codemod/src/utils/directoryInfo.ts b/packages/turbo-codemod/src/utils/directoryInfo.ts new file mode 100644 index 0000000..7cb3594 --- /dev/null +++ b/packages/turbo-codemod/src/utils/directoryInfo.ts @@ -0,0 +1,10 @@ +import path from "path"; +import fs from "fs"; + +export default function directoryInfo({ directory }: { directory: string }) { + const dir = path.isAbsolute(directory) + ? directory + : path.join(process.cwd(), directory); + + return { exists: fs.existsSync(dir), absolute: dir }; +} diff --git a/packages/turbo-codemod/src/utils/getPackageManager.ts b/packages/turbo-codemod/src/utils/getPackageManager.ts new file mode 100644 index 0000000..1df0acc --- /dev/null +++ b/packages/turbo-codemod/src/utils/getPackageManager.ts @@ -0,0 +1,42 @@ +import findUp from "find-up"; +import path from "path"; + +export type PackageManager = "yarn" | "pnpm" | "npm"; + +const cache: { [cwd: string]: PackageManager } = {}; + +export default function getPackageManager({ + directory, +}: { directory?: string } = {}): PackageManager | undefined { + const cwd = directory || process.cwd(); + if (cache[cwd]) { + return cache[cwd]; + } + + const lockFile = findUp.sync( + ["yarn.lock", "pnpm-lock.yaml", "package-lock.json"], + { + cwd, + } + ); + + if (!lockFile) { + return; + } + + switch (path.basename(lockFile)) { + case "yarn.lock": + cache[cwd] = "yarn"; + break; + + case "pnpm-lock.yaml": + cache[cwd] = "pnpm"; + break; + + case "package-lock.json": + cache[cwd] = "npm"; + break; + } + + return cache[cwd]; +} diff --git a/packages/turbo-codemod/src/utils/getPackageManagerVersion.ts b/packages/turbo-codemod/src/utils/getPackageManagerVersion.ts new file mode 100644 index 0000000..54a572a --- /dev/null +++ b/packages/turbo-codemod/src/utils/getPackageManagerVersion.ts @@ -0,0 +1,16 @@ +import { execSync } from "child_process"; +import type { PackageManager } from "./getPackageManager"; + +export default function getPackageManagerVersion( + packageManager: PackageManager, + root: string +): string { + switch (packageManager) { + case "yarn": + return execSync("yarn --version", { cwd: root }).toString().trim(); + case "pnpm": + return execSync("pnpm --version", { cwd: root }).toString().trim(); + case "npm": + return execSync("npm --version", { cwd: root }).toString().trim(); + } +} diff --git a/packages/turbo-codemod/src/utils/getTransformerHelpers.ts b/packages/turbo-codemod/src/utils/getTransformerHelpers.ts new file mode 100644 index 0000000..e37da6e --- /dev/null +++ b/packages/turbo-codemod/src/utils/getTransformerHelpers.ts @@ -0,0 +1,23 @@ +import { TransformerOptions } from "../types"; +import { Runner } from "../runner"; +import Logger from "./logger"; + +export default function getTransformerHelpers({ + transformer, + rootPath, + options, +}: { + transformer: string; + rootPath: string; + options: TransformerOptions; +}) { + const utilArgs = { + transformer, + rootPath, + ...options, + }; + const log = new Logger(utilArgs); + const runner = new Runner(utilArgs); + + return { log, runner }; +} diff --git a/packages/turbo-codemod/src/utils/loadTransformers.ts b/packages/turbo-codemod/src/utils/loadTransformers.ts new file mode 100644 index 0000000..9ba5ca1 --- /dev/null +++ b/packages/turbo-codemod/src/utils/loadTransformers.ts @@ -0,0 +1,27 @@ +import path from "path"; +import fs from "fs-extra"; +import type { Transformer } from "../types"; + +// transforms/ is a sibling when built in in dist/ +export const transformerDirectory = + process.env.NODE_ENV === "test" + ? path.join(__dirname, "../transforms") + : path.join(__dirname, "./transforms"); + +export default function loadTransformers(): Array { + const transformerFiles = fs.readdirSync(transformerDirectory); + return transformerFiles + .map((transformerFilename) => { + const transformerPath = path.join( + transformerDirectory, + transformerFilename + ); + try { + return require(transformerPath).default; + } catch (e) { + // we ignore this error because it's likely that the file is not a transformer (README, etc) + return undefined; + } + }) + .filter(Boolean); +} diff --git a/packages/turbo-codemod/src/utils/logger.ts b/packages/turbo-codemod/src/utils/logger.ts new file mode 100644 index 0000000..123a836 --- /dev/null +++ b/packages/turbo-codemod/src/utils/logger.ts @@ -0,0 +1,47 @@ +import chalk from "chalk"; +import { UtilityArgs } from "../types"; + +export default class Logger { + transform: string; + dry: boolean; + + constructor(args: UtilityArgs) { + this.transform = args.transformer; + this.dry = args.dry; + } + modified(...args: any[]) { + console.log( + chalk.green(` MODIFIED `), + ...args, + this.dry ? chalk.dim(`(dry run)`) : "" + ); + } + unchanged(...args: any[]) { + console.log( + chalk.gray(` UNCHANGED `), + ...args, + this.dry ? chalk.dim(`(dry run)`) : "" + ); + } + skipped(...args: any[]) { + console.log( + chalk.yellow(` SKIPPED `), + ...args, + this.dry ? chalk.dim(`(dry run)`) : "" + ); + } + error(...args: any[]) { + console.log( + chalk.red(` ERROR `), + ...args, + this.dry ? chalk.dim(`(dry run)`) : "" + ); + } + info(...args: any[]) { + console.log( + chalk.bold(` INFO `), + ...args, + this.dry ? chalk.dim(`(dry run)`) : "" + ); + } +} diff --git a/packages/turbo-codemod/src/utils/looksLikeRepo.ts b/packages/turbo-codemod/src/utils/looksLikeRepo.ts new file mode 100644 index 0000000..77f0e5c --- /dev/null +++ b/packages/turbo-codemod/src/utils/looksLikeRepo.ts @@ -0,0 +1,12 @@ +import path from "path"; +import { existsSync } from "fs-extra"; + +const HINTS = ["package.json", "turbo.json", ".git"]; + +export default function looksLikeRepo({ + directory, +}: { + directory: string; +}): boolean { + return HINTS.some((hint) => existsSync(path.join(directory, hint))); +} diff --git a/packages/turbo-codemod/src/utils/notifyUpdate.ts b/packages/turbo-codemod/src/utils/notifyUpdate.ts new file mode 100644 index 0000000..634ffd8 --- /dev/null +++ b/packages/turbo-codemod/src/utils/notifyUpdate.ts @@ -0,0 +1,35 @@ +import chalk from "chalk"; +import checkForUpdate from "update-check"; + +import cliPkgJson from "../../package.json"; +import getWorkspaceImplementation from "./getPackageManager"; + +const update = checkForUpdate(cliPkgJson).catch(() => null); + +export default async function notifyUpdate(): Promise { + try { + const res = await update; + if (res?.latest) { + const ws = getWorkspaceImplementation(); + + console.log(); + console.log( + chalk.yellow.bold("A new version of `@turbo/codemod` is available!") + ); + console.log( + "You can update by running: " + + chalk.cyan( + ws === "yarn" + ? "yarn global add @turbo/codemod" + : ws === "pnpm" + ? "pnpm i -g @turbo/codemod" + : "npm i -g @turbo/codemod" + ) + ); + console.log(); + } + process.exit(); + } catch (_e: any) { + // ignore error + } +} diff --git a/packages/turbo-codemod/templates/transformer.hbs b/packages/turbo-codemod/templates/transformer.hbs new file mode 100644 index 0000000..593490a --- /dev/null +++ b/packages/turbo-codemod/templates/transformer.hbs @@ -0,0 +1,45 @@ +import { TransformerArgs } from "../types"; +import { TransformerResults } from "../runner"; +import getTransformerHelpers from "../utils/getTransformerHelpers"; + +// transformer details +const TRANSFORMER = "{{ name }}"; +const DESCRIPTION = "{{ description }}"; +const INTRODUCED_IN = "{{ introducedIn }}"; + +export function transformer({ + root, + options, +}: TransformerArgs): TransformerResults { + const { log, runner } = getTransformerHelpers({ + transformer: TRANSFORMER, + rootPath: root, + options, + }); + + log.info("Short description about {{ name }}") + + /* + Make changes to required files, and track each modified file with: + + runner.modifyFile({ + filePath: packageJsonPath, // absolute path to file + after: transformedFile, // file after modifications have been made + }); + + This automatically handles all cases of print / dry etc. + */ + + return runner.finish(); +} + +const transformerMeta = { + name: `${TRANSFORMER}: ${DESCRIPTION}`, + value: TRANSFORMER, + introducedIn: INTRODUCED_IN, + transformer, +}; + +export default transformerMeta; + + diff --git a/packages/turbo-codemod/templates/transformer.test.hbs b/packages/turbo-codemod/templates/transformer.test.hbs new file mode 100644 index 0000000..c63a9df --- /dev/null +++ b/packages/turbo-codemod/templates/transformer.test.hbs @@ -0,0 +1,25 @@ +import { transformer } from "../src/transforms/{{ name }}"; +import { setupTestFixtures } from "./test-utils"; + +describe("{{ name }}", () => { + + const { useFixture } = setupTestFixtures({ test: "{{ name }}" }); + + test("basic", () => { + // load the fixture for the test + const { root, read, readJson } = useFixture({ + fixture: "specific-fixture", + }); + + // run the transformer + const result = transformer({ + root, + options: { force: false, dry: false, print: false }, + }); + + // result should be correct + expect(result.fatalError).toBeUndefined(); + expect(result.changes).toMatchInlineSnapshot(); + }); +}); + diff --git a/packages/turbo-codemod/tsconfig.json b/packages/turbo-codemod/tsconfig.json new file mode 100644 index 0000000..0620a3c --- /dev/null +++ b/packages/turbo-codemod/tsconfig.json @@ -0,0 +1,6 @@ +{ + "extends": "@turbo/tsconfig/library.json", + "compilerOptions": { + "rootDir": "." + } +} diff --git a/packages/turbo-codemod/tsup.config.ts b/packages/turbo-codemod/tsup.config.ts new file mode 100644 index 0000000..8e92107 --- /dev/null +++ b/packages/turbo-codemod/tsup.config.ts @@ -0,0 +1,9 @@ +import { defineConfig, Options } from "tsup"; + +export default defineConfig((options: Options) => ({ + entry: ["src/cli.ts", "src/transforms/*.ts"], + format: ["cjs"], + clean: true, + minify: true, + ...options, +})); diff --git a/packages/turbo-ignore/README.md b/packages/turbo-ignore/README.md new file mode 100644 index 0000000..866c0e7 --- /dev/null +++ b/packages/turbo-ignore/README.md @@ -0,0 +1,99 @@ +# `turbo-ignore` + +To get started, use the following command as your [Ignored Build Step](https://vercel.com/docs/concepts/projects/overview#ignored-build-step): + +```sh +$ npx turbo-ignore +``` + +This uses `turbo` to automatically determine if the current app has new changes that need to be deployed. + +## Usage + +Use `npx turbo-ignore --help` to see list of options: + +```sh +turbo-ignore + +Automatically ignore builds that have no changes + +Usage: + $ npx turbo-ignore [] [flags...] + +If is not provided, it will be inferred from the "name" +field of the "package.json" located at the current working directory. + +Flags: + --fallback= On Vercel, if no previously deployed SHA is available to compare against, + fallback to comparing against the provided ref [default: None] + --help, -h Show this help message + --version, -v Show the version of this script + +--- + +turbo-ignore will also check for special commit messages to indicate if a build should be skipped or not. + +Skip turbo-ignore check and automatically ignore: + - [skip ci] + - [ci skip] + - [no ci] + - [skip vercel] + - [vercel skip] + - [vercel skip ] + +Skip turbo-ignore check and automatically deploy: + - [vercel deploy] + - [vercel build] + - [vercel deploy ] + - [vercel build ] +``` + +### Examples + +```sh +npx turbo-ignore +``` + +> Only build if there are changes to the workspace in the current working directory, or any of it's dependencies. On Vercel, compare against the last successful deployment for the current branch. When not on Vercel, compare against the parent commit (`HEAD^`). + +--- + +```sh +npx turbo-ignore docs +``` + +> Only build if there are changes to the `docs` workspace, or any of its dependencies. On Vercel, compare against the last successful deployment for the current branch. When not on Vercel compare against the parent commit (`HEAD^`). + +--- + +```sh +npx turbo-ignore --fallback=HEAD~10 +``` + +> Only build if there are changes to the workspace in the current working directory, or any of it's dependencies. On Vercel, compare against the last successful deployment for the current branch. If this does not exist (first deploy of the branch), compare against the previous 10 commits. When not on Vercel, always compare against the parent commit (`HEAD^`). + +--- + +```sh +npx turbo-ignore --fallback=HEAD^ +``` + +> Only build if there are changes to the workspace in the current working directory, or any of it's dependencies. On Vercel, compare against the last successful deployment for the current branch. If this does not exist (first deploy of the branch), compare against the parent commit (`HEAD^`). When not on Vercel, always compare against the parent commit (`HEAD^`). + +## How it Works + +`turbo-ignore` determines if a build should continue by analyzing the package dependency graph of the given workspace. + +The _given workspace_ is determined by reading the "name" field in the "package.json" file located at the current working directory, or by passing in a workspace name as the first argument to `turbo-ignore`. + +Next, it uses `turbo run build --dry` to determine if the given workspace, _or any dependencies of the workspace_, have changed since the previous commit. + +**NOTE:** `turbo` determines dependencies from reading the dependency graph of the given workspace. This means a workspace **must** be listed as a `dependency` (or `devDependency`) in the given workspaces `package.json` for `turbo` to recognize it. + +When deploying on [Vercel](https://vercel.com), `turbo-ignore` can make a more accurate decision by comparing between the current commit, and the last successfully deployed commit for the current branch. + +**NOTE:** By default on Vercel, `turbo-ignore` will always deploy the first commit of a new branch. This behavior can be changed by providing the `ref` to compare against to the `--fallback` flag. See the [Examples](#Examples) section for more details. + +--- + +For more information about Turborepo, visit [turbo.build](https://turbo.build) and follow us on Twitter ([@turborepo](https://twitter.com/turborepo))! diff --git a/packages/turbo-ignore/__fixtures__/app/package.json b/packages/turbo-ignore/__fixtures__/app/package.json new file mode 100644 index 0000000..17d7c56 --- /dev/null +++ b/packages/turbo-ignore/__fixtures__/app/package.json @@ -0,0 +1,11 @@ +{ + "name": "test-app", + "private": true, + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "vercel" +} diff --git a/packages/turbo-ignore/__fixtures__/invalid-app/package.json b/packages/turbo-ignore/__fixtures__/invalid-app/package.json new file mode 100644 index 0000000..ee2f59b --- /dev/null +++ b/packages/turbo-ignore/__fixtures__/invalid-app/package.json @@ -0,0 +1,10 @@ +{ + "private": true, + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "vercel" +} diff --git a/packages/turbo-ignore/__fixtures__/no-app/index.js b/packages/turbo-ignore/__fixtures__/no-app/index.js new file mode 100644 index 0000000..e69de29 diff --git a/packages/turbo-ignore/__tests__/args.test.ts b/packages/turbo-ignore/__tests__/args.test.ts new file mode 100644 index 0000000..f546247 --- /dev/null +++ b/packages/turbo-ignore/__tests__/args.test.ts @@ -0,0 +1,109 @@ +import parseArgs, { help } from "../src/args"; +import pkg from "../package.json"; +import { spyConsole, spyExit } from "@turbo/test-utils"; + +describe("parseArgs()", () => { + const mockConsole = spyConsole(); + const mockExit = spyExit(); + + it("does not throw with no args", async () => { + const result = parseArgs({ argv: [] }); + expect(result.workspace).toBe(undefined); + expect(result.fallback).toBe(undefined); + expect(result.task).toBe(undefined); + }); + + it("outputs help text (--help)", async () => { + parseArgs({ argv: ["--help"] }); + expect(mockExit.exit).toHaveBeenCalledWith(0); + expect(mockConsole.log).toHaveBeenCalledWith(help); + }); + + it("outputs help text (-h)", async () => { + parseArgs({ argv: ["-h"] }); + expect(mockExit.exit).toHaveBeenCalledWith(0); + expect(mockConsole.log).toHaveBeenCalledWith(help); + }); + + it("outputs version text (--version)", async () => { + parseArgs({ argv: ["--version"] }); + expect(mockExit.exit).toHaveBeenCalledWith(0); + expect(mockConsole.log).toHaveBeenCalledWith(pkg.version); + }); + + it("outputs version text (-v)", async () => { + parseArgs({ argv: ["-v"] }); + expect(mockExit.exit).toHaveBeenCalledWith(0); + expect(mockConsole.log).toHaveBeenCalledWith(pkg.version); + }); + + it("correctly finds workspace", async () => { + const result = parseArgs({ argv: ["this-workspace"] }); + expect(result.workspace).toBe("this-workspace"); + expect(result.fallback).toBe(undefined); + expect(result.task).toBe(undefined); + expect(mockExit.exit).toHaveBeenCalledTimes(0); + }); + + it("correctly finds fallback", async () => { + const result = parseArgs({ argv: ["--fallback=HEAD^"] }); + expect(result.workspace).toBe(undefined); + expect(result.fallback).toBe("HEAD^"); + expect(result.task).toBe(undefined); + expect(mockExit.exit).toHaveBeenCalledTimes(0); + }); + + it("correctly finds task", async () => { + const result = parseArgs({ argv: ["--task=some-workspace#build"] }); + expect(result.workspace).toBe(undefined); + expect(result.fallback).toBe(undefined); + expect(result.task).toBe("some-workspace#build"); + expect(mockExit.exit).toHaveBeenCalledTimes(0); + }); + + it("uses default fallback if incorrectly specified", async () => { + const result = parseArgs({ argv: ["--fallback"] }); + expect(result.workspace).toBe(undefined); + expect(result.fallback).toBe(undefined); + expect(result.task).toBe(undefined); + expect(mockExit.exit).toHaveBeenCalledTimes(0); + }); + + it("uses default fallback if empty string", async () => { + const result = parseArgs({ argv: ["--fallback="] }); + expect(result.workspace).toBe(undefined); + expect(result.fallback).toBe(undefined); + expect(result.task).toBe(undefined); + expect(mockExit.exit).toHaveBeenCalledTimes(0); + }); + + it("uses default task if incorrectly specified", async () => { + const result = parseArgs({ argv: ["--task"] }); + expect(result.workspace).toBe(undefined); + expect(result.fallback).toBe(undefined); + expect(result.task).toBe(undefined); + expect(mockExit.exit).toHaveBeenCalledTimes(0); + }); + + it("uses default task if empty string", async () => { + const result = parseArgs({ argv: ["--task="] }); + expect(result.workspace).toBe(undefined); + expect(result.fallback).toBe(undefined); + expect(result.task).toBe(undefined); + expect(mockExit.exit).toHaveBeenCalledTimes(0); + }); + + it("correctly finds fallback and workspace", async () => { + const result = parseArgs({ + argv: [ + "this-workspace", + "--fallback=HEAD~10", + "--task=some-workspace#build", + ], + }); + expect(result.workspace).toBe("this-workspace"); + expect(result.fallback).toBe("HEAD~10"); + expect(result.task).toBe("some-workspace#build"); + expect(mockExit.exit).toHaveBeenCalledTimes(0); + }); +}); diff --git a/packages/turbo-ignore/__tests__/checkCommit.test.ts b/packages/turbo-ignore/__tests__/checkCommit.test.ts new file mode 100644 index 0000000..e7e4a5f --- /dev/null +++ b/packages/turbo-ignore/__tests__/checkCommit.test.ts @@ -0,0 +1,229 @@ +import child_process from "child_process"; +import { checkCommit } from "../src/checkCommit"; +import { mockEnv } from "@turbo/test-utils"; + +describe("checkCommit()", () => { + describe("on Vercel", () => { + mockEnv(); + + describe("for all workspaces", () => { + it("results in continue when no special commit messages are found", async () => { + process.env.VERCEL = "1"; + process.env.VERCEL_GIT_COMMIT_MESSAGE = "fixing a test"; + expect(checkCommit({ workspace: "test-workspace" })).toEqual({ + result: "continue", + scope: "global", + reason: "No deploy or skip string found in commit message.", + }); + }); + + it("results in conflict when deploy and skip commit messages are found", async () => { + process.env.VERCEL = "1"; + process.env.VERCEL_GIT_COMMIT_MESSAGE = + "deploying [vercel deploy] and skipping [vercel skip]"; + expect(checkCommit({ workspace: "test-workspace" })).toEqual({ + result: "conflict", + scope: "global", + reason: + "Conflicting commit messages found: [vercel deploy] and [vercel skip]", + }); + }); + + it("results in deploy when deploy commit message is found", async () => { + process.env.VERCEL = "1"; + process.env.VERCEL_GIT_COMMIT_MESSAGE = "deploying [vercel deploy]"; + expect(checkCommit({ workspace: "test-workspace" })).toEqual({ + result: "deploy", + scope: "global", + reason: "Found commit message: [vercel deploy]", + }); + }); + + it("results in skip when skip commit message is found", async () => { + process.env.VERCEL = "1"; + process.env.VERCEL_GIT_COMMIT_MESSAGE = "skip deployment [vercel skip]"; + expect(checkCommit({ workspace: "test-workspace" })).toEqual({ + result: "skip", + scope: "global", + reason: "Found commit message: [vercel skip]", + }); + }); + }); + + describe("for specific workspaces", () => { + it("results in continue when no special commit messages are found", async () => { + process.env.VERCEL = "1"; + process.env.VERCEL_GIT_COMMIT_MESSAGE = + "fixing a test in test-workspace"; + expect(checkCommit({ workspace: "test-workspace" })).toEqual({ + result: "continue", + scope: "global", + reason: "No deploy or skip string found in commit message.", + }); + }); + + it("results in conflict when deploy and skip commit messages are found", async () => { + process.env.VERCEL = "1"; + process.env.VERCEL_GIT_COMMIT_MESSAGE = + "deploying [vercel deploy test-workspace] and skipping [vercel skip test-workspace]"; + expect(checkCommit({ workspace: "test-workspace" })).toEqual({ + result: "conflict", + scope: "workspace", + reason: + "Conflicting commit messages found: [vercel deploy test-workspace] and [vercel skip test-workspace]", + }); + }); + + it("results in deploy when deploy commit message is found", async () => { + process.env.VERCEL = "1"; + process.env.VERCEL_GIT_COMMIT_MESSAGE = + "deploying [vercel deploy test-workspace]"; + expect(checkCommit({ workspace: "test-workspace" })).toEqual({ + result: "deploy", + scope: "workspace", + reason: "Found commit message: [vercel deploy test-workspace]", + }); + }); + + it("results in skip when skip commit message is found", async () => { + process.env.VERCEL = "1"; + process.env.VERCEL_GIT_COMMIT_MESSAGE = + "skip deployment [vercel skip test-workspace]"; + expect(checkCommit({ workspace: "test-workspace" })).toEqual({ + result: "skip", + scope: "workspace", + reason: "Found commit message: [vercel skip test-workspace]", + }); + }); + }); + }); + describe("Not on Vercel", () => { + describe("for all workspaces", () => { + it("results in continue when no special commit messages are found", async () => { + const commitBody = "fixing a test"; + const mockExecSync = jest + .spyOn(child_process, "execSync") + .mockImplementation((_) => commitBody); + + expect(checkCommit({ workspace: "test-workspace" })).toEqual({ + result: "continue", + scope: "global", + reason: "No deploy or skip string found in commit message.", + }); + expect(mockExecSync).toHaveBeenCalledWith("git show -s --format=%B"); + mockExecSync.mockRestore(); + }); + + it("results in conflict when deploy and skip commit messages are found", async () => { + const commitBody = + "deploying [vercel deploy] and skipping [vercel skip]"; + const mockExecSync = jest + .spyOn(child_process, "execSync") + .mockImplementation((_) => commitBody); + + expect(checkCommit({ workspace: "test-workspace" })).toEqual({ + result: "conflict", + scope: "global", + reason: + "Conflicting commit messages found: [vercel deploy] and [vercel skip]", + }); + expect(mockExecSync).toHaveBeenCalledWith("git show -s --format=%B"); + mockExecSync.mockRestore(); + }); + + it("results in deploy when deploy commit message is found", async () => { + const commitBody = "deploying [vercel deploy]"; + const mockExecSync = jest + .spyOn(child_process, "execSync") + .mockImplementation((_) => commitBody); + + expect(checkCommit({ workspace: "test-workspace" })).toEqual({ + result: "deploy", + scope: "global", + reason: "Found commit message: [vercel deploy]", + }); + expect(mockExecSync).toHaveBeenCalledWith("git show -s --format=%B"); + mockExecSync.mockRestore(); + }); + + it("results in skip when skip commit message is found", async () => { + const commitBody = "skip deployment [vercel skip]"; + const mockExecSync = jest + .spyOn(child_process, "execSync") + .mockImplementation((_) => commitBody); + + expect(checkCommit({ workspace: "test-workspace" })).toEqual({ + result: "skip", + scope: "global", + reason: "Found commit message: [vercel skip]", + }); + expect(mockExecSync).toHaveBeenCalledWith("git show -s --format=%B"); + mockExecSync.mockRestore(); + }); + }); + + describe("for specific workspaces", () => { + it("results in continue when no special commit messages are found", async () => { + const commitBody = "fixing a test in test-workspace"; + const mockExecSync = jest + .spyOn(child_process, "execSync") + .mockImplementation((_) => commitBody); + + expect(checkCommit({ workspace: "test-workspace" })).toEqual({ + result: "continue", + scope: "global", + reason: "No deploy or skip string found in commit message.", + }); + expect(mockExecSync).toHaveBeenCalledWith("git show -s --format=%B"); + mockExecSync.mockRestore(); + }); + + it("results in conflict when deploy and skip commit messages are found", async () => { + const commitBody = + "deploying [vercel deploy test-workspace] and skipping [vercel skip test-workspace]"; + const mockExecSync = jest + .spyOn(child_process, "execSync") + .mockImplementation((_) => commitBody); + + expect(checkCommit({ workspace: "test-workspace" })).toEqual({ + result: "conflict", + scope: "workspace", + reason: + "Conflicting commit messages found: [vercel deploy test-workspace] and [vercel skip test-workspace]", + }); + expect(mockExecSync).toHaveBeenCalledWith("git show -s --format=%B"); + mockExecSync.mockRestore(); + }); + + it("results in deploy when deploy commit message is found", async () => { + const commitBody = "deploying [vercel deploy test-workspace]"; + const mockExecSync = jest + .spyOn(child_process, "execSync") + .mockImplementation((_) => commitBody); + + expect(checkCommit({ workspace: "test-workspace" })).toEqual({ + result: "deploy", + scope: "workspace", + reason: "Found commit message: [vercel deploy test-workspace]", + }); + expect(mockExecSync).toHaveBeenCalledWith("git show -s --format=%B"); + mockExecSync.mockRestore(); + }); + + it("results in skip when skip commit message is found", async () => { + const commitBody = "skip deployment [vercel skip test-workspace]"; + const mockExecSync = jest + .spyOn(child_process, "execSync") + .mockImplementation((_) => commitBody); + + expect(checkCommit({ workspace: "test-workspace" })).toEqual({ + result: "skip", + scope: "workspace", + reason: "Found commit message: [vercel skip test-workspace]", + }); + expect(mockExecSync).toHaveBeenCalledWith("git show -s --format=%B"); + mockExecSync.mockRestore(); + }); + }); + }); +}); diff --git a/packages/turbo-ignore/__tests__/errors.test.ts b/packages/turbo-ignore/__tests__/errors.test.ts new file mode 100644 index 0000000..18f26bd --- /dev/null +++ b/packages/turbo-ignore/__tests__/errors.test.ts @@ -0,0 +1,46 @@ +import { shouldWarn, NON_FATAL_ERRORS } from "../src/errors"; + +describe("shouldWarn()", () => { + it("it detects errors when packageManager is missing", async () => { + const result = shouldWarn({ + err: `run failed: We did not detect an in-use package manager for your project. Please set the "packageManager" property in your root package.json (https://nodejs.org/api/packages.html#packagemanager) or run \`npx @turbo/codemod add-package-manager\` in the root of your monorepo.`, + }); + expect(result.code).toBe("NO_PACKAGE_MANAGER"); + expect(result.level).toBe("warn"); + expect(result.message).toBe(NON_FATAL_ERRORS.NO_PACKAGE_MANAGER.message); + }); + + it("it detects errors when yarn lockfile is missing", async () => { + const result = shouldWarn({ + err: `* reading yarn.lock: open /test/../yarn.lock: no such file or directory`, + }); + expect(result.code).toBe("MISSING_LOCKFILE"); + expect(result.level).toBe("warn"); + expect(result.message).toBe(NON_FATAL_ERRORS.MISSING_LOCKFILE.message); + }); + + it("it detects errors when pnpm lockfile is missing", async () => { + const result = shouldWarn({ + err: `* reading pnpm-lock.yaml: open /test/../pnpm-lock.yaml: no such file or directory`, + }); + expect(result.code).toBe("MISSING_LOCKFILE"); + expect(result.level).toBe("warn"); + expect(result.message).toBe(NON_FATAL_ERRORS.MISSING_LOCKFILE.message); + }); + + it("it detects errors when npm lockfile is missing", async () => { + const result = shouldWarn({ + err: `* reading package-lock.json: open /test/../package-lock.json: no such file or directory`, + }); + expect(result.code).toBe("MISSING_LOCKFILE"); + expect(result.level).toBe("warn"); + expect(result.message).toBe(NON_FATAL_ERRORS.MISSING_LOCKFILE.message); + }); + + it("it returns unknown errors", async () => { + const result = shouldWarn({ err: `something bad happened` }); + expect(result.code).toBe("UNKNOWN_ERROR"); + expect(result.level).toBe("error"); + expect(result.message).toBe(`something bad happened`); + }); +}); diff --git a/packages/turbo-ignore/__tests__/getComparison.test.ts b/packages/turbo-ignore/__tests__/getComparison.test.ts new file mode 100644 index 0000000..b5c74c7 --- /dev/null +++ b/packages/turbo-ignore/__tests__/getComparison.test.ts @@ -0,0 +1,61 @@ +import { getComparison } from "../src/getComparison"; +import { spyConsole, mockEnv } from "@turbo/test-utils"; + +describe("getComparison()", () => { + mockEnv(); + const mockConsole = spyConsole(); + it("uses headRelative comparison when not running Vercel CI", async () => { + expect(getComparison({ workspace: "test-workspace" })) + .toMatchInlineSnapshot(` + Object { + "ref": "HEAD^", + "type": "headRelative", + } + `); + }); + + it("returns null when running in Vercel CI with no VERCEL_GIT_PREVIOUS_SHA", async () => { + process.env.VERCEL = "1"; + process.env.VERCEL_GIT_COMMIT_REF = "my-branch"; + expect(getComparison({ workspace: "test-workspace" })).toBeNull(); + expect(mockConsole.log).toHaveBeenCalledWith( + "≫ ", + 'No previous deployments found for "test-workspace" on branch "my-branch".' + ); + }); + + it("uses custom fallback when running in Vercel CI with no VERCEL_GIT_PREVIOUS_SHA", async () => { + process.env.VERCEL = "1"; + process.env.VERCEL_GIT_COMMIT_REF = "my-branch"; + expect(getComparison({ workspace: "test-workspace", fallback: "HEAD^2" })) + .toMatchInlineSnapshot(` + Object { + "ref": "HEAD^2", + "type": "customFallback", + } + `); + expect(mockConsole.log).toHaveBeenNthCalledWith( + 1, + "≫ ", + 'No previous deployments found for "test-workspace" on branch "my-branch".' + ); + expect(mockConsole.log).toHaveBeenNthCalledWith( + 2, + "≫ ", + "Falling back to ref HEAD^2" + ); + }); + + it("uses previousDeploy when running in Vercel CI with VERCEL_GIT_PREVIOUS_SHA", async () => { + process.env.VERCEL = "1"; + process.env.VERCEL_GIT_PREVIOUS_SHA = "mygitsha"; + process.env.VERCEL_GIT_COMMIT_REF = "my-branch"; + expect(getComparison({ workspace: "test-workspace" })) + .toMatchInlineSnapshot(` + Object { + "ref": "mygitsha", + "type": "previousDeploy", + } + `); + }); +}); diff --git a/packages/turbo-ignore/__tests__/getTask.test.ts b/packages/turbo-ignore/__tests__/getTask.test.ts new file mode 100644 index 0000000..a184893 --- /dev/null +++ b/packages/turbo-ignore/__tests__/getTask.test.ts @@ -0,0 +1,27 @@ +import { getTask } from "../src/getTask"; +import { spyConsole, validateLogs } from "@turbo/test-utils"; + +describe("getWorkspace()", () => { + const mockConsole = spyConsole(); + it("getTask defaults to build", async () => { + expect(getTask({})).toEqual("build"); + validateLogs( + ['Using "build" as the task as it was unspecified'], + mockConsole.log, + { prefix: "≫ " } + ); + }); + + it("getTask returns a quoted task if user-supplied", async () => { + expect( + getTask({ + task: "workspace#task", + }) + ).toEqual(`"workspace#task"`); + validateLogs( + ['Using "workspace#task" as the task from the arguments'], + mockConsole.log, + { prefix: "≫ " } + ); + }); +}); diff --git a/packages/turbo-ignore/__tests__/getWorkspace.test.ts b/packages/turbo-ignore/__tests__/getWorkspace.test.ts new file mode 100644 index 0000000..6d97fe2 --- /dev/null +++ b/packages/turbo-ignore/__tests__/getWorkspace.test.ts @@ -0,0 +1,62 @@ +import { getWorkspace } from "../src/getWorkspace"; +import { spyConsole, validateLogs } from "@turbo/test-utils"; + +describe("getWorkspace()", () => { + const mockConsole = spyConsole(); + it("getWorkspace returns workspace from arg", async () => { + expect( + getWorkspace({ + workspace: "test-workspace", + }) + ).toEqual("test-workspace"); + validateLogs( + ['Using "test-workspace" as workspace from arguments'], + mockConsole.log, + { prefix: "≫ " } + ); + }); + + it("getWorkspace returns workspace from package.json", async () => { + expect( + getWorkspace({ + directory: "./__fixtures__/app", + }) + ).toEqual("test-app"); + expect(mockConsole.log).toHaveBeenCalledWith( + "≫ ", + 'Inferred "test-app" as workspace from "package.json"' + ); + }); + + it("getWorkspace used current directory if not specified", async () => { + expect(getWorkspace({})).toEqual("turbo-ignore"); + expect(mockConsole.log).toHaveBeenCalledWith( + "≫ ", + 'Inferred "turbo-ignore" as workspace from "package.json"' + ); + }); + + it("getWorkspace returns null when no arg is provided and package.json is missing name field", async () => { + expect( + getWorkspace({ + directory: "./__fixtures__/invalid-app", + }) + ).toEqual(null); + expect(mockConsole.error).toHaveBeenCalledWith( + "≫ ", + '"__fixtures__/invalid-app/package.json" is missing the "name" field (required).' + ); + }); + + it("getWorkspace returns null when no arg is provided and package.json can be found", async () => { + expect( + getWorkspace({ + directory: "./__fixtures__/no-app", + }) + ).toEqual(null); + expect(mockConsole.error).toHaveBeenCalledWith( + "≫ ", + '"__fixtures__/no-app/package.json" could not be found. turbo-ignore inferencing failed' + ); + }); +}); diff --git a/packages/turbo-ignore/__tests__/ignore.test.ts b/packages/turbo-ignore/__tests__/ignore.test.ts new file mode 100644 index 0000000..37908c5 --- /dev/null +++ b/packages/turbo-ignore/__tests__/ignore.test.ts @@ -0,0 +1,578 @@ +import child_process, { ChildProcess, ExecException } from "child_process"; +import turboIgnore from "../src/ignore"; +import { + spyConsole, + spyExit, + SpyExit, + mockEnv, + validateLogs, +} from "@turbo/test-utils"; + +function expectBuild(mockExit: SpyExit) { + expect(mockExit.exit).toHaveBeenCalledWith(1); +} + +function expectIgnore(mockExit: SpyExit) { + expect(mockExit.exit).toHaveBeenCalledWith(0); +} + +describe("turboIgnore()", () => { + mockEnv(); + const mockExit = spyExit(); + const mockConsole = spyConsole(); + + it("throws error and allows build when exec fails", async () => { + const mockExec = jest + .spyOn(child_process, "exec") + .mockImplementation((command, options, callback) => { + if (callback) { + return callback( + "error" as unknown as ExecException, + "stdout", + "stderr" + ) as unknown as ChildProcess; + } + return {} as unknown as ChildProcess; + }); + + turboIgnore({ + args: { workspace: "test-workspace" }, + }); + + expect(mockExec).toHaveBeenCalledWith( + "npx turbo run build --filter=test-workspace...[HEAD^] --dry=json", + expect.anything(), + expect.anything() + ); + + validateLogs(["UNKNOWN_ERROR: error"], mockConsole.error, { + prefix: "≫ ", + }); + + expectBuild(mockExit); + mockExec.mockRestore(); + }); + + it("throws pretty error and allows build when exec fails", async () => { + const mockExec = jest + .spyOn(child_process, "exec") + .mockImplementation((command, options, callback) => { + if (callback) { + return callback( + { + message: + "run failed: We did not detect an in-use package manager for your project", + } as unknown as ExecException, + "stdout", + "stderr" + ) as unknown as ChildProcess; + } + return {} as unknown as ChildProcess; + }); + + turboIgnore({ + args: { workspace: "test-workspace" }, + }); + + expect(mockExec).toHaveBeenCalledWith( + "npx turbo run build --filter=test-workspace...[HEAD^] --dry=json", + expect.anything(), + expect.anything() + ); + + validateLogs( + [ + `turbo-ignore could not complete - no package manager detected, please commit a lockfile, or set "packageManager" in your root "package.json"`, + ], + mockConsole.warn, + { prefix: "≫ " } + ); + + expectBuild(mockExit); + mockExec.mockRestore(); + }); + + it("throws pretty error and allows build when can't find previous sha", async () => { + process.env.VERCEL = "1"; + process.env.VERCEL_GIT_PREVIOUS_SHA = "too-far-back"; + process.env.VERCEL_GIT_COMMIT_REF = "my-branch"; + const mockExec = jest + .spyOn(child_process, "exec") + .mockImplementation((command, options, callback) => { + if (callback) { + return callback( + { + message: + " ERROR run failed: failed to resolve packages to run: commit too-far-back does not exist", + } as unknown as ExecException, + "stdout", + "stderr" + ) as unknown as ChildProcess; + } + return {} as unknown as ChildProcess; + }); + + turboIgnore({ + args: { workspace: "test-workspace" }, + }); + + expect(mockExec).toHaveBeenCalledWith( + "npx turbo run build --filter=test-workspace...[too-far-back] --dry=json", + expect.anything(), + expect.anything() + ); + + validateLogs( + [ + `turbo-ignore could not complete - commit does not exist or is unreachable`, + ], + mockConsole.warn, + { prefix: "≫ " } + ); + + expectBuild(mockExit); + mockExec.mockRestore(); + }); + + it("throws pretty error and allows build when fallback fails", async () => { + const mockExec = jest + .spyOn(child_process, "exec") + .mockImplementation((command, options, callback) => { + if (callback) { + return callback( + { + message: + "ERROR run failed: failed to resolve packages to run: commit HEAD^ does not exist", + } as unknown as ExecException, + "stdout", + "stderr" + ) as unknown as ChildProcess; + } + return {} as unknown as ChildProcess; + }); + + turboIgnore({ + args: { workspace: "test-workspace", fallback: "HEAD^" }, + }); + + expect(mockExec).toHaveBeenCalledWith( + "npx turbo run build --filter=test-workspace...[HEAD^] --dry=json", + expect.anything(), + expect.anything() + ); + + validateLogs( + [ + `turbo-ignore could not complete - parent commit does not exist or is unreachable`, + ], + mockConsole.warn, + { prefix: "≫ " } + ); + + expectBuild(mockExit); + mockExec.mockRestore(); + }); + + it("skips checks and allows build when no workspace can be found", async () => { + turboIgnore({ + args: { + directory: "__fixtures__/no-app", + }, + }); + validateLogs( + [ + () => [ + "≫ ", + expect.stringContaining( + " could not be found. turbo-ignore inferencing failed" + ), + ], + ], + mockConsole.error, + { prefix: "≫ " } + ); + expectBuild(mockExit); + }); + + it("skips checks and allows build when a workspace with no name is found", async () => { + turboIgnore({ + args: { + directory: "__fixtures__/invalid-app", + }, + }); + validateLogs( + [ + () => [ + "≫ ", + expect.stringContaining(' is missing the "name" field (required).'), + ], + ], + mockConsole.error, + { prefix: "≫ " } + ); + expectBuild(mockExit); + }); + + it("skips checks and allows build when no monorepo root can be found", async () => { + turboIgnore({ + args: { directory: "/" }, + }); + expectBuild(mockExit); + expect(mockConsole.error).toHaveBeenLastCalledWith( + "≫ ", + "Monorepo root not found. turbo-ignore inferencing failed" + ); + }); + + it("skips checks and allows build when TURBO_FORCE is set", async () => { + process.env.TURBO_FORCE = "true"; + turboIgnore({ + args: { workspace: "test-workspace" }, + }); + expect(mockConsole.log).toHaveBeenNthCalledWith( + 2, + "≫ ", + "`TURBO_FORCE` detected" + ); + expectBuild(mockExit); + }); + + it("allows build when no comparison is returned", async () => { + process.env.VERCEL = "1"; + process.env.VERCEL_GIT_PREVIOUS_SHA = ""; + process.env.VERCEL_GIT_COMMIT_REF = "my-branch"; + turboIgnore({ + args: { + workspace: "test-app", + directory: "__fixtures__/app", + }, + }); + expect(mockConsole.log).toHaveBeenNthCalledWith( + 4, + "≫ ", + 'No previous deployments found for "test-app" on branch "my-branch".' + ); + expectBuild(mockExit); + }); + + it("skips build for `previousDeploy` comparison with no changes", async () => { + process.env.VERCEL = "1"; + process.env.VERCEL_GIT_PREVIOUS_SHA = "last-deployed-sha"; + process.env.VERCEL_GIT_COMMIT_REF = "my-branch"; + const mockExec = jest + .spyOn(child_process, "exec") + .mockImplementation((command, options, callback) => { + if (callback) { + return callback( + null, + '{"packages":[],"tasks":[]}', + "stderr" + ) as unknown as ChildProcess; + } + return {} as unknown as ChildProcess; + }); + turboIgnore({ + args: { + directory: "__fixtures__/app", + }, + }); + validateLogs( + [ + "Using Turborepo to determine if this project is affected by the commit...\n", + 'Inferred "test-app" as workspace from "package.json"', + 'Using "build" as the task as it was unspecified', + `Found previous deployment ("last-deployed-sha") for \"test-app\" on branch \"my-branch\"`, + "Analyzing results of `npx turbo run build --filter=test-app...[last-deployed-sha] --dry=json`", + "This project and its dependencies are not affected", + () => expect.stringContaining("⏭ Ignoring the change"), + ], + mockConsole.log, + { prefix: "≫ " } + ); + + expectIgnore(mockExit); + mockExec.mockRestore(); + }); + + it("allows build for `previousDeploy` comparison with changes", async () => { + process.env.VERCEL = "1"; + process.env.VERCEL_GIT_PREVIOUS_SHA = "last-deployed-sha"; + process.env.VERCEL_GIT_COMMIT_REF = "my-branch"; + const mockExec = jest + .spyOn(child_process, "exec") + .mockImplementation((command, options, callback) => { + if (callback) { + return callback( + null, + '{"packages":["test-app"],"tasks":[]}', + "stderr" + ) as unknown as ChildProcess; + } + return {} as unknown as ChildProcess; + }); + turboIgnore({ + args: { + task: "workspace#build", + directory: "__fixtures__/app", + }, + }); + validateLogs( + [ + "Using Turborepo to determine if this project is affected by the commit...\n", + 'Inferred "test-app" as workspace from "package.json"', + 'Using "workspace#build" as the task from the arguments', + 'Found previous deployment ("last-deployed-sha") for "test-app" on branch "my-branch"', + 'Analyzing results of `npx turbo run "workspace#build" --filter=test-app...[last-deployed-sha] --dry=json`', + 'This commit affects "test-app"', + () => expect.stringContaining("✓ Proceeding with deployment"), + ], + mockConsole.log, + { prefix: "≫ " } + ); + + expectBuild(mockExit); + mockExec.mockRestore(); + }); + + it("allows build for `previousDeploy` comparison with single dependency change", async () => { + process.env.VERCEL = "1"; + process.env.VERCEL_GIT_PREVIOUS_SHA = "last-deployed-sha"; + process.env.VERCEL_GIT_COMMIT_REF = "my-branch"; + const mockExec = jest + .spyOn(child_process, "exec") + .mockImplementation((command, options, callback) => { + if (callback) { + return callback( + null, + '{"packages":["test-app", "ui"],"tasks":[]}', + "stderr" + ) as unknown as ChildProcess; + } + return {} as unknown as ChildProcess; + }); + turboIgnore({ + args: { + directory: "__fixtures__/app", + }, + }); + validateLogs( + [ + "Using Turborepo to determine if this project is affected by the commit...\n", + 'Inferred "test-app" as workspace from "package.json"', + 'Using "build" as the task as it was unspecified', + 'Found previous deployment ("last-deployed-sha") for "test-app" on branch "my-branch"', + "Analyzing results of `npx turbo run build --filter=test-app...[last-deployed-sha] --dry=json`", + 'This commit affects "test-app" and 1 dependency (ui)', + () => expect.stringContaining("✓ Proceeding with deployment"), + ], + mockConsole.log, + { prefix: "≫ " } + ); + + expectBuild(mockExit); + mockExec.mockRestore(); + }); + + it("allows build for `previousDeploy` comparison with multiple dependency changes", async () => { + process.env.VERCEL = "1"; + process.env.VERCEL_GIT_PREVIOUS_SHA = "last-deployed-sha"; + process.env.VERCEL_GIT_COMMIT_REF = "my-branch"; + const mockExec = jest + .spyOn(child_process, "exec") + .mockImplementation((command, options, callback) => { + if (callback) { + return callback( + null, + '{"packages":["test-app", "ui", "tsconfig"],"tasks":[]}', + "stderr" + ) as unknown as ChildProcess; + } + return {} as unknown as ChildProcess; + }); + turboIgnore({ + args: { + directory: "__fixtures__/app", + }, + }); + validateLogs( + [ + "Using Turborepo to determine if this project is affected by the commit...\n", + 'Inferred "test-app" as workspace from "package.json"', + 'Using "build" as the task as it was unspecified', + 'Found previous deployment ("last-deployed-sha") for "test-app" on branch "my-branch"', + "Analyzing results of `npx turbo run build --filter=test-app...[last-deployed-sha] --dry=json`", + 'This commit affects "test-app" and 2 dependencies (ui, tsconfig)', + () => expect.stringContaining("✓ Proceeding with deployment"), + ], + mockConsole.log, + { prefix: "≫ " } + ); + + expectBuild(mockExit); + mockExec.mockRestore(); + }); + + it("throws error and allows build when json cannot be parsed", async () => { + const mockExec = jest + .spyOn(child_process, "exec") + .mockImplementation((command, options, callback) => { + if (callback) { + return callback(null, "stdout", "stderr") as unknown as ChildProcess; + } + return {} as unknown as ChildProcess; + }); + + turboIgnore({ + args: { + directory: "__fixtures__/app", + }, + }); + + expect(mockExec).toHaveBeenCalledWith( + "npx turbo run build --filter=test-app...[HEAD^] --dry=json", + expect.anything(), + expect.anything() + ); + validateLogs( + [ + "Failed to parse JSON output from `npx turbo run build --filter=test-app...[HEAD^] --dry=json`.", + ], + mockConsole.error, + { prefix: "≫ " } + ); + + expectBuild(mockExit); + mockExec.mockRestore(); + }); + + it("throws error and allows build when stdout is null", async () => { + const mockExec = jest + .spyOn(child_process, "exec") + .mockImplementation((command, options, callback) => { + if (callback) { + return callback( + null, + null as unknown as string, + "stderr" + ) as unknown as ChildProcess; + } + return {} as unknown as ChildProcess; + }); + + turboIgnore({ + args: { + directory: "__fixtures__/app", + }, + }); + + expect(mockExec).toHaveBeenCalledWith( + "npx turbo run build --filter=test-app...[HEAD^] --dry=json", + expect.anything(), + expect.anything() + ); + validateLogs( + [ + "Failed to parse JSON output from `npx turbo run build --filter=test-app...[HEAD^] --dry=json`.", + ], + mockConsole.error, + { prefix: "≫ " } + ); + + expectBuild(mockExit); + mockExec.mockRestore(); + }); + + it("skips when commit message contains a skip string", async () => { + process.env.VERCEL = "1"; + process.env.VERCEL_GIT_COMMIT_MESSAGE = "[vercel skip]"; + + turboIgnore({ + args: { + directory: "__fixtures__/app", + }, + }); + + validateLogs( + [ + "Using Turborepo to determine if this project is affected by the commit...\n", + 'Inferred "test-app" as workspace from "package.json"', + 'Using "build" as the task as it was unspecified', + "Found commit message: [vercel skip]", + () => expect.stringContaining("⏭ Ignoring the change"), + ], + mockConsole.log, + { prefix: "≫ " } + ); + + expectIgnore(mockExit); + }); + + it("deploys when commit message contains a deploy string", async () => { + process.env.VERCEL = "1"; + process.env.VERCEL_GIT_COMMIT_MESSAGE = "[vercel deploy]"; + + turboIgnore({ + args: { + directory: "__fixtures__/app", + }, + }); + + validateLogs( + [ + "Using Turborepo to determine if this project is affected by the commit...\n", + 'Inferred "test-app" as workspace from "package.json"', + 'Using "build" as the task as it was unspecified', + "Found commit message: [vercel deploy]", + () => expect.stringContaining("✓ Proceeding with deployment"), + ], + mockConsole.log, + { prefix: "≫ " } + ); + + expectBuild(mockExit); + }); + + it("runs full turbo-ignore check when commit message contains a conflicting string", async () => { + process.env.VERCEL = "1"; + process.env.VERCEL_GIT_COMMIT_MESSAGE = "[vercel deploy] [vercel skip]"; + process.env.VERCEL_GIT_PREVIOUS_SHA = "last-deployed-sha"; + process.env.VERCEL_GIT_COMMIT_REF = "my-branch"; + + const mockExec = jest + .spyOn(child_process, "exec") + .mockImplementation((command, options, callback) => { + if (callback) { + return callback( + null, + '{"packages":[],"tasks":[]}', + "stderr" + ) as unknown as ChildProcess; + } + return {} as unknown as ChildProcess; + }); + + turboIgnore({ + args: { + directory: "__fixtures__/app", + }, + }); + + validateLogs( + [ + "Using Turborepo to determine if this project is affected by the commit...\n", + 'Inferred "test-app" as workspace from "package.json"', + 'Using "build" as the task as it was unspecified', + "Conflicting commit messages found: [vercel deploy] and [vercel skip]", + `Found previous deployment ("last-deployed-sha") for \"test-app\" on branch \"my-branch\"`, + "Analyzing results of `npx turbo run build --filter=test-app...[last-deployed-sha] --dry=json`", + "This project and its dependencies are not affected", + () => expect.stringContaining("⏭ Ignoring the change"), + ], + mockConsole.log, + { prefix: "≫ " } + ); + + expectIgnore(mockExit); + mockExec.mockRestore(); + }); +}); diff --git a/packages/turbo-ignore/jest.config.js b/packages/turbo-ignore/jest.config.js new file mode 100644 index 0000000..52ddbbc --- /dev/null +++ b/packages/turbo-ignore/jest.config.js @@ -0,0 +1,18 @@ +/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */ +module.exports = { + preset: "ts-jest/presets/js-with-ts", + testEnvironment: "node", + testPathIgnorePatterns: ["/__fixtures__/"], + coveragePathIgnorePatterns: ["/__fixtures__/"], + collectCoverage: true, + coverageThreshold: { + global: { + branches: 100, + functions: 100, + lines: 100, + statements: 100, + }, + }, + modulePathIgnorePatterns: ["/node_modules", "/dist"], + transformIgnorePatterns: ["/node_modules/(?!(ansi-regex)/)"], +}; diff --git a/packages/turbo-ignore/package.json b/packages/turbo-ignore/package.json new file mode 100644 index 0000000..0fae072 --- /dev/null +++ b/packages/turbo-ignore/package.json @@ -0,0 +1,40 @@ +{ + "name": "turbo-ignore", + "version": "1.9.4-canary.2", + "description": "", + "homepage": "https://turbo.build/repo", + "keywords": [], + "author": "Jared Palmer", + "license": "MPL-2.0", + "repository": { + "type": "git", + "url": "https://github.com/vercel/turbo", + "directory": "packages/turbo-ignore" + }, + "bugs": { + "url": "https://github.com/vercel/turbo/issues" + }, + "files": [ + "dist" + ], + "main": "dist/index.js", + "bin": "dist/index.js", + "scripts": { + "build": "tsup", + "test": "jest", + "lint": "eslint src/**/*.ts", + "check-types": "tsc --noEmit" + }, + "devDependencies": { + "@types/jest": "^27.4.0", + "@types/node": "^16.11.12", + "eslint": "^8.20.0", + "jest": "^27.4.3", + "ts-jest": "^27.1.1", + "@turbo/tsconfig": "workspace:*", + "tsup": "^5.12.1", + "@turbo/test-utils": "workspace:^0.0.0", + "@turbo/utils": "workspace:*", + "typescript": "^4.7.4" + } +} diff --git a/packages/turbo-ignore/src/args.ts b/packages/turbo-ignore/src/args.ts new file mode 100644 index 0000000..8d6015c --- /dev/null +++ b/packages/turbo-ignore/src/args.ts @@ -0,0 +1,89 @@ +import pkg from "../package.json"; +import { TurboIgnoreArgs } from "./types"; +import { + skipAllCommits, + forceAllCommits, + skipWorkspaceCommits, + forceWorkspaceCommits, +} from "./checkCommit"; + +export const help = ` +turbo-ignore + +Automatically ignore builds that have no changes + +Usage: + $ npx turbo-ignore [] [flags...] + +If is not provided, it will be inferred from the "name" +field of the "package.json" located at the current working directory. + +Flags: + --fallback= On Vercel, if no previously deployed SHA is available to compare against, + fallback to comparing against the provided ref + --help, -h Show this help message + --version, -v Show the version of this script + +--- + +turbo-ignore will also check for special commit messages to indicate if a build should be skipped or not. + +Skip turbo-ignore check and automatically ignore: +${[...skipAllCommits, ...skipWorkspaceCommits({ workspace: "" })] + .map((msg) => ` - ${msg}`) + .join("\n")} + +Skip turbo-ignore check and automatically deploy: +${[...forceAllCommits, ...forceWorkspaceCommits({ workspace: "" })] + .map((msg) => ` - ${msg}`) + .join("\n")} +`; + +// simple args parser because we don't want to pull in a dependency +// and we don't need many features +export default function parseArgs({ + argv, +}: { + argv: Array; +}): TurboIgnoreArgs { + const args: TurboIgnoreArgs = { directory: process.cwd() }; + + // find all flags + const flags = new Set( + argv + .filter((args) => args.startsWith("-")) + .map((flag) => flag.replace(/-/g, "")) + ); + + // handle help flag and exit + if (flags.has("help") || flags.has("h")) { + console.log(help); + process.exit(0); + } + // handle version flag and exit + if (flags.has("version") || flags.has("v")) { + console.log(pkg.version); + process.exit(0); + } + + // set workspace (if provided) + if (argv.length && !argv[0].startsWith("-")) { + args.workspace = argv[0]; + } + + // set task (if provided) + const taskArgSentinel = "--task="; + const taskArg = argv.find((arg) => arg.startsWith(taskArgSentinel)); + if (taskArg && taskArg.length > taskArgSentinel.length) { + args.task = taskArg.split("=")[1]; + } + + // set fallback (if provided) + const fallbackSentinel = "--fallback="; + const fallbackArg = argv.find((arg) => arg.startsWith(fallbackSentinel)); + if (fallbackArg && fallbackArg.length > fallbackSentinel.length) { + args.fallback = fallbackArg.split("=")[1]; + } + + return args; +} diff --git a/packages/turbo-ignore/src/checkCommit.ts b/packages/turbo-ignore/src/checkCommit.ts new file mode 100644 index 0000000..af6108e --- /dev/null +++ b/packages/turbo-ignore/src/checkCommit.ts @@ -0,0 +1,104 @@ +import { execSync } from "child_process"; + +export const skipAllCommits = [ + `[skip ci]`, + `[ci skip]`, + `[no ci]`, + `[skip vercel]`, + `[vercel skip]`, +]; + +export const forceAllCommits = [`[vercel deploy]`, `[vercel build]`]; + +export function skipWorkspaceCommits({ workspace }: { workspace: string }) { + return [`[vercel skip ${workspace}]`]; +} + +export function forceWorkspaceCommits({ workspace }: { workspace: string }) { + return [`[vercel deploy ${workspace}]`, `[vercel build ${workspace}]`]; +} + +export function getCommitDetails() { + // if we're on Vercel, use the provided commit message + if (process.env.VERCEL === "1") { + if (process.env.VERCEL_GIT_COMMIT_MESSAGE) { + return process.env.VERCEL_GIT_COMMIT_MESSAGE; + } + } + return execSync("git show -s --format=%B").toString(); +} + +export function checkCommit({ workspace }: { workspace: string }): { + result: "skip" | "deploy" | "continue" | "conflict"; + scope: "global" | "workspace"; + reason: string; +} { + const commitMessage = getCommitDetails(); + const findInCommit = (commit: string) => commitMessage.includes(commit); + + // check workspace specific messages first + const forceWorkspaceDeploy = forceWorkspaceCommits({ workspace }).find( + findInCommit + ); + const forceWorkspaceSkip = skipWorkspaceCommits({ workspace }).find( + findInCommit + ); + + if (forceWorkspaceDeploy && forceWorkspaceSkip) { + return { + result: "conflict", + scope: "workspace", + reason: `Conflicting commit messages found: ${forceWorkspaceDeploy} and ${forceWorkspaceSkip}`, + }; + } + + if (forceWorkspaceDeploy) { + return { + result: "deploy", + scope: "workspace", + reason: `Found commit message: ${forceWorkspaceDeploy}`, + }; + } + + if (forceWorkspaceSkip) { + return { + result: "skip", + scope: "workspace", + reason: `Found commit message: ${forceWorkspaceSkip}`, + }; + } + + // check global messages last + const forceDeploy = forceAllCommits.find(findInCommit); + const forceSkip = skipAllCommits.find(findInCommit); + + if (forceDeploy && forceSkip) { + return { + result: "conflict", + scope: "global", + reason: `Conflicting commit messages found: ${forceDeploy} and ${forceSkip}`, + }; + } + + if (forceDeploy) { + return { + result: "deploy", + scope: "global", + reason: `Found commit message: ${forceDeploy}`, + }; + } + + if (forceSkip) { + return { + result: "skip", + scope: "global", + reason: `Found commit message: ${forceSkip}`, + }; + } + + return { + result: "continue", + scope: "global", + reason: `No deploy or skip string found in commit message.`, + }; +} diff --git a/packages/turbo-ignore/src/errors.ts b/packages/turbo-ignore/src/errors.ts new file mode 100644 index 0000000..f600dfb --- /dev/null +++ b/packages/turbo-ignore/src/errors.ts @@ -0,0 +1,43 @@ +import { NonFatalErrorKey, NonFatalErrors } from "./types"; + +export const NON_FATAL_ERRORS: NonFatalErrors = { + MISSING_LOCKFILE: { + regex: + /reading (yarn.lock|package-lock.json|pnpm-lock.yaml):.*?no such file or directory/, + message: `turbo-ignore could not complete - no lockfile found, please commit one to your repository`, + }, + NO_PACKAGE_MANAGER: { + regex: + /run failed: We did not detect an in-use package manager for your project/, + message: `turbo-ignore could not complete - no package manager detected, please commit a lockfile, or set "packageManager" in your root "package.json"`, + }, + UNREACHABLE_PARENT: { + regex: /failed to resolve packages to run: commit HEAD\^ does not exist/, + message: `turbo-ignore could not complete - parent commit does not exist or is unreachable`, + }, + UNREACHABLE_COMMIT: { + regex: /commit \S+ does not exist/, + message: `turbo-ignore could not complete - commit does not exist or is unreachable`, + }, +}; + +export function shouldWarn({ err }: { err: string }): { + level: "warn" | "error"; + message: string; + code: NonFatalErrorKey | "UNKNOWN_ERROR"; +} { + const knownError = Object.keys(NON_FATAL_ERRORS).find((key) => { + const { regex } = NON_FATAL_ERRORS[key as NonFatalErrorKey]; + return regex.test(err); + }); + + if (knownError) { + return { + level: "warn", + message: NON_FATAL_ERRORS[knownError as NonFatalErrorKey].message, + code: knownError as NonFatalErrorKey, + }; + } + + return { level: "error", message: err, code: "UNKNOWN_ERROR" }; +} diff --git a/packages/turbo-ignore/src/getComparison.ts b/packages/turbo-ignore/src/getComparison.ts new file mode 100644 index 0000000..a2ad61a --- /dev/null +++ b/packages/turbo-ignore/src/getComparison.ts @@ -0,0 +1,39 @@ +import { info } from "./logger"; +import { TurboIgnoreArgs } from "./types"; + +export interface GetComparisonArgs extends TurboIgnoreArgs { + // the workspace to check for changes + workspace: string; + // A ref/head to compare against if no previously deployed SHA is available + fallback?: string; +} + +export function getComparison(args: GetComparisonArgs): { + ref: string; + type: "previousDeploy" | "headRelative" | "customFallback"; +} | null { + const { fallback, workspace } = args; + if (process.env.VERCEL === "1") { + if (process.env.VERCEL_GIT_PREVIOUS_SHA) { + // use the commit SHA of the last successful deployment for this project / branch + info( + `Found previous deployment ("${process.env.VERCEL_GIT_PREVIOUS_SHA}") for "${workspace}" on branch "${process.env.VERCEL_GIT_COMMIT_REF}"` + ); + return { + ref: process.env.VERCEL_GIT_PREVIOUS_SHA, + type: "previousDeploy", + }; + } else { + info( + `No previous deployments found for "${workspace}" on branch "${process.env.VERCEL_GIT_COMMIT_REF}".` + ); + if (fallback) { + info(`Falling back to ref ${fallback}`); + return { ref: fallback, type: "customFallback" }; + } + + return null; + } + } + return { ref: "HEAD^", type: "headRelative" }; +} diff --git a/packages/turbo-ignore/src/getTask.ts b/packages/turbo-ignore/src/getTask.ts new file mode 100644 index 0000000..9e95e35 --- /dev/null +++ b/packages/turbo-ignore/src/getTask.ts @@ -0,0 +1,13 @@ +import { info } from "./logger"; +import { TurboIgnoreArgs } from "./types"; + +export function getTask(args: TurboIgnoreArgs): string | null { + if (args.task) { + info(`Using "${args.task}" as the task from the arguments`); + return `"${args.task}"`; + } + + info('Using "build" as the task as it was unspecified'); + + return "build"; +} diff --git a/packages/turbo-ignore/src/getWorkspace.ts b/packages/turbo-ignore/src/getWorkspace.ts new file mode 100644 index 0000000..e0b3167 --- /dev/null +++ b/packages/turbo-ignore/src/getWorkspace.ts @@ -0,0 +1,37 @@ +import fs from "fs"; +import path from "path"; +import { error, info } from "./logger"; +import { TurboIgnoreArgs } from "./types"; + +export function getWorkspace(args: TurboIgnoreArgs): string | null { + const { directory = process.cwd(), workspace } = args; + + // if the workspace is provided via args, use that + if (workspace) { + info(`Using "${workspace}" as workspace from arguments`); + return workspace; + } + + // otherwise, try and infer it from a package.json in the current directory + const packageJsonPath = path.join(directory, "package.json"); + try { + const raw = fs.readFileSync(packageJsonPath, "utf8"); + const packageJsonContent: Record & { name: string } = + JSON.parse(raw); + + if (!packageJsonContent.name) { + error(`"${packageJsonPath}" is missing the "name" field (required).`); + return null; + } + + info( + `Inferred "${packageJsonContent.name}" as workspace from "package.json"` + ); + return packageJsonContent.name; + } catch (e) { + error( + `"${packageJsonPath}" could not be found. turbo-ignore inferencing failed` + ); + return null; + } +} diff --git a/packages/turbo-ignore/src/ignore.ts b/packages/turbo-ignore/src/ignore.ts new file mode 100644 index 0000000..a6f8f2e --- /dev/null +++ b/packages/turbo-ignore/src/ignore.ts @@ -0,0 +1,125 @@ +import { exec } from "child_process"; +import path from "path"; +import { getTurboRoot } from "@turbo/utils"; +import { getComparison } from "./getComparison"; +import { getTask } from "./getTask"; +import { getWorkspace } from "./getWorkspace"; +import { info, warn, error } from "./logger"; +import { shouldWarn } from "./errors"; +import { TurboIgnoreArgs } from "./types"; +import { checkCommit } from "./checkCommit"; + +function ignoreBuild() { + console.log("⏭ Ignoring the change"); + return process.exit(0); +} + +function continueBuild() { + console.log("✓ Proceeding with deployment"); + return process.exit(1); +} + +export default function turboIgnore({ args }: { args: TurboIgnoreArgs }) { + info( + `Using Turborepo to determine if this project is affected by the commit...\n` + ); + + // set default directory + args.directory = args.directory + ? path.resolve(args.directory) + : process.cwd(); + + // check for TURBO_FORCE and bail early if it's set + if (process.env.TURBO_FORCE === "true") { + info("`TURBO_FORCE` detected"); + return continueBuild(); + } + + // find the monorepo root + const root = getTurboRoot(args.directory); + if (!root) { + error("Monorepo root not found. turbo-ignore inferencing failed"); + return continueBuild(); + } + + // Find the workspace from the command-line args, or the package.json at the current directory + const workspace = getWorkspace(args); + if (!workspace) { + return continueBuild(); + } + + // Identify which task to execute from the command-line args + let task = getTask(args); + + // check the commit message + const parsedCommit = checkCommit({ workspace }); + if (parsedCommit.result === "skip") { + info(parsedCommit.reason); + return ignoreBuild(); + } + if (parsedCommit.result === "deploy") { + info(parsedCommit.reason); + return continueBuild(); + } + if (parsedCommit.result === "conflict") { + info(parsedCommit.reason); + } + + // Get the start of the comparison (previous deployment when available, or previous commit by default) + const comparison = getComparison({ workspace, fallback: args.fallback }); + if (!comparison) { + // This is either the first deploy of the project, or the first deploy for the branch, either way - build it. + return continueBuild(); + } + + // Build, and execute the command + const command = `npx turbo run ${task} --filter=${workspace}...[${comparison.ref}] --dry=json`; + info(`Analyzing results of \`${command}\``); + exec( + command, + { + cwd: root, + }, + (err, stdout) => { + if (err) { + const { level, code, message } = shouldWarn({ err: err.message }); + if (level === "warn") { + warn(message); + } else { + error(`${code}: ${err}`); + } + return continueBuild(); + } + + try { + const parsed = JSON.parse(stdout); + if (parsed == null) { + error(`Failed to parse JSON output from \`${command}\`.`); + return continueBuild(); + } + const { packages } = parsed; + if (packages && packages.length > 0) { + if (packages.length === 1) { + info(`This commit affects "${workspace}"`); + } else { + // subtract 1 because the first package is the workspace itself + info( + `This commit affects "${workspace}" and ${packages.length - 1} ${ + packages.length - 1 === 1 ? "dependency" : "dependencies" + } (${packages.slice(1).join(", ")})` + ); + } + + return continueBuild(); + } else { + info(`This project and its dependencies are not affected`); + return ignoreBuild(); + } + } catch (e) { + error(`Failed to parse JSON output from \`${command}\`.`); + error(e); + return continueBuild(); + } + } + ); +} diff --git a/packages/turbo-ignore/src/index.ts b/packages/turbo-ignore/src/index.ts new file mode 100644 index 0000000..0c34d3a --- /dev/null +++ b/packages/turbo-ignore/src/index.ts @@ -0,0 +1,6 @@ +#!/usr/bin/env node + +import turboIgnore from "./ignore"; +import parseArgs from "./args"; + +turboIgnore({ args: parseArgs({ argv: process.argv.slice(2) }) }); diff --git a/packages/turbo-ignore/src/logger.ts b/packages/turbo-ignore/src/logger.ts new file mode 100644 index 0000000..a7903ac --- /dev/null +++ b/packages/turbo-ignore/src/logger.ts @@ -0,0 +1,16 @@ +// ≫ +const TURBO_IGNORE_PREFIX = "\u226B "; + +function info(...args: any[]) { + console.log(TURBO_IGNORE_PREFIX, ...args); +} + +function error(...args: any[]) { + console.error(TURBO_IGNORE_PREFIX, ...args); +} + +function warn(...args: any[]) { + console.warn(TURBO_IGNORE_PREFIX, ...args); +} + +export { info, warn, error }; diff --git a/packages/turbo-ignore/src/types.ts b/packages/turbo-ignore/src/types.ts new file mode 100644 index 0000000..07fac3f --- /dev/null +++ b/packages/turbo-ignore/src/types.ts @@ -0,0 +1,23 @@ +export type NonFatalErrorKey = + | "MISSING_LOCKFILE" + | "NO_PACKAGE_MANAGER" + | "UNREACHABLE_PARENT" + | "UNREACHABLE_COMMIT"; + +export interface NonFatalError { + regex: RegExp; + message: string; +} + +export type NonFatalErrors = Record; + +export interface TurboIgnoreArgs { + // the working directory to use when looking for a workspace + directory?: string; + // the workspace to check for changes + workspace?: string; + // the task to run, if not build + task?: string; + // A ref/head to compare against if no previously deployed SHA is available + fallback?: string; +} diff --git a/packages/turbo-ignore/tsconfig.json b/packages/turbo-ignore/tsconfig.json new file mode 100644 index 0000000..0620a3c --- /dev/null +++ b/packages/turbo-ignore/tsconfig.json @@ -0,0 +1,6 @@ +{ + "extends": "@turbo/tsconfig/library.json", + "compilerOptions": { + "rootDir": "." + } +} diff --git a/packages/turbo-ignore/tsup.config.ts b/packages/turbo-ignore/tsup.config.ts new file mode 100644 index 0000000..4d9d9bf --- /dev/null +++ b/packages/turbo-ignore/tsup.config.ts @@ -0,0 +1,9 @@ +import { defineConfig, Options } from "tsup"; + +export default defineConfig((options: Options) => ({ + entry: ["src/index.ts"], + format: ["cjs"], + minify: true, + clean: true, + ...options, +})); diff --git a/packages/turbo-test-utils/README.md b/packages/turbo-test-utils/README.md new file mode 100644 index 0000000..60b6382 --- /dev/null +++ b/packages/turbo-test-utils/README.md @@ -0,0 +1,3 @@ +# `@turbo/test-utils` + +Internal package of generic testing utilities shared between [turborepo/packages/](https://github.com/vercel/turbo/tree/main/packages) diff --git a/packages/turbo-test-utils/package.json b/packages/turbo-test-utils/package.json new file mode 100644 index 0000000..ff3f7f9 --- /dev/null +++ b/packages/turbo-test-utils/package.json @@ -0,0 +1,40 @@ +{ + "name": "@turbo/test-utils", + "version": "0.0.0", + "private": true, + "description": "", + "homepage": "https://turbo.build/repo", + "keywords": [], + "author": "Vercel", + "main": "src/index.ts", + "license": "MPL-2.0", + "repository": { + "type": "git", + "url": "https://github.com/vercel/turbo", + "directory": "packages/turbo-test-utils" + }, + "bugs": { + "url": "https://github.com/vercel/turbo/issues" + }, + "scripts": { + "lint": "eslint src/**/*.ts", + "check-types": "tsc --noEmit" + }, + "devDependencies": { + "@types/fs-extra": "^9.0.13", + "@types/jest": "^27.4.0", + "@types/js-yaml": "^4.0.5", + "@types/node": "^16.11.12", + "@types/uuid": "^9.0.0", + "jest": "^27.4.3", + "ts-jest": "^27.1.1", + "@turbo/tsconfig": "workspace:*", + "typescript": "^4.7.4" + }, + "dependencies": { + "fs-extra": "^11.1.0", + "js-yaml": "^4.1.0", + "json5": "^2.2.3", + "uuid": "^9.0.0" + } +} diff --git a/packages/turbo-test-utils/src/index.ts b/packages/turbo-test-utils/src/index.ts new file mode 100644 index 0000000..07d0496 --- /dev/null +++ b/packages/turbo-test-utils/src/index.ts @@ -0,0 +1,9 @@ +export { default as setupTestFixtures } from "./useFixtures"; +export { default as validateLogs } from "./validateLogs"; +export { default as mockEnv } from "./mockEnv"; + +export { default as spyExit } from "./spyExit"; +export type { SpyExit } from "./spyExit"; + +export { default as spyConsole } from "./spyConsole"; +export type { SpyConsole } from "./spyConsole"; diff --git a/packages/turbo-test-utils/src/mockEnv.ts b/packages/turbo-test-utils/src/mockEnv.ts new file mode 100644 index 0000000..31909b0 --- /dev/null +++ b/packages/turbo-test-utils/src/mockEnv.ts @@ -0,0 +1,12 @@ +export default function mockEnv() { + const OLD_ENV = process.env; + + beforeEach(() => { + jest.resetModules(); + process.env = { ...OLD_ENV }; + }); + + afterAll(() => { + process.env = OLD_ENV; + }); +} diff --git a/packages/turbo-test-utils/src/spyConsole.ts b/packages/turbo-test-utils/src/spyConsole.ts new file mode 100644 index 0000000..61722a5 --- /dev/null +++ b/packages/turbo-test-utils/src/spyConsole.ts @@ -0,0 +1,25 @@ +export type SpyConsole = { log?: any; error?: any; warn?: any }; + +export default function spyConsole() { + let spy: SpyConsole = {}; + + beforeEach(() => { + spy.log = jest.spyOn(console, "log").mockImplementation(() => {}); + spy.error = jest.spyOn(console, "error").mockImplementation(() => {}); + spy.warn = jest.spyOn(console, "warn").mockImplementation(() => {}); + }); + + afterEach(() => { + spy.log.mockClear(); + spy.error.mockClear(); + spy.warn.mockClear(); + }); + + afterAll(() => { + spy.log.mockRestore(); + spy.error.mockRestore(); + spy.warn.mockRestore(); + }); + + return spy; +} diff --git a/packages/turbo-test-utils/src/spyExit.ts b/packages/turbo-test-utils/src/spyExit.ts new file mode 100644 index 0000000..1df9844 --- /dev/null +++ b/packages/turbo-test-utils/src/spyExit.ts @@ -0,0 +1,21 @@ +export type SpyExit = { exit?: any }; + +export default function spyExit() { + let spy: SpyExit = {}; + + beforeEach(() => { + spy.exit = jest + .spyOn(process, "exit") + .mockImplementation(() => undefined as never); + }); + + afterEach(() => { + spy.exit.mockClear(); + }); + + afterAll(() => { + spy.exit.mockRestore(); + }); + + return spy; +} diff --git a/packages/turbo-test-utils/src/useFixtures.ts b/packages/turbo-test-utils/src/useFixtures.ts new file mode 100644 index 0000000..2c47f5a --- /dev/null +++ b/packages/turbo-test-utils/src/useFixtures.ts @@ -0,0 +1,89 @@ +import { v4 as uuidv4 } from "uuid"; +import path from "path"; +import fs from "fs-extra"; +import yaml from "js-yaml"; +import JSON5 from "json5"; + +export default function setupTestFixtures({ + directory, + test = "", +}: { + directory: string; + test?: string; +}) { + const fixtures: Array = []; + const parentDirectory = path.join(directory, test ? test : "test-runs"); + + afterEach(() => { + fixtures.forEach((fixture) => { + fs.rmSync(fixture, { recursive: true, force: true }); + }); + }); + + afterAll(() => { + fs.rmSync(parentDirectory, { recursive: true, force: true }); + }); + + const useFixture = ({ fixture }: { fixture: string }) => { + const directoryName = uuidv4(); + const testDirectory = path.join(parentDirectory, directoryName); + if (!fs.existsSync(testDirectory)) { + fs.mkdirSync(testDirectory, { recursive: true }); + } + // keep track of it + fixtures.push(testDirectory); + + // copy fixture to test directory + const fixturePath = path.join(directory, "__fixtures__", test, fixture); + fs.copySync(fixturePath, testDirectory, { + recursive: true, + }); + + const getFilePath = (filename: string) => { + return path.isAbsolute(filename) + ? filename + : path.join(testDirectory, filename); + }; + + const readGenerator = (method: (filePath: string) => unknown) => { + return (filename: string) => { + try { + return method(getFilePath(filename)) as T; + } catch (e) { + return undefined; + } + }; + }; + + const write = ( + filename: string, + content: string | NodeJS.ArrayBufferView + ) => { + fs.writeFileSync(getFilePath(filename), content); + }; + + const exists = (filename: string): boolean => { + return fs.existsSync(getFilePath(filename)); + }; + + const read = readGenerator((filePath) => fs.readFileSync(filePath, "utf8")); + const readJson = readGenerator((filePath) => + JSON5.parse(fs.readFileSync(filePath, "utf8")) + ); + const readYaml = readGenerator((filePath) => + yaml.load(fs.readFileSync(filePath, "utf8")) + ); + + return { + root: testDirectory, + read, + readJson, + readYaml, + write, + exists, + directoryName, + }; + }; + + return { useFixture }; +} diff --git a/packages/turbo-test-utils/src/validateLogs.ts b/packages/turbo-test-utils/src/validateLogs.ts new file mode 100644 index 0000000..b8e59ac --- /dev/null +++ b/packages/turbo-test-utils/src/validateLogs.ts @@ -0,0 +1,27 @@ +import { SpyConsole } from "./spyConsole"; + +export default function validateLogs( + logs: Array boolean | Array)>, + mockConsole: SpyConsole["log"] | SpyConsole["error"], + options: { prefix?: string } = {} +) { + logs.forEach((log, idx) => { + if (typeof log === "function") { + const expected = log(); + expect(mockConsole).toHaveBeenNthCalledWith( + idx + 1, + ...(Array.isArray(expected) ? expected : [expected]) + ); + } else { + if (options.prefix) { + expect(mockConsole).toHaveBeenNthCalledWith( + idx + 1, + options.prefix, + log + ); + } else { + expect(mockConsole).toHaveBeenNthCalledWith(idx + 1, log); + } + } + }); +} diff --git a/packages/turbo-test-utils/tsconfig.json b/packages/turbo-test-utils/tsconfig.json new file mode 100644 index 0000000..0620a3c --- /dev/null +++ b/packages/turbo-test-utils/tsconfig.json @@ -0,0 +1,6 @@ +{ + "extends": "@turbo/tsconfig/library.json", + "compilerOptions": { + "rootDir": "." + } +} diff --git a/packages/turbo-tracing-next-plugin/README.md b/packages/turbo-tracing-next-plugin/README.md new file mode 100644 index 0000000..dbd2609 --- /dev/null +++ b/packages/turbo-tracing-next-plugin/README.md @@ -0,0 +1,39 @@ +# `@vercel/experimental-nft-next-plugin` + +## Installation + +- yarn add -D `@vercel/experimental-nft-next-plugin` +- npm install -D `@vercel/experimental-nft-next-plugin` +- pnpm install -D `@vercel/experimental-nft-next-plugin` + +## Usage + +```js +// next.config.js + +const { createNodeFileTrace } = require("@vercel/experimental-nft-next-plugin"); + +const withNodeFileTrace = createNodeFileTrace({ + // experimental nft options + log: { + all: true, + }, +}); + +module.exports = withNodeFileTrace({ + // next config +}); +``` + +### experimental nft options + +> **Note** +> +> The default options should work fine. + +- `cwd?: string`, default is `process.cwd()`, you can override it to specify another directory to run experimental nft. +- `contextDirectory?: string`, relative to cwd, default is `.`. It must be the directory where the `node_modules` directory is located. If you are in the monorepo, you should set it to the root directory of the monorepo. For yarn2+/npm workspaces, the default value will respect the `PROJECT_CWD` and `npm_config_local_prefix` environment variables injected by yarn/npm client. If the default value doesn't work, you can override it to specify the root directory of the monorepo. +- `path?: string`, additional path which will be appended into the `PATH` environment variable. +- `log?.all?: boolean`, default is `false`, whether to show all logs. +- `log?.level?: string`, default is `error`, the log level. +- `log?.detail?: boolean`, default is `false`, whether to expand the log details. diff --git a/packages/turbo-tracing-next-plugin/package.json b/packages/turbo-tracing-next-plugin/package.json new file mode 100644 index 0000000..e2f0662 --- /dev/null +++ b/packages/turbo-tracing-next-plugin/package.json @@ -0,0 +1,25 @@ +{ + "name": "@vercel/experimental-nft-next-plugin", + "version": "0.0.3-alpha.2", + "license": "MPL-2.0", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "files": [ + "dist/**/*" + ], + "publishConfig": { + "access": "public" + }, + "dependencies": { + "@vercel/webpack-nft": "workspace:*" + }, + "peerDependencies": { + "next": ">= 12" + }, + "devDependencies": { + "next": "^13.0.6" + }, + "scripts": { + "lint": "eslint src/**/*.ts" + } +} diff --git a/packages/turbo-tracing-next-plugin/src/index.ts b/packages/turbo-tracing-next-plugin/src/index.ts new file mode 100644 index 0000000..0b75113 --- /dev/null +++ b/packages/turbo-tracing-next-plugin/src/index.ts @@ -0,0 +1,27 @@ +import { + NodeModuleTracePlugin, + NodeModuleTracePluginOptions, +} from "@vercel/webpack-nft"; +import type { NextConfig } from "next"; + +export function createNodeFileTrace(options?: NodeModuleTracePluginOptions) { + return function withNodeFileTrace(config: NextConfig = {}) { + const createWebpackConfig = config.webpack; + config.outputFileTracing = false; + config.webpack = (webpackConfig, context) => { + const config = + createWebpackConfig?.(webpackConfig, context) ?? webpackConfig; + if (context.isServer && !context.dev) { + const plugin = new NodeModuleTracePlugin(options); + if (config.plugins) { + config.plugins.push(plugin); + } else { + config.plugins = [plugin]; + } + } + + return config; + }; + return config; + }; +} diff --git a/packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/.env.local.example b/packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/.env.local.example new file mode 100644 index 0000000..9dead41 --- /dev/null +++ b/packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/.env.local.example @@ -0,0 +1 @@ +MONGODB_URI= \ No newline at end of file diff --git a/packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/.gitignore b/packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/.gitignore new file mode 100644 index 0000000..1437c53 --- /dev/null +++ b/packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/.gitignore @@ -0,0 +1,34 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.js + +# testing +/coverage + +# next.js +/.next/ +/out/ + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# local env files +.env.local +.env.development.local +.env.test.local +.env.production.local + +# vercel +.vercel diff --git a/packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/README.md b/packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/README.md new file mode 100644 index 0000000..1f7110e --- /dev/null +++ b/packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/README.md @@ -0,0 +1,5 @@ +# MongoDB and Mongoose with Next.js + +Copied from https://github.com/vercel/next.js/tree/canary/examples/with-mongodb. + +Run `pnpm run --filter @vercel/turbo-tracing-test-app build` to build this application. diff --git a/packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/components/Form.js b/packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/components/Form.js new file mode 100644 index 0000000..b184d9c --- /dev/null +++ b/packages/turbo-tracing-next-plugin/test/with-mongodb-mongoose/components/Form.js @@ -0,0 +1,202 @@ +import { useState } from "react"; +import { useRouter } from "next/router"; +import { mutate } from "swr"; + +const Form = ({ formId, petForm, forNewPet = true }) => { + const router = useRouter(); + const contentType = "application/json"; + const [errors, setErrors] = useState({}); + const [message, setMessage] = useState(""); + + const [form, setForm] = useState({ + name: petForm.name, + owner_name: petForm.owner_name, + species: petForm.species, + age: petForm.age, + poddy_trained: petForm.poddy_trained, + diet: petForm.diet, + image_url: petForm.image_url, + likes: petForm.likes, + dislikes: petForm.dislikes, + }); + + /* The PUT method edits an existing entry in the mongodb database. */ + const putData = async (form) => { + const { id } = router.query; + + try { + const res = await fetch(`/api/pets/${id}`, { + method: "PUT", + headers: { + Accept: contentType, + "Content-Type": contentType, + }, + body: JSON.stringify(form), + }); + + // Throw error with status code in case Fetch API req failed + if (!res.ok) { + throw new Error(res.status); + } + + const { data } = await res.json(); + + mutate(`/api/pets/${id}`, data, false); // Update the local data without a revalidation + router.push("/"); + } catch (error) { + setMessage("Failed to update pet"); + } + }; + + /* The POST method adds a new entry in the mongodb database. */ + const postData = async (form) => { + try { + const res = await fetch("/api/pets", { + method: "POST", + headers: { + Accept: contentType, + "Content-Type": contentType, + }, + body: JSON.stringify(form), + }); + + // Throw error with status code in case Fetch API req failed + if (!res.ok) { + throw new Error(res.status); + } + + router.push("/"); + } catch (error) { + setMessage("Failed to add pet"); + } + }; + + const handleChange = (e) => { + const target = e.target; + const value = + target.name === "poddy_trained" ? target.checked : target.value; + const name = target.name; + + setForm({ + ...form, + [name]: value, + }); + }; + + const handleSubmit = (e) => { + e.preventDefault(); + const errs = formValidate(); + if (Object.keys(errs).length === 0) { + forNewPet ? postData(form) : putData(form); + } else { + setErrors({ errs }); + } + }; + + /* Makes sure pet info is filled for pet name, owner name, species, and image url*/ + const formValidate = () => { + let err = {}; + if (!form.name) err.name = "Name is required"; + if (!form.owner_name) err.owner_name = "Owner is required"; + if (!form.species) err.species = "Species is required"; + if (!form.image_url) err.image_url = "Image URL is required"; + return err; + }; + + return ( + <> +
+ + + + + + + + + + + + + + + + +