aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/cli/internal/scope
diff options
context:
space:
mode:
Diffstat (limited to 'cli/internal/scope')
-rw-r--r--cli/internal/scope/filter/filter.go421
-rw-r--r--cli/internal/scope/filter/filter_test.go614
-rw-r--r--cli/internal/scope/filter/matcher.go32
-rw-r--r--cli/internal/scope/filter/matcher_test.go65
-rw-r--r--cli/internal/scope/filter/parse_target_selector.go165
-rw-r--r--cli/internal/scope/filter/parse_target_selector_test.go311
-rw-r--r--cli/internal/scope/scope.go380
-rw-r--r--cli/internal/scope/scope_test.go550
8 files changed, 2538 insertions, 0 deletions
diff --git a/cli/internal/scope/filter/filter.go b/cli/internal/scope/filter/filter.go
new file mode 100644
index 0000000..60aaf1d
--- /dev/null
+++ b/cli/internal/scope/filter/filter.go
@@ -0,0 +1,421 @@
+package filter
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/pkg/errors"
+ "github.com/pyr-sh/dag"
+ "github.com/vercel/turbo/cli/internal/doublestar"
+ "github.com/vercel/turbo/cli/internal/turbopath"
+ "github.com/vercel/turbo/cli/internal/util"
+ "github.com/vercel/turbo/cli/internal/workspace"
+)
+
+type SelectedPackages struct {
+ pkgs util.Set
+ unusedFilters []*TargetSelector
+}
+
+// PackagesChangedInRange is the signature of a function to provide the set of
+// packages that have changed in a particular range of git refs.
+type PackagesChangedInRange = func(fromRef string, toRef string) (util.Set, error)
+
+// PackageInference holds the information we have inferred from the working-directory
+// (really --infer-filter-root flag) about which packages are of interest.
+type PackageInference struct {
+ // PackageName, if set, means that we have determined that filters without a package-specifier
+ // should get this package name
+ PackageName string
+ // DirectoryRoot is used to infer a "parentDir" for the filter in the event that we haven't
+ // identified a specific package. If the filter already contains a parentDir, this acts as
+ // a prefix. If the filter does not contain a parentDir, we consider this to be a glob for
+ // all subdirectories
+ DirectoryRoot turbopath.RelativeSystemPath
+}
+
+type Resolver struct {
+ Graph *dag.AcyclicGraph
+ WorkspaceInfos workspace.Catalog
+ Cwd turbopath.AbsoluteSystemPath
+ Inference *PackageInference
+ PackagesChangedInRange PackagesChangedInRange
+}
+
+// GetPackagesFromPatterns compiles filter patterns and applies them, returning
+// the selected packages
+func (r *Resolver) GetPackagesFromPatterns(patterns []string) (util.Set, error) {
+ selectors := []*TargetSelector{}
+ for _, pattern := range patterns {
+ selector, err := ParseTargetSelector(pattern)
+ if err != nil {
+ return nil, err
+ }
+ selectors = append(selectors, selector)
+ }
+ selected, err := r.getFilteredPackages(selectors)
+ if err != nil {
+ return nil, err
+ }
+ return selected.pkgs, nil
+}
+
+func (pi *PackageInference) apply(selector *TargetSelector) error {
+ if selector.namePattern != "" {
+ // The selector references a package name, don't apply inference
+ return nil
+ }
+ if pi.PackageName != "" {
+ selector.namePattern = pi.PackageName
+ }
+ if selector.parentDir != "" {
+ parentDir := pi.DirectoryRoot.Join(selector.parentDir)
+ selector.parentDir = parentDir
+ } else if pi.PackageName == "" {
+ // The user didn't set a parent directory and we didn't find a single package,
+ // so use the directory we inferred and select all subdirectories
+ selector.parentDir = pi.DirectoryRoot.Join("**")
+ }
+ return nil
+}
+
+func (r *Resolver) applyInference(selectors []*TargetSelector) ([]*TargetSelector, error) {
+ if r.Inference == nil {
+ return selectors, nil
+ }
+ // If there are existing patterns, use inference on those. If there are no
+ // patterns, but there is a directory supplied, synthesize a selector
+ if len(selectors) == 0 {
+ selectors = append(selectors, &TargetSelector{})
+ }
+ for _, selector := range selectors {
+ if err := r.Inference.apply(selector); err != nil {
+ return nil, err
+ }
+ }
+ return selectors, nil
+}
+
+func (r *Resolver) getFilteredPackages(selectors []*TargetSelector) (*SelectedPackages, error) {
+ selectors, err := r.applyInference(selectors)
+ if err != nil {
+ return nil, err
+ }
+ prodPackageSelectors := []*TargetSelector{}
+ allPackageSelectors := []*TargetSelector{}
+ for _, selector := range selectors {
+ if selector.followProdDepsOnly {
+ prodPackageSelectors = append(prodPackageSelectors, selector)
+ } else {
+ allPackageSelectors = append(allPackageSelectors, selector)
+ }
+ }
+ if len(allPackageSelectors) > 0 || len(prodPackageSelectors) > 0 {
+ if len(allPackageSelectors) > 0 {
+ selected, err := r.filterGraph(allPackageSelectors)
+ if err != nil {
+ return nil, err
+ }
+ return selected, nil
+ }
+ }
+ return &SelectedPackages{
+ pkgs: make(util.Set),
+ }, nil
+}
+
+func (r *Resolver) filterGraph(selectors []*TargetSelector) (*SelectedPackages, error) {
+ includeSelectors := []*TargetSelector{}
+ excludeSelectors := []*TargetSelector{}
+ for _, selector := range selectors {
+ if selector.exclude {
+ excludeSelectors = append(excludeSelectors, selector)
+ } else {
+ includeSelectors = append(includeSelectors, selector)
+ }
+ }
+ var include *SelectedPackages
+ if len(includeSelectors) > 0 {
+ found, err := r.filterGraphWithSelectors(includeSelectors)
+ if err != nil {
+ return nil, err
+ }
+ include = found
+ } else {
+ vertexSet := make(util.Set)
+ for _, v := range r.Graph.Vertices() {
+ vertexSet.Add(v)
+ }
+ include = &SelectedPackages{
+ pkgs: vertexSet,
+ }
+ }
+ exclude, err := r.filterGraphWithSelectors(excludeSelectors)
+ if err != nil {
+ return nil, err
+ }
+ return &SelectedPackages{
+ pkgs: include.pkgs.Difference(exclude.pkgs),
+ unusedFilters: append(include.unusedFilters, exclude.unusedFilters...),
+ }, nil
+}
+
+func (r *Resolver) filterGraphWithSelectors(selectors []*TargetSelector) (*SelectedPackages, error) {
+ unmatchedSelectors := []*TargetSelector{}
+
+ cherryPickedPackages := make(dag.Set)
+ walkedDependencies := make(dag.Set)
+ walkedDependents := make(dag.Set)
+ walkedDependentsDependencies := make(dag.Set)
+
+ for _, selector := range selectors {
+ // TODO(gsoltis): this should be a list?
+ entryPackages, err := r.filterGraphWithSelector(selector)
+ if err != nil {
+ return nil, err
+ }
+ if entryPackages.Len() == 0 {
+ unmatchedSelectors = append(unmatchedSelectors, selector)
+ }
+ for _, pkg := range entryPackages {
+ if selector.includeDependencies {
+ dependencies, err := r.Graph.Ancestors(pkg)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to get dependencies of package %v", pkg)
+ }
+ for dep := range dependencies {
+ walkedDependencies.Add(dep)
+ }
+ if !selector.excludeSelf {
+ walkedDependencies.Add(pkg)
+ }
+ }
+ if selector.includeDependents {
+ dependents, err := r.Graph.Descendents(pkg)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to get dependents of package %v", pkg)
+ }
+ for dep := range dependents {
+ walkedDependents.Add(dep)
+ if selector.includeDependencies {
+ dependentDeps, err := r.Graph.Ancestors(dep)
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to get dependencies of dependent %v", dep)
+ }
+ for dependentDep := range dependentDeps {
+ walkedDependentsDependencies.Add(dependentDep)
+ }
+ }
+ }
+ if !selector.excludeSelf {
+ walkedDependents.Add(pkg)
+ }
+ }
+ if !selector.includeDependencies && !selector.includeDependents {
+ cherryPickedPackages.Add(pkg)
+ }
+ }
+ }
+ allPkgs := make(util.Set)
+ for pkg := range cherryPickedPackages {
+ allPkgs.Add(pkg)
+ }
+ for pkg := range walkedDependencies {
+ allPkgs.Add(pkg)
+ }
+ for pkg := range walkedDependents {
+ allPkgs.Add(pkg)
+ }
+ for pkg := range walkedDependentsDependencies {
+ allPkgs.Add(pkg)
+ }
+ return &SelectedPackages{
+ pkgs: allPkgs,
+ unusedFilters: unmatchedSelectors,
+ }, nil
+}
+
+func (r *Resolver) filterGraphWithSelector(selector *TargetSelector) (util.Set, error) {
+ if selector.matchDependencies {
+ return r.filterSubtreesWithSelector(selector)
+ }
+ return r.filterNodesWithSelector(selector)
+}
+
+// filterNodesWithSelector returns the set of nodes that match a given selector
+func (r *Resolver) filterNodesWithSelector(selector *TargetSelector) (util.Set, error) {
+ entryPackages := make(util.Set)
+ selectorWasUsed := false
+ if selector.fromRef != "" {
+ // get changed packaged
+ selectorWasUsed = true
+ changedPkgs, err := r.PackagesChangedInRange(selector.fromRef, selector.getToRef())
+ if err != nil {
+ return nil, err
+ }
+ parentDir := selector.parentDir
+ for pkgName := range changedPkgs {
+ if parentDir != "" {
+ // Type assert/coerce to string here because we want to use
+ // this value in a map that has string keys.
+ // TODO(mehulkar) `changedPkgs` is a util.Set, we could make a `util.PackageNamesSet``
+ // or something similar that is all strings.
+ pkgNameStr := pkgName.(string)
+ if pkgName == util.RootPkgName {
+ // The root package changed, only add it if
+ // the parentDir is equivalent to the root
+ if matches, err := doublestar.PathMatch(r.Cwd.Join(parentDir).ToString(), r.Cwd.ToString()); err != nil {
+ return nil, fmt.Errorf("failed to resolve directory relationship %v contains %v: %v", parentDir, r.Cwd, err)
+ } else if matches {
+ entryPackages.Add(pkgName)
+ }
+ } else if pkg, ok := r.WorkspaceInfos.PackageJSONs[pkgNameStr]; !ok {
+ return nil, fmt.Errorf("missing info for package %v", pkgName)
+ } else if matches, err := doublestar.PathMatch(r.Cwd.Join(parentDir).ToString(), pkg.Dir.RestoreAnchor(r.Cwd).ToString()); err != nil {
+ return nil, fmt.Errorf("failed to resolve directory relationship %v contains %v: %v", selector.parentDir, pkg.Dir, err)
+ } else if matches {
+ entryPackages.Add(pkgName)
+ }
+ } else {
+ entryPackages.Add(pkgName)
+ }
+ }
+ } else if selector.parentDir != "" {
+ // get packages by path
+ selectorWasUsed = true
+ parentDir := selector.parentDir
+ if parentDir == "." {
+ entryPackages.Add(util.RootPkgName)
+ } else {
+ for name, pkg := range r.WorkspaceInfos.PackageJSONs {
+ if matches, err := doublestar.PathMatch(r.Cwd.Join(parentDir).ToString(), pkg.Dir.RestoreAnchor(r.Cwd).ToString()); err != nil {
+ return nil, fmt.Errorf("failed to resolve directory relationship %v contains %v: %v", selector.parentDir, pkg.Dir, err)
+ } else if matches {
+ entryPackages.Add(name)
+ }
+ }
+ }
+ }
+ if selector.namePattern != "" {
+ // find packages that match name
+ if !selectorWasUsed {
+ matched, err := matchPackageNamesToVertices(selector.namePattern, r.Graph.Vertices())
+ if err != nil {
+ return nil, err
+ }
+ entryPackages = matched
+ selectorWasUsed = true
+ } else {
+ matched, err := matchPackageNames(selector.namePattern, entryPackages)
+ if err != nil {
+ return nil, err
+ }
+ entryPackages = matched
+ }
+ }
+ // TODO(gsoltis): we can do this earlier
+ // Check if the selector specified anything
+ if !selectorWasUsed {
+ return nil, fmt.Errorf("invalid selector: %v", selector.raw)
+ }
+ return entryPackages, nil
+}
+
+// filterSubtreesWithSelector returns the set of nodes where the node or any of its dependencies
+// match a selector
+func (r *Resolver) filterSubtreesWithSelector(selector *TargetSelector) (util.Set, error) {
+ // foreach package that matches parentDir && namePattern, check if any dependency is in changed packages
+ changedPkgs, err := r.PackagesChangedInRange(selector.fromRef, selector.getToRef())
+ if err != nil {
+ return nil, err
+ }
+
+ parentDir := selector.parentDir
+ entryPackages := make(util.Set)
+ for name, pkg := range r.WorkspaceInfos.PackageJSONs {
+ if parentDir == "" {
+ entryPackages.Add(name)
+ } else if matches, err := doublestar.PathMatch(parentDir.ToString(), pkg.Dir.RestoreAnchor(r.Cwd).ToString()); err != nil {
+ return nil, fmt.Errorf("failed to resolve directory relationship %v contains %v: %v", selector.parentDir, pkg.Dir, err)
+ } else if matches {
+ entryPackages.Add(name)
+ }
+ }
+ if selector.namePattern != "" {
+ matched, err := matchPackageNames(selector.namePattern, entryPackages)
+ if err != nil {
+ return nil, err
+ }
+ entryPackages = matched
+ }
+ roots := make(util.Set)
+ matched := make(util.Set)
+ for pkg := range entryPackages {
+ if matched.Includes(pkg) {
+ roots.Add(pkg)
+ continue
+ }
+ deps, err := r.Graph.Ancestors(pkg)
+ if err != nil {
+ return nil, err
+ }
+ for changedPkg := range changedPkgs {
+ if !selector.excludeSelf && pkg == changedPkg {
+ roots.Add(pkg)
+ break
+ }
+ if deps.Include(changedPkg) {
+ roots.Add(pkg)
+ matched.Add(changedPkg)
+ break
+ }
+ }
+ }
+ return roots, nil
+}
+
+func matchPackageNamesToVertices(pattern string, vertices []dag.Vertex) (util.Set, error) {
+ packages := make(util.Set)
+ for _, v := range vertices {
+ packages.Add(v)
+ }
+ packages.Add(util.RootPkgName)
+ return matchPackageNames(pattern, packages)
+}
+
+func matchPackageNames(pattern string, packages util.Set) (util.Set, error) {
+ matcher, err := matcherFromPattern(pattern)
+ if err != nil {
+ return nil, err
+ }
+ matched := make(util.Set)
+ for _, pkg := range packages {
+ pkg := pkg.(string)
+ if matcher(pkg) {
+ matched.Add(pkg)
+ }
+ }
+ if matched.Len() == 0 && !strings.HasPrefix(pattern, "@") && !strings.Contains(pattern, "/") {
+ // we got no matches and the pattern isn't a scoped package.
+ // Check if we have exactly one scoped package that does match
+ scopedPattern := fmt.Sprintf("@*/%v", pattern)
+ matcher, err = matcherFromPattern(scopedPattern)
+ if err != nil {
+ return nil, err
+ }
+ foundScopedPkg := false
+ for _, pkg := range packages {
+ pkg := pkg.(string)
+ if matcher(pkg) {
+ if foundScopedPkg {
+ // we found a second scoped package. Return the empty set, we can't
+ // disambiguate
+ return make(util.Set), nil
+ }
+ foundScopedPkg = true
+ matched.Add(pkg)
+ }
+ }
+ }
+ return matched, nil
+}
diff --git a/cli/internal/scope/filter/filter_test.go b/cli/internal/scope/filter/filter_test.go
new file mode 100644
index 0000000..a23ae1d
--- /dev/null
+++ b/cli/internal/scope/filter/filter_test.go
@@ -0,0 +1,614 @@
+package filter
+
+import (
+ "fmt"
+ "os"
+ "strings"
+ "testing"
+
+ "github.com/pyr-sh/dag"
+ "github.com/vercel/turbo/cli/internal/fs"
+ "github.com/vercel/turbo/cli/internal/turbopath"
+ "github.com/vercel/turbo/cli/internal/util"
+ "github.com/vercel/turbo/cli/internal/workspace"
+)
+
+func setMatches(t *testing.T, name string, s util.Set, expected []string) {
+ expectedSet := make(util.Set)
+ for _, item := range expected {
+ expectedSet.Add(item)
+ }
+ missing := s.Difference(expectedSet)
+ if missing.Len() > 0 {
+ t.Errorf("%v set has extra elements: %v", name, strings.Join(missing.UnsafeListOfStrings(), ", "))
+ }
+ extra := expectedSet.Difference(s)
+ if extra.Len() > 0 {
+ t.Errorf("%v set missing elements: %v", name, strings.Join(extra.UnsafeListOfStrings(), ", "))
+ }
+}
+
+func Test_filter(t *testing.T) {
+ rawCwd, err := os.Getwd()
+ if err != nil {
+ t.Fatalf("failed to get working directory: %v", err)
+ }
+ root, err := fs.GetCwd(rawCwd)
+ if err != nil {
+ t.Fatalf("failed to get working directory: %v", err)
+ }
+ workspaceInfos := workspace.Catalog{
+ PackageJSONs: make(map[string]*fs.PackageJSON),
+ }
+ packageJSONs := workspaceInfos.PackageJSONs
+ graph := &dag.AcyclicGraph{}
+ graph.Add("project-0")
+ packageJSONs["project-0"] = &fs.PackageJSON{
+ Name: "project-0",
+ Dir: turbopath.AnchoredUnixPath("packages/project-0").ToSystemPath(),
+ }
+ graph.Add("project-1")
+ packageJSONs["project-1"] = &fs.PackageJSON{
+ Name: "project-1",
+ Dir: turbopath.AnchoredUnixPath("packages/project-1").ToSystemPath(),
+ }
+ graph.Add("project-2")
+ packageJSONs["project-2"] = &fs.PackageJSON{
+ Name: "project-2",
+ Dir: "project-2",
+ }
+ graph.Add("project-3")
+ packageJSONs["project-3"] = &fs.PackageJSON{
+ Name: "project-3",
+ Dir: "project-3",
+ }
+ graph.Add("project-4")
+ packageJSONs["project-4"] = &fs.PackageJSON{
+ Name: "project-4",
+ Dir: "project-4",
+ }
+ graph.Add("project-5")
+ packageJSONs["project-5"] = &fs.PackageJSON{
+ Name: "project-5",
+ Dir: "project-5",
+ }
+ // Note: inside project-5
+ graph.Add("project-6")
+ packageJSONs["project-6"] = &fs.PackageJSON{
+ Name: "project-6",
+ Dir: turbopath.AnchoredUnixPath("project-5/packages/project-6").ToSystemPath(),
+ }
+ // Add dependencies
+ graph.Connect(dag.BasicEdge("project-0", "project-1"))
+ graph.Connect(dag.BasicEdge("project-0", "project-5"))
+ graph.Connect(dag.BasicEdge("project-1", "project-2"))
+ graph.Connect(dag.BasicEdge("project-1", "project-4"))
+
+ testCases := []struct {
+ Name string
+ Selectors []*TargetSelector
+ PackageInference *PackageInference
+ Expected []string
+ }{
+ {
+ "select root package",
+ []*TargetSelector{
+ {
+ namePattern: util.RootPkgName,
+ },
+ },
+ nil,
+ []string{util.RootPkgName},
+ },
+ {
+ "select only package dependencies (excluding the package itself)",
+ []*TargetSelector{
+ {
+ excludeSelf: true,
+ includeDependencies: true,
+ namePattern: "project-1",
+ },
+ },
+ nil,
+ []string{"project-2", "project-4"},
+ },
+ {
+ "select package with dependencies",
+ []*TargetSelector{
+ {
+ excludeSelf: false,
+ includeDependencies: true,
+ namePattern: "project-1",
+ },
+ },
+ nil,
+ []string{"project-1", "project-2", "project-4"},
+ },
+ {
+ "select package with dependencies and dependents, including dependent dependencies",
+ []*TargetSelector{
+ {
+ excludeSelf: true,
+ includeDependencies: true,
+ includeDependents: true,
+ namePattern: "project-1",
+ },
+ },
+ nil,
+ []string{"project-0", "project-1", "project-2", "project-4", "project-5"},
+ },
+ {
+ "select package with dependents",
+ []*TargetSelector{
+ {
+ includeDependents: true,
+ namePattern: "project-2",
+ },
+ },
+ nil,
+ []string{"project-1", "project-2", "project-0"},
+ },
+ {
+ "select dependents excluding package itself",
+ []*TargetSelector{
+ {
+ excludeSelf: true,
+ includeDependents: true,
+ namePattern: "project-2",
+ },
+ },
+ nil,
+ []string{"project-0", "project-1"},
+ },
+ {
+ "filter using two selectors: one selects dependencies another selects dependents",
+ []*TargetSelector{
+ {
+ excludeSelf: true,
+ includeDependents: true,
+ namePattern: "project-2",
+ },
+ {
+ excludeSelf: true,
+ includeDependencies: true,
+ namePattern: "project-1",
+ },
+ },
+ nil,
+ []string{"project-0", "project-1", "project-2", "project-4"},
+ },
+ {
+ "select just a package by name",
+ []*TargetSelector{
+ {
+ namePattern: "project-2",
+ },
+ },
+ nil,
+ []string{"project-2"},
+ },
+ // Note: we don't support the option to switch path prefix mode
+ // {
+ // "select by parentDir",
+ // []*TargetSelector{
+ // {
+ // parentDir: "/packages",
+ // },
+ // },
+ // []string{"project-0", "project-1"},
+ // },
+ {
+ "select by parentDir using glob",
+ []*TargetSelector{
+ {
+ parentDir: turbopath.MakeRelativeSystemPath("packages", "*"),
+ },
+ },
+ nil,
+ []string{"project-0", "project-1"},
+ },
+ {
+ "select by parentDir using globstar",
+ []*TargetSelector{
+ {
+ parentDir: turbopath.MakeRelativeSystemPath("project-5", "**"),
+ },
+ },
+ nil,
+ []string{"project-5", "project-6"},
+ },
+ {
+ "select by parentDir with no glob",
+ []*TargetSelector{
+ {
+ parentDir: turbopath.MakeRelativeSystemPath("project-5"),
+ },
+ },
+ nil,
+ []string{"project-5"},
+ },
+ {
+ "select all packages except one",
+ []*TargetSelector{
+ {
+ exclude: true,
+ namePattern: "project-1",
+ },
+ },
+ nil,
+ []string{"project-0", "project-2", "project-3", "project-4", "project-5", "project-6"},
+ },
+ {
+ "select by parentDir and exclude one package by pattern",
+ []*TargetSelector{
+ {
+ parentDir: turbopath.MakeRelativeSystemPath("packages", "*"),
+ },
+ {
+ exclude: true,
+ namePattern: "*-1",
+ },
+ },
+ nil,
+ []string{"project-0"},
+ },
+ {
+ "select root package by directory",
+ []*TargetSelector{
+ {
+ parentDir: turbopath.MakeRelativeSystemPath("."), // input . gets cleaned to ""
+ },
+ },
+ nil,
+ []string{util.RootPkgName},
+ },
+ {
+ "select packages directory",
+ []*TargetSelector{},
+ &PackageInference{
+ DirectoryRoot: turbopath.MakeRelativeSystemPath("packages"),
+ },
+ []string{"project-0", "project-1"},
+ },
+ {
+ "infer single package",
+ []*TargetSelector{},
+ &PackageInference{
+ DirectoryRoot: turbopath.MakeRelativeSystemPath("packages", "project-0"),
+ PackageName: "project-0",
+ },
+ []string{"project-0"},
+ },
+ {
+ "infer single package from subdirectory",
+ []*TargetSelector{},
+ &PackageInference{
+ DirectoryRoot: turbopath.MakeRelativeSystemPath("packages", "project-0", "src"),
+ PackageName: "project-0",
+ },
+ []string{"project-0"},
+ },
+ }
+
+ for _, tc := range testCases {
+ t.Run(tc.Name, func(t *testing.T) {
+ r := &Resolver{
+ Graph: graph,
+ WorkspaceInfos: workspaceInfos,
+ Cwd: root,
+ Inference: tc.PackageInference,
+ }
+ pkgs, err := r.getFilteredPackages(tc.Selectors)
+ if err != nil {
+ t.Fatalf("%v failed to filter packages: %v", tc.Name, err)
+ }
+ setMatches(t, tc.Name, pkgs.pkgs, tc.Expected)
+ })
+ }
+
+ t.Run("report unmatched filters", func(t *testing.T) {
+ r := &Resolver{
+ Graph: graph,
+ WorkspaceInfos: workspaceInfos,
+ Cwd: root,
+ }
+ pkgs, err := r.getFilteredPackages([]*TargetSelector{
+ {
+ excludeSelf: true,
+ includeDependencies: true,
+ namePattern: "project-7",
+ },
+ })
+ if err != nil {
+ t.Fatalf("unmatched filter failed to filter packages: %v", err)
+ }
+ if pkgs.pkgs.Len() != 0 {
+ t.Errorf("unmatched filter expected no packages, got %v", strings.Join(pkgs.pkgs.UnsafeListOfStrings(), ", "))
+ }
+ if len(pkgs.unusedFilters) != 1 {
+ t.Errorf("unmatched filter expected to report one unused filter, got %v", len(pkgs.unusedFilters))
+ }
+ })
+}
+
+func Test_matchScopedPackage(t *testing.T) {
+ rawCwd, err := os.Getwd()
+ if err != nil {
+ t.Fatalf("failed to get working directory: %v", err)
+ }
+ root, err := fs.GetCwd(rawCwd)
+ if err != nil {
+ t.Fatalf("failed to get working directory: %v", err)
+ }
+
+ workspaceInfos := workspace.Catalog{
+ PackageJSONs: make(map[string]*fs.PackageJSON),
+ }
+ packageJSONs := workspaceInfos.PackageJSONs
+ graph := &dag.AcyclicGraph{}
+ graph.Add("@foo/bar")
+ packageJSONs["@foo/bar"] = &fs.PackageJSON{
+ Name: "@foo/bar",
+ Dir: turbopath.AnchoredUnixPath("packages/bar").ToSystemPath(),
+ }
+ r := &Resolver{
+ Graph: graph,
+ WorkspaceInfos: workspaceInfos,
+ Cwd: root,
+ }
+ pkgs, err := r.getFilteredPackages([]*TargetSelector{
+ {
+ namePattern: "bar",
+ },
+ })
+ if err != nil {
+ t.Fatalf("failed to filter packages: %v", err)
+ }
+ setMatches(t, "match scoped package", pkgs.pkgs, []string{"@foo/bar"})
+}
+
+func Test_matchExactPackages(t *testing.T) {
+ rawCwd, err := os.Getwd()
+ if err != nil {
+ t.Fatalf("failed to get working directory: %v", err)
+ }
+ root, err := fs.GetCwd(rawCwd)
+ if err != nil {
+ t.Fatalf("failed to get working directory: %v", err)
+ }
+
+ workspaceInfos := workspace.Catalog{
+ PackageJSONs: make(map[string]*fs.PackageJSON),
+ }
+ packageJSONs := workspaceInfos.PackageJSONs
+ graph := &dag.AcyclicGraph{}
+ graph.Add("@foo/bar")
+ packageJSONs["@foo/bar"] = &fs.PackageJSON{
+ Name: "@foo/bar",
+ Dir: turbopath.AnchoredUnixPath("packages/@foo/bar").ToSystemPath(),
+ }
+ graph.Add("bar")
+ packageJSONs["bar"] = &fs.PackageJSON{
+ Name: "bar",
+ Dir: turbopath.AnchoredUnixPath("packages/bar").ToSystemPath(),
+ }
+ r := &Resolver{
+ Graph: graph,
+ WorkspaceInfos: workspaceInfos,
+ Cwd: root,
+ }
+ pkgs, err := r.getFilteredPackages([]*TargetSelector{
+ {
+ namePattern: "bar",
+ },
+ })
+ if err != nil {
+ t.Fatalf("failed to filter packages: %v", err)
+ }
+ setMatches(t, "match exact package", pkgs.pkgs, []string{"bar"})
+}
+
+func Test_matchMultipleScopedPackages(t *testing.T) {
+ rawCwd, err := os.Getwd()
+ if err != nil {
+ t.Fatalf("failed to get working directory: %v", err)
+ }
+ root, err := fs.GetCwd(rawCwd)
+ if err != nil {
+ t.Fatalf("failed to get working directory: %v", err)
+ }
+
+ workspaceInfos := workspace.Catalog{
+ PackageJSONs: make(map[string]*fs.PackageJSON),
+ }
+ packageJSONs := workspaceInfos.PackageJSONs
+ graph := &dag.AcyclicGraph{}
+ graph.Add("@foo/bar")
+ packageJSONs["@foo/bar"] = &fs.PackageJSON{
+ Name: "@foo/bar",
+ Dir: turbopath.AnchoredUnixPath("packages/@foo/bar").ToSystemPath(),
+ }
+ graph.Add("@types/bar")
+ packageJSONs["@types/bar"] = &fs.PackageJSON{
+ Name: "@types/bar",
+ Dir: turbopath.AnchoredUnixPath("packages/@types/bar").ToSystemPath(),
+ }
+ r := &Resolver{
+ Graph: graph,
+ WorkspaceInfos: workspaceInfos,
+ Cwd: root,
+ }
+ pkgs, err := r.getFilteredPackages([]*TargetSelector{
+ {
+ namePattern: "bar",
+ },
+ })
+ if err != nil {
+ t.Fatalf("failed to filter packages: %v", err)
+ }
+ setMatches(t, "match nothing with multiple scoped packages", pkgs.pkgs, []string{})
+}
+
+func Test_SCM(t *testing.T) {
+ rawCwd, err := os.Getwd()
+ if err != nil {
+ t.Fatalf("failed to get working directory: %v", err)
+ }
+ root, err := fs.GetCwd(rawCwd)
+ if err != nil {
+ t.Fatalf("failed to get working directory: %v", err)
+ }
+ head1Changed := make(util.Set)
+ head1Changed.Add("package-1")
+ head1Changed.Add("package-2")
+ head1Changed.Add(util.RootPkgName)
+ head2Changed := make(util.Set)
+ head2Changed.Add("package-3")
+ workspaceInfos := workspace.Catalog{
+ PackageJSONs: make(map[string]*fs.PackageJSON),
+ }
+ packageJSONs := workspaceInfos.PackageJSONs
+ graph := &dag.AcyclicGraph{}
+ graph.Add("package-1")
+ packageJSONs["package-1"] = &fs.PackageJSON{
+ Name: "package-1",
+ Dir: "package-1",
+ }
+ graph.Add("package-2")
+ packageJSONs["package-2"] = &fs.PackageJSON{
+ Name: "package-2",
+ Dir: "package-2",
+ }
+ graph.Add("package-3")
+ packageJSONs["package-3"] = &fs.PackageJSON{
+ Name: "package-3",
+ Dir: "package-3",
+ }
+ graph.Add("package-20")
+ packageJSONs["package-20"] = &fs.PackageJSON{
+ Name: "package-20",
+ Dir: "package-20",
+ }
+
+ graph.Connect(dag.BasicEdge("package-3", "package-20"))
+
+ r := &Resolver{
+ Graph: graph,
+ WorkspaceInfos: workspaceInfos,
+ Cwd: root,
+ PackagesChangedInRange: func(fromRef string, toRef string) (util.Set, error) {
+ if fromRef == "HEAD~1" && toRef == "HEAD" {
+ return head1Changed, nil
+ } else if fromRef == "HEAD~2" && toRef == "HEAD" {
+ union := head1Changed.Copy()
+ for val := range head2Changed {
+ union.Add(val)
+ }
+ return union, nil
+ } else if fromRef == "HEAD~2" && toRef == "HEAD~1" {
+ return head2Changed, nil
+ }
+ panic(fmt.Sprintf("unsupported commit range %v...%v", fromRef, toRef))
+ },
+ }
+
+ testCases := []struct {
+ Name string
+ Selectors []*TargetSelector
+ Expected []string
+ }{
+ {
+ "all changed packages",
+ []*TargetSelector{
+ {
+ fromRef: "HEAD~1",
+ },
+ },
+ []string{"package-1", "package-2", util.RootPkgName},
+ },
+ {
+ "all changed packages with parent dir exact match",
+ []*TargetSelector{
+ {
+ fromRef: "HEAD~1",
+ parentDir: ".",
+ },
+ },
+ []string{util.RootPkgName},
+ },
+ {
+ "changed packages in directory",
+ []*TargetSelector{
+ {
+ fromRef: "HEAD~1",
+ parentDir: "package-2",
+ },
+ },
+ []string{"package-2"},
+ },
+ {
+ "changed packages matching pattern",
+ []*TargetSelector{
+ {
+ fromRef: "HEAD~1",
+ namePattern: "package-2*",
+ },
+ },
+ []string{"package-2"},
+ },
+ {
+ "changed packages matching pattern",
+ []*TargetSelector{
+ {
+ fromRef: "HEAD~1",
+ namePattern: "package-2*",
+ },
+ },
+ []string{"package-2"},
+ },
+ // Note: missing test here that takes advantage of automatically exempting
+ // test-only changes from pulling in dependents
+ //
+ // turbo-specific tests below here
+ {
+ "changed package was requested scope, and we're matching dependencies",
+ []*TargetSelector{
+ {
+ fromRef: "HEAD~1",
+ namePattern: "package-1",
+ matchDependencies: true,
+ },
+ },
+ []string{"package-1"},
+ },
+ {
+ "older commit",
+ []*TargetSelector{
+ {
+ fromRef: "HEAD~2",
+ },
+ },
+ []string{"package-1", "package-2", "package-3", util.RootPkgName},
+ },
+ {
+ "commit range",
+ []*TargetSelector{
+ {
+ fromRef: "HEAD~2",
+ toRefOverride: "HEAD~1",
+ },
+ },
+ []string{"package-3"},
+ },
+ }
+
+ for _, tc := range testCases {
+ t.Run(tc.Name, func(t *testing.T) {
+ pkgs, err := r.getFilteredPackages(tc.Selectors)
+ if err != nil {
+ t.Fatalf("%v failed to filter packages: %v", tc.Name, err)
+ }
+ setMatches(t, tc.Name, pkgs.pkgs, tc.Expected)
+ })
+ }
+}
diff --git a/cli/internal/scope/filter/matcher.go b/cli/internal/scope/filter/matcher.go
new file mode 100644
index 0000000..2460326
--- /dev/null
+++ b/cli/internal/scope/filter/matcher.go
@@ -0,0 +1,32 @@
+package filter
+
+import (
+ "regexp"
+ "strings"
+
+ "github.com/pkg/errors"
+)
+
+type Matcher = func(pkgName string) bool
+
+func matchAll(pkgName string) bool {
+ return true
+}
+
+func matcherFromPattern(pattern string) (Matcher, error) {
+ if pattern == "*" {
+ return matchAll, nil
+ }
+
+ escaped := regexp.QuoteMeta(pattern)
+ // replace escaped '*' with regex '.*'
+ normalized := strings.ReplaceAll(escaped, "\\*", ".*")
+ if normalized == pattern {
+ return func(pkgName string) bool { return pkgName == pattern }, nil
+ }
+ regex, err := regexp.Compile("^" + normalized + "$")
+ if err != nil {
+ return nil, errors.Wrapf(err, "failed to compile filter pattern to regex: %v", pattern)
+ }
+ return func(pkgName string) bool { return regex.Match([]byte(pkgName)) }, nil
+}
diff --git a/cli/internal/scope/filter/matcher_test.go b/cli/internal/scope/filter/matcher_test.go
new file mode 100644
index 0000000..966be2b
--- /dev/null
+++ b/cli/internal/scope/filter/matcher_test.go
@@ -0,0 +1,65 @@
+package filter
+
+import "testing"
+
+func TestMatcher(t *testing.T) {
+ testCases := map[string][]struct {
+ test string
+ want bool
+ }{
+ "*": {
+ {
+ test: "@eslint/plugin-foo",
+ want: true,
+ },
+ {
+ test: "express",
+ want: true,
+ },
+ },
+ "eslint-*": {
+ {
+ test: "eslint-plugin-foo",
+ want: true,
+ },
+ {
+ test: "express",
+ want: false,
+ },
+ },
+ "*plugin*": {
+ {
+ test: "@eslint/plugin-foo",
+ want: true,
+ },
+ {
+ test: "express",
+ want: false,
+ },
+ },
+ "a*c": {
+ {
+ test: "abc",
+ want: true,
+ },
+ },
+ "*-positive": {
+ {
+ test: "is-positive",
+ want: true,
+ },
+ },
+ }
+ for pattern, tests := range testCases {
+ matcher, err := matcherFromPattern(pattern)
+ if err != nil {
+ t.Fatalf("failed to compile match pattern %v, %v", pattern, err)
+ }
+ for _, testCase := range tests {
+ got := matcher(testCase.test)
+ if got != testCase.want {
+ t.Errorf("%v.match(%v) got %v, want %v", pattern, testCase.test, got, testCase.want)
+ }
+ }
+ }
+}
diff --git a/cli/internal/scope/filter/parse_target_selector.go b/cli/internal/scope/filter/parse_target_selector.go
new file mode 100644
index 0000000..4f5c90f
--- /dev/null
+++ b/cli/internal/scope/filter/parse_target_selector.go
@@ -0,0 +1,165 @@
+package filter
+
+import (
+ "regexp"
+ "strings"
+
+ "github.com/pkg/errors"
+ "github.com/vercel/turbo/cli/internal/turbopath"
+)
+
+type TargetSelector struct {
+ includeDependencies bool
+ matchDependencies bool
+ includeDependents bool
+ exclude bool
+ excludeSelf bool
+ followProdDepsOnly bool
+ parentDir turbopath.RelativeSystemPath
+ namePattern string
+ fromRef string
+ toRefOverride string
+ raw string
+}
+
+func (ts *TargetSelector) IsValid() bool {
+ return ts.fromRef != "" || ts.parentDir != "" || ts.namePattern != ""
+}
+
+// getToRef returns the git ref to use for upper bound of the comparison when finding changed
+// packages.
+func (ts *TargetSelector) getToRef() string {
+ if ts.toRefOverride == "" {
+ return "HEAD"
+ }
+ return ts.toRefOverride
+}
+
+var errCantMatchDependencies = errors.New("cannot use match dependencies without specifying either a directory or package")
+
+var targetSelectorRegex = regexp.MustCompile(`^(?P<name>[^.](?:[^{}[\]]*[^{}[\].])?)?(?P<directory>\{[^}]*\})?(?P<commits>(?:\.{3})?\[[^\]]+\])?$`)
+
+// ParseTargetSelector is a function that returns pnpm compatible --filter command line flags
+func ParseTargetSelector(rawSelector string) (*TargetSelector, error) {
+ exclude := false
+ firstChar := rawSelector[0]
+ selector := rawSelector
+ if firstChar == '!' {
+ selector = selector[1:]
+ exclude = true
+ }
+ excludeSelf := false
+ includeDependencies := strings.HasSuffix(selector, "...")
+ if includeDependencies {
+ selector = selector[:len(selector)-3]
+ if strings.HasSuffix(selector, "^") {
+ excludeSelf = true
+ selector = selector[:len(selector)-1]
+ }
+ }
+ includeDependents := strings.HasPrefix(selector, "...")
+ if includeDependents {
+ selector = selector[3:]
+ if strings.HasPrefix(selector, "^") {
+ excludeSelf = true
+ selector = selector[1:]
+ }
+ }
+
+ matches := targetSelectorRegex.FindAllStringSubmatch(selector, -1)
+
+ if len(matches) == 0 {
+ if relativePath, ok := isSelectorByLocation(selector); ok {
+ return &TargetSelector{
+ exclude: exclude,
+ includeDependencies: includeDependencies,
+ includeDependents: includeDependents,
+ parentDir: relativePath,
+ raw: rawSelector,
+ }, nil
+ }
+ return &TargetSelector{
+ exclude: exclude,
+ excludeSelf: excludeSelf,
+ includeDependencies: includeDependencies,
+ includeDependents: includeDependents,
+ namePattern: selector,
+ raw: rawSelector,
+ }, nil
+ }
+
+ fromRef := ""
+ toRefOverride := ""
+ var parentDir turbopath.RelativeSystemPath
+ namePattern := ""
+ preAddDepdencies := false
+ if len(matches) > 0 && len(matches[0]) > 0 {
+ match := matches[0]
+ namePattern = match[targetSelectorRegex.SubexpIndex("name")]
+ rawParentDir := match[targetSelectorRegex.SubexpIndex("directory")]
+ if len(rawParentDir) > 0 {
+ // trim {}
+ rawParentDir = rawParentDir[1 : len(rawParentDir)-1]
+ if rawParentDir == "" {
+ return nil, errors.New("empty path specification")
+ } else if relPath, err := turbopath.CheckedToRelativeSystemPath(rawParentDir); err == nil {
+ parentDir = relPath
+ } else {
+ return nil, errors.Wrapf(err, "invalid path specification: %v", rawParentDir)
+ }
+ }
+ rawCommits := match[targetSelectorRegex.SubexpIndex("commits")]
+ if len(rawCommits) > 0 {
+ fromRef = rawCommits
+ if strings.HasPrefix(fromRef, "...") {
+ if parentDir == "" && namePattern == "" {
+ return &TargetSelector{}, errCantMatchDependencies
+ }
+ preAddDepdencies = true
+ fromRef = fromRef[3:]
+ }
+ // strip []
+ fromRef = fromRef[1 : len(fromRef)-1]
+ refs := strings.Split(fromRef, "...")
+ if len(refs) == 2 {
+ fromRef = refs[0]
+ toRefOverride = refs[1]
+ }
+ }
+ }
+
+ return &TargetSelector{
+ fromRef: fromRef,
+ toRefOverride: toRefOverride,
+ exclude: exclude,
+ excludeSelf: excludeSelf,
+ includeDependencies: includeDependencies,
+ matchDependencies: preAddDepdencies,
+ includeDependents: includeDependents,
+ namePattern: namePattern,
+ parentDir: parentDir,
+ raw: rawSelector,
+ }, nil
+}
+
+// isSelectorByLocation returns true if the selector is by filesystem location
+func isSelectorByLocation(rawSelector string) (turbopath.RelativeSystemPath, bool) {
+ if rawSelector[0:1] != "." {
+ return "", false
+ }
+
+ // . or ./ or .\
+ if len(rawSelector) == 1 || rawSelector[1:2] == "/" || rawSelector[1:2] == "\\" {
+ return turbopath.MakeRelativeSystemPath(rawSelector), true
+ }
+
+ if rawSelector[1:2] != "." {
+ return "", false
+ }
+
+ // .. or ../ or ..\
+ if len(rawSelector) == 2 || rawSelector[2:3] == "/" || rawSelector[2:3] == "\\" {
+ return turbopath.MakeRelativeSystemPath(rawSelector), true
+ }
+ return "", false
+}
diff --git a/cli/internal/scope/filter/parse_target_selector_test.go b/cli/internal/scope/filter/parse_target_selector_test.go
new file mode 100644
index 0000000..2973a61
--- /dev/null
+++ b/cli/internal/scope/filter/parse_target_selector_test.go
@@ -0,0 +1,311 @@
+package filter
+
+import (
+ "reflect"
+ "testing"
+
+ "github.com/vercel/turbo/cli/internal/turbopath"
+)
+
+func TestParseTargetSelector(t *testing.T) {
+ tests := []struct {
+ rawSelector string
+ want *TargetSelector
+ wantErr bool
+ }{
+ {
+ "{}",
+ &TargetSelector{},
+ true,
+ },
+ {
+ "foo",
+ &TargetSelector{
+ fromRef: "",
+ exclude: false,
+ excludeSelf: false,
+ includeDependencies: false,
+ includeDependents: false,
+ namePattern: "foo",
+ parentDir: "",
+ },
+ false,
+ },
+ {
+ "foo...",
+ &TargetSelector{
+ fromRef: "",
+ exclude: false,
+ excludeSelf: false,
+ includeDependencies: true,
+ includeDependents: false,
+ namePattern: "foo",
+ parentDir: "",
+ },
+ false,
+ },
+ {
+ "...foo",
+ &TargetSelector{
+ fromRef: "",
+ exclude: false,
+ excludeSelf: false,
+ includeDependencies: false,
+ includeDependents: true,
+ namePattern: "foo",
+ parentDir: "",
+ },
+ false,
+ },
+ {
+ "...foo...",
+ &TargetSelector{
+ fromRef: "",
+ exclude: false,
+ excludeSelf: false,
+ includeDependencies: true,
+ includeDependents: true,
+ namePattern: "foo",
+ parentDir: "",
+ },
+ false,
+ },
+ {
+ "foo^...",
+ &TargetSelector{
+ fromRef: "",
+ exclude: false,
+ excludeSelf: true,
+ includeDependencies: true,
+ includeDependents: false,
+ namePattern: "foo",
+ parentDir: "",
+ },
+ false,
+ },
+ {
+ "...^foo",
+ &TargetSelector{
+ fromRef: "",
+ exclude: false,
+ excludeSelf: true,
+ includeDependencies: false,
+ includeDependents: true,
+ namePattern: "foo",
+ parentDir: "",
+ },
+ false,
+ },
+ {
+ "./foo",
+ &TargetSelector{
+ fromRef: "",
+ exclude: false,
+ excludeSelf: false,
+ includeDependencies: false,
+ includeDependents: false,
+ namePattern: "",
+ parentDir: "foo",
+ },
+ false,
+ },
+ {
+ "../foo",
+ &TargetSelector{
+ fromRef: "",
+ exclude: false,
+ excludeSelf: false,
+ includeDependencies: false,
+ includeDependents: false,
+ namePattern: "",
+ parentDir: turbopath.MakeRelativeSystemPath("..", "foo"),
+ },
+ false,
+ },
+ {
+ "...{./foo}",
+ &TargetSelector{
+ fromRef: "",
+ exclude: false,
+ excludeSelf: false,
+ includeDependencies: false,
+ includeDependents: true,
+ namePattern: "",
+ parentDir: "foo",
+ },
+ false,
+ },
+ {
+ ".",
+ &TargetSelector{
+ fromRef: "",
+ exclude: false,
+ excludeSelf: false,
+ includeDependencies: false,
+ includeDependents: false,
+ namePattern: "",
+ parentDir: ".",
+ },
+ false,
+ },
+ {
+ "..",
+ &TargetSelector{
+ fromRef: "",
+ exclude: false,
+ excludeSelf: false,
+ includeDependencies: false,
+ includeDependents: false,
+ namePattern: "",
+ parentDir: "..",
+ },
+ false,
+ },
+ {
+ "[master]",
+ &TargetSelector{
+ fromRef: "master",
+ exclude: false,
+ excludeSelf: false,
+ includeDependencies: false,
+ includeDependents: false,
+ namePattern: "",
+ parentDir: "",
+ },
+ false,
+ },
+ {
+ "[from...to]",
+ &TargetSelector{
+ fromRef: "from",
+ toRefOverride: "to",
+ },
+ false,
+ },
+ {
+ "{foo}[master]",
+ &TargetSelector{
+ fromRef: "master",
+ exclude: false,
+ excludeSelf: false,
+ includeDependencies: false,
+ includeDependents: false,
+ namePattern: "",
+ parentDir: "foo",
+ },
+ false,
+ },
+ {
+ "pattern{foo}[master]",
+ &TargetSelector{
+ fromRef: "master",
+ exclude: false,
+ excludeSelf: false,
+ includeDependencies: false,
+ includeDependents: false,
+ namePattern: "pattern",
+ parentDir: "foo",
+ },
+ false,
+ },
+ {
+ "[master]...",
+ &TargetSelector{
+ fromRef: "master",
+ exclude: false,
+ excludeSelf: false,
+ includeDependencies: true,
+ includeDependents: false,
+ namePattern: "",
+ parentDir: "",
+ },
+ false,
+ },
+ {
+ "...[master]",
+ &TargetSelector{
+ fromRef: "master",
+ exclude: false,
+ excludeSelf: false,
+ includeDependencies: false,
+ includeDependents: true,
+ namePattern: "",
+ parentDir: "",
+ },
+ false,
+ },
+ {
+ "...[master]...",
+ &TargetSelector{
+ fromRef: "master",
+ exclude: false,
+ excludeSelf: false,
+ includeDependencies: true,
+ includeDependents: true,
+ namePattern: "",
+ parentDir: "",
+ },
+ false,
+ },
+ {
+ "...[from...to]...",
+ &TargetSelector{
+ fromRef: "from",
+ toRefOverride: "to",
+ includeDependencies: true,
+ includeDependents: true,
+ },
+ false,
+ },
+ {
+ "foo...[master]",
+ &TargetSelector{
+ fromRef: "master",
+ namePattern: "foo",
+ matchDependencies: true,
+ },
+ false,
+ },
+ {
+ "foo...[master]...",
+ &TargetSelector{
+ fromRef: "master",
+ namePattern: "foo",
+ matchDependencies: true,
+ includeDependencies: true,
+ },
+ false,
+ },
+ {
+ "{foo}...[master]",
+ &TargetSelector{
+ fromRef: "master",
+ parentDir: "foo",
+ matchDependencies: true,
+ },
+ false,
+ },
+ {
+ "......[master]",
+ &TargetSelector{},
+ true,
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.rawSelector, func(t *testing.T) {
+ got, err := ParseTargetSelector(tt.rawSelector)
+ if tt.wantErr {
+ if err == nil {
+ t.Errorf("ParseTargetSelector() error = %#v, wantErr %#v", err, tt.wantErr)
+ }
+ } else {
+ // copy the raw selector from the args into what we want. This value is used
+ // for reporting errors in the case of a malformed selector
+ tt.want.raw = tt.rawSelector
+ if !reflect.DeepEqual(got, tt.want) {
+ t.Errorf("ParseTargetSelector() = %#v, want %#v", got, tt.want)
+ }
+ }
+ })
+ }
+}
diff --git a/cli/internal/scope/scope.go b/cli/internal/scope/scope.go
new file mode 100644
index 0000000..b5ed4e7
--- /dev/null
+++ b/cli/internal/scope/scope.go
@@ -0,0 +1,380 @@
+package scope
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "sort"
+ "strings"
+
+ "github.com/hashicorp/go-hclog"
+ "github.com/mitchellh/cli"
+ "github.com/pkg/errors"
+ "github.com/vercel/turbo/cli/internal/context"
+ "github.com/vercel/turbo/cli/internal/lockfile"
+ "github.com/vercel/turbo/cli/internal/scm"
+ scope_filter "github.com/vercel/turbo/cli/internal/scope/filter"
+ "github.com/vercel/turbo/cli/internal/turbopath"
+ "github.com/vercel/turbo/cli/internal/turbostate"
+ "github.com/vercel/turbo/cli/internal/util"
+ "github.com/vercel/turbo/cli/internal/util/filter"
+ "github.com/vercel/turbo/cli/internal/workspace"
+)
+
+// LegacyFilter holds the options in use before the filter syntax. They have their own rules
+// for how they are compiled into filter expressions.
+type LegacyFilter struct {
+ // IncludeDependencies is whether to include pkg.dependencies in execution (defaults to false)
+ IncludeDependencies bool
+ // SkipDependents is whether to skip dependent impacted consumers in execution (defaults to false)
+ SkipDependents bool
+ // Entrypoints is a list of package entrypoints
+ Entrypoints []string
+ // Since is the git ref used to calculate changed packages
+ Since string
+}
+
+var _sinceHelp = `Limit/Set scope to changed packages since a
+mergebase. This uses the git diff ${target_branch}...
+mechanism to identify which packages have changed.`
+
+func addLegacyFlagsFromArgs(opts *LegacyFilter, args *turbostate.ParsedArgsFromRust) {
+ opts.IncludeDependencies = args.Command.Run.IncludeDependencies
+ opts.SkipDependents = args.Command.Run.NoDeps
+ opts.Entrypoints = args.Command.Run.Scope
+ opts.Since = args.Command.Run.Since
+}
+
+// Opts holds the options for how to select the entrypoint packages for a turbo run
+type Opts struct {
+ LegacyFilter LegacyFilter
+ // IgnorePatterns is the list of globs of file paths to ignore from execution scope calculation
+ IgnorePatterns []string
+ // GlobalDepPatterns is a list of globs to global files whose contents will be included in the global hash calculation
+ GlobalDepPatterns []string
+ // Patterns are the filter patterns supplied to --filter on the commandline
+ FilterPatterns []string
+
+ PackageInferenceRoot turbopath.RelativeSystemPath
+}
+
+var (
+ _filterHelp = `Use the given selector to specify package(s) to act as
+entry points. The syntax mirrors pnpm's syntax, and
+additional documentation and examples can be found in
+turbo's documentation https://turbo.build/repo/docs/reference/command-line-reference#--filter
+--filter can be specified multiple times. Packages that
+match any filter will be included.`
+ _ignoreHelp = `Files to ignore when calculating changed files (i.e. --since). Supports globs.`
+ _globalDepHelp = `Specify glob of global filesystem dependencies to be hashed. Useful for .env and files
+in the root directory. Includes turbo.json, root package.json, and the root lockfile by default.`
+)
+
+// normalize package inference path. We compare against "" in several places, so maintain
+// that behavior. In a post-rust-port world, this should more properly be an Option
+func resolvePackageInferencePath(raw string) (turbopath.RelativeSystemPath, error) {
+ pkgInferenceRoot, err := turbopath.CheckedToRelativeSystemPath(raw)
+ if err != nil {
+ return "", errors.Wrapf(err, "invalid package inference root %v", raw)
+ }
+ if pkgInferenceRoot == "." {
+ return "", nil
+ }
+ return pkgInferenceRoot, nil
+}
+
+// OptsFromArgs adds the settings relevant to this package to the given Opts
+func OptsFromArgs(opts *Opts, args *turbostate.ParsedArgsFromRust) error {
+ opts.FilterPatterns = args.Command.Run.Filter
+ opts.IgnorePatterns = args.Command.Run.Ignore
+ opts.GlobalDepPatterns = args.Command.Run.GlobalDeps
+ pkgInferenceRoot, err := resolvePackageInferencePath(args.Command.Run.PkgInferenceRoot)
+ if err != nil {
+ return err
+ }
+ opts.PackageInferenceRoot = pkgInferenceRoot
+ addLegacyFlagsFromArgs(&opts.LegacyFilter, args)
+ return nil
+}
+
+// AsFilterPatterns normalizes legacy selectors to filter syntax
+func (l *LegacyFilter) AsFilterPatterns() []string {
+ var patterns []string
+ prefix := ""
+ if !l.SkipDependents {
+ prefix = "..."
+ }
+ suffix := ""
+ if l.IncludeDependencies {
+ suffix = "..."
+ }
+ since := ""
+ if l.Since != "" {
+ since = fmt.Sprintf("[%v]", l.Since)
+ }
+ if len(l.Entrypoints) > 0 {
+ // --scope implies our tweaked syntax to see if any dependency matches
+ if since != "" {
+ since = "..." + since
+ }
+ for _, pattern := range l.Entrypoints {
+ if strings.HasPrefix(pattern, "!") {
+ patterns = append(patterns, pattern)
+ } else {
+ filterPattern := fmt.Sprintf("%v%v%v%v", prefix, pattern, since, suffix)
+ patterns = append(patterns, filterPattern)
+ }
+ }
+ } else if since != "" {
+ // no scopes specified, but --since was provided
+ filterPattern := fmt.Sprintf("%v%v%v", prefix, since, suffix)
+ patterns = append(patterns, filterPattern)
+ }
+ return patterns
+}
+
+// ResolvePackages translates specified flags to a set of entry point packages for
+// the selected tasks. Returns the selected packages and whether or not the selected
+// packages represents a default "all packages".
+func ResolvePackages(opts *Opts, repoRoot turbopath.AbsoluteSystemPath, scm scm.SCM, ctx *context.Context, tui cli.Ui, logger hclog.Logger) (util.Set, bool, error) {
+ inferenceBase, err := calculateInference(repoRoot, opts.PackageInferenceRoot, ctx.WorkspaceInfos, logger)
+ if err != nil {
+ return nil, false, err
+ }
+ filterResolver := &scope_filter.Resolver{
+ Graph: &ctx.WorkspaceGraph,
+ WorkspaceInfos: ctx.WorkspaceInfos,
+ Cwd: repoRoot,
+ Inference: inferenceBase,
+ PackagesChangedInRange: opts.getPackageChangeFunc(scm, repoRoot, ctx),
+ }
+ filterPatterns := opts.FilterPatterns
+ legacyFilterPatterns := opts.LegacyFilter.AsFilterPatterns()
+ filterPatterns = append(filterPatterns, legacyFilterPatterns...)
+ isAllPackages := len(filterPatterns) == 0 && opts.PackageInferenceRoot == ""
+ filteredPkgs, err := filterResolver.GetPackagesFromPatterns(filterPatterns)
+ if err != nil {
+ return nil, false, err
+ }
+
+ if isAllPackages {
+ // no filters specified, run every package
+ for _, f := range ctx.WorkspaceNames {
+ filteredPkgs.Add(f)
+ }
+ }
+ filteredPkgs.Delete(ctx.RootNode)
+ return filteredPkgs, isAllPackages, nil
+}
+
+func calculateInference(repoRoot turbopath.AbsoluteSystemPath, pkgInferencePath turbopath.RelativeSystemPath, packageInfos workspace.Catalog, logger hclog.Logger) (*scope_filter.PackageInference, error) {
+ if pkgInferencePath == "" {
+ // No inference specified, no need to calculate anything
+ return nil, nil
+ }
+ logger.Debug(fmt.Sprintf("Using %v as a basis for selecting packages", pkgInferencePath))
+ fullInferencePath := repoRoot.Join(pkgInferencePath)
+ for _, pkgInfo := range packageInfos.PackageJSONs {
+ pkgPath := pkgInfo.Dir.RestoreAnchor(repoRoot)
+ inferredPathIsBelow, err := pkgPath.ContainsPath(fullInferencePath)
+ if err != nil {
+ return nil, err
+ }
+ // We skip over the root package as the inferred path will always be below it
+ if inferredPathIsBelow && pkgPath != repoRoot {
+ // set both. The user might have set a parent directory filter,
+ // in which case we *should* fail to find any packages, but we should
+ // do so in a consistent manner
+ return &scope_filter.PackageInference{
+ PackageName: pkgInfo.Name,
+ DirectoryRoot: pkgInferencePath,
+ }, nil
+ }
+ inferredPathIsBetweenRootAndPkg, err := fullInferencePath.ContainsPath(pkgPath)
+ if err != nil {
+ return nil, err
+ }
+ if inferredPathIsBetweenRootAndPkg {
+ // we've found *some* package below our inference directory. We can stop now and conclude
+ // that we're looking for all packages in a subdirectory
+ break
+ }
+ }
+ return &scope_filter.PackageInference{
+ DirectoryRoot: pkgInferencePath,
+ }, nil
+}
+
+func (o *Opts) getPackageChangeFunc(scm scm.SCM, cwd turbopath.AbsoluteSystemPath, ctx *context.Context) scope_filter.PackagesChangedInRange {
+ return func(fromRef string, toRef string) (util.Set, error) {
+ // We could filter changed files at the git level, since it's possible
+ // that the changes we're interested in are scoped, but we need to handle
+ // global dependencies changing as well. A future optimization might be to
+ // scope changed files more deeply if we know there are no global dependencies.
+ var changedFiles []string
+ if fromRef != "" {
+ scmChangedFiles, err := scm.ChangedFiles(fromRef, toRef, cwd.ToStringDuringMigration())
+ if err != nil {
+ return nil, err
+ }
+ sort.Strings(scmChangedFiles)
+ changedFiles = scmChangedFiles
+ }
+ makeAllPkgs := func() util.Set {
+ allPkgs := make(util.Set)
+ for pkg := range ctx.WorkspaceInfos.PackageJSONs {
+ allPkgs.Add(pkg)
+ }
+ return allPkgs
+ }
+ if hasRepoGlobalFileChanged, err := repoGlobalFileHasChanged(o, getDefaultGlobalDeps(), changedFiles); err != nil {
+ return nil, err
+ } else if hasRepoGlobalFileChanged {
+ return makeAllPkgs(), nil
+ }
+
+ filteredChangedFiles, err := filterIgnoredFiles(o, changedFiles)
+ if err != nil {
+ return nil, err
+ }
+ changedPkgs := getChangedPackages(filteredChangedFiles, ctx.WorkspaceInfos)
+
+ if lockfileChanges, fullChanges := getChangesFromLockfile(scm, ctx, changedFiles, fromRef); !fullChanges {
+ for _, pkg := range lockfileChanges {
+ changedPkgs.Add(pkg)
+ }
+ } else {
+ return makeAllPkgs(), nil
+ }
+
+ return changedPkgs, nil
+ }
+}
+
+func getChangesFromLockfile(scm scm.SCM, ctx *context.Context, changedFiles []string, fromRef string) ([]string, bool) {
+ lockfileFilter, err := filter.Compile([]string{ctx.PackageManager.Lockfile})
+ if err != nil {
+ panic(fmt.Sprintf("Lockfile is invalid glob: %v", err))
+ }
+ match := false
+ for _, file := range changedFiles {
+ if lockfileFilter.Match(file) {
+ match = true
+ break
+ }
+ }
+ if !match {
+ return nil, false
+ }
+
+ if lockfile.IsNil(ctx.Lockfile) {
+ return nil, true
+ }
+
+ prevContents, err := scm.PreviousContent(fromRef, ctx.PackageManager.Lockfile)
+ if err != nil {
+ // unable to reconstruct old lockfile, assume everything changed
+ return nil, true
+ }
+ prevLockfile, err := ctx.PackageManager.UnmarshalLockfile(ctx.WorkspaceInfos.PackageJSONs[util.RootPkgName], prevContents)
+ if err != nil {
+ // unable to parse old lockfile, assume everything changed
+ return nil, true
+ }
+ additionalPkgs, err := ctx.ChangedPackages(prevLockfile)
+ if err != nil {
+ // missing at least one lockfile, assume everything changed
+ return nil, true
+ }
+
+ return additionalPkgs, false
+}
+
+func getDefaultGlobalDeps() []string {
+ // include turbo.json and root package.json as implicit global dependencies
+ defaultGlobalDeps := []string{
+ "turbo.json",
+ "package.json",
+ }
+ return defaultGlobalDeps
+}
+
+func repoGlobalFileHasChanged(opts *Opts, defaultGlobalDeps []string, changedFiles []string) (bool, error) {
+ globalDepsGlob, err := filter.Compile(append(opts.GlobalDepPatterns, defaultGlobalDeps...))
+ if err != nil {
+ return false, errors.Wrap(err, "invalid global deps glob")
+ }
+
+ if globalDepsGlob != nil {
+ for _, file := range changedFiles {
+ if globalDepsGlob.Match(filepath.ToSlash(file)) {
+ return true, nil
+ }
+ }
+ }
+ return false, nil
+}
+
+func filterIgnoredFiles(opts *Opts, changedFiles []string) ([]string, error) {
+ // changedFiles is an array of repo-relative system paths.
+ // opts.IgnorePatterns is an array of unix-separator glob paths.
+ ignoreGlob, err := filter.Compile(opts.IgnorePatterns)
+ if err != nil {
+ return nil, errors.Wrap(err, "invalid ignore globs")
+ }
+ filteredChanges := []string{}
+ for _, file := range changedFiles {
+ // If we don't have anything to ignore, or if this file doesn't match the ignore pattern,
+ // keep it as a changed file.
+ if ignoreGlob == nil || !ignoreGlob.Match(filepath.ToSlash(file)) {
+ filteredChanges = append(filteredChanges, file)
+ }
+ }
+ return filteredChanges, nil
+}
+
+func fileInPackage(changedFile string, packagePath string) bool {
+ // This whole method is basically this regex: /^.*\/?$/
+ // The regex is more-expensive, so we don't do it.
+
+ // If it has the prefix, it might be in the package.
+ if strings.HasPrefix(changedFile, packagePath) {
+ // Now we need to see if the prefix stopped at a reasonable boundary.
+ prefixLen := len(packagePath)
+ changedFileLen := len(changedFile)
+
+ // Same path.
+ if prefixLen == changedFileLen {
+ return true
+ }
+
+ // We know changedFile is longer than packagePath.
+ // We can safely directly index into it.
+ // Look ahead one byte and see if it's the separator.
+ if changedFile[prefixLen] == os.PathSeparator {
+ return true
+ }
+ }
+
+ // If it does not have the prefix, it's definitely not in the package.
+ return false
+}
+
+func getChangedPackages(changedFiles []string, packageInfos workspace.Catalog) util.Set {
+ changedPackages := make(util.Set)
+ for _, changedFile := range changedFiles {
+ found := false
+ for pkgName, pkgInfo := range packageInfos.PackageJSONs {
+ if pkgName != util.RootPkgName && fileInPackage(changedFile, pkgInfo.Dir.ToStringDuringMigration()) {
+ changedPackages.Add(pkgName)
+ found = true
+ break
+ }
+ }
+ if !found {
+ // Consider the root package to have changed
+ changedPackages.Add(util.RootPkgName)
+ }
+ }
+ return changedPackages
+}
diff --git a/cli/internal/scope/scope_test.go b/cli/internal/scope/scope_test.go
new file mode 100644
index 0000000..216984d
--- /dev/null
+++ b/cli/internal/scope/scope_test.go
@@ -0,0 +1,550 @@
+package scope
+
+import (
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "reflect"
+ "testing"
+
+ "github.com/hashicorp/go-hclog"
+ "github.com/pyr-sh/dag"
+ "github.com/vercel/turbo/cli/internal/context"
+ "github.com/vercel/turbo/cli/internal/fs"
+ "github.com/vercel/turbo/cli/internal/lockfile"
+ "github.com/vercel/turbo/cli/internal/packagemanager"
+ "github.com/vercel/turbo/cli/internal/turbopath"
+ "github.com/vercel/turbo/cli/internal/ui"
+ "github.com/vercel/turbo/cli/internal/util"
+ "github.com/vercel/turbo/cli/internal/workspace"
+)
+
+type mockSCM struct {
+ changed []string
+ contents map[string][]byte
+}
+
+func (m *mockSCM) ChangedFiles(_fromCommit string, _toCommit string, _relativeTo string) ([]string, error) {
+ return m.changed, nil
+}
+
+func (m *mockSCM) PreviousContent(fromCommit string, filePath string) ([]byte, error) {
+ contents, ok := m.contents[filePath]
+ if !ok {
+ return nil, fmt.Errorf("No contents found")
+ }
+ return contents, nil
+}
+
+type mockLockfile struct {
+ globalChange bool
+ versions map[string]string
+ allDeps map[string]map[string]string
+}
+
+func (m *mockLockfile) ResolvePackage(workspacePath turbopath.AnchoredUnixPath, name string, version string) (lockfile.Package, error) {
+ resolvedVersion, ok := m.versions[name]
+ if ok {
+ key := fmt.Sprintf("%s%s", name, version)
+ return lockfile.Package{Key: key, Version: resolvedVersion, Found: true}, nil
+ }
+ return lockfile.Package{Found: false}, nil
+}
+
+func (m *mockLockfile) AllDependencies(key string) (map[string]string, bool) {
+ deps, ok := m.allDeps[key]
+ return deps, ok
+}
+
+func (m *mockLockfile) Encode(w io.Writer) error {
+ return nil
+}
+
+func (m *mockLockfile) GlobalChange(other lockfile.Lockfile) bool {
+ return m.globalChange || (other != nil && other.(*mockLockfile).globalChange)
+}
+
+func (m *mockLockfile) Patches() []turbopath.AnchoredUnixPath {
+ return nil
+}
+
+func (m *mockLockfile) Subgraph(workspaces []turbopath.AnchoredSystemPath, packages []string) (lockfile.Lockfile, error) {
+ return nil, nil
+}
+
+var _ (lockfile.Lockfile) = (*mockLockfile)(nil)
+
+func TestResolvePackages(t *testing.T) {
+ cwd, err := os.Getwd()
+ if err != nil {
+ t.Fatalf("cwd: %v", err)
+ }
+ root, err := fs.GetCwd(cwd)
+ if err != nil {
+ t.Fatalf("cwd: %v", err)
+ }
+ tui := ui.Default()
+ logger := hclog.Default()
+ // Dependency graph:
+ //
+ // app0 -
+ // \
+ // app1 -> libA
+ // \
+ // > libB -> libD
+ // /
+ // app2 <
+ // \
+ // > libC
+ // /
+ // app2-a <
+ //
+ // Filesystem layout:
+ //
+ // app/
+ // app0
+ // app1
+ // app2
+ // app2-a
+ // libs/
+ // libA
+ // libB
+ // libC
+ // libD
+ graph := dag.AcyclicGraph{}
+ graph.Add("app0")
+ graph.Add("app1")
+ graph.Add("app2")
+ graph.Add("app2-a")
+ graph.Add("libA")
+ graph.Add("libB")
+ graph.Add("libC")
+ graph.Add("libD")
+ graph.Connect(dag.BasicEdge("libA", "libB"))
+ graph.Connect(dag.BasicEdge("libB", "libD"))
+ graph.Connect(dag.BasicEdge("app0", "libA"))
+ graph.Connect(dag.BasicEdge("app1", "libA"))
+ graph.Connect(dag.BasicEdge("app2", "libB"))
+ graph.Connect(dag.BasicEdge("app2", "libC"))
+ graph.Connect(dag.BasicEdge("app2-a", "libC"))
+ workspaceInfos := workspace.Catalog{
+ PackageJSONs: map[string]*fs.PackageJSON{
+ "//": {
+ Dir: turbopath.AnchoredSystemPath("").ToSystemPath(),
+ UnresolvedExternalDeps: map[string]string{"global": "2"},
+ TransitiveDeps: []lockfile.Package{{Key: "global2", Version: "2", Found: true}},
+ },
+ "app0": {
+ Dir: turbopath.AnchoredUnixPath("app/app0").ToSystemPath(),
+ Name: "app0",
+ UnresolvedExternalDeps: map[string]string{"app0-dep": "2"},
+ TransitiveDeps: []lockfile.Package{
+ {Key: "app0-dep2", Version: "2", Found: true},
+ {Key: "app0-util2", Version: "2", Found: true},
+ },
+ },
+ "app1": {
+ Dir: turbopath.AnchoredUnixPath("app/app1").ToSystemPath(),
+ Name: "app1",
+ },
+ "app2": {
+ Dir: turbopath.AnchoredUnixPath("app/app2").ToSystemPath(),
+ Name: "app2",
+ },
+ "app2-a": {
+ Dir: turbopath.AnchoredUnixPath("app/app2-a").ToSystemPath(),
+ Name: "app2-a",
+ },
+ "libA": {
+ Dir: turbopath.AnchoredUnixPath("libs/libA").ToSystemPath(),
+ Name: "libA",
+ },
+ "libB": {
+ Dir: turbopath.AnchoredUnixPath("libs/libB").ToSystemPath(),
+ Name: "libB",
+ UnresolvedExternalDeps: map[string]string{"external": "1"},
+ TransitiveDeps: []lockfile.Package{
+ {Key: "external-dep-a1", Version: "1", Found: true},
+ {Key: "external-dep-b1", Version: "1", Found: true},
+ {Key: "external1", Version: "1", Found: true},
+ },
+ },
+ "libC": {
+ Dir: turbopath.AnchoredUnixPath("libs/libC").ToSystemPath(),
+ Name: "libC",
+ },
+ "libD": {
+ Dir: turbopath.AnchoredUnixPath("libs/libD").ToSystemPath(),
+ Name: "libD",
+ },
+ },
+ }
+ packageNames := []string{}
+ for name := range workspaceInfos.PackageJSONs {
+ packageNames = append(packageNames, name)
+ }
+
+ // global -> globalDep
+ // app0-dep -> app0-dep :)
+
+ makeLockfile := func(f func(*mockLockfile)) *mockLockfile {
+ l := mockLockfile{
+ globalChange: false,
+ versions: map[string]string{
+ "global": "2",
+ "app0-dep": "2",
+ "app0-util": "2",
+ "external": "1",
+ "external-dep-a": "1",
+ "external-dep-b": "1",
+ },
+ allDeps: map[string]map[string]string{
+ "global2": map[string]string{},
+ "app0-dep2": map[string]string{
+ "app0-util": "2",
+ },
+ "app0-util2": map[string]string{},
+ "external1": map[string]string{
+ "external-dep-a": "1",
+ "external-dep-b": "1",
+ },
+ "external-dep-a1": map[string]string{},
+ "external-dep-b1": map[string]string{},
+ },
+ }
+ if f != nil {
+ f(&l)
+ }
+ return &l
+ }
+
+ testCases := []struct {
+ name string
+ changed []string
+ expected []string
+ expectAllPackages bool
+ scope []string
+ since string
+ ignore string
+ globalDeps []string
+ includeDependencies bool
+ includeDependents bool
+ lockfile string
+ currLockfile *mockLockfile
+ prevLockfile *mockLockfile
+ inferPkgPath string
+ }{
+ {
+ name: "Just scope and dependencies",
+ changed: []string{},
+ includeDependencies: true,
+ scope: []string{"app2"},
+ expected: []string{"app2", "libB", "libC", "libD"},
+ },
+ {
+ name: "Only turbo.json changed",
+ changed: []string{"turbo.json"},
+ expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"},
+ since: "dummy",
+ includeDependencies: true,
+ },
+ {
+ name: "Only root package.json changed",
+ changed: []string{"package.json"},
+ expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"},
+ since: "dummy",
+ includeDependencies: true,
+ },
+ {
+ name: "Only package-lock.json changed",
+ changed: []string{"package-lock.json"},
+ expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"},
+ since: "dummy",
+ includeDependencies: true,
+ lockfile: "package-lock.json",
+ },
+ {
+ name: "Only yarn.lock changed",
+ changed: []string{"yarn.lock"},
+ expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"},
+ since: "dummy",
+ includeDependencies: true,
+ lockfile: "yarn.lock",
+ },
+ {
+ name: "Only pnpm-lock.yaml changed",
+ changed: []string{"pnpm-lock.yaml"},
+ expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"},
+ since: "dummy",
+ includeDependencies: true,
+ lockfile: "pnpm-lock.yaml",
+ },
+ {
+ name: "One package changed",
+ changed: []string{"libs/libB/src/index.ts"},
+ expected: []string{"libB"},
+ since: "dummy",
+ },
+ {
+ name: "One package manifest changed",
+ changed: []string{"libs/libB/package.json"},
+ expected: []string{"libB"},
+ since: "dummy",
+ },
+ {
+ name: "An ignored package changed",
+ changed: []string{"libs/libB/src/index.ts"},
+ expected: []string{},
+ since: "dummy",
+ ignore: "libs/libB/**/*.ts",
+ },
+ {
+ // nothing in scope depends on the change
+ name: "unrelated library changed",
+ changed: []string{"libs/libC/src/index.ts"},
+ expected: []string{},
+ since: "dummy",
+ scope: []string{"app1"},
+ includeDependencies: true, // scope implies include-dependencies
+ },
+ {
+ // a dependent lib changed, scope implies include-dependencies,
+ // so all deps of app1 get built
+ name: "dependency of scope changed",
+ changed: []string{"libs/libA/src/index.ts"},
+ expected: []string{"libA", "libB", "libD", "app1"},
+ since: "dummy",
+ scope: []string{"app1"},
+ includeDependencies: true, // scope implies include-dependencies
+ },
+ {
+ // a dependent lib changed, user explicitly asked to not build dependencies.
+ // Since the package matching the scope had a changed dependency, we run it.
+ // We don't include its dependencies because the user asked for no dependencies.
+ // note: this is not yet supported by the CLI, as you cannot specify --include-dependencies=false
+ name: "dependency of scope changed, user asked to not include depedencies",
+ changed: []string{"libs/libA/src/index.ts"},
+ expected: []string{"app1"},
+ since: "dummy",
+ scope: []string{"app1"},
+ includeDependencies: false,
+ },
+ {
+ // a nested dependent lib changed, user explicitly asked to not build dependencies
+ // note: this is not yet supported by the CLI, as you cannot specify --include-dependencies=false
+ name: "nested dependency of scope changed, user asked to not include dependencies",
+ changed: []string{"libs/libB/src/index.ts"},
+ expected: []string{"app1"},
+ since: "dummy",
+ scope: []string{"app1"},
+ includeDependencies: false,
+ },
+ {
+ name: "global dependency changed, even though it was ignored, forcing a build of everything",
+ changed: []string{"libs/libB/src/index.ts"},
+ expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"},
+ since: "dummy",
+ ignore: "libs/libB/**/*.ts",
+ globalDeps: []string{"libs/**/*.ts"},
+ },
+ {
+ name: "an app changed, user asked for dependencies to build",
+ changed: []string{"app/app2/src/index.ts"},
+ since: "dummy",
+ includeDependencies: true,
+ expected: []string{"app2", "libB", "libC", "libD"},
+ },
+ {
+ name: "a library changed, user asked for dependents to be built",
+ changed: []string{"libs/libB"},
+ since: "dummy",
+ includeDependents: true,
+ expected: []string{"app0", "app1", "app2", "libA", "libB"},
+ },
+ {
+ // no changes, no base to compare against, defaults to everything
+ name: "no changes or scope specified, build everything",
+ since: "",
+ expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"},
+ expectAllPackages: true,
+ },
+ {
+ // a dependent library changed, no deps beyond the scope are build
+ // "libB" is still built because it is a dependent within the scope, but libB's dependents
+ // are skipped
+ name: "a dependent library changed, build up to scope",
+ changed: []string{"libs/libD/src/index.ts"},
+ since: "dummy",
+ scope: []string{"libB"},
+ expected: []string{"libB", "libD"},
+ includeDependencies: true, // scope implies include-dependencies
+ },
+ {
+ name: "library change, no scope",
+ changed: []string{"libs/libA/src/index.ts"},
+ expected: []string{"libA", "app0", "app1"},
+ includeDependents: true,
+ since: "dummy",
+ },
+ {
+ // make sure multiple apps with the same prefix are handled separately.
+ // prevents this issue: https://github.com/vercel/turbo/issues/1528
+ name: "Two apps with an overlapping prefix changed",
+ changed: []string{"app/app2/src/index.js", "app/app2-a/src/index.js"},
+ expected: []string{"app2", "app2-a"},
+ since: "dummy",
+ },
+ {
+ name: "Global lockfile change invalidates all packages",
+ changed: []string{"dummy.lock"},
+ expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"},
+ lockfile: "dummy.lock",
+ currLockfile: makeLockfile(nil),
+ prevLockfile: makeLockfile(func(ml *mockLockfile) {
+ ml.globalChange = true
+ }),
+ since: "dummy",
+ },
+ {
+ name: "Dependency of workspace root change invalidates all packages",
+ changed: []string{"dummy.lock"},
+ expected: []string{"//", "app0", "app1", "app2", "app2-a", "libA", "libB", "libC", "libD"},
+ lockfile: "dummy.lock",
+ currLockfile: makeLockfile(nil),
+ prevLockfile: makeLockfile(func(ml *mockLockfile) {
+ ml.versions["global"] = "3"
+ ml.allDeps["global3"] = map[string]string{}
+ }),
+ since: "dummy",
+ },
+ {
+ name: "Version change invalidates package",
+ changed: []string{"dummy.lock"},
+ expected: []string{"//", "app0"},
+ lockfile: "dummy.lock",
+ currLockfile: makeLockfile(nil),
+ prevLockfile: makeLockfile(func(ml *mockLockfile) {
+ ml.versions["app0-util"] = "3"
+ ml.allDeps["app0-dep2"] = map[string]string{"app0-util": "3"}
+ ml.allDeps["app0-util3"] = map[string]string{}
+ }),
+ since: "dummy",
+ },
+ {
+ name: "Transitive dep invalidates package",
+ changed: []string{"dummy.lock"},
+ expected: []string{"//", "libB"},
+ lockfile: "dummy.lock",
+ currLockfile: makeLockfile(nil),
+ prevLockfile: makeLockfile(func(ml *mockLockfile) {
+ ml.versions["external-dep-a"] = "2"
+ ml.allDeps["external1"] = map[string]string{"external-dep-a": "2", "external-dep-b": "1"}
+ ml.allDeps["external-dep-a2"] = map[string]string{}
+ }),
+ since: "dummy",
+ },
+ {
+ name: "Transitive dep invalidates package and dependents",
+ changed: []string{"dummy.lock"},
+ expected: []string{"//", "app0", "app1", "app2", "libA", "libB"},
+ lockfile: "dummy.lock",
+ includeDependents: true,
+ currLockfile: makeLockfile(nil),
+ prevLockfile: makeLockfile(func(ml *mockLockfile) {
+ ml.versions["external-dep-a"] = "2"
+ ml.allDeps["external1"] = map[string]string{"external-dep-a": "2", "external-dep-b": "1"}
+ ml.allDeps["external-dep-a2"] = map[string]string{}
+ }),
+ since: "dummy",
+ },
+ {
+ name: "Infer app2 from directory",
+ inferPkgPath: "app/app2",
+ expected: []string{"app2"},
+ },
+ {
+ name: "Infer app2 from a subdirectory",
+ inferPkgPath: "app/app2/src",
+ expected: []string{"app2"},
+ },
+ {
+ name: "Infer from a directory with no packages",
+ inferPkgPath: "wrong",
+ expected: []string{},
+ },
+ {
+ name: "Infer from a parent directory",
+ inferPkgPath: "app",
+ expected: []string{"app0", "app1", "app2", "app2-a"},
+ },
+ {
+ name: "library change, no scope, inferred libs",
+ changed: []string{"libs/libA/src/index.ts"},
+ expected: []string{"libA"},
+ since: "dummy",
+ inferPkgPath: "libs",
+ },
+ {
+ name: "library change, no scope, inferred app",
+ changed: []string{"libs/libA/src/index.ts"},
+ expected: []string{},
+ since: "dummy",
+ inferPkgPath: "app",
+ },
+ }
+ for i, tc := range testCases {
+ t.Run(fmt.Sprintf("test #%v %v", i, tc.name), func(t *testing.T) {
+ // Convert test data to system separators.
+ systemSeparatorChanged := make([]string, len(tc.changed))
+ for index, path := range tc.changed {
+ systemSeparatorChanged[index] = filepath.FromSlash(path)
+ }
+ scm := &mockSCM{
+ changed: systemSeparatorChanged,
+ contents: make(map[string][]byte, len(systemSeparatorChanged)),
+ }
+ for _, path := range systemSeparatorChanged {
+ scm.contents[path] = nil
+ }
+ readLockfile := func(_rootPackageJSON *fs.PackageJSON, content []byte) (lockfile.Lockfile, error) {
+ return tc.prevLockfile, nil
+ }
+ pkgInferenceRoot, err := resolvePackageInferencePath(tc.inferPkgPath)
+ if err != nil {
+ t.Errorf("bad inference path (%v): %v", tc.inferPkgPath, err)
+ }
+ pkgs, isAllPackages, err := ResolvePackages(&Opts{
+ LegacyFilter: LegacyFilter{
+ Entrypoints: tc.scope,
+ Since: tc.since,
+ IncludeDependencies: tc.includeDependencies,
+ SkipDependents: !tc.includeDependents,
+ },
+ IgnorePatterns: []string{tc.ignore},
+ GlobalDepPatterns: tc.globalDeps,
+ PackageInferenceRoot: pkgInferenceRoot,
+ }, root, scm, &context.Context{
+ WorkspaceInfos: workspaceInfos,
+ WorkspaceNames: packageNames,
+ PackageManager: &packagemanager.PackageManager{Lockfile: tc.lockfile, UnmarshalLockfile: readLockfile},
+ WorkspaceGraph: graph,
+ RootNode: "root",
+ Lockfile: tc.currLockfile,
+ }, tui, logger)
+ if err != nil {
+ t.Errorf("expected no error, got %v", err)
+ }
+ expected := make(util.Set)
+ for _, pkg := range tc.expected {
+ expected.Add(pkg)
+ }
+ if !reflect.DeepEqual(pkgs, expected) {
+ t.Errorf("ResolvePackages got %v, want %v", pkgs, expected)
+ }
+ if isAllPackages != tc.expectAllPackages {
+ t.Errorf("isAllPackages got %v, want %v", isAllPackages, tc.expectAllPackages)
+ }
+ })
+ }
+}