Skip to content
This repository has been archived by the owner on Sep 9, 2020. It is now read-only.

Vendor verification #1912

Merged
merged 26 commits into from
Jul 11, 2018
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
c7e220d
Remove InputsDigest, add InputImports
sdboyer Jun 14, 2018
db8b66b
dep: Introduce lock verification logic
sdboyer Jun 15, 2018
f23ef51
gps: Convert LockedProject to an interface
sdboyer Jun 21, 2018
81b3a3d
gps: Introduce verify subpackage
sdboyer Jun 25, 2018
485c74e
dep: Add foundation for verified, pruned vendor
sdboyer Jun 25, 2018
4cb57f5
dep: Make DeltaWriter use temp sibling vendor dir
sdboyer Jun 26, 2018
bce4a36
gps: Remove all reference to InputsDigest
sdboyer Jun 26, 2018
0b2482d
verify: Relocate lock diffing and tree hashing
sdboyer Jun 26, 2018
df2c26b
dep: Get DeltaWriter into a working state
sdboyer Jun 28, 2018
13ec211
gps: Diff->Delta, and bitfield for change checking
sdboyer Jun 28, 2018
6b47f58
dep: Tell the user why we're solving
sdboyer Jul 1, 2018
f00e828
dep: Update scads of tests
sdboyer Jul 3, 2018
9d4eca8
gps: Collapse LockWithImports into Lock
sdboyer Jul 4, 2018
3c60abc
dep: Make DeltaWriter sensitive to -vendor-only
sdboyer Jul 4, 2018
08a4349
dep: Fix linting issues, update CHANGELOG
sdboyer Jul 4, 2018
812b8c1
dep: Linting fixes
sdboyer Jul 4, 2018
d22fbb8
dep: Implement DeltaWriter.PrintPreparedActions()
sdboyer Jul 7, 2018
4b02ee0
dep: Use DeltaWriter on -no-vendor path, as well
sdboyer Jul 8, 2018
80eeec7
gps/verify: Add tests for LockSatisfaction
sdboyer Jul 9, 2018
0db8f69
gps/verify: Add LockDiff unit tests
sdboyer Jul 9, 2018
69991c7
dep: Fix -vendor-only path's invocation ordering
sdboyer Jul 9, 2018
5c8a1e3
Merge branch 'master' into verify-vendor
sdboyer Jul 9, 2018
d7a412f
dep: Update docs to reflect vendor verification
sdboyer Jul 9, 2018
fe299f7
dep: Switch back to SafeWriter for -vendor-only
sdboyer Jul 9, 2018
a34a48d
dep: Encapsulate vendor verification into method
sdboyer Jul 10, 2018
fc9484a
Add missing license header
sdboyer Jul 10, 2018
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
dep: Introduce lock verification logic
This mostly supplants the hash comparison-based checking, though it's
still in rough form.
  • Loading branch information
sdboyer committed Jul 3, 2018
commit db8b66b3078a5ab11fa7020c8dacba54121f777e
32 changes: 9 additions & 23 deletions cmd/dep/ensure.go
Original file line number Diff line number Diff line change
Expand Up @@ -256,13 +256,9 @@ func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project
return err
}

solver, err := gps.Prepare(params, sm)
if err != nil {
return errors.Wrap(err, "prepare solver")
}

if p.Lock != nil && bytes.Equal(p.Lock.InputsDigest(), solver.HashInputs()) {
// Memo matches, so there's probably nothing to do.
if lsat, err := p.LockSatisfiesInputs(sm); err != nil {
return err
} else if !lsat.Passes() {
if ctx.Verbose {
ctx.Out.Printf("%s was already in sync with imports and %s\n", dep.LockName, dep.ManifestName)
}
Expand All @@ -272,13 +268,7 @@ func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project
return nil
}

// TODO(sdboyer) The desired behavior at this point is to determine
// whether it's necessary to write out vendor, or if it's already
// consistent with the lock. However, we haven't yet determined what
// that "verification" is supposed to look like (#121); in the meantime,
// we unconditionally write out vendor/ so that `dep ensure`'s behavior
// is maximally compatible with what it will eventually become.
sw, err := dep.NewSafeWriter(nil, p.Lock, p.Lock, dep.VendorAlways, p.Manifest.PruneOptions)
sw, err := dep.NewSafeWriter(nil, p.Lock, p.Lock, dep.VendorOnChanged, p.Manifest.PruneOptions)
if err != nil {
return err
}
Expand All @@ -294,6 +284,11 @@ func (cmd *ensureCommand) runDefault(ctx *dep.Ctx, args []string, p *dep.Project
return errors.WithMessage(sw.Write(p.AbsRoot, sm, true, logger), "grouped write of manifest, lock and vendor")
}

solver, err := gps.Prepare(params, sm)
if err != nil {
return errors.Wrap(err, "prepare solver")
}

if cmd.noVendor && cmd.dryRun {
return errors.New("Gopkg.lock was not up to date")
}
Expand Down Expand Up @@ -361,15 +356,6 @@ func (cmd *ensureCommand) runUpdate(ctx *dep.Ctx, args []string, p *dep.Project,
return errors.Wrap(err, "fastpath solver prepare")
}

// Compare the hashes. If they're not equal, bail out and ask the user to
// run a straight `dep ensure` before updating. This is handholding the
// user a bit, but the extra effort required is minimal, and it ensures the
// user is isolating variables in the event of solve problems (was it the
// "pending" changes, or the -update that caused the problem?).
if !bytes.Equal(p.Lock.InputsDigest(), solver.HashInputs()) {
ctx.Out.Printf("Warning: %s is out of sync with %s or the project's imports.", dep.LockName, dep.ManifestName)
}

// When -update is specified without args, allow every dependency to change
// versions, regardless of the lock file.
if len(args) == 0 {
Expand Down
3 changes: 2 additions & 1 deletion lock.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ type SolveMeta struct {
AnalyzerVersion int
SolverName string
SolverVersion int
InputImports []string
}

type rawLock struct {
Expand Down Expand Up @@ -100,7 +101,7 @@ func fromRawLock(raw rawLock) (*Lock, error) {
ProjectRoot: gps.ProjectRoot(ld.Name),
Source: ld.Source,
}
l.P[i] = gps.NewLockedProject(id, v, ld.Packages, ld.Imports)
l.P[i] = gps.NewLockedProject(id, v, ld.Packages)
}

return l, nil
Expand Down
142 changes: 142 additions & 0 deletions project.go
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,148 @@ func (p *Project) GetDirectDependencyNames(sm gps.SourceManager) (pkgtree.Packag
return ptree, directDeps, nil
}

type lockUnsatisfy uint8

const (
missingFromLock lockUnsatisfy = iota
inAdditionToLock
)

type constraintMismatch struct {
c gps.Constraint
v gps.Version
}

type constraintMismatches map[gps.ProjectRoot]constraintMismatch

type LockSatisfaction struct {
nolock bool
missingPkgs, excessPkgs []string
pkgs map[string]lockUnsatisfy
badovr, badconstraint constraintMismatches
}

// Passed is a shortcut method to check if any problems with the evaluted lock
// were identified.
func (ls LockSatisfaction) Passed() bool {
if ls.nolock {
return false
}

if len(ls.pkgs) > 0 {
return false
}

if len(ls.badovr) > 0 {
return false
}

if len(ls.badconstraint) > 0 {
return false
}

return true
}

func (ls LockSatisfaction) MissingPackages() []string {
return ls.missingPkgs
}

func (ls LockSatisfaction) ExcessPackages() []string {
return ls.excessPkgs
}

func (ls LockSatisfaction) UnmatchedOverrides() map[gps.ProjectRoot]constraintMismatch {
return ls.badovr
}

func (ls LockSatisfaction) UnmatchedConstraints() map[gps.ProjectRoot]constraintMismatch {
return ls.badconstraint
}

// LockSatisfiesInputs determines whether the Project's lock satisfies all the
// requirements indicated by the inputs (Manifest and RootPackageTree).
func (p *Project) LockSatisfiesInputs(sm gps.SourceManager) (LockSatisfaction, error) {
if p.Lock == nil {
return LockSatisfaction{nolock: true}, nil
}

ptree, err := p.ParseRootPackageTree()
if err != nil {
return LockSatisfaction{}, err
}

var ig *pkgtree.IgnoredRuleset
var req map[string]bool
if p.Manifest != nil {
ig = p.Manifest.IgnoredPackages()
req = p.Manifest.RequiredPackages()
}

rm, _ := ptree.ToReachMap(true, true, false, ig)
reach := rm.FlattenFn(paths.IsStandardImportPath)

inlock := make(map[string]bool, len(p.Lock.SolveMeta.InputImports))
ininputs := make(map[string]bool, len(reach)+len(req))

for _, imp := range reach {
ininputs[imp] = true
}

for imp := range req {
ininputs[imp] = true
}

for _, imp := range p.Lock.SolveMeta.InputImports {
inlock[imp] = true
}

lsat := LockSatisfaction{
badovr: make(constraintMismatches),
badconstraint: make(constraintMismatches),
}

for ip := range ininputs {
if !inlock[ip] {
lsat.pkgs[ip] = missingFromLock
} else {
// So we don't have to revisit it below
delete(inlock, ip)
}
}

for ip := range inlock {
if !ininputs[ip] {
lsat.pkgs[ip] = inAdditionToLock
}
}

ineff := make(map[string]bool)
for _, pr := range p.FindIneffectualConstraints(sm) {
ineff[string(pr)] = true
}

for _, lp := range p.Lock.Projects() {
pr := lp.Ident().ProjectRoot

if pp, has := p.Manifest.Ovr[pr]; has && !pp.Constraint.Matches(lp.Version()) {
lsat.badovr[pr] = constraintMismatch{
c: pp.Constraint,
v: lp.Version(),
}
}

if pp, has := p.Manifest.Constraints[pr]; has && !ineff[string(pr)] && !pp.Constraint.Matches(lp.Version()) {
lsat.badconstraint[pr] = constraintMismatch{
c: pp.Constraint,
v: lp.Version(),
}
}
}

return lsat, nil
}

// FindIneffectualConstraints looks for constraint rules expressed in the
// manifest that will have no effect during solving, as they are specified for
// projects that are not direct dependencies of the Project.
Expand Down