|
| 1 | +/* |
| 2 | +Copyright 2016 The Kubernetes Authors All rights reserved. |
| 3 | +
|
| 4 | +Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | +you may not use this file except in compliance with the License. |
| 6 | +You may obtain a copy of the License at |
| 7 | +
|
| 8 | + http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | +
|
| 10 | +Unless required by applicable law or agreed to in writing, software |
| 11 | +distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | +See the License for the specific language governing permissions and |
| 14 | +limitations under the License. |
| 15 | +*/ |
| 16 | + |
| 17 | +// Package generators has the generators for the import-boss utility. |
| 18 | +package generators |
| 19 | + |
| 20 | +import ( |
| 21 | + "encoding/json" |
| 22 | + "fmt" |
| 23 | + "io" |
| 24 | + "io/ioutil" |
| 25 | + "os" |
| 26 | + "path/filepath" |
| 27 | + "regexp" |
| 28 | + "sort" |
| 29 | + "strings" |
| 30 | + |
| 31 | + "k8s.io/kubernetes/cmd/libs/go2idl/args" |
| 32 | + "k8s.io/kubernetes/cmd/libs/go2idl/generator" |
| 33 | + "k8s.io/kubernetes/cmd/libs/go2idl/namer" |
| 34 | + "k8s.io/kubernetes/cmd/libs/go2idl/types" |
| 35 | + |
| 36 | + //"github.com/golang/glog" |
| 37 | +) |
| 38 | + |
| 39 | +const ( |
| 40 | + importBossFileType = "import-boss" |
| 41 | +) |
| 42 | + |
| 43 | +// NameSystems returns the name system used by the generators in this package. |
| 44 | +func NameSystems() namer.NameSystems { |
| 45 | + return namer.NameSystems{ |
| 46 | + "raw": namer.NewRawNamer("", nil), |
| 47 | + } |
| 48 | +} |
| 49 | + |
| 50 | +// DefaultNameSystem returns the default name system for ordering the types to be |
| 51 | +// processed by the generators in this package. |
| 52 | +func DefaultNameSystem() string { |
| 53 | + return "raw" |
| 54 | +} |
| 55 | + |
| 56 | +// Packages makes the sets package definition. |
| 57 | +func Packages(c *generator.Context, arguments *args.GeneratorArgs) generator.Packages { |
| 58 | + pkgs := generator.Packages{} |
| 59 | + c.FileTypes = map[string]generator.FileType{ |
| 60 | + importBossFileType: importRuleFile{}, |
| 61 | + } |
| 62 | + |
| 63 | + for _, p := range c.Universe { |
| 64 | + if !arguments.InputIncludes(p) { |
| 65 | + // Don't run on e.g. third party dependencies. |
| 66 | + continue |
| 67 | + } |
| 68 | + savedPackage := p |
| 69 | + pkgs = append(pkgs, &generator.DefaultPackage{ |
| 70 | + PackageName: p.Name, |
| 71 | + PackagePath: p.Path, |
| 72 | + // GeneratorFunc returns a list of generators. Each generator makes a |
| 73 | + // single file. |
| 74 | + GeneratorFunc: func(c *generator.Context) (generators []generator.Generator) { |
| 75 | + return []generator.Generator{&importRules{ |
| 76 | + myPackage: savedPackage, |
| 77 | + }} |
| 78 | + }, |
| 79 | + FilterFunc: func(c *generator.Context, t *types.Type) bool { |
| 80 | + return false |
| 81 | + }, |
| 82 | + }) |
| 83 | + } |
| 84 | + |
| 85 | + return pkgs |
| 86 | +} |
| 87 | + |
| 88 | +// A single import restriction rule. |
| 89 | +type Rule struct { |
| 90 | + // All import paths that match this regexp... |
| 91 | + SelectorRegexp string |
| 92 | + // ... must have one of these prefixes ... |
| 93 | + AllowedPrefixes []string |
| 94 | + // ... and must not have one of these prefixes. |
| 95 | + ForbiddenPrefixes []string |
| 96 | +} |
| 97 | + |
| 98 | +type fileFormat struct { |
| 99 | + CurrentImports []string |
| 100 | + |
| 101 | + Rules []Rule |
| 102 | +} |
| 103 | + |
| 104 | +func readFile(path string) (*fileFormat, error) { |
| 105 | + currentBytes, err := ioutil.ReadFile(path) |
| 106 | + if err != nil { |
| 107 | + return nil, fmt.Errorf("couldn't read %v: %v", path, err) |
| 108 | + } |
| 109 | + |
| 110 | + var current fileFormat |
| 111 | + err = json.Unmarshal(currentBytes, ¤t) |
| 112 | + if err != nil { |
| 113 | + return nil, fmt.Errorf("couldn't unmarshal %v: %v", path, err) |
| 114 | + } |
| 115 | + return ¤t, nil |
| 116 | +} |
| 117 | + |
| 118 | +func writeFile(path string, ff *fileFormat) error { |
| 119 | + raw, err := json.MarshalIndent(ff, "", "\t") |
| 120 | + if err != nil { |
| 121 | + return fmt.Errorf("couldn't format data for file %v.\n%#v", path, ff) |
| 122 | + } |
| 123 | + f, err := os.Create(path) |
| 124 | + if err != nil { |
| 125 | + return fmt.Errorf("couldn't open %v for writing: %v", path, err) |
| 126 | + } |
| 127 | + defer f.Close() |
| 128 | + _, err = f.Write(raw) |
| 129 | + return err |
| 130 | +} |
| 131 | + |
| 132 | +// This does the actual checking, since it knows the literal destination file. |
| 133 | +type importRuleFile struct{} |
| 134 | + |
| 135 | +func (importRuleFile) AssembleFile(f *generator.File, path string) error { |
| 136 | + return nil |
| 137 | + |
| 138 | + // If the file exists, populate its current imports. This is mostly to help |
| 139 | + // humans figure out what they need to fix. |
| 140 | + // TODO: add a command line flag to enable this? Or require that it always stay up-to-date? |
| 141 | + if _, err := os.Stat(path); err != nil { |
| 142 | + // Ignore packages which haven't opted in by adding an .import-restrictions file. |
| 143 | + return nil |
| 144 | + } |
| 145 | + |
| 146 | + current, err := readFile(path) |
| 147 | + if err != nil { |
| 148 | + return err |
| 149 | + } |
| 150 | + |
| 151 | + current.CurrentImports = []string{} |
| 152 | + for v := range f.Imports { |
| 153 | + current.CurrentImports = append(current.CurrentImports, v) |
| 154 | + } |
| 155 | + sort.Strings(current.CurrentImports) |
| 156 | + |
| 157 | + return writeFile(path, current) |
| 158 | +} |
| 159 | + |
| 160 | +// removeLastDir removes the last directory, but leaves the file name |
| 161 | +// unchanged. It returns the new path and the removed directory. So: |
| 162 | +// "a/b/c/file" -> ("a/b/file", "c") |
| 163 | +func removeLastDir(path string) (newPath, removedDir string) { |
| 164 | + dir, file := filepath.Split(path) |
| 165 | + dir = strings.TrimSuffix(dir, string(filepath.Separator)) |
| 166 | + return filepath.Join(filepath.Dir(dir), file), filepath.Base(dir) |
| 167 | +} |
| 168 | + |
| 169 | +// Keep going up a directory until we find an .import-restrictions file. |
| 170 | +func recursiveRead(path string) (*fileFormat, string, error) { |
| 171 | + for { |
| 172 | + if _, err := os.Stat(path); err == nil { |
| 173 | + ff, err := readFile(path) |
| 174 | + return ff, path, err |
| 175 | + } |
| 176 | + |
| 177 | + nextPath, removedDir := removeLastDir(path) |
| 178 | + if nextPath == path || removedDir == "src" { |
| 179 | + break |
| 180 | + } |
| 181 | + path = nextPath |
| 182 | + } |
| 183 | + return nil, "", nil |
| 184 | +} |
| 185 | + |
| 186 | +func (importRuleFile) VerifyFile(f *generator.File, path string) error { |
| 187 | + rules, actualPath, err := recursiveRead(path) |
| 188 | + if err != nil { |
| 189 | + return fmt.Errorf("error finding rules file: %v", err) |
| 190 | + } |
| 191 | + |
| 192 | + if rules == nil { |
| 193 | + // No restrictions on this directory. |
| 194 | + return nil |
| 195 | + } |
| 196 | + |
| 197 | + for _, r := range rules.Rules { |
| 198 | + re, err := regexp.Compile(r.SelectorRegexp) |
| 199 | + if err != nil { |
| 200 | + return fmt.Errorf("regexp `%s` in file %q doesn't compile: %v", r.SelectorRegexp, actualPath, err) |
| 201 | + } |
| 202 | + for v := range f.Imports { |
| 203 | + // fmt.Printf("Checking %v matches %v: %v\n", r.SelectorRegexp, v, re.MatchString(v)) |
| 204 | + if !re.MatchString(v) { |
| 205 | + continue |
| 206 | + } |
| 207 | + for _, forbidden := range r.ForbiddenPrefixes { |
| 208 | + // fmt.Printf("Checking %v against %v\n", v, forbidden) |
| 209 | + if strings.HasPrefix(v, forbidden) { |
| 210 | + return fmt.Errorf("import %v has forbidden prefix %v", v, forbidden) |
| 211 | + } |
| 212 | + } |
| 213 | + found := false |
| 214 | + for _, allowed := range r.AllowedPrefixes { |
| 215 | + fmt.Printf("Checking %v against %v\n", v, allowed) |
| 216 | + if strings.HasPrefix(v, allowed) { |
| 217 | + found = true |
| 218 | + break |
| 219 | + } |
| 220 | + } |
| 221 | + if !found { |
| 222 | + return fmt.Errorf("import %v did not match any allowed prefix", v) |
| 223 | + } |
| 224 | + } |
| 225 | + } |
| 226 | + if len(rules.Rules) > 0 { |
| 227 | + fmt.Printf("%v passes rules found in %v\n", path, actualPath) |
| 228 | + } |
| 229 | + |
| 230 | + return nil |
| 231 | +} |
| 232 | + |
| 233 | +// importRules produces a file with a set for a single type. |
| 234 | +type importRules struct { |
| 235 | + myPackage *types.Package |
| 236 | + imports *generator.ImportTracker |
| 237 | +} |
| 238 | + |
| 239 | +var ( |
| 240 | + _ = generator.Generator(&importRules{}) |
| 241 | + _ = generator.FileType(importRuleFile{}) |
| 242 | +) |
| 243 | + |
| 244 | +func (r *importRules) Name() string { return "import rules" } |
| 245 | +func (r *importRules) Filter(*generator.Context, *types.Type) bool { return false } |
| 246 | +func (r *importRules) Namers(*generator.Context) namer.NameSystems { return nil } |
| 247 | +func (r *importRules) PackageVars(*generator.Context) []string { return []string{} } |
| 248 | +func (r *importRules) PackageConsts(*generator.Context) []string { return []string{} } |
| 249 | +func (r *importRules) GenerateType(*generator.Context, *types.Type, io.Writer) error { return nil } |
| 250 | +func (r *importRules) Filename() string { return ".import-restrictions" } |
| 251 | +func (r *importRules) FileType() string { return importBossFileType } |
| 252 | +func (r *importRules) Init(c *generator.Context, w io.Writer) error { return nil } |
| 253 | + |
| 254 | +func dfsImports(dest *[]string, seen map[string]bool, p *types.Package) { |
| 255 | + for _, p2 := range p.Imports { |
| 256 | + if seen[p2.Path] { |
| 257 | + continue |
| 258 | + } |
| 259 | + seen[p2.Path] = true |
| 260 | + dfsImports(dest, seen, p2) |
| 261 | + *dest = append(*dest, p2.Path) |
| 262 | + } |
| 263 | +} |
| 264 | + |
| 265 | +func (r *importRules) Imports(*generator.Context) []string { |
| 266 | + all := []string{} |
| 267 | + dfsImports(&all, map[string]bool{}, r.myPackage) |
| 268 | + return all |
| 269 | +} |
0 commit comments