Skip to content

Commit

Permalink
extracting config paths to flags
Browse files Browse the repository at this point in the history
  • Loading branch information
krzysied committed Jun 3, 2019
1 parent 981eed6 commit b7445ae
Show file tree
Hide file tree
Showing 4 changed files with 40 additions and 30 deletions.
28 changes: 16 additions & 12 deletions perfdash/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ import (
"strings"

"github.com/ghodss/yaml"
"k8s.io/contrib/test-utils/utils"
)

// To add new e2e test support, you need to:
Expand Down Expand Up @@ -215,15 +214,10 @@ var (
"benchmark": benchmarkDescriptions,
"dnsBenchmark": dnsBenchmarkDescriptions,
}

// TestConfig contains all the test PerfDash supports now. Downloader will download and
// analyze build log from all these Jobs, and parse the data from all these Test.
// Notice that all the tests should have different name for now.
TestConfig = Buckets{utils.KubekinsBucket: getProwConfigOrDie()}
)

func getProwConfigOrDie() Jobs {
jobs, err := getProwConfig()
func getProwConfigOrDie(configPaths []string) Jobs {
jobs, err := getProwConfig(configPaths)
if err != nil {
panic(err)
}
Expand All @@ -239,12 +233,22 @@ type periodic struct {
Tags []string `json:"tags"`
}

func getProwConfig() (Jobs, error) {
func getProwConfig(configPaths []string) (Jobs, error) {
fmt.Fprintf(os.Stderr, "Fetching prow config from GitHub...\n")
jobs := Jobs{}
yamlLinks := []string{
"https://raw.githubusercontent.com/kubernetes/test-infra/master/config/jobs/kubernetes/sig-scalability/sig-scalability-periodic-jobs.yaml",
"https://raw.githubusercontent.com/kubernetes/test-infra/master/config/jobs/kubernetes/sig-scalability/sig-scalability-release-blocking-jobs.yaml",
yamlLinks := []string{}
for _, configPath := range configPaths {
// Perfdash supports only urls.
if !strings.HasPrefix(configPath, "http") {
fmt.Fprintf(os.Stderr, "%s is not an url!\n", configPath)
continue
}
// Perfdash supports only yamls.
if !strings.HasSuffix(configPath, ".yaml") {
fmt.Fprintf(os.Stderr, "%s is not an yaml file!\n", configPath)
continue
}
yamlLinks = append(yamlLinks, configPath)
}

for _, yamlLink := range yamlLinks {
Expand Down
2 changes: 2 additions & 0 deletions perfdash/deployment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ spec:
- --dir=/www
- --address=0.0.0.0:8080
- --builds=100
- --configPath=https://raw.githubusercontent.com/kubernetes/test-infra/master/config/jobs/kubernetes/sig-scalability/sig-scalability-periodic-jobs.yaml
- --configPath=https://raw.githubusercontent.com/kubernetes/test-infra/master/config/jobs/kubernetes/sig-scalability/sig-scalability-release-blocking-jobs.yaml
imagePullPolicy: Always
ports:
- name: status
Expand Down
18 changes: 9 additions & 9 deletions perfdash/google-gcs-downloader.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,32 +32,32 @@ import (
type GoogleGCSDownloader struct {
DefaultBuildsCount int
GoogleGCSBucketUtils *utils.Utils
ConfigPaths []string
}

// NewGoogleGCSDownloader creates a new GoogleGCSDownloader
func NewGoogleGCSDownloader(defaultBuildsCount int) *GoogleGCSDownloader {
func NewGoogleGCSDownloader(configPaths []string, defaultBuildsCount int) *GoogleGCSDownloader {
return &GoogleGCSDownloader{
DefaultBuildsCount: defaultBuildsCount,
GoogleGCSBucketUtils: utils.NewUtils(utils.KubekinsBucket, utils.LogDir),
ConfigPaths: configPaths,
}
}

// TODO(random-liu): Only download and update new data each time.
func (g *GoogleGCSDownloader) getData() (JobToCategoryData, error) {
newJobs, err := getProwConfig()
if err == nil {
TestConfig[utils.KubekinsBucket] = newJobs
} else {
fmt.Fprintf(os.Stderr, "Failed to refresh config: %v\n", err)
newJobs, err := getProwConfig(g.ConfigPaths)
if err != nil {
return nil, fmt.Errorf("failed to refresh config: %v", err)
}
fmt.Print("Getting Data from GCS...\n")
result := make(JobToCategoryData)
var resultLock sync.Mutex
var wg sync.WaitGroup
wg.Add(len(TestConfig[utils.KubekinsBucket]))
for job, tests := range TestConfig[utils.KubekinsBucket] {
wg.Add(len(newJobs))
for job, tests := range newJobs {
if tests.Prefix == "" {
return result, fmt.Errorf("Invalid empty Prefix for job %s", job)
return nil, fmt.Errorf("Invalid empty Prefix for job %s", job)
}
for categoryLabel, categoryMap := range tests.Descriptions {
for testLabel := range categoryMap {
Expand Down
22 changes: 13 additions & 9 deletions perfdash/perfdash.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,12 @@ package main

import (
"encoding/json"
"flag"
"fmt"
"net/http"
"os"
"time"

"github.com/spf13/pflag"
)

const (
Expand All @@ -32,10 +33,11 @@ const (
)

var (
addr = flag.String("address", ":8080", "The address to serve web data on")
www = flag.Bool("www", false, "If true, start a web-server to server performance data")
wwwDir = flag.String("dir", "www", "If non-empty, add a file server for this directory at the root of the web server")
builds = flag.Int("builds", maxBuilds, "Total builds number")
addr = pflag.String("address", ":8080", "The address to serve web data on")
www = pflag.Bool("www", false, "If true, start a web-server to server performance data")
wwwDir = pflag.String("dir", "www", "If non-empty, add a file server for this directory at the root of the web server")
builds = pflag.Int("builds", maxBuilds, "Total builds number")
configPaths = pflag.StringArray("configPath", []string{}, "Paths/urls to the prow config")
)

func main() {
Expand All @@ -47,16 +49,18 @@ func main() {
}

func run() error {
flag.Parse()
pflag.Parse()
fmt.Printf("config paths - %d\n", len(*configPaths))
for i := 0; i < len(*configPaths); i++ {
fmt.Printf("config path %d: %s\n", i+1, (*configPaths)[i])
}

if *builds > maxBuilds || *builds < 0 {
fmt.Printf("Invalid number of builds: %d, setting to %d\n", *builds, maxBuilds)
*builds = maxBuilds
}

// TODO(random-liu): Add a top layer downloader to download build log from different buckets when we support
// more buckets in the future.
downloader := NewGoogleGCSDownloader(*builds)
downloader := NewGoogleGCSDownloader(*configPaths, *builds)
result := make(JobToCategoryData)
var err error

Expand Down

0 comments on commit b7445ae

Please sign in to comment.