diff --git a/cmd/files.go b/cmd/files.go index 9efacc6..9dec930 100644 --- a/cmd/files.go +++ b/cmd/files.go @@ -28,73 +28,86 @@ import ( "github.com/h2non/filetype" ) +type maxConcurrency int + +const ( + // avoid hitting default open file descriptor limits (1024) + maxDirectoryScans maxConcurrency = 32 + maxFileScans maxConcurrency = 256 +) + +type Concurrency struct { + directoryScans chan int + fileScans chan int +} + var ( ErrNoImagesFound = fmt.Errorf("no supported image formats found which match all criteria") extensions = [6]string{".jpg", ".jpeg", ".png", ".gif", ".webp", ".bmp"} ) type Dimensions struct { - Width int - Height int + width int + height int } type Files struct { - Mutex sync.Mutex - List map[string][]string + mutex sync.Mutex + list map[string][]string } func (f *Files) Append(directory, path string) { - f.Mutex.Lock() - f.List[directory] = append(f.List[directory], path) - f.Mutex.Unlock() + f.mutex.Lock() + f.list[directory] = append(f.list[directory], path) + f.mutex.Unlock() } type ScanStats struct { - FilesMatched uint64 - FilesSkipped uint64 - DirectoriesMatched uint64 + filesMatched uint64 + filesSkipped uint64 + directoriesMatched uint64 } -func (s *ScanStats) GetFilesTotal() uint64 { - return atomic.LoadUint64(&s.FilesMatched) + atomic.LoadUint64(&s.FilesSkipped) +func (s *ScanStats) FilesTotal() uint64 { + return atomic.LoadUint64(&s.filesMatched) + atomic.LoadUint64(&s.filesSkipped) } -func (s *ScanStats) IncrementFilesMatched() { - atomic.AddUint64(&s.FilesMatched, 1) +func (s *ScanStats) incrementFilesMatched() { + atomic.AddUint64(&s.filesMatched, 1) } -func (s *ScanStats) GetFilesMatched() uint64 { - return atomic.LoadUint64(&s.FilesMatched) +func (s *ScanStats) FilesMatched() uint64 { + return atomic.LoadUint64(&s.filesMatched) } -func (s *ScanStats) IncrementFilesSkipped() { - atomic.AddUint64(&s.FilesSkipped, 1) +func (s *ScanStats) incrementFilesSkipped() { + atomic.AddUint64(&s.filesSkipped, 1) } -func (s *ScanStats) GetFilesSkipped() uint64 { - return atomic.LoadUint64(&s.FilesSkipped) +func (s *ScanStats) FilesSkipped() uint64 { + return atomic.LoadUint64(&s.filesSkipped) } -func (s *ScanStats) IncrementDirectoriesMatched() { - atomic.AddUint64(&s.DirectoriesMatched, 1) +func (s *ScanStats) incrementDirectoriesMatched() { + atomic.AddUint64(&s.directoriesMatched, 1) } -func (s *ScanStats) GetDirectoriesMatched() uint64 { - return atomic.LoadUint64(&s.DirectoriesMatched) +func (s *ScanStats) DirectoriesMatched() uint64 { + return atomic.LoadUint64(&s.directoriesMatched) } type Path struct { - Base string - Number int - Extension string + base string + number int + extension string } func (p *Path) Increment() { - p.Number = p.Number + 1 + p.number = p.number + 1 } func (p *Path) Decrement() { - p.Number = p.Number - 1 + p.number = p.number - 1 } func contains(s []string, e string) bool { @@ -124,7 +137,7 @@ func humanReadableSize(bytes int) string { float64(bytes)/float64(div), "KMGTPE"[exp]) } -func getImageDimensions(path string) (*Dimensions, error) { +func imageDimensions(path string) (*Dimensions, error) { file, err := os.Open(path) switch { case errors.Is(err, os.ErrNotExist): @@ -137,12 +150,12 @@ func getImageDimensions(path string) (*Dimensions, error) { myImage, _, err := image.DecodeConfig(file) switch { case errors.Is(err, image.ErrFormat): - return &Dimensions{Width: 0, Height: 0}, nil + return &Dimensions{width: 0, height: 0}, nil case err != nil: return &Dimensions{}, err } - return &Dimensions{Width: myImage.Width, Height: myImage.Height}, nil + return &Dimensions{width: myImage.Width, height: myImage.Height}, nil } func preparePath(path string) string { @@ -154,7 +167,6 @@ func preparePath(path string) string { } func appendPath(directory, path string, files *Files, stats *ScanStats, shouldCache bool) error { - // If caching, only check image types once, during the initial scan, to speed up future pickFile() calls if shouldCache { image, err := isImage(path) if err != nil { @@ -168,7 +180,7 @@ func appendPath(directory, path string, files *Files, stats *ScanStats, shouldCa files.Append(directory, path) - stats.IncrementFilesMatched() + stats.incrementFilesMatched() return nil } @@ -186,12 +198,12 @@ func appendPaths(path string, files *Files, filters *Filters, stats *ScanStats) filename = strings.ToLower(filename) if filters.HasExcludes() { - for i := 0; i < len(filters.Excludes); i++ { + for i := 0; i < len(filters.excludes); i++ { if strings.Contains( filename, - filters.Excludes[i], + filters.excludes[i], ) { - stats.IncrementFilesSkipped() + stats.incrementFilesSkipped() return nil } @@ -199,10 +211,10 @@ func appendPaths(path string, files *Files, filters *Filters, stats *ScanStats) } if filters.HasIncludes() { - for i := 0; i < len(filters.Includes); i++ { + for i := 0; i < len(filters.includes); i++ { if strings.Contains( filename, - filters.Includes[i], + filters.includes[i], ) { err := appendPath(directory, path, files, stats, shouldCache) if err != nil { @@ -213,7 +225,7 @@ func appendPaths(path string, files *Files, filters *Filters, stats *ScanStats) } } - stats.IncrementFilesSkipped() + stats.incrementFilesSkipped() return nil } @@ -226,18 +238,18 @@ func appendPaths(path string, files *Files, filters *Filters, stats *ScanStats) return nil } -func getNewFile(paths []string, filters *Filters, sortOrder string, regexes *Regexes, index *Index) (string, error) { +func newFile(paths []string, filters *Filters, sortOrder string, Regexes *Regexes, index *Index) (string, error) { filePath, err := pickFile(paths, filters, sortOrder, index) if err != nil { return "", nil } - path, err := splitPath(filePath, regexes) + path, err := splitPath(filePath, Regexes) if err != nil { return "", err } - path.Number = 1 + path.number = 1 switch { case sortOrder == "asc": @@ -270,8 +282,8 @@ func getNewFile(paths []string, filters *Filters, sortOrder string, regexes *Reg return filePath, nil } -func getNextFile(filePath, sortOrder string, regexes *Regexes) (string, error) { - path, err := splitPath(filePath, regexes) +func nextFile(filePath, sortOrder string, Regexes *Regexes) (string, error) { + path, err := splitPath(filePath, Regexes) if err != nil { return "", err } @@ -293,25 +305,25 @@ func getNextFile(filePath, sortOrder string, regexes *Regexes) (string, error) { return fileName, err } -func splitPath(path string, regexes *Regexes) (*Path, error) { +func splitPath(path string, Regexes *Regexes) (*Path, error) { p := Path{} var err error - split := regexes.Filename.FindAllStringSubmatch(path, -1) + split := Regexes.filename.FindAllStringSubmatch(path, -1) if len(split) < 1 || len(split[0]) < 3 { return &Path{}, nil } - p.Base = split[0][1] + p.base = split[0][1] - p.Number, err = strconv.Atoi(split[0][2]) + p.number, err = strconv.Atoi(split[0][2]) if err != nil { return &Path{}, err } - p.Extension = split[0][3] + p.extension = split[0][3] return &p, nil } @@ -320,7 +332,7 @@ func tryExtensions(p *Path) (string, error) { var fileName string for _, extension := range extensions { - fileName = fmt.Sprintf("%s%.3d%s", p.Base, p.Number, extension) + fileName = fmt.Sprintf("%s%.3d%s", p.base, p.number, extension) exists, err := fileExists(fileName) if err != nil { @@ -387,7 +399,7 @@ func isImage(path string) (bool, error) { return filetype.IsImage(head), nil } -func getFiles(path string, files *Files, filters *Filters, stats *ScanStats, concurrency *Concurrency) error { +func scanPath(path string, files *Files, filters *Filters, stats *ScanStats, concurrency *Concurrency) error { var wg sync.WaitGroup err := filepath.WalkDir(path, func(p string, info os.DirEntry, err error) error { @@ -400,11 +412,11 @@ func getFiles(path string, files *Files, filters *Filters, stats *ScanStats, con return filepath.SkipDir case !info.IsDir(): wg.Add(1) - concurrency.FileScans <- 1 + concurrency.fileScans <- 1 go func() { defer func() { - <-concurrency.FileScans + <-concurrency.fileScans wg.Done() }() @@ -414,7 +426,7 @@ func getFiles(path string, files *Files, filters *Filters, stats *ScanStats, con } }() case info.IsDir(): - stats.IncrementDirectoriesMatched() + stats.incrementDirectoriesMatched() } return err @@ -429,27 +441,27 @@ func getFiles(path string, files *Files, filters *Filters, stats *ScanStats, con return nil } -func getFileList(paths []string, filters *Filters, sort string, index *Index) ([]string, bool) { +func fileList(paths []string, filters *Filters, sort string, index *Index) ([]string, bool) { if Cache && filters.IsEmpty() && !index.IsEmpty() { - return index.Get(), true + return index.Index(), true } var fileList []string files := &Files{ - Mutex: sync.Mutex{}, - List: make(map[string][]string), + mutex: sync.Mutex{}, + list: make(map[string][]string), } stats := &ScanStats{ - FilesMatched: 0, - FilesSkipped: 0, - DirectoriesMatched: 0, + filesMatched: 0, + filesSkipped: 0, + directoriesMatched: 0, } concurrency := &Concurrency{ - DirectoryScans: make(chan int, maxDirectoryScans), - FileScans: make(chan int, maxFileScans), + directoryScans: make(chan int, maxDirectoryScans), + fileScans: make(chan int, maxFileScans), } var wg sync.WaitGroup @@ -458,15 +470,15 @@ func getFileList(paths []string, filters *Filters, sort string, index *Index) ([ for i := 0; i < len(paths); i++ { wg.Add(1) - concurrency.DirectoryScans <- 1 + concurrency.directoryScans <- 1 go func(i int) { defer func() { - <-concurrency.DirectoryScans + <-concurrency.directoryScans wg.Done() }() - err := getFiles(paths[i], files, filters, stats, concurrency) + err := scanPath(paths[i], files, filters, stats, concurrency) if err != nil { fmt.Println(err) } @@ -480,15 +492,15 @@ func getFileList(paths []string, filters *Filters, sort string, index *Index) ([ if Verbose { fmt.Printf("%s | Indexed %d/%d files across %d directories in %s\n", time.Now().Format(LogDate), - stats.GetFilesMatched(), - stats.GetFilesTotal(), - stats.GetDirectoriesMatched(), + stats.FilesMatched(), + stats.FilesTotal(), + stats.DirectoriesMatched(), time.Since(startTime), ) } if Cache && filters.IsEmpty() { - index.Set(fileList) + index.setIndex(fileList) } return fileList, false @@ -515,21 +527,21 @@ func prepareDirectory(directory []string) []string { func prepareDirectories(files *Files, sort string) []string { directories := []string{} - keys := make([]string, len(files.List)) + keys := make([]string, len(files.list)) i := 0 - for k := range files.List { + for k := range files.list { keys[i] = k i++ } if sort == "asc" || sort == "desc" { for i := 0; i < len(keys); i++ { - directories = append(directories, prepareDirectory(files.List[keys[i]])...) + directories = append(directories, prepareDirectory(files.list[keys[i]])...) } } else { for i := 0; i < len(keys); i++ { - directories = append(directories, files.List[keys[i]]...) + directories = append(directories, files.list[keys[i]]...) } } @@ -537,7 +549,7 @@ func prepareDirectories(files *Files, sort string) []string { } func pickFile(args []string, filters *Filters, sort string, index *Index) (string, error) { - fileList, fromCache := getFileList(args, filters, sort, index) + fileList, fromCache := fileList(args, filters, sort, index) fileCount := len(fileList) if fileCount == 0 { diff --git a/cmd/root.go b/cmd/root.go index 8fa71d4..a0cdda5 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -11,19 +11,6 @@ import ( "github.com/spf13/cobra" ) -type MaxConcurrency int - -const ( - // avoid hitting default open file descriptor limits (1024) - maxDirectoryScans MaxConcurrency = 32 - maxFileScans MaxConcurrency = 256 -) - -type Concurrency struct { - DirectoryScans chan int - FileScans chan int -} - var Cache bool var Debug bool var Filter bool diff --git a/cmd/version.go b/cmd/version.go index cccd132..096d8d1 100644 --- a/cmd/version.go +++ b/cmd/version.go @@ -10,7 +10,7 @@ import ( "github.com/spf13/cobra" ) -var Version = "0.33.4" +var Version = "0.34.0" func init() { rootCmd.AddCommand(versionCmd) diff --git a/cmd/web.go b/cmd/web.go index 418aa97..7903240 100644 --- a/cmd/web.go +++ b/cmd/web.go @@ -32,14 +32,14 @@ const ( ) type Regexes struct { - Alphanumeric *regexp.Regexp - Filename *regexp.Regexp - Units *regexp.Regexp + alphanumeric *regexp.Regexp + filename *regexp.Regexp + units *regexp.Regexp } type Filters struct { - Includes []string - Excludes []string + includes []string + excludes []string } func (f *Filters) IsEmpty() bool { @@ -47,99 +47,99 @@ func (f *Filters) IsEmpty() bool { } func (f *Filters) HasIncludes() bool { - return len(f.Includes) != 0 + return len(f.includes) != 0 } -func (f *Filters) GetIncludes() string { - return strings.Join(f.Includes, ",") +func (f *Filters) Includes() string { + return strings.Join(f.includes, ",") } func (f *Filters) HasExcludes() bool { - return len(f.Excludes) != 0 + return len(f.excludes) != 0 } -func (f *Filters) GetExcludes() string { - return strings.Join(f.Excludes, ",") +func (f *Filters) Excludes() string { + return strings.Join(f.excludes, ",") } type Index struct { - Mutex sync.RWMutex - List []string + mutex sync.RWMutex + list []string } -func (i *Index) Get() []string { - i.Mutex.RLock() - val := i.List - i.Mutex.RUnlock() +func (i *Index) Index() []string { + i.mutex.RLock() + val := i.list + i.mutex.RUnlock() return val } -func (i *Index) Set(val []string) { - i.Mutex.Lock() - i.List = val - i.Mutex.Unlock() +func (i *Index) setIndex(val []string) { + i.mutex.Lock() + i.list = val + i.mutex.Unlock() } -func (i *Index) GenerateCache(args []string) { - i.Mutex.Lock() - i.List = []string{} - i.Mutex.Unlock() +func (i *Index) generateCache(args []string) { + i.mutex.Lock() + i.list = []string{} + i.mutex.Unlock() - getFileList(args, &Filters{}, "", i) + fileList(args, &Filters{}, "", i) } func (i *Index) IsEmpty() bool { - i.Mutex.RLock() - length := len(i.List) - i.Mutex.RUnlock() + i.mutex.RLock() + length := len(i.list) + i.mutex.RUnlock() return length == 0 } type ServeStats struct { - Mutex sync.RWMutex - List []string - Count map[string]uint64 - Size map[string]string - Times map[string][]string + mutex sync.RWMutex + list []string + count map[string]uint64 + size map[string]string + times map[string][]string } -func (s *ServeStats) IncrementCounter(image string, timestamp time.Time, filesize string) { - s.Mutex.Lock() +func (s *ServeStats) incrementCounter(image string, timestamp time.Time, filesize string) { + s.mutex.Lock() - s.Count[image]++ + s.count[image]++ - s.Times[image] = append(s.Times[image], timestamp.Format(LogDate)) + s.times[image] = append(s.times[image], timestamp.Format(LogDate)) - _, exists := s.Size[image] + _, exists := s.size[image] if !exists { - s.Size[image] = filesize + s.size[image] = filesize } - if !contains(s.List, image) { - s.List = append(s.List, image) + if !contains(s.list, image) { + s.list = append(s.list, image) } - s.Mutex.Unlock() + s.mutex.Unlock() } func (s *ServeStats) ListImages() ([]byte, error) { - s.Mutex.RLock() + s.mutex.RLock() - sortedList := s.List + sortedList := s.list sort.SliceStable(sortedList, func(p, q int) bool { return sortedList[p] < sortedList[q] }) - a := []TimesServed{} + a := []timesServed{} - for _, image := range s.List { - a = append(a, TimesServed{image, s.Count[image], s.Size[image], s.Times[image]}) + for _, image := range s.list { + a = append(a, timesServed{image, s.count[image], s.size[image], s.times[image]}) } - s.Mutex.RUnlock() + s.mutex.RUnlock() r, err := json.MarshalIndent(a, "", " ") if err != nil { @@ -149,7 +149,7 @@ func (s *ServeStats) ListImages() ([]byte, error) { return r, nil } -type TimesServed struct { +type timesServed struct { File string Served uint64 Size string @@ -184,10 +184,10 @@ func notFound(w http.ResponseWriter, r *http.Request, filePath string) error { return nil } -func getRefreshInterval(r *http.Request, regexes *Regexes) (int64, string) { +func refreshInterval(r *http.Request, Regexes *Regexes) (int64, string) { refreshInterval := r.URL.Query().Get("refresh") - if !regexes.Units.MatchString(refreshInterval) { + if !Regexes.units.MatchString(refreshInterval) { return 0, "0ms" } @@ -201,7 +201,7 @@ func getRefreshInterval(r *http.Request, regexes *Regexes) (int64, string) { return durationInMs, refreshInterval } -func getSortOrder(r *http.Request) string { +func sortOrder(r *http.Request) string { sortOrder := r.URL.Query().Get("sort") if sortOrder == "asc" || sortOrder == "desc" { return sortOrder @@ -210,7 +210,7 @@ func getSortOrder(r *http.Request) string { return "" } -func splitQueryParams(query string, regexes *Regexes) []string { +func splitQueryParams(query string, Regexes *Regexes) []string { results := []string{} if query == "" { @@ -220,7 +220,7 @@ func splitQueryParams(query string, regexes *Regexes) []string { params := strings.Split(query, ",") for i := 0; i < len(params); i++ { - if regexes.Alphanumeric.MatchString(params[i]) { + if Regexes.alphanumeric.MatchString(params[i]) { results = append(results, strings.ToLower(params[i])) } } @@ -238,12 +238,12 @@ func generateQueryParams(filters *Filters, sortOrder, refreshInterval string) st if Filter { queryParams.WriteString("include=") if filters.HasIncludes() { - queryParams.WriteString(filters.GetIncludes()) + queryParams.WriteString(filters.Includes()) } queryParams.WriteString("&exclude=") if filters.HasExcludes() { - queryParams.WriteString(filters.GetExcludes()) + queryParams.WriteString(filters.Excludes()) } hasParams = true @@ -309,7 +309,7 @@ func refererToUri(referer string) string { return "/" + parts[3] } -func getRealIp(r *http.Request) string { +func realIP(r *http.Request) string { remoteAddr := strings.SplitAfter(r.RemoteAddr, ":") if len(remoteAddr) < 1 { @@ -331,14 +331,14 @@ func getRealIp(r *http.Request) string { } } -func serveHtml(w http.ResponseWriter, r *http.Request, filePath string, dimensions *Dimensions, filters *Filters, regexes *Regexes) error { +func serveHtml(w http.ResponseWriter, r *http.Request, filePath string, dimensions *Dimensions, filters *Filters, Regexes *Regexes) error { fileName := filepath.Base(filePath) w.Header().Add("Content-Type", "text/html") - sortOrder := getSortOrder(r) + sortOrder := sortOrder(r) - refreshTimer, refreshInterval := getRefreshInterval(r, regexes) + refreshTimer, refreshInterval := refreshInterval(r, Regexes) queryParams := generateQueryParams(filters, sortOrder, refreshInterval) @@ -350,8 +350,8 @@ func serveHtml(w http.ResponseWriter, r *http.Request, filePath string, dimensio htmlBody.WriteString(`position:absolute;top:50%;left:50%;transform:translate(-50%,-50%);}`) htmlBody.WriteString(fmt.Sprintf(`%s (%dx%d)`, fileName, - dimensions.Width, - dimensions.Height)) + dimensions.width, + dimensions.height)) htmlBody.WriteString(``) if refreshInterval != "0ms" { htmlBody.WriteString(fmt.Sprintf("", @@ -361,8 +361,8 @@ func serveHtml(w http.ResponseWriter, r *http.Request, filePath string, dimensio htmlBody.WriteString(fmt.Sprintf(`Roulette selected: %s`, queryParams, generateFilePath(filePath), - dimensions.Width, - dimensions.Height, + dimensions.width, + dimensions.height, fileName)) htmlBody.WriteString(``) @@ -418,13 +418,13 @@ func serveStaticFile(w http.ResponseWriter, r *http.Request, paths []string, sta startTime.Format(LogDate), filePath, fileSize, - getRealIp(r), + realIP(r), time.Since(startTime).Round(time.Microsecond), ) } if Debug { - stats.IncrementCounter(filePath, startTime, fileSize) + stats.incrementCounter(filePath, startTime, fileSize) } return nil @@ -432,7 +432,7 @@ func serveStaticFile(w http.ResponseWriter, r *http.Request, paths []string, sta func serveCacheClearHandler(args []string, index *Index) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { - index.GenerateCache(args) + index.generateCache(args) w.WriteHeader(http.StatusOK) w.Header().Set("Content-Type", "text/plain") @@ -458,7 +458,7 @@ func serveStatsHandler(args []string, stats *ServeStats) http.HandlerFunc { fmt.Printf("%s | Served statistics page (%s) to %s in %s\n", startTime.Format(LogDate), humanReadableSize(len(response)), - getRealIp(r), + realIP(r), time.Since(startTime).Round(time.Microsecond), ) } @@ -474,7 +474,7 @@ func serveStaticFileHandler(paths []string, stats *ServeStats) http.HandlerFunc } } -func serveHtmlHandler(paths []string, regexes *Regexes, index *Index) http.HandlerFunc { +func serveHtmlHandler(paths []string, Regexes *Regexes, index *Index) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { refererUri, err := stripQueryParams(refererToUri(r.Referer())) if err != nil { @@ -482,27 +482,27 @@ func serveHtmlHandler(paths []string, regexes *Regexes, index *Index) http.Handl } filters := &Filters{ - Includes: splitQueryParams(r.URL.Query().Get("include"), regexes), - Excludes: splitQueryParams(r.URL.Query().Get("exclude"), regexes), + includes: splitQueryParams(r.URL.Query().Get("include"), Regexes), + excludes: splitQueryParams(r.URL.Query().Get("exclude"), Regexes), } - sortOrder := getSortOrder(r) + sortOrder := sortOrder(r) - _, refreshInterval := getRefreshInterval(r, regexes) + _, refreshInterval := refreshInterval(r, Regexes) if r.URL.Path == "/" { var filePath string var err error if refererUri != "" { - filePath, err = getNextFile(refererUri, sortOrder, regexes) + filePath, err = nextFile(refererUri, sortOrder, Regexes) if err != nil { log.Fatal(err) } } if filePath == "" { - filePath, err = getNewFile(paths, filters, sortOrder, regexes, index) + filePath, err = newFile(paths, filters, sortOrder, Regexes, index) switch { case err != nil && err == ErrNoImagesFound: notFound(w, r, filePath) @@ -548,12 +548,12 @@ func serveHtmlHandler(paths []string, regexes *Regexes, index *Index) http.Handl return } - dimensions, err := getImageDimensions(filePath) + dimensions, err := imageDimensions(filePath) if err != nil { log.Fatal(err) } - err = serveHtml(w, r, filePath, dimensions, filters, regexes) + err = serveHtml(w, r, filePath, dimensions, filters, Regexes) if err != nil { log.Fatal(err) } @@ -571,34 +571,34 @@ func ServePage(args []string) error { return err } - regexes := &Regexes{ - Filename: regexp.MustCompile(`(.+)([0-9]{3})(\..+)`), - Alphanumeric: regexp.MustCompile(`^[a-zA-Z0-9]*$`), - Units: regexp.MustCompile(`^[0-9]+(ns|us|µs|ms|s|m|h)$`), + Regexes := &Regexes{ + filename: regexp.MustCompile(`(.+)([0-9]{3})(\..+)`), + alphanumeric: regexp.MustCompile(`^[a-zA-Z0-9]*$`), + units: regexp.MustCompile(`^[0-9]+(ns|us|µs|ms|s|m|h)$`), } rand.Seed(time.Now().UnixNano()) index := &Index{ - Mutex: sync.RWMutex{}, - List: []string{}, + mutex: sync.RWMutex{}, + list: []string{}, } if Cache { - index.GenerateCache(args) + index.generateCache(args) http.Handle("/_/clear_cache", serveCacheClearHandler(args, index)) } stats := &ServeStats{ - Mutex: sync.RWMutex{}, - List: []string{}, - Count: make(map[string]uint64), - Size: make(map[string]string), - Times: make(map[string][]string), + mutex: sync.RWMutex{}, + list: []string{}, + count: make(map[string]uint64), + size: make(map[string]string), + times: make(map[string][]string), } - http.Handle("/", serveHtmlHandler(paths, regexes, index)) + http.Handle("/", serveHtmlHandler(paths, Regexes, index)) http.Handle(Prefix+"/", http.StripPrefix(Prefix, serveStaticFileHandler(paths, stats))) http.HandleFunc("/favicon.ico", doNothing)