diff --git a/cmd/files.go b/cmd/files.go index 11f00fb..9efacc6 100644 --- a/cmd/files.go +++ b/cmd/files.go @@ -29,75 +29,132 @@ import ( ) var ( - errNoImagesFound = fmt.Errorf("no supported image formats found which match all criteria") + ErrNoImagesFound = fmt.Errorf("no supported image formats found which match all criteria") extensions = [6]string{".jpg", ".jpeg", ".png", ".gif", ".webp", ".bmp"} ) type Dimensions struct { - width int - height int + Width int + Height int } type Files struct { - mutex sync.Mutex - list map[string][]string + Mutex sync.Mutex + List map[string][]string } -func (f *Files) append(directory, path string) { - f.mutex.Lock() - f.list[directory] = append(f.list[directory], path) - f.mutex.Unlock() -} - -type Path struct { - base string - number int - extension string -} - -func (p *Path) increment() { - p.number = p.number + 1 -} - -func (p *Path) decrement() { - p.number = p.number - 1 +func (f *Files) Append(directory, path string) { + f.Mutex.Lock() + f.List[directory] = append(f.List[directory], path) + f.Mutex.Unlock() } type ScanStats struct { - filesMatched uint64 - filesSkipped uint64 - directoriesMatched uint64 + FilesMatched uint64 + FilesSkipped uint64 + DirectoriesMatched uint64 } -func (s *ScanStats) FilesTotal() uint64 { - return atomic.LoadUint64(&s.filesMatched) + atomic.LoadUint64(&s.filesSkipped) +func (s *ScanStats) GetFilesTotal() uint64 { + return atomic.LoadUint64(&s.FilesMatched) + atomic.LoadUint64(&s.FilesSkipped) } -func (s *ScanStats) incrementFilesMatched() { - atomic.AddUint64(&s.filesMatched, 1) +func (s *ScanStats) IncrementFilesMatched() { + atomic.AddUint64(&s.FilesMatched, 1) } -func (s *ScanStats) FilesMatched() uint64 { - return atomic.LoadUint64(&s.filesMatched) +func (s *ScanStats) GetFilesMatched() uint64 { + return atomic.LoadUint64(&s.FilesMatched) } -func (s *ScanStats) incrementFilesSkipped() { - atomic.AddUint64(&s.filesSkipped, 1) +func (s *ScanStats) IncrementFilesSkipped() { + atomic.AddUint64(&s.FilesSkipped, 1) } -func (s *ScanStats) FilesSkipped() uint64 { - return atomic.LoadUint64(&s.filesSkipped) +func (s *ScanStats) GetFilesSkipped() uint64 { + return atomic.LoadUint64(&s.FilesSkipped) } -func (s *ScanStats) incrementDirectoriesMatched() { - atomic.AddUint64(&s.directoriesMatched, 1) +func (s *ScanStats) IncrementDirectoriesMatched() { + atomic.AddUint64(&s.DirectoriesMatched, 1) } -func (s *ScanStats) DirectoriesMatched() uint64 { - return atomic.LoadUint64(&s.directoriesMatched) +func (s *ScanStats) GetDirectoriesMatched() uint64 { + return atomic.LoadUint64(&s.DirectoriesMatched) +} + +type Path struct { + Base string + Number int + Extension string +} + +func (p *Path) Increment() { + p.Number = p.Number + 1 +} + +func (p *Path) Decrement() { + p.Number = p.Number - 1 +} + +func contains(s []string, e string) bool { + for _, a := range s { + if a == e { + return true + } + } + return false +} + +func humanReadableSize(bytes int) string { + const unit = 1000 + + if bytes < unit { + return fmt.Sprintf("%d B", bytes) + } + + div, exp := int64(unit), 0 + + for n := bytes / unit; n >= unit; n /= unit { + div *= unit + exp++ + } + + return fmt.Sprintf("%.1f %cB", + float64(bytes)/float64(div), "KMGTPE"[exp]) +} + +func getImageDimensions(path string) (*Dimensions, error) { + file, err := os.Open(path) + switch { + case errors.Is(err, os.ErrNotExist): + return &Dimensions{}, nil + case err != nil: + return &Dimensions{}, err + } + defer file.Close() + + myImage, _, err := image.DecodeConfig(file) + switch { + case errors.Is(err, image.ErrFormat): + return &Dimensions{Width: 0, Height: 0}, nil + case err != nil: + return &Dimensions{}, err + } + + return &Dimensions{Width: myImage.Width, Height: myImage.Height}, nil +} + +func preparePath(path string) string { + if runtime.GOOS == "windows" { + path = fmt.Sprintf("/%s", filepath.ToSlash(path)) + } + + return path } func appendPath(directory, path string, files *Files, stats *ScanStats, shouldCache bool) error { + // If caching, only check image types once, during the initial scan, to speed up future pickFile() calls if shouldCache { image, err := isImage(path) if err != nil { @@ -109,9 +166,9 @@ func appendPath(directory, path string, files *Files, stats *ScanStats, shouldCa } } - files.append(directory, path) + files.Append(directory, path) - stats.incrementFilesMatched() + stats.IncrementFilesMatched() return nil } @@ -129,12 +186,12 @@ func appendPaths(path string, files *Files, filters *Filters, stats *ScanStats) filename = strings.ToLower(filename) if filters.HasExcludes() { - for i := 0; i < len(filters.excludes); i++ { + for i := 0; i < len(filters.Excludes); i++ { if strings.Contains( filename, - filters.excludes[i], + filters.Excludes[i], ) { - stats.incrementFilesSkipped() + stats.IncrementFilesSkipped() return nil } @@ -142,10 +199,10 @@ func appendPaths(path string, files *Files, filters *Filters, stats *ScanStats) } if filters.HasIncludes() { - for i := 0; i < len(filters.includes); i++ { + for i := 0; i < len(filters.Includes); i++ { if strings.Contains( filename, - filters.includes[i], + filters.Includes[i], ) { err := appendPath(directory, path, files, stats, shouldCache) if err != nil { @@ -156,7 +213,7 @@ func appendPaths(path string, files *Files, filters *Filters, stats *ScanStats) } } - stats.incrementFilesSkipped() + stats.IncrementFilesSkipped() return nil } @@ -169,17 +226,113 @@ func appendPaths(path string, files *Files, filters *Filters, stats *ScanStats) return nil } -func cleanFilename(filename string) string { - return filename[:len(filename)-(len(filepath.Ext(filename))+3)] -} +func getNewFile(paths []string, filters *Filters, sortOrder string, regexes *Regexes, index *Index) (string, error) { + filePath, err := pickFile(paths, filters, sortOrder, index) + if err != nil { + return "", nil + } -func contains(s []string, e string) bool { - for _, a := range s { - if a == e { - return true + path, err := splitPath(filePath, regexes) + if err != nil { + return "", err + } + + path.Number = 1 + + switch { + case sortOrder == "asc": + filePath, err = tryExtensions(path) + if err != nil { + return "", err + } + case sortOrder == "desc": + for { + path.Increment() + + filePath, err = tryExtensions(path) + if err != nil { + return "", err + } + + if filePath == "" { + path.Decrement() + + filePath, err = tryExtensions(path) + if err != nil { + return "", err + } + + break + } } } - return false + + return filePath, nil +} + +func getNextFile(filePath, sortOrder string, regexes *Regexes) (string, error) { + path, err := splitPath(filePath, regexes) + if err != nil { + return "", err + } + + switch { + case sortOrder == "asc": + path.Increment() + case sortOrder == "desc": + path.Decrement() + default: + return "", nil + } + + fileName, err := tryExtensions(path) + if err != nil { + return "", err + } + + return fileName, err +} + +func splitPath(path string, regexes *Regexes) (*Path, error) { + p := Path{} + var err error + + split := regexes.Filename.FindAllStringSubmatch(path, -1) + + if len(split) < 1 || len(split[0]) < 3 { + return &Path{}, nil + } + + p.Base = split[0][1] + + p.Number, err = strconv.Atoi(split[0][2]) + + if err != nil { + return &Path{}, err + } + + p.Extension = split[0][3] + + return &p, nil +} + +func tryExtensions(p *Path) (string, error) { + var fileName string + + for _, extension := range extensions { + fileName = fmt.Sprintf("%s%.3d%s", p.Base, p.Number, extension) + + exists, err := fileExists(fileName) + if err != nil { + return "", err + } + + if exists { + return fileName, nil + } + } + + return "", nil } func fileExists(path string) (bool, error) { @@ -194,22 +347,104 @@ func fileExists(path string) (bool, error) { } } -func fileList(paths []string, filters *Filters, sort string, index *Index) ([]string, bool) { +func pathIsValid(filePath string, paths []string) bool { + var matchesPrefix = false + + for i := 0; i < len(paths); i++ { + if strings.HasPrefix(filePath, paths[i]) { + matchesPrefix = true + } + } + + switch { + case Verbose && !matchesPrefix: + fmt.Printf("%s | Error: Failed to serve file outside specified path(s): %s\n", + time.Now().Format(LogDate), + filePath, + ) + + return false + case !matchesPrefix: + return false + default: + return true + } +} + +func isImage(path string) (bool, error) { + file, err := os.Open(path) + switch { + case errors.Is(err, os.ErrNotExist): + return false, nil + case err != nil: + return false, err + } + defer file.Close() + + head := make([]byte, 261) + file.Read(head) + + return filetype.IsImage(head), nil +} + +func getFiles(path string, files *Files, filters *Filters, stats *ScanStats, concurrency *Concurrency) error { + var wg sync.WaitGroup + + err := filepath.WalkDir(path, func(p string, info os.DirEntry, err error) error { + if err != nil { + return err + } + + switch { + case !Recursive && info.IsDir() && p != path: + return filepath.SkipDir + case !info.IsDir(): + wg.Add(1) + concurrency.FileScans <- 1 + + go func() { + defer func() { + <-concurrency.FileScans + wg.Done() + }() + + err = appendPaths(p, files, filters, stats) + if err != nil { + fmt.Println(err) + } + }() + case info.IsDir(): + stats.IncrementDirectoriesMatched() + } + + return err + }) + + wg.Wait() + + if err != nil { + return err + } + + return nil +} + +func getFileList(paths []string, filters *Filters, sort string, index *Index) ([]string, bool) { if Cache && filters.IsEmpty() && !index.IsEmpty() { - return index.Index(), true + return index.Get(), true } var fileList []string files := &Files{ - mutex: sync.Mutex{}, - list: make(map[string][]string), + Mutex: sync.Mutex{}, + List: make(map[string][]string), } stats := &ScanStats{ - filesMatched: 0, - filesSkipped: 0, - directoriesMatched: 0, + FilesMatched: 0, + FilesSkipped: 0, + DirectoriesMatched: 0, } concurrency := &Concurrency{ @@ -231,7 +466,7 @@ func fileList(paths []string, filters *Filters, sort string, index *Index) ([]st wg.Done() }() - err := scanPath(paths[i], files, filters, stats, concurrency) + err := getFiles(paths[i], files, filters, stats, concurrency) if err != nil { fmt.Println(err) } @@ -244,141 +479,99 @@ func fileList(paths []string, filters *Filters, sort string, index *Index) ([]st if Verbose { fmt.Printf("%s | Indexed %d/%d files across %d directories in %s\n", - time.Now().Format(logDate), - stats.FilesMatched(), - stats.FilesTotal(), - stats.DirectoriesMatched(), + time.Now().Format(LogDate), + stats.GetFilesMatched(), + stats.GetFilesTotal(), + stats.GetDirectoriesMatched(), time.Since(startTime), ) } if Cache && filters.IsEmpty() { - index.setIndex(fileList) + index.Set(fileList) } return fileList, false } -func humanReadableSize(bytes int) string { - const unit = 1000 - - if bytes < unit { - return fmt.Sprintf("%d B", bytes) - } - - div, exp := int64(unit), 0 - - for n := bytes / unit; n >= unit; n /= unit { - div *= unit - exp++ - } - - return fmt.Sprintf("%.1f %cB", - float64(bytes)/float64(div), "KMGTPE"[exp]) +func cleanFilename(filename string) string { + return filename[:len(filename)-(len(filepath.Ext(filename))+3)] } -func imageDimensions(path string) (*Dimensions, error) { - file, err := os.Open(path) - switch { - case errors.Is(err, os.ErrNotExist): - return &Dimensions{}, nil - case err != nil: - return &Dimensions{}, err - } - defer file.Close() +func prepareDirectory(directory []string) []string { + _, first := filepath.Split(directory[0]) + first = cleanFilename(first) - myImage, _, err := image.DecodeConfig(file) - switch { - case errors.Is(err, image.ErrFormat): - return &Dimensions{width: 0, height: 0}, nil - case err != nil: - return &Dimensions{}, err - } + _, last := filepath.Split(directory[len(directory)-1]) + last = cleanFilename(last) - return &Dimensions{width: myImage.Width, height: myImage.Height}, nil + if first == last { + return append([]string{}, directory[0]) + } else { + return directory + } } -func isImage(path string) (bool, error) { - file, err := os.Open(path) - switch { - case errors.Is(err, os.ErrNotExist): - return false, nil - case err != nil: - return false, err - } - defer file.Close() +func prepareDirectories(files *Files, sort string) []string { + directories := []string{} - head := make([]byte, 261) - file.Read(head) + keys := make([]string, len(files.List)) - return filetype.IsImage(head), nil -} - -func newFile(paths []string, filters *Filters, sortOrder string, regexes *Regexes, index *Index) (string, error) { - filePath, err := pickFile(paths, filters, sortOrder, index) - if err != nil { - return "", nil + i := 0 + for k := range files.List { + keys[i] = k + i++ } - path, err := splitPath(filePath, regexes) - if err != nil { - return "", err - } - - path.number = 1 - - switch { - case sortOrder == "asc": - filePath, err = tryExtensions(path) - if err != nil { - return "", err + if sort == "asc" || sort == "desc" { + for i := 0; i < len(keys); i++ { + directories = append(directories, prepareDirectory(files.List[keys[i]])...) } - case sortOrder == "desc": - for { - path.increment() + } else { + for i := 0; i < len(keys); i++ { + directories = append(directories, files.List[keys[i]]...) + } + } - filePath, err = tryExtensions(path) + return directories +} + +func pickFile(args []string, filters *Filters, sort string, index *Index) (string, error) { + fileList, fromCache := getFileList(args, filters, sort, index) + + fileCount := len(fileList) + if fileCount == 0 { + return "", ErrNoImagesFound + } + + r := rand.Intn(fileCount - 1) + + for i := 0; i < fileCount; i++ { + if r >= (fileCount - 1) { + r = 0 + } else { + r++ + } + + filePath := fileList[r] + + if !fromCache { + isImage, err := isImage(filePath) if err != nil { return "", err } - if filePath == "" { - path.decrement() - - filePath, err = tryExtensions(path) - if err != nil { - return "", err - } - - break + if isImage { + return filePath, nil } + + continue } + + return filePath, nil } - return filePath, nil -} - -func nextFile(filePath, sortOrder string, regexes *Regexes) (string, error) { - path, err := splitPath(filePath, regexes) - if err != nil { - return "", err - } - - switch { - case sortOrder == "asc": - path.increment() - case sortOrder == "desc": - path.decrement() - default: - return "", nil - } - - fileName, err := tryExtensions(path) - if err != nil { - return "", err - } - - return fileName, err + return "", ErrNoImagesFound } func normalizePaths(args []string) ([]string, error) { @@ -410,195 +603,3 @@ func normalizePaths(args []string) ([]string, error) { return paths, nil } - -func pathIsValid(filePath string, paths []string) bool { - var matchesPrefix = false - - for i := 0; i < len(paths); i++ { - if strings.HasPrefix(filePath, paths[i]) { - matchesPrefix = true - } - } - - switch { - case Verbose && !matchesPrefix: - fmt.Printf("%s | Error: Failed to serve file outside specified path(s): %s\n", - time.Now().Format(logDate), - filePath, - ) - - return false - case !matchesPrefix: - return false - default: - return true - } -} - -func pickFile(args []string, filters *Filters, sortOrder string, index *Index) (string, error) { - fileList, fromCache := fileList(args, filters, sortOrder, index) - - fileCount := len(fileList) - if fileCount == 0 { - return "", errNoImagesFound - } - - r := rand.Intn(fileCount - 1) - - for i := 0; i < fileCount; i++ { - if r >= (fileCount - 1) { - r = 0 - } else { - r++ - } - - filePath := fileList[r] - - if !fromCache { - isImage, err := isImage(filePath) - if err != nil { - return "", err - } - - if isImage { - return filePath, nil - } - - continue - } - - return filePath, nil - } - - return "", errNoImagesFound -} - -func prepareDirectories(files *Files, sort string) []string { - directories := []string{} - - keys := make([]string, len(files.list)) - - i := 0 - for k := range files.list { - keys[i] = k - i++ - } - - if sort == "asc" || sort == "desc" { - for i := 0; i < len(keys); i++ { - directories = append(directories, prepareDirectory(files.list[keys[i]])...) - } - } else { - for i := 0; i < len(keys); i++ { - directories = append(directories, files.list[keys[i]]...) - } - } - - return directories -} - -func prepareDirectory(directory []string) []string { - _, first := filepath.Split(directory[0]) - first = cleanFilename(first) - - _, last := filepath.Split(directory[len(directory)-1]) - last = cleanFilename(last) - - if first == last { - return append([]string{}, directory[0]) - } else { - return directory - } -} - -func preparePath(path string) string { - if runtime.GOOS == "windows" { - path = fmt.Sprintf("/%s", filepath.ToSlash(path)) - } - - return path -} - -func scanPath(path string, files *Files, filters *Filters, stats *ScanStats, concurrency *Concurrency) error { - var wg sync.WaitGroup - - err := filepath.WalkDir(path, func(p string, info os.DirEntry, err error) error { - if err != nil { - return err - } - - switch { - case !Recursive && info.IsDir() && p != path: - return filepath.SkipDir - case !info.IsDir(): - wg.Add(1) - concurrency.FileScans <- 1 - - go func() { - defer func() { - <-concurrency.FileScans - wg.Done() - }() - - err = appendPaths(p, files, filters, stats) - if err != nil { - fmt.Println(err) - } - }() - case info.IsDir(): - stats.incrementDirectoriesMatched() - } - - return err - }) - - wg.Wait() - - if err != nil { - return err - } - - return nil -} - -func splitPath(path string, regexes *Regexes) (*Path, error) { - p := Path{} - var err error - - split := regexes.filename.FindAllStringSubmatch(path, -1) - - if len(split) < 1 || len(split[0]) < 3 { - return &Path{}, nil - } - - p.base = split[0][1] - - p.number, err = strconv.Atoi(split[0][2]) - - if err != nil { - return &Path{}, err - } - - p.extension = split[0][3] - - return &p, nil -} - -func tryExtensions(p *Path) (string, error) { - var fileName string - - for _, extension := range extensions { - fileName = fmt.Sprintf("%s%.3d%s", p.base, p.number, extension) - - exists, err := fileExists(fileName) - if err != nil { - return "", err - } - - if exists { - return fileName, nil - } - } - - return "", nil -} diff --git a/cmd/root.go b/cmd/root.go index 2c9166b..8fa71d4 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -5,7 +5,7 @@ Copyright © 2023 Seednode package cmd import ( - "fmt" + "log" "os" "github.com/spf13/cobra" @@ -39,8 +39,7 @@ var rootCmd = &cobra.Command{ Run: func(cmd *cobra.Command, args []string) { err := ServePage(args) if err != nil { - fmt.Println(err) - os.Exit(1) + log.Fatal(err) } }, } @@ -48,7 +47,6 @@ var rootCmd = &cobra.Command{ func Execute() { err := rootCmd.Execute() if err != nil { - fmt.Println(err) os.Exit(1) } } diff --git a/cmd/version.go b/cmd/version.go index 23cf362..1e704cd 100644 --- a/cmd/version.go +++ b/cmd/version.go @@ -10,7 +10,7 @@ import ( "github.com/spf13/cobra" ) -var Version = "0.35.0" +var Version = "0.33.3" func init() { rootCmd.AddCommand(versionCmd) diff --git a/cmd/web.go b/cmd/web.go index 5f3d8f8..418aa97 100644 --- a/cmd/web.go +++ b/cmd/web.go @@ -8,6 +8,7 @@ import ( "encoding/json" "fmt" "io" + "log" "math/rand" "net/http" "net/url" @@ -25,14 +26,20 @@ import ( ) const ( - logDate string = `2006-01-02T15:04:05.000-07:00` - prefix string = `/src` - redirectStatusCode int = http.StatusSeeOther + LogDate string = `2006-01-02T15:04:05.000-07:00` + Prefix string = `/src` + RedirectStatusCode int = http.StatusSeeOther ) +type Regexes struct { + Alphanumeric *regexp.Regexp + Filename *regexp.Regexp + Units *regexp.Regexp +} + type Filters struct { - includes []string - excludes []string + Includes []string + Excludes []string } func (f *Filters) IsEmpty() bool { @@ -40,105 +47,99 @@ func (f *Filters) IsEmpty() bool { } func (f *Filters) HasIncludes() bool { - return len(f.includes) != 0 + return len(f.Includes) != 0 } -func (f *Filters) Includes() string { - return strings.Join(f.includes, ",") +func (f *Filters) GetIncludes() string { + return strings.Join(f.Includes, ",") } func (f *Filters) HasExcludes() bool { - return len(f.excludes) != 0 + return len(f.Excludes) != 0 } -func (f *Filters) Excludes() string { - return strings.Join(f.excludes, ",") +func (f *Filters) GetExcludes() string { + return strings.Join(f.Excludes, ",") } type Index struct { - mutex sync.RWMutex - list []string + Mutex sync.RWMutex + List []string } -func (i *Index) Index() []string { - i.mutex.RLock() - val := i.list - i.mutex.RUnlock() +func (i *Index) Get() []string { + i.Mutex.RLock() + val := i.List + i.Mutex.RUnlock() return val } -func (i *Index) setIndex(val []string) { - i.mutex.Lock() - i.list = val - i.mutex.Unlock() +func (i *Index) Set(val []string) { + i.Mutex.Lock() + i.List = val + i.Mutex.Unlock() } -func (i *Index) generateCache(args []string) { - i.mutex.Lock() - i.list = []string{} - i.mutex.Unlock() +func (i *Index) GenerateCache(args []string) { + i.Mutex.Lock() + i.List = []string{} + i.Mutex.Unlock() - fileList(args, &Filters{}, "", i) + getFileList(args, &Filters{}, "", i) } func (i *Index) IsEmpty() bool { - i.mutex.RLock() - length := len(i.list) - i.mutex.RUnlock() + i.Mutex.RLock() + length := len(i.List) + i.Mutex.RUnlock() return length == 0 } -type Regexes struct { - alphanumeric *regexp.Regexp - filename *regexp.Regexp - units *regexp.Regexp -} - type ServeStats struct { - mutex sync.RWMutex - list []string - count map[string]uint64 - size map[string]string - times map[string][]string + Mutex sync.RWMutex + List []string + Count map[string]uint64 + Size map[string]string + Times map[string][]string } -func (s *ServeStats) incrementCounter(image string, timestamp time.Time, filesize string) { - s.mutex.Lock() +func (s *ServeStats) IncrementCounter(image string, timestamp time.Time, filesize string) { + s.Mutex.Lock() - s.count[image]++ + s.Count[image]++ - s.times[image] = append(s.times[image], timestamp.Format(logDate)) + s.Times[image] = append(s.Times[image], timestamp.Format(LogDate)) - _, exists := s.size[image] + _, exists := s.Size[image] if !exists { - s.size[image] = filesize + s.Size[image] = filesize } - if !contains(s.list, image) { - s.list = append(s.list, image) + if !contains(s.List, image) { + s.List = append(s.List, image) } - s.mutex.Unlock() + s.Mutex.Unlock() } func (s *ServeStats) ListImages() ([]byte, error) { - s.mutex.RLock() + s.Mutex.RLock() - sortedList := s.list + sortedList := s.List sort.SliceStable(sortedList, func(p, q int) bool { return sortedList[p] < sortedList[q] }) - a := []timesServed{} + a := []TimesServed{} - for _, image := range s.list { - a = append(a, timesServed{image, s.count[image], s.size[image], s.times[image]}) + for _, image := range s.List { + a = append(a, TimesServed{image, s.Count[image], s.Size[image], s.Times[image]}) } - s.mutex.RUnlock() + s.Mutex.RUnlock() r, err := json.MarshalIndent(a, "", " ") if err != nil { @@ -148,109 +149,11 @@ func (s *ServeStats) ListImages() ([]byte, error) { return r, nil } -type timesServed struct { - file string - served uint64 - size string - times []string -} - -func cacheHandler(args []string, index *Index) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - index.generateCache(args) - - w.Header().Set("Content-Type", "text/plain") - w.Write([]byte("Ok")) - } -} - -func doNothing(http.ResponseWriter, *http.Request) {} - -func htmlHandler(paths []string, regexes *Regexes, index *Index) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - refererUri, err := stripQueryParams(refererToUri(r.Referer())) - if err != nil { - fmt.Println(err) - } - - filters := &Filters{ - includes: splitQueryParams(r.URL.Query().Get("include"), regexes), - excludes: splitQueryParams(r.URL.Query().Get("exclude"), regexes), - } - - sortOrder := sortOrder(r) - - _, refreshInterval := refreshInterval(r, regexes) - - if r.URL.Path == "/" { - var filePath string - var err error - - if refererUri != "" { - filePath, err = nextFile(refererUri, sortOrder, regexes) - if err != nil { - fmt.Println(err) - } - } - - if filePath == "" { - filePath, err = newFile(paths, filters, sortOrder, regexes, index) - switch { - case err != nil && err == errNoImagesFound: - notFound(w, r, filePath) - - return - case err != nil: - fmt.Println(err) - } - } - - queryParams := generateQueryParams(filters, sortOrder, refreshInterval) - - newUrl := fmt.Sprintf("http://%s%s%s", - r.Host, - preparePath(filePath), - queryParams, - ) - http.Redirect(w, r, newUrl, redirectStatusCode) - } else { - filePath := r.URL.Path - - if runtime.GOOS == "windows" { - filePath = strings.TrimPrefix(filePath, "/") - } - - exists, err := fileExists(filePath) - if err != nil { - fmt.Println(err) - } - if !exists { - notFound(w, r, filePath) - - return - } - - image, err := isImage(filePath) - if err != nil { - fmt.Println(err) - } - if !image { - notFound(w, r, filePath) - - return - } - - dimensions, err := imageDimensions(filePath) - if err != nil { - fmt.Println(err) - } - - err = html(w, r, filePath, dimensions, filters, regexes) - if err != nil { - fmt.Println(err) - } - } - } +type TimesServed struct { + File string + Served uint64 + Size string + Times []string } func notFound(w http.ResponseWriter, r *http.Request, filePath string) error { @@ -258,7 +161,7 @@ func notFound(w http.ResponseWriter, r *http.Request, filePath string) error { if Verbose { fmt.Printf("%s | Unavailable file %s requested by %s\n", - startTime.Format(logDate), + startTime.Format(LogDate), filePath, r.RemoteAddr, ) @@ -281,10 +184,10 @@ func notFound(w http.ResponseWriter, r *http.Request, filePath string) error { return nil } -func refreshInterval(r *http.Request, regexes *Regexes) (int64, string) { +func getRefreshInterval(r *http.Request, regexes *Regexes) (int64, string) { refreshInterval := r.URL.Query().Get("refresh") - if !regexes.units.MatchString(refreshInterval) { + if !regexes.Units.MatchString(refreshInterval) { return 0, "0ms" } @@ -293,61 +196,12 @@ func refreshInterval(r *http.Request, regexes *Regexes) (int64, string) { return 0, "0ms" } - return duration.Milliseconds(), refreshInterval + durationInMs := duration.Milliseconds() + + return durationInMs, refreshInterval } -func ServePage(args []string) error { - fmt.Printf("roulette v%s\n\n", Version) - - paths, err := normalizePaths(args) - if err != nil { - return err - } - - regexes := &Regexes{ - filename: regexp.MustCompile(`(.+)([0-9]{3})(\..+)`), - alphanumeric: regexp.MustCompile(`^[a-zA-Z0-9]*$`), - units: regexp.MustCompile(`^[0-9]+(ns|us|µs|ms|s|m|h)$`), - } - - rand.Seed(time.Now().UnixNano()) - - index := &Index{ - mutex: sync.RWMutex{}, - list: []string{}, - } - - if Cache { - index.generateCache(args) - - http.Handle("/_/clear_cache", cacheHandler(args, index)) - } - - stats := &ServeStats{ - mutex: sync.RWMutex{}, - list: []string{}, - count: make(map[string]uint64), - size: make(map[string]string), - times: make(map[string][]string), - } - - http.Handle("/", htmlHandler(paths, regexes, index)) - http.Handle(prefix+"/", http.StripPrefix(prefix, staticFileHandler(paths, stats))) - http.HandleFunc("/favicon.ico", doNothing) - - if Debug { - http.Handle("/_/stats", statsHandler(args, stats)) - } - - err = http.ListenAndServe(":"+strconv.FormatInt(int64(Port), 10), nil) - if err != nil { - return err - } - - return nil -} - -func sortOrder(r *http.Request) string { +func getSortOrder(r *http.Request) string { sortOrder := r.URL.Query().Get("sort") if sortOrder == "asc" || sortOrder == "desc" { return sortOrder @@ -366,7 +220,7 @@ func splitQueryParams(query string, regexes *Regexes) []string { params := strings.Split(query, ",") for i := 0; i < len(params); i++ { - if regexes.alphanumeric.MatchString(params[i]) { + if regexes.Alphanumeric.MatchString(params[i]) { results = append(results, strings.ToLower(params[i])) } } @@ -374,18 +228,6 @@ func splitQueryParams(query string, regexes *Regexes) []string { return results } -func generateFilePath(filePath string) string { - var htmlBody strings.Builder - - htmlBody.WriteString(prefix) - if runtime.GOOS == "windows" { - htmlBody.WriteString(`/`) - } - htmlBody.WriteString(filePath) - - return htmlBody.String() -} - func generateQueryParams(filters *Filters, sortOrder, refreshInterval string) string { var hasParams bool @@ -396,12 +238,12 @@ func generateQueryParams(filters *Filters, sortOrder, refreshInterval string) st if Filter { queryParams.WriteString("include=") if filters.HasIncludes() { - queryParams.WriteString(filters.Includes()) + queryParams.WriteString(filters.GetIncludes()) } queryParams.WriteString("&exclude=") if filters.HasExcludes() { - queryParams.WriteString(filters.Excludes()) + queryParams.WriteString(filters.GetExcludes()) } hasParams = true @@ -425,69 +267,36 @@ func generateQueryParams(filters *Filters, sortOrder, refreshInterval string) st return queryParams.String() } -func html(w http.ResponseWriter, r *http.Request, filePath string, dimensions *Dimensions, filters *Filters, regexes *Regexes) error { - fileName := filepath.Base(filePath) - - w.Header().Add("Content-Type", "text/html") - - sortOrder := sortOrder(r) - - refreshTimer, refreshInterval := refreshInterval(r, regexes) - - queryParams := generateQueryParams(filters, sortOrder, refreshInterval) - - var htmlBody strings.Builder - htmlBody.WriteString(``) - htmlBody.WriteString(``) - htmlBody.WriteString(fmt.Sprintf(`%s (%dx%d)`, - fileName, - dimensions.width, - dimensions.height)) - htmlBody.WriteString(``) - if refreshInterval != "0ms" { - htmlBody.WriteString(fmt.Sprintf("", - queryParams, - refreshTimer)) - } - htmlBody.WriteString(fmt.Sprintf(`Roulette selected: %s`, - queryParams, - generateFilePath(filePath), - dimensions.width, - dimensions.height, - fileName)) - htmlBody.WriteString(``) - - _, err := io.WriteString(w, gohtml.Format(htmlBody.String())) +func stripQueryParams(u string) (string, error) { + uri, err := url.Parse(u) if err != nil { - return err + return "", err } - return nil + uri.RawQuery = "" + + escapedUri, err := url.QueryUnescape(uri.String()) + if err != nil { + return "", err + } + + if runtime.GOOS == "windows" { + return strings.TrimPrefix(escapedUri, "/"), nil + } + + return escapedUri, nil } -func realIP(r *http.Request) string { - remoteAddr := strings.SplitAfter(r.RemoteAddr, ":") +func generateFilePath(filePath string) string { + var htmlBody strings.Builder - if len(remoteAddr) < 1 { - return r.RemoteAddr + htmlBody.WriteString(Prefix) + if runtime.GOOS == "windows" { + htmlBody.WriteString(`/`) } + htmlBody.WriteString(filePath) - remotePort := remoteAddr[len(remoteAddr)-1] - - cfIP := r.Header.Get("Cf-Connecting-Ip") - xRealIP := r.Header.Get("X-Real-Ip") - - switch { - case cfIP != "": - return cfIP + ":" + remotePort - case xRealIP != "": - return xRealIP + ":" + remotePort - default: - return r.RemoteAddr - } + return htmlBody.String() } func refererToUri(referer string) string { @@ -500,13 +309,78 @@ func refererToUri(referer string) string { return "/" + parts[3] } -func staticFile(w http.ResponseWriter, r *http.Request, paths []string, stats *ServeStats) error { +func getRealIp(r *http.Request) string { + remoteAddr := strings.SplitAfter(r.RemoteAddr, ":") + + if len(remoteAddr) < 1 { + return r.RemoteAddr + } + + remotePort := remoteAddr[len(remoteAddr)-1] + + cfIP := r.Header.Get("Cf-Connecting-Ip") + xRealIp := r.Header.Get("X-Real-Ip") + + switch { + case cfIP != "": + return cfIP + ":" + remotePort + case xRealIp != "": + return xRealIp + ":" + remotePort + default: + return r.RemoteAddr + } +} + +func serveHtml(w http.ResponseWriter, r *http.Request, filePath string, dimensions *Dimensions, filters *Filters, regexes *Regexes) error { + fileName := filepath.Base(filePath) + + w.Header().Add("Content-Type", "text/html") + + sortOrder := getSortOrder(r) + + refreshTimer, refreshInterval := getRefreshInterval(r, regexes) + + queryParams := generateQueryParams(filters, sortOrder, refreshInterval) + + var htmlBody strings.Builder + htmlBody.WriteString(``) + htmlBody.WriteString(``) + htmlBody.WriteString(fmt.Sprintf(`%s (%dx%d)`, + fileName, + dimensions.Width, + dimensions.Height)) + htmlBody.WriteString(``) + if refreshInterval != "0ms" { + htmlBody.WriteString(fmt.Sprintf("", + queryParams, + refreshTimer)) + } + htmlBody.WriteString(fmt.Sprintf(`Roulette selected: %s`, + queryParams, + generateFilePath(filePath), + dimensions.Width, + dimensions.Height, + fileName)) + htmlBody.WriteString(``) + + _, err := io.WriteString(w, gohtml.Format(htmlBody.String())) + if err != nil { + return err + } + + return nil +} + +func serveStaticFile(w http.ResponseWriter, r *http.Request, paths []string, stats *ServeStats) error { prefixedFilePath, err := stripQueryParams(r.URL.Path) if err != nil { return err } - filePath, err := filepath.EvalSymlinks(strings.TrimPrefix(prefixedFilePath, prefix)) + filePath, err := filepath.EvalSymlinks(strings.TrimPrefix(prefixedFilePath, Prefix)) if err != nil { return err } @@ -541,70 +415,201 @@ func staticFile(w http.ResponseWriter, r *http.Request, paths []string, stats *S if Verbose { fmt.Printf("%s | Served %s (%s) to %s in %s\n", - startTime.Format(logDate), + startTime.Format(LogDate), filePath, fileSize, - realIP(r), + getRealIp(r), time.Since(startTime).Round(time.Microsecond), ) } if Debug { - stats.incrementCounter(filePath, startTime, fileSize) + stats.IncrementCounter(filePath, startTime, fileSize) } return nil } -func staticFileHandler(paths []string, stats *ServeStats) http.HandlerFunc { +func serveCacheClearHandler(args []string, index *Index) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { - err := staticFile(w, r, paths, stats) - if err != nil { - fmt.Println(err) - } + index.GenerateCache(args) + + w.WriteHeader(http.StatusOK) + w.Header().Set("Content-Type", "text/plain") + w.Write([]byte("Ok")) } } -func statsHandler(args []string, stats *ServeStats) http.HandlerFunc { +func serveStatsHandler(args []string, stats *ServeStats) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) w.Header().Set("Content-Type", "application/json") startTime := time.Now() response, err := stats.ListImages() if err != nil { - fmt.Println(err) + log.Fatal(err) } w.Write(response) if Verbose { fmt.Printf("%s | Served statistics page (%s) to %s in %s\n", - startTime.Format(logDate), + startTime.Format(LogDate), humanReadableSize(len(response)), - realIP(r), + getRealIp(r), time.Since(startTime).Round(time.Microsecond), ) } } } -func stripQueryParams(u string) (string, error) { - uri, err := url.Parse(u) - if err != nil { - return "", err +func serveStaticFileHandler(paths []string, stats *ServeStats) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + err := serveStaticFile(w, r, paths, stats) + if err != nil { + log.Fatal(err) + } } - - uri.RawQuery = "" - - escapedUri, err := url.QueryUnescape(uri.String()) - if err != nil { - return "", err - } - - if runtime.GOOS == "windows" { - return strings.TrimPrefix(escapedUri, "/"), nil - } - - return escapedUri, nil +} + +func serveHtmlHandler(paths []string, regexes *Regexes, index *Index) http.HandlerFunc { + return func(w http.ResponseWriter, r *http.Request) { + refererUri, err := stripQueryParams(refererToUri(r.Referer())) + if err != nil { + log.Fatal(err) + } + + filters := &Filters{ + Includes: splitQueryParams(r.URL.Query().Get("include"), regexes), + Excludes: splitQueryParams(r.URL.Query().Get("exclude"), regexes), + } + + sortOrder := getSortOrder(r) + + _, refreshInterval := getRefreshInterval(r, regexes) + + if r.URL.Path == "/" { + var filePath string + var err error + + if refererUri != "" { + filePath, err = getNextFile(refererUri, sortOrder, regexes) + if err != nil { + log.Fatal(err) + } + } + + if filePath == "" { + filePath, err = getNewFile(paths, filters, sortOrder, regexes, index) + switch { + case err != nil && err == ErrNoImagesFound: + notFound(w, r, filePath) + + return + case err != nil: + log.Fatal(err) + } + } + + queryParams := generateQueryParams(filters, sortOrder, refreshInterval) + + newUrl := fmt.Sprintf("http://%s%s%s", + r.Host, + preparePath(filePath), + queryParams, + ) + http.Redirect(w, r, newUrl, RedirectStatusCode) + } else { + filePath := r.URL.Path + + if runtime.GOOS == "windows" { + filePath = strings.TrimPrefix(filePath, "/") + } + + exists, err := fileExists(filePath) + if err != nil { + log.Fatal(err) + } + if !exists { + notFound(w, r, filePath) + + return + } + + image, err := isImage(filePath) + if err != nil { + log.Fatal(err) + } + if !image { + notFound(w, r, filePath) + + return + } + + dimensions, err := getImageDimensions(filePath) + if err != nil { + log.Fatal(err) + } + + err = serveHtml(w, r, filePath, dimensions, filters, regexes) + if err != nil { + log.Fatal(err) + } + } + } +} + +func doNothing(http.ResponseWriter, *http.Request) {} + +func ServePage(args []string) error { + fmt.Printf("roulette v%s\n\n", Version) + + paths, err := normalizePaths(args) + if err != nil { + return err + } + + regexes := &Regexes{ + Filename: regexp.MustCompile(`(.+)([0-9]{3})(\..+)`), + Alphanumeric: regexp.MustCompile(`^[a-zA-Z0-9]*$`), + Units: regexp.MustCompile(`^[0-9]+(ns|us|µs|ms|s|m|h)$`), + } + + rand.Seed(time.Now().UnixNano()) + + index := &Index{ + Mutex: sync.RWMutex{}, + List: []string{}, + } + + if Cache { + index.GenerateCache(args) + + http.Handle("/_/clear_cache", serveCacheClearHandler(args, index)) + } + + stats := &ServeStats{ + Mutex: sync.RWMutex{}, + List: []string{}, + Count: make(map[string]uint64), + Size: make(map[string]string), + Times: make(map[string][]string), + } + + http.Handle("/", serveHtmlHandler(paths, regexes, index)) + http.Handle(Prefix+"/", http.StripPrefix(Prefix, serveStaticFileHandler(paths, stats))) + http.HandleFunc("/favicon.ico", doNothing) + + if Debug { + http.Handle("/_/stats", serveStatsHandler(args, stats)) + } + + err = http.ListenAndServe(":"+strconv.FormatInt(int64(Port), 10), nil) + if err != nil { + return err + } + + return nil }