Skip to content

Commit

Permalink
format
Browse files Browse the repository at this point in the history
  • Loading branch information
lucat1 committed Feb 2, 2022
1 parent 2dcc43d commit be5eb46
Showing 1 changed file with 165 additions and 163 deletions.
328 changes: 165 additions & 163 deletions statik.go
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
package main

import (
"fmt"
"flag"
"fmt"
"io/fs"
"io/ioutil"
"log"
Expand All @@ -17,196 +17,198 @@ import (
)

const (
formatLayout = time.RFC822
nameSpace, dateSpace, sizeSpace = 50, "30", "8"
formatLayout = time.RFC822
nameSpace, dateSpace, sizeSpace = 50, "30", "8"
)

var (
baseDir, outDir string
baseUrl *url.URL = nil
baseDir, outDir string
baseUrl *url.URL = nil

include, exclude *regexp.Regexp = nil, nil
empty, recursive, sortEntries bool
include, exclude *regexp.Regexp = nil, nil
empty, recursive, sortEntries bool
)

func bytes(b int64) string {
const unit = 1000
if b < unit {
return fmt.Sprintf("%d B", b)
}
div, exp := int64(unit), 0
for n := b / unit; n >= unit; n /= unit {
div *= unit
exp++
}
return fmt.Sprintf("%.1f %cB", float64(b)/float64(div), "kMGTPE"[exp])
const unit = 1000
if b < unit {
return fmt.Sprintf("%d B", b)
}
div, exp := int64(unit), 0
for n := b / unit; n >= unit; n /= unit {
div *= unit
exp++
}
return fmt.Sprintf("%.1f %cB", float64(b)/float64(div), "kMGTPE"[exp])
}

// joins the baseUrl path with the given relative path and returns the url as a string
func join(rel string) string {
cpy := baseUrl.Path
baseUrl.Path = path.Join(baseUrl.Path, rel)
res := baseUrl.String()
baseUrl.Path = cpy
return res
cpy := baseUrl.Path
baseUrl.Path = path.Join(baseUrl.Path, rel)
res := baseUrl.String()
baseUrl.Path = cpy
return res
}

func header(rel string) string {
path := path.Join(baseUrl.Path + rel)
str := "<html><head><title>Index of " + path + "</title></head><body><h1>Index of " + path + "</h1><hr><pre>"
if rel != "/" {
str += "<a href=\"" + join(rel+"/..") + "\">..</a>\n"
}
return str
path := path.Join(baseUrl.Path + rel)
str := "<html><head><title>Index of " + path + "</title></head><body><h1>Index of " + path + "</h1><hr><pre>"
if rel != "/" {
str += "<a href=\"" + join(rel+"/..") + "\">..</a>\n"
}
return str
}

func line(name, path string, modTime time.Time, size int64) string {
space := strings.Repeat(" ", nameSpace-len(name))
return fmt.Sprintf("<a href=\"%s\">%s</a>%s %-"+dateSpace+"s %-"+sizeSpace+"s\n", join(path), name, space, modTime.Format(formatLayout), bytes(size))
space := strings.Repeat(" ", nameSpace-len(name))
return fmt.Sprintf("<a href=\"%s\">%s</a>%s %-"+dateSpace+"s %-"+sizeSpace+"s\n", join(path), name, space, modTime.Format(formatLayout), bytes(size))
}

func footer(date time.Time) string {
return "</pre><hr>Generated by <a href=\"https://github.com/lucat1/statik\">statik</a> on " + date.Format(formatLayout) + "</body></html>"
return "</pre><hr>Generated by <a href=\"https://github.com/lucat1/statik\">statik</a> on " + date.Format(formatLayout) + "</body></html>"
}

func copy(src, dest string) {
input, err := ioutil.ReadFile(src)
if err != nil {
log.Fatalf("Could not open source file for copying: %s\n%s\n", src, err)
}
err = ioutil.WriteFile(dest, input, 0644)
if err != nil {
log.Fatalf("Could not write to destination file for copying: %s\n%s\n", dest, err)
}
input, err := ioutil.ReadFile(src)
if err != nil {
log.Fatalf("Could not open source file for copying: %s\n%s\n", src, err)
}
err = ioutil.WriteFile(dest, input, 0644)
if err != nil {
log.Fatalf("Could not write to destination file for copying: %s\n%s\n", dest, err)
}
}

func filter(entries []fs.FileInfo) []fs.FileInfo {
filtered := []fs.FileInfo{}
for _, entry := range entries {
if include.MatchString(entry.Name()) && !exclude.MatchString(entry.Name()) {
filtered = append(filtered, entry)
}
}
return filtered
filtered := []fs.FileInfo{}
for _, entry := range entries {
if include.MatchString(entry.Name()) && !exclude.MatchString(entry.Name()) {
filtered = append(filtered, entry)
}
}
return filtered
}

func generate(dir string) bool {
entries, err := ioutil.ReadDir(dir)
if err != nil {
log.Fatalf("Could not read input directory: %s\n%s\n", dir, err)
}
entries = filter(entries)
if len(entries) == 0 {
return empty
}
if sortEntries {
sort.Slice(entries, func(i, j int) bool {
return entries[i].IsDir() && !entries[j].IsDir()
})
}

if !strings.HasSuffix(dir, "/") {
dir += "/"
}
rel := strings.Replace(dir, baseDir, "", 1)
out := path.Join(outDir, rel)
if err := os.Mkdir(out, os.ModePerm); err != nil {
log.Fatalf("Could not create output *sub*directory: %s\n%s\n", out, err)
}
htmlPath := path.Join(out, "index.html")
html, err := os.OpenFile(htmlPath, os.O_RDWR|os.O_CREATE, 0666)
if err != nil {
log.Fatalf("Could not create output index.html: %s\n%s\n", htmlPath, err)
}

content := header(rel)
for _, entry := range entries {
pth := path.Join(dir, entry.Name())
// Avoid recursive infinite loop
if pth == outDir { continue }
// Only list directories when recursing and only those which are not empty
if !entry.IsDir() || recursive && generate(pth) {
content += line(entry.Name(), path.Join(rel, entry.Name()), entry.ModTime(), entry.Size())
}

// Copy all files over to the web root
if !entry.IsDir() {
copy(pth, path.Join(out, entry.Name()))
}
}
content += footer(time.Now())
if n, err := html.Write([]byte(content)); err != nil || n != len(content) {
log.Fatalf("Could not write to index.html: %s\n%s\n", htmlPath, err)
}
if err := html.Close(); err != nil {
log.Fatalf("Could not write to close index.html: %s\n%s\n", htmlPath, err)
}
log.Printf("Generated data for directory: %s\n", dir)

return !empty
entries, err := ioutil.ReadDir(dir)
if err != nil {
log.Fatalf("Could not read input directory: %s\n%s\n", dir, err)
}
entries = filter(entries)
if len(entries) == 0 {
return empty
}
if sortEntries {
sort.Slice(entries, func(i, j int) bool {
return entries[i].IsDir() && !entries[j].IsDir()
})
}

if !strings.HasSuffix(dir, "/") {
dir += "/"
}
rel := strings.Replace(dir, baseDir, "", 1)
out := path.Join(outDir, rel)
if err := os.Mkdir(out, os.ModePerm); err != nil {
log.Fatalf("Could not create output *sub*directory: %s\n%s\n", out, err)
}
htmlPath := path.Join(out, "index.html")
html, err := os.OpenFile(htmlPath, os.O_RDWR|os.O_CREATE, 0666)
if err != nil {
log.Fatalf("Could not create output index.html: %s\n%s\n", htmlPath, err)
}

content := header(rel)
for _, entry := range entries {
pth := path.Join(dir, entry.Name())
// Avoid recursive infinite loop
if pth == outDir {
continue
}
// Only list directories when recursing and only those which are not empty
if !entry.IsDir() || recursive && generate(pth) {
content += line(entry.Name(), path.Join(rel, entry.Name()), entry.ModTime(), entry.Size())
}

// Copy all files over to the web root
if !entry.IsDir() {
copy(pth, path.Join(out, entry.Name()))
}
}
content += footer(time.Now())
if n, err := html.Write([]byte(content)); err != nil || n != len(content) {
log.Fatalf("Could not write to index.html: %s\n%s\n", htmlPath, err)
}
if err := html.Close(); err != nil {
log.Fatalf("Could not write to close index.html: %s\n%s\n", htmlPath, err)
}
log.Printf("Generated data for directory: %s\n", dir)

return !empty
}

func main() {
i := flag.String("i", ".*", "A regex pattern to include files into the listing")
e := flag.String("e", "\\.git(hub)?", "A regex pattern to exclude files from the listing")
r := flag.Bool("r", true, "Recursively scan the file tree")
emp := flag.Bool("empty", false, "Whether to list empty directories")
s := flag.Bool("sort", true, "Sort files A-z and by type")
b := flag.String("b", "http://localhost", "The base URL")
flag.Parse()

args := flag.Args()
src, dest := ".", "site"
if len(args) > 2 {
log.Fatal("Invalid number of aruments, expected two at max")
}
if len(args) == 1 {
dest = args[0]
} else if len(args) == 2 {
src = args[0]
dest = args[1]
}

log.Println("Running with parameters:")
log.Println("\tInclude:\t", *i)
log.Println("\tExclude:\t", *e)
log.Println("\tRecursive:\t", *r)
log.Println("\tEmpty:\t\t", *emp)
log.Println("\tSource:\t\t", src)
log.Println("\tDestination:\t", dest)
log.Println("\tBase URL:\t", *b)

var err error
if include, err = regexp.Compile(*i); err != nil {
log.Fatal("Invalid regexp for include matching", err)
}
if exclude, err = regexp.Compile(*e); err != nil {
log.Fatal("Invalid regexp for exclude matching", err)
}
recursive = *r
empty = *emp
sortEntries = *s

var wd string
if !filepath.IsAbs(src) || !filepath.IsAbs(dest) {
wd, err = os.Getwd()
if err != nil {
log.Fatal("Could not get currently working directory", err)
}
}
if baseDir = src; !filepath.IsAbs(src) {
baseDir = path.Join(wd, src)
}
if outDir = dest; !filepath.IsAbs(dest) {
outDir = path.Join(wd, dest)
}
if _, err := os.Stat(outDir); err == nil {
if err = os.RemoveAll(outDir); err != nil {
log.Fatalf("Could not remove output directory previous contents: %s\n%s\n", outDir, err)
}
}
if baseUrl, err = url.Parse(*b); err != nil {
log.Fatalf("Could not parse base URL: %s\n%s\n", *b, err)
}
generate(baseDir)
i := flag.String("i", ".*", "A regex pattern to include files into the listing")
e := flag.String("e", "\\.git(hub)?", "A regex pattern to exclude files from the listing")
r := flag.Bool("r", true, "Recursively scan the file tree")
emp := flag.Bool("empty", false, "Whether to list empty directories")
s := flag.Bool("sort", true, "Sort files A-z and by type")
b := flag.String("b", "http://localhost", "The base URL")
flag.Parse()

args := flag.Args()
src, dest := ".", "site"
if len(args) > 2 {
log.Fatal("Invalid number of aruments, expected two at max")
}
if len(args) == 1 {
dest = args[0]
} else if len(args) == 2 {
src = args[0]
dest = args[1]
}

log.Println("Running with parameters:")
log.Println("\tInclude:\t", *i)
log.Println("\tExclude:\t", *e)
log.Println("\tRecursive:\t", *r)
log.Println("\tEmpty:\t\t", *emp)
log.Println("\tSource:\t\t", src)
log.Println("\tDestination:\t", dest)
log.Println("\tBase URL:\t", *b)

var err error
if include, err = regexp.Compile(*i); err != nil {
log.Fatal("Invalid regexp for include matching", err)
}
if exclude, err = regexp.Compile(*e); err != nil {
log.Fatal("Invalid regexp for exclude matching", err)
}
recursive = *r
empty = *emp
sortEntries = *s

var wd string
if !filepath.IsAbs(src) || !filepath.IsAbs(dest) {
wd, err = os.Getwd()
if err != nil {
log.Fatal("Could not get currently working directory", err)
}
}
if baseDir = src; !filepath.IsAbs(src) {
baseDir = path.Join(wd, src)
}
if outDir = dest; !filepath.IsAbs(dest) {
outDir = path.Join(wd, dest)
}
if _, err := os.Stat(outDir); err == nil {
if err = os.RemoveAll(outDir); err != nil {
log.Fatalf("Could not remove output directory previous contents: %s\n%s\n", outDir, err)
}
}
if baseUrl, err = url.Parse(*b); err != nil {
log.Fatalf("Could not parse base URL: %s\n%s\n", *b, err)
}
generate(baseDir)
}

0 comments on commit be5eb46

Please sign in to comment.