Skip to content

Commit

Permalink
rewrite in go
Browse files Browse the repository at this point in the history
  • Loading branch information
lucat1 committed Feb 2, 2022
1 parent ed6f88c commit 2dcc43d
Show file tree
Hide file tree
Showing 5 changed files with 215 additions and 140 deletions.
2 changes: 0 additions & 2 deletions .gitignore

This file was deleted.

14 changes: 0 additions & 14 deletions Makefile

This file was deleted.

3 changes: 3 additions & 0 deletions go.mod
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
module github.com/lucat1/statik

go 1.17
124 changes: 0 additions & 124 deletions statik.c

This file was deleted.

212 changes: 212 additions & 0 deletions statik.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,212 @@
package main

import (
"fmt"
"flag"
"io/fs"
"io/ioutil"
"log"
"net/url"
"os"
"path"
"path/filepath"
"regexp"
"sort"
"strings"
"time"
)

const (
formatLayout = time.RFC822
nameSpace, dateSpace, sizeSpace = 50, "30", "8"
)

var (
baseDir, outDir string
baseUrl *url.URL = nil

include, exclude *regexp.Regexp = nil, nil
empty, recursive, sortEntries bool
)

func bytes(b int64) string {
const unit = 1000
if b < unit {
return fmt.Sprintf("%d B", b)
}
div, exp := int64(unit), 0
for n := b / unit; n >= unit; n /= unit {
div *= unit
exp++
}
return fmt.Sprintf("%.1f %cB", float64(b)/float64(div), "kMGTPE"[exp])
}

// joins the baseUrl path with the given relative path and returns the url as a string
func join(rel string) string {
cpy := baseUrl.Path
baseUrl.Path = path.Join(baseUrl.Path, rel)
res := baseUrl.String()
baseUrl.Path = cpy
return res
}

func header(rel string) string {
path := path.Join(baseUrl.Path + rel)
str := "<html><head><title>Index of " + path + "</title></head><body><h1>Index of " + path + "</h1><hr><pre>"
if rel != "/" {
str += "<a href=\"" + join(rel+"/..") + "\">..</a>\n"
}
return str
}

func line(name, path string, modTime time.Time, size int64) string {
space := strings.Repeat(" ", nameSpace-len(name))
return fmt.Sprintf("<a href=\"%s\">%s</a>%s %-"+dateSpace+"s %-"+sizeSpace+"s\n", join(path), name, space, modTime.Format(formatLayout), bytes(size))
}

func footer(date time.Time) string {
return "</pre><hr>Generated by <a href=\"https://github.com/lucat1/statik\">statik</a> on " + date.Format(formatLayout) + "</body></html>"
}

func copy(src, dest string) {
input, err := ioutil.ReadFile(src)
if err != nil {
log.Fatalf("Could not open source file for copying: %s\n%s\n", src, err)
}
err = ioutil.WriteFile(dest, input, 0644)
if err != nil {
log.Fatalf("Could not write to destination file for copying: %s\n%s\n", dest, err)
}
}

func filter(entries []fs.FileInfo) []fs.FileInfo {
filtered := []fs.FileInfo{}
for _, entry := range entries {
if include.MatchString(entry.Name()) && !exclude.MatchString(entry.Name()) {
filtered = append(filtered, entry)
}
}
return filtered
}

func generate(dir string) bool {
entries, err := ioutil.ReadDir(dir)
if err != nil {
log.Fatalf("Could not read input directory: %s\n%s\n", dir, err)
}
entries = filter(entries)
if len(entries) == 0 {
return empty
}
if sortEntries {
sort.Slice(entries, func(i, j int) bool {
return entries[i].IsDir() && !entries[j].IsDir()
})
}

if !strings.HasSuffix(dir, "/") {
dir += "/"
}
rel := strings.Replace(dir, baseDir, "", 1)
out := path.Join(outDir, rel)
if err := os.Mkdir(out, os.ModePerm); err != nil {
log.Fatalf("Could not create output *sub*directory: %s\n%s\n", out, err)
}
htmlPath := path.Join(out, "index.html")
html, err := os.OpenFile(htmlPath, os.O_RDWR|os.O_CREATE, 0666)
if err != nil {
log.Fatalf("Could not create output index.html: %s\n%s\n", htmlPath, err)
}

content := header(rel)
for _, entry := range entries {
pth := path.Join(dir, entry.Name())
// Avoid recursive infinite loop
if pth == outDir { continue }
// Only list directories when recursing and only those which are not empty
if !entry.IsDir() || recursive && generate(pth) {
content += line(entry.Name(), path.Join(rel, entry.Name()), entry.ModTime(), entry.Size())
}

// Copy all files over to the web root
if !entry.IsDir() {
copy(pth, path.Join(out, entry.Name()))
}
}
content += footer(time.Now())
if n, err := html.Write([]byte(content)); err != nil || n != len(content) {
log.Fatalf("Could not write to index.html: %s\n%s\n", htmlPath, err)
}
if err := html.Close(); err != nil {
log.Fatalf("Could not write to close index.html: %s\n%s\n", htmlPath, err)
}
log.Printf("Generated data for directory: %s\n", dir)

return !empty
}

func main() {
i := flag.String("i", ".*", "A regex pattern to include files into the listing")
e := flag.String("e", "\\.git(hub)?", "A regex pattern to exclude files from the listing")
r := flag.Bool("r", true, "Recursively scan the file tree")
emp := flag.Bool("empty", false, "Whether to list empty directories")
s := flag.Bool("sort", true, "Sort files A-z and by type")
b := flag.String("b", "http://localhost", "The base URL")
flag.Parse()

args := flag.Args()
src, dest := ".", "site"
if len(args) > 2 {
log.Fatal("Invalid number of aruments, expected two at max")
}
if len(args) == 1 {
dest = args[0]
} else if len(args) == 2 {
src = args[0]
dest = args[1]
}

log.Println("Running with parameters:")
log.Println("\tInclude:\t", *i)
log.Println("\tExclude:\t", *e)
log.Println("\tRecursive:\t", *r)
log.Println("\tEmpty:\t\t", *emp)
log.Println("\tSource:\t\t", src)
log.Println("\tDestination:\t", dest)
log.Println("\tBase URL:\t", *b)

var err error
if include, err = regexp.Compile(*i); err != nil {
log.Fatal("Invalid regexp for include matching", err)
}
if exclude, err = regexp.Compile(*e); err != nil {
log.Fatal("Invalid regexp for exclude matching", err)
}
recursive = *r
empty = *emp
sortEntries = *s

var wd string
if !filepath.IsAbs(src) || !filepath.IsAbs(dest) {
wd, err = os.Getwd()
if err != nil {
log.Fatal("Could not get currently working directory", err)
}
}
if baseDir = src; !filepath.IsAbs(src) {
baseDir = path.Join(wd, src)
}
if outDir = dest; !filepath.IsAbs(dest) {
outDir = path.Join(wd, dest)
}
if _, err := os.Stat(outDir); err == nil {
if err = os.RemoveAll(outDir); err != nil {
log.Fatalf("Could not remove output directory previous contents: %s\n%s\n", outDir, err)
}
}
if baseUrl, err = url.Parse(*b); err != nil {
log.Fatalf("Could not parse base URL: %s\n%s\n", *b, err)
}
generate(baseDir)
}

0 comments on commit 2dcc43d

Please sign in to comment.