1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
|
package main
import (
"html/template"
"log"
"net/http"
"sort"
"strings"
)
type TemplateHandler struct {
handler http.Handler
}
func (t *TemplateHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Server", "uhttpd")
// We use our custom handler to prettify directories. Files are served via default.
if !strings.HasSuffix(r.URL.Path, "/") {
t.handler.ServeHTTP(w, r)
return
}
rec := NewRecorder()
defer rec.Body.Reset()
// passing the recorder instead of the real ResponseWriter
t.handler.ServeHTTP(rec, r)
// let the standard lib handle all the caching
if rec.Code > 300 && rec.Code < 400 {
log.Println("Code: ", rec.Code)
t.handler.ServeHTTP(w, r)
return
}
// we copy the original headers first
for k, v := range rec.Header() {
w.Header()[k] = v
}
// not found
if rec.Code == 404 {
w.Header().Set("Content-Type", "text/html")
w.WriteHeader(404)
tmpl := template.New("404")
tmpl, err := tmpl.Parse(get404())
if err != nil {
log.Println(err.Error())
w.WriteHeader(500)
w.Write([]byte(err.Error()))
return
}
err = tmpl.Execute(w, struct {
URL string
Favicon template.HTML
}{URL: r.URL.Path, Favicon: getFavicon()})
if err != nil {
log.Println(err.Error())
w.WriteHeader(500)
w.Write([]byte(err.Error()))
return
}
return
}
// we serve a file instead of a html page
if !strings.Contains(w.Header().Get("Content-Type"), "text/html") {
w.Write(rec.Body.Bytes())
return
}
data := rec.Body.String()
// we serve the directoy page
if strings.HasPrefix(data, "<pre>") {
execTemplate(w, r, data)
} else {
w.Write(rec.Body.Bytes())
}
}
func execTemplate(w http.ResponseWriter, r *http.Request, data string) {
// We get all links in the document
allLinks := getToken(data)
// all directories
var dirs []Link
// all files
var links []Link
// we split the file and directory links
for k, v := range allLinks {
if allLinks[k].IsDir {
dirs = append(dirs, v)
} else {
links = append(links, v)
}
}
// sort them independent
sort.Sort(LinksAsSlice(dirs))
sort.Sort(LinksAsSlice(links))
// merge them back
allLinks = append(dirs, links...)
// ... now directories are listed first
tmpl := template.New("page")
tmpl, err := tmpl.Parse(getTemplate())
if err != nil {
log.Println(err.Error())
w.WriteHeader(500)
w.Write([]byte(err.Error()))
return
}
err = tmpl.Execute(w, struct {
Links []Link
URL string
Favicon template.HTML
AllowUpload bool
}{Links: allLinks, URL: r.URL.Path, Favicon: getFavicon(), AllowUpload: _allow_upload})
if err != nil {
log.Println(err)
}
}
|