Implement "global" state for modules, OnLoad and OnUnload callbacks

Tested for memory leaks and performance. Obviously the added locking and
global state is not awesome, but the alternative is a little uglier IMO:
we'd have to make some sort of "liaison" value which stores the state,
then pass it around to every module, and so LoadModule becomes a lot
less accessible, and each module would need to maintain a reference to
it... nope, just ugly. I think this is the cleaner solution: just make
sure only one Start() happens at a time, and keep global things global.

Very simple log middleware is an example.

Might need to reorder the operations in Start() and handle errors
differently, etc. Otherwise, I'm mostly happy with this solution...
This commit is contained in:
Matthew Holt
2019-04-08 00:00:14 -06:00
parent 3eae6d43b6
commit 402f423693
3 changed files with 123 additions and 11 deletions

View File

@@ -12,13 +12,31 @@ import (
func init() {
caddy2.RegisterModule(caddy2.Module{
Name: "http.middleware.log",
New: func() (interface{}, error) { return &Log{}, nil },
New: func() (interface{}, error) { return new(Log), nil },
OnLoad: func(instances []interface{}, priorState interface{}) (interface{}, error) {
var counter int
if priorState != nil {
counter = priorState.(int)
}
counter++
for _, inst := range instances {
logInst := inst.(*Log)
logInst.counter = counter
}
log.Println("State is now:", counter)
return counter, nil
},
OnUnload: func(state interface{}) error {
log.Println("Closing log files, state:", state)
return nil
},
})
}
// Log implements a simple logging middleware.
type Log struct {
Filename string
counter int
}
func (l *Log) ServeHTTP(w http.ResponseWriter, r *http.Request, next caddyhttp.Handler) error {
@@ -28,7 +46,7 @@ func (l *Log) ServeHTTP(w http.ResponseWriter, r *http.Request, next caddyhttp.H
return err
}
log.Println("latency:", time.Now().Sub(start))
log.Println("latency:", time.Now().Sub(start), l.counter)
return nil
}