summaryrefslogtreecommitdiff
path: root/imagestore
diff options
context:
space:
mode:
authorhorus_arch2015-04-19 22:09:52 +0200
committerhorus_arch2015-04-19 22:09:52 +0200
commit01e9a34952bd6ddd383680b0ca2312e476ad07a6 (patch)
tree00902575e5c271cc5d35ea65aa8795b8caeb97bc /imagestore
downloadmandible-01e9a34952bd6ddd383680b0ca2312e476ad07a6.tar.gz
Initial commit.
Diffstat (limited to 'imagestore')
-rw-r--r--imagestore/factory.go121
-rw-r--r--imagestore/gcsstore.go62
-rw-r--r--imagestore/hash.go76
-rw-r--r--imagestore/localstore.go92
-rw-r--r--imagestore/namepathmapper.go34
-rw-r--r--imagestore/s3store.go53
-rw-r--r--imagestore/store.go16
-rw-r--r--imagestore/storeobject.go8
-rw-r--r--imagestore/strippath.go13
9 files changed, 475 insertions, 0 deletions
diff --git a/imagestore/factory.go b/imagestore/factory.go
new file mode 100644
index 0000000..90bb32a
--- /dev/null
+++ b/imagestore/factory.go
@@ -0,0 +1,121 @@
+package imagestore
+
+import (
+ "io/ioutil"
+ "log"
+ "os"
+
+ "mandible/config"
+ "github.com/mitchellh/goamz/aws"
+ "github.com/mitchellh/goamz/s3"
+
+ "golang.org/x/oauth2"
+ "golang.org/x/oauth2/google"
+ gcloud "google.golang.org/cloud"
+ gcs "google.golang.org/cloud/storage"
+)
+
+type Factory struct {
+ conf *config.Configuration
+}
+
+func NewFactory(conf *config.Configuration) *Factory {
+ return &Factory{conf}
+}
+
+func (this *Factory) NewImageStores() []ImageStore {
+ stores := []ImageStore{}
+
+ for _, configWrapper := range this.conf.Stores {
+ switch configWrapper["Type"] {
+ case "s3":
+ store := this.NewS3ImageStore(configWrapper)
+ stores = append(stores, store)
+ case "gcs":
+ store := this.NewGCSImageStore(configWrapper)
+ stores = append(stores, store)
+ case "local":
+ store := this.NewLocalImageStore(configWrapper)
+ stores = append(stores, store)
+ default:
+ log.Fatal("Unsupported store %s", configWrapper["Type"])
+ }
+ }
+
+ return stores
+}
+
+func (this *Factory) NewS3ImageStore(conf map[string]string) ImageStore {
+ bucket := os.Getenv("S3_BUCKET")
+ if len(bucket) == 0 {
+ bucket = conf["BucketName"]
+ }
+
+ auth, err := aws.EnvAuth()
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ client := s3.New(auth, aws.Regions[conf["Region"]])
+ mapper := NewNamePathMapper(conf["NamePathRegex"], conf["NamePathMap"])
+
+ return NewS3ImageStore(
+ bucket,
+ conf["StoreRoot"],
+ client,
+ mapper,
+ )
+}
+
+func (this *Factory) NewGCSImageStore(conf map[string]string) ImageStore {
+ jsonKey, err := ioutil.ReadFile(conf["KeyFile"])
+ if err != nil {
+ log.Fatal(err)
+ }
+ cloudConf, err := google.JWTConfigFromJSON(
+ jsonKey,
+ gcs.ScopeFullControl,
+ )
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ bucket := os.Getenv("GCS_BUCKET")
+ if len(bucket) == 0 {
+ bucket = conf["BucketName"]
+ }
+
+ ctx := gcloud.NewContext(conf["AppID"], cloudConf.Client(oauth2.NoContext))
+ mapper := NewNamePathMapper(conf["NamePathRegex"], conf["NamePathMap"])
+
+ return NewGCSImageStore(
+ ctx,
+ bucket,
+ conf["StoreRoot"],
+ mapper,
+ )
+}
+
+func (this *Factory) NewLocalImageStore(conf map[string]string) ImageStore {
+ mapper := NewNamePathMapper(conf["NamePathRegex"], conf["NamePathMap"])
+ return NewLocalImageStore(conf["StoreRoot"], mapper)
+}
+
+func (this *Factory) NewStoreObject(name string, mime string, imgType string) *StoreObject {
+ return &StoreObject{
+ Name: name,
+ MimeType: mime,
+ Type: imgType,
+ }
+}
+
+func (this *Factory) NewHashGenerator(store ImageStore) *HashGenerator {
+ hashGen := &HashGenerator{
+ make(chan string),
+ this.conf.HashLength,
+ store,
+ }
+
+ hashGen.init()
+ return hashGen
+}
diff --git a/imagestore/gcsstore.go b/imagestore/gcsstore.go
new file mode 100644
index 0000000..798d4eb
--- /dev/null
+++ b/imagestore/gcsstore.go
@@ -0,0 +1,62 @@
+package imagestore
+
+import (
+ "io/ioutil"
+ "log"
+
+ "golang.org/x/net/context"
+ "google.golang.org/cloud/storage"
+)
+
+type GCSImageStore struct {
+ ctx context.Context
+ bucketName string
+ storeRoot string
+ namePathMapper *NamePathMapper
+}
+
+func NewGCSImageStore(ctx context.Context, bucket string, root string, mapper *NamePathMapper) *GCSImageStore {
+ return &GCSImageStore{
+ ctx: ctx,
+ bucketName: bucket,
+ storeRoot: root,
+ namePathMapper: mapper,
+ }
+}
+
+func (this *GCSImageStore) Exists(obj *StoreObject) (bool, error) {
+ _, err := storage.StatObject(this.ctx, this.bucketName, this.toPath(obj))
+ if err != nil {
+ return false, err
+ }
+ return true, nil
+}
+
+func (this *GCSImageStore) Save(src string, obj *StoreObject) (*StoreObject, error) {
+ data, err := ioutil.ReadFile(src)
+ if err != nil {
+ log.Printf("error on read file: %s", err)
+ return nil, err
+ }
+
+ wc := storage.NewWriter(this.ctx, this.bucketName, this.toPath(obj))
+ wc.ContentType = obj.MimeType
+ if _, err := wc.Write(data); err != nil {
+ log.Printf("error on write data: %s", err)
+ return nil, err
+ }
+ if err := wc.Close(); err != nil {
+ log.Printf("error on close writer: %s", err)
+ return nil, err
+ }
+
+ obj.Url = "https://storage.googleapis.com/" + this.bucketName + "/" + this.toPath(obj)
+ return obj, nil
+}
+
+func (this *GCSImageStore) toPath(obj *StoreObject) string {
+ if this.storeRoot != "" {
+ return this.storeRoot + "/" + this.namePathMapper.mapToPath(obj)
+ }
+ return this.namePathMapper.mapToPath(obj)
+}
diff --git a/imagestore/hash.go b/imagestore/hash.go
new file mode 100644
index 0000000..d24e6fc
--- /dev/null
+++ b/imagestore/hash.go
@@ -0,0 +1,76 @@
+package imagestore
+
+import (
+ "crypto/rand"
+ "log"
+)
+
+type HashGenerator struct {
+ hashGetter chan string
+ length int
+ store ImageStore
+}
+
+func (this *HashGenerator) init() {
+ go func() {
+ storeObj := &StoreObject{
+ "",
+ "",
+ "original",
+ "",
+ }
+
+ for {
+ str := ""
+
+ for len(str) < this.length {
+ c := 10
+ bArr := make([]byte, c)
+ _, err := rand.Read(bArr)
+ if err != nil {
+ log.Println("error:", err)
+ break
+ }
+
+ for _, b := range bArr {
+ if len(str) == this.length {
+ break
+ }
+
+ /**
+ * Each byte will be in [0, 256), but we only care about:
+ *
+ * [48, 57] 0-9
+ * [65, 90] A-Z
+ * [97, 122] a-z
+ *
+ * Which means that the highest bit will always be zero, since the last byte with high bit
+ * zero is 01111111 = 127 which is higher than 122. Lower our odds of having to re-roll a byte by
+ * dividing by two (right bit shift of 1).
+ */
+
+ b = b >> 1
+
+ // The byte is any of 0-9 A-Z a-z
+ byteIsAllowable := (b >= 48 && b <= 57) || (b >= 65 && b <= 90) || (b >= 97 && b <= 122)
+
+ if byteIsAllowable {
+ str += string(b)
+ }
+ }
+
+ }
+
+ storeObj.Name = str
+
+ exists, _ := this.store.Exists(storeObj)
+ if !exists {
+ this.hashGetter <- str
+ }
+ }
+ }()
+}
+
+func (this *HashGenerator) Get() string {
+ return <-this.hashGetter
+}
diff --git a/imagestore/localstore.go b/imagestore/localstore.go
new file mode 100644
index 0000000..4df2931
--- /dev/null
+++ b/imagestore/localstore.go
@@ -0,0 +1,92 @@
+package imagestore
+
+import (
+ "bufio"
+ "io"
+ "os"
+ "path"
+)
+
+type LocalImageStore struct {
+ storeRoot string
+ namePathMapper *NamePathMapper
+}
+
+func NewLocalImageStore(root string, mapper *NamePathMapper) *LocalImageStore {
+ return &LocalImageStore{
+ storeRoot: root,
+ namePathMapper: mapper,
+ }
+}
+
+func (this *LocalImageStore) Exists(obj *StoreObject) (bool, error) {
+ if _, err := os.Stat(this.toPath(obj)); os.IsNotExist(err) {
+ return false, err
+ }
+
+ return true, nil
+}
+
+func (this *LocalImageStore) Save(src string, obj *StoreObject) (*StoreObject, error) {
+ // open input file
+ fi, err := os.Open(src)
+ if err != nil {
+ return nil, err
+ }
+
+ defer fi.Close()
+
+ // make a read buffer
+ r := bufio.NewReader(fi)
+
+ // open output file
+ this.createParent(obj)
+ fo, err := os.Create(this.toPath(obj))
+ if err != nil {
+ return nil, err
+ }
+
+ defer fo.Close()
+
+ // make a write buffer
+ w := bufio.NewWriter(fo)
+
+ // make a buffer to keep chunks that are read
+ buf := make([]byte, 1024)
+ for {
+ // read a chunk
+ n, err := r.Read(buf)
+ if err != nil && err != io.EOF {
+ return nil, err
+ }
+
+ if n == 0 {
+ break
+ }
+
+ // write a chunk
+ if _, err := w.Write(buf[:n]); err != nil {
+ return nil, err
+ }
+ }
+
+ if err = w.Flush(); err != nil {
+ return nil, err
+ }
+
+ obj.Url = this.toPath(obj)
+ obj.Url = stripPath(obj.Url)
+ return obj, nil
+}
+
+func (this *LocalImageStore) createParent(obj *StoreObject) {
+ path := path.Dir(this.toPath(obj))
+
+ if _, err := os.Stat(path); os.IsNotExist(err) {
+ os.MkdirAll(path, 0777)
+ }
+}
+
+func (this *LocalImageStore) toPath(obj *StoreObject) string {
+ return this.storeRoot + "/" + this.namePathMapper.mapToPath(obj)
+}
diff --git a/imagestore/namepathmapper.go b/imagestore/namepathmapper.go
new file mode 100644
index 0000000..919007b
--- /dev/null
+++ b/imagestore/namepathmapper.go
@@ -0,0 +1,34 @@
+package imagestore
+
+import (
+ "regexp"
+ "strings"
+)
+
+type NamePathMapper struct {
+ regex *regexp.Regexp
+ replace string
+}
+
+func NewNamePathMapper(expr string, mapping string) *NamePathMapper {
+ var r *regexp.Regexp
+ if len(expr) > 0 {
+ r = regexp.MustCompile(expr)
+ }
+
+ return &NamePathMapper{
+ r,
+ mapping,
+ }
+}
+
+func (this *NamePathMapper) mapToPath(obj *StoreObject) string {
+ repl := strings.Replace(this.replace, "${ImageName}", obj.Name, -1)
+ repl = strings.Replace(repl, "${ImageSize}", obj.Type, -1)
+
+ if this.regex != nil {
+ return this.regex.ReplaceAllString(obj.Name, repl)
+ }
+
+ return repl
+}
diff --git a/imagestore/s3store.go b/imagestore/s3store.go
new file mode 100644
index 0000000..e023fcc
--- /dev/null
+++ b/imagestore/s3store.go
@@ -0,0 +1,53 @@
+package imagestore
+
+import (
+ "github.com/mitchellh/goamz/s3"
+ "io/ioutil"
+)
+
+type S3ImageStore struct {
+ bucketName string
+ storeRoot string
+ client *s3.S3
+ namePathMapper *NamePathMapper
+}
+
+func NewS3ImageStore(bucket string, root string, client *s3.S3, mapper *NamePathMapper) *S3ImageStore {
+ return &S3ImageStore{
+ bucketName: bucket,
+ storeRoot: root,
+ client: client,
+ namePathMapper: mapper,
+ }
+}
+
+func (this *S3ImageStore) Exists(obj *StoreObject) (bool, error) {
+ bucket := this.client.Bucket(this.bucketName)
+ response, err := bucket.Head(this.toPath(obj))
+ if err != nil {
+ return false, err
+ }
+
+ return (response.StatusCode == 200), nil
+}
+
+func (this *S3ImageStore) Save(src string, obj *StoreObject) (*StoreObject, error) {
+ bucket := this.client.Bucket(this.bucketName)
+
+ data, err := ioutil.ReadFile(src)
+ if err != nil {
+ return nil, err
+ }
+
+ err = bucket.Put(this.toPath(obj), data, obj.MimeType, s3.PublicReadWrite)
+ if err != nil {
+ return nil, err
+ }
+
+ obj.Url = "https://s3.amazonaws.com/" + this.bucketName + this.toPath(obj)
+ return obj, nil
+}
+
+func (this *S3ImageStore) toPath(obj *StoreObject) string {
+ return this.storeRoot + "/" + this.namePathMapper.mapToPath(obj)
+}
diff --git a/imagestore/store.go b/imagestore/store.go
new file mode 100644
index 0000000..a58d1ff
--- /dev/null
+++ b/imagestore/store.go
@@ -0,0 +1,16 @@
+package imagestore
+
+type ImageStore interface {
+ Save(src string, obj *StoreObject) (*StoreObject, error)
+ Exists(obj *StoreObject) (bool, error)
+}
+
+type ImageStores []ImageStore
+
+func (this *ImageStores) Save(src string, obj *StoreObject) {
+ // TODO
+}
+
+func (this *ImageStores) Exists(obj *StoreObject) (bool, error) {
+ return false, nil
+}
diff --git a/imagestore/storeobject.go b/imagestore/storeobject.go
new file mode 100644
index 0000000..f834360
--- /dev/null
+++ b/imagestore/storeobject.go
@@ -0,0 +1,8 @@
+package imagestore
+
+type StoreObject struct {
+ Name string // Unique identifier
+ MimeType string // i.e. image/jpg
+ Type string // i.e. thumb
+ Url string // if publicly available
+}
diff --git a/imagestore/strippath.go b/imagestore/strippath.go
new file mode 100644
index 0000000..11ea12e
--- /dev/null
+++ b/imagestore/strippath.go
@@ -0,0 +1,13 @@
+package imagestore
+
+import (
+ "os"
+ "strings"
+)
+
+func stripPath(url string) string {
+ ABSPATH := os.Getenv("UPLOAD_DIR")
+ URL := os.Getenv("UPLOAD_URL")
+ return URL + strings.Replace(strings.TrimPrefix(url, ABSPATH), "/original/", "/i/", 1)
+ //return URL + strings.TrimPrefix(url, ABSPATH)
+}