309 lines
7.2 KiB
Go
309 lines
7.2 KiB
Go
package photosapi
|
|
|
|
import (
|
|
"bytes"
|
|
"errors"
|
|
"fmt"
|
|
"io"
|
|
"mime"
|
|
"net/http"
|
|
"path/filepath"
|
|
"strings"
|
|
"time"
|
|
|
|
"github.com/dsoprea/go-exif/v3"
|
|
"github.com/gin-gonic/gin"
|
|
"github.com/jackc/pgconn"
|
|
"gorm.io/gorm"
|
|
)
|
|
|
|
// Error
|
|
var (
|
|
// Store
|
|
ErrStoreBadChecksum = errors.New("checksum should be sha1 in hex format")
|
|
ErrStoreBadChunkSize = errors.New("part file size should be 1MB max")
|
|
ErrStoreMissingChunks = errors.New("part checksum missing")
|
|
ErrStoreWrongChecksum = errors.New("wrong checksum")
|
|
ErrStoreMismatchChecksum = errors.New("part files doesn't match the original checksum")
|
|
ErrStoreAlreadyExists = errors.New("original file already exists")
|
|
ErrStoreChunkAlreadyExists = errors.New("chunk file already exists")
|
|
ErrStoreMissingName = errors.New("name required")
|
|
)
|
|
|
|
// Model
|
|
type File struct {
|
|
ID uint32 `gorm:"primary_key" json:"id"`
|
|
Name string `gorm:"not null" json:"name"`
|
|
Checksum string `gorm:"unique;size:44;not null"`
|
|
Size uint64 `gorm:"not null"`
|
|
Author *Account `gorm:"constraint:OnDelete:SET NULL,OnUpdate:CASCADE" json:"author"`
|
|
AuthorId *uint32 `json:"-"`
|
|
Chunks []*FileChunk `gorm:"constraint:OnDelete:CASCADE,OnUpdate:CASCADE"`
|
|
CreatedAt time.Time `json:"created_at"`
|
|
UpdatedAt time.Time `json:"updated_at"`
|
|
}
|
|
|
|
type FileChunk struct {
|
|
FileId uint32
|
|
File *File `gorm:"constraint:OnDelete:CASCADE,OnUpdate:CASCADE"`
|
|
Part uint32
|
|
Checksum string `gorm:"unique;size:44;not null"`
|
|
CreatedAt time.Time `json:"created_at"`
|
|
UpdatedAt time.Time `json:"updated_at"`
|
|
}
|
|
|
|
// Service
|
|
var CHUNK_SIZE int64 = 4 << 20
|
|
|
|
type FileRequest struct {
|
|
Name string `json:"name"`
|
|
Checksum string `json:"checksum"`
|
|
Chunks []string `json:"chunks"`
|
|
}
|
|
|
|
func (s *Service) FileCreate(c *gin.Context) {
|
|
file := &FileRequest{}
|
|
if err := c.ShouldBindJSON(file); err != nil {
|
|
s.Error(c, http.StatusInternalServerError, err)
|
|
return
|
|
}
|
|
|
|
if len(file.Name) < 1 {
|
|
s.Error(c, http.StatusBadRequest, ErrStoreMissingName)
|
|
return
|
|
}
|
|
|
|
if len(file.Checksum) != 40 {
|
|
s.Error(c, http.StatusBadRequest, ErrStoreBadChecksum)
|
|
return
|
|
}
|
|
if len(file.Chunks) == 0 {
|
|
s.Error(c, http.StatusBadRequest, ErrStoreMissingChunks)
|
|
return
|
|
}
|
|
|
|
for _, chunk := range file.Chunks {
|
|
if len(chunk) != 40 {
|
|
s.Error(c, http.StatusBadRequest, ErrStoreBadChecksum)
|
|
return
|
|
}
|
|
}
|
|
|
|
r, rs, err := s.Store.Combine(file.Chunks)
|
|
if err != nil {
|
|
if strings.HasPrefix(err.Error(), "chunk") && strings.HasSuffix(err.Error(), "doesn't exists") {
|
|
s.Error(c, http.StatusBadRequest, err)
|
|
} else {
|
|
s.Error(c, http.StatusInternalServerError, err)
|
|
}
|
|
return
|
|
}
|
|
|
|
if r != file.Checksum {
|
|
s.Error(c, http.StatusExpectationFailed, ErrStoreMismatchChecksum)
|
|
return
|
|
}
|
|
|
|
sess := s.CurrentSession(c)
|
|
|
|
f := &File{
|
|
Name: file.Name,
|
|
Checksum: file.Checksum,
|
|
Size: rs,
|
|
AuthorId: &sess.AccountId,
|
|
}
|
|
|
|
err = s.DB.Transaction(func(tx *gorm.DB) error {
|
|
if err := tx.Create(f).Error; err != nil {
|
|
return err
|
|
}
|
|
for i, chunk := range file.Chunks {
|
|
fc := &FileChunk{
|
|
FileId: f.ID,
|
|
Part: uint32(i + 1),
|
|
Checksum: chunk,
|
|
}
|
|
if err := tx.Create(fc).Error; err != nil {
|
|
return err
|
|
}
|
|
}
|
|
return nil
|
|
})
|
|
|
|
if nerr, ok := err.(*pgconn.PgError); ok {
|
|
if nerr.Code == "23505" && nerr.Detail == fmt.Sprintf("Key (checksum)=(%s) already exists.", file.Checksum) {
|
|
err = nil
|
|
}
|
|
}
|
|
|
|
if err != nil {
|
|
s.Error(c, http.StatusInternalServerError, err)
|
|
return
|
|
}
|
|
|
|
c.JSON(http.StatusOK, gin.H{
|
|
"status": "success",
|
|
"sum": file.Checksum,
|
|
"nbChunks": len(file.Chunks),
|
|
"size": rs,
|
|
})
|
|
}
|
|
|
|
func (s *Service) FileCreateChunk(c *gin.Context) {
|
|
if c.Request.ContentLength > CHUNK_SIZE {
|
|
s.Error(c, http.StatusBadRequest, ErrStoreBadChunkSize)
|
|
return
|
|
}
|
|
|
|
b := bytes.NewBuffer([]byte{})
|
|
io.Copy(b, c.Request.Body)
|
|
c.Request.Body.Close()
|
|
|
|
sess := s.CurrentSession(c)
|
|
|
|
chunk := s.Store.NewChunk(b.Bytes())
|
|
if err := chunk.Save(sess.Account.Login); err != nil {
|
|
if errors.Is(err, ErrStoreChunkAlreadyExists) {
|
|
c.JSON(http.StatusOK, gin.H{
|
|
"status": "success",
|
|
"checksum": chunk.Sum,
|
|
})
|
|
} else {
|
|
s.Error(c, http.StatusBadRequest, err)
|
|
}
|
|
return
|
|
}
|
|
|
|
c.JSON(http.StatusOK, gin.H{
|
|
"status": "success",
|
|
"checksum": chunk.Sum,
|
|
})
|
|
}
|
|
|
|
func (s *Service) FileChunkExists(c *gin.Context) {
|
|
checksum := c.Param("checksum")
|
|
if len(checksum) != 40 {
|
|
s.Error(c, http.StatusBadRequest, ErrStoreBadChecksum)
|
|
return
|
|
}
|
|
|
|
if s.Store.Chunk(checksum).FileExists() {
|
|
c.Status(http.StatusOK)
|
|
} else {
|
|
c.Status(http.StatusNotFound)
|
|
}
|
|
}
|
|
|
|
func (s *Service) FileExists(c *gin.Context) {
|
|
checksum := c.Param("checksum")
|
|
if len(checksum) != 40 {
|
|
s.Error(c, http.StatusBadRequest, ErrStoreBadChecksum)
|
|
return
|
|
}
|
|
|
|
var fileExists int64
|
|
if err := s.DB.Model(&File{}).Where("checksum = ?", checksum).Count(&fileExists).Error; err != nil {
|
|
s.Error(c, http.StatusInternalServerError, err)
|
|
return
|
|
}
|
|
|
|
if fileExists > 0 {
|
|
c.Status(http.StatusOK)
|
|
} else {
|
|
c.Status(http.StatusNotFound)
|
|
}
|
|
}
|
|
|
|
func (s *Service) FileGet(c *gin.Context) {
|
|
checksum := c.Param("checksum")
|
|
if len(checksum) != 40 {
|
|
s.Error(c, http.StatusBadRequest, ErrStoreBadChecksum)
|
|
return
|
|
}
|
|
if checksum == c.GetHeader("If-None-Match") {
|
|
c.Status(http.StatusNotModified)
|
|
return
|
|
}
|
|
|
|
f := &File{}
|
|
if err := s.DB.Debug().Preload("Chunks").Where("checksum = ?", checksum).First(&f).Error; err != nil {
|
|
s.Error(c, http.StatusBadRequest, err)
|
|
return
|
|
}
|
|
|
|
chunks := make([]string, len(f.Chunks))
|
|
for _, fc := range f.Chunks {
|
|
chunks[fc.Part-1] = fc.Checksum
|
|
}
|
|
|
|
reader, err := s.Store.NewStoreReader(chunks)
|
|
if err != nil {
|
|
s.Error(c, http.StatusInternalServerError, err)
|
|
return
|
|
}
|
|
defer reader.Close()
|
|
|
|
c.DataFromReader(
|
|
http.StatusOK,
|
|
reader.Size,
|
|
mime.TypeByExtension(filepath.Ext(f.Name)),
|
|
reader,
|
|
map[string]string{
|
|
"Content-Disposition": fmt.Sprintf("inline; filename=\"%s\"", f.Name),
|
|
"ETag": f.Checksum,
|
|
},
|
|
)
|
|
}
|
|
|
|
func (s *Service) FileAnalyze(c *gin.Context) {
|
|
checksum := c.Param("checksum")
|
|
if len(checksum) != 40 {
|
|
s.Error(c, http.StatusBadRequest, ErrStoreBadChecksum)
|
|
return
|
|
}
|
|
|
|
f := &File{}
|
|
if err := s.DB.Debug().Preload("Chunks").Where("checksum = ?", checksum).First(&f).Error; err != nil {
|
|
s.Error(c, http.StatusBadRequest, err)
|
|
return
|
|
}
|
|
|
|
chunks := make([]string, len(f.Chunks))
|
|
for _, fc := range f.Chunks {
|
|
chunks[fc.Part-1] = fc.Checksum
|
|
}
|
|
|
|
reader, err := s.Store.NewStoreReader(chunks)
|
|
if err != nil {
|
|
s.Error(c, http.StatusInternalServerError, err)
|
|
return
|
|
}
|
|
defer reader.Close()
|
|
|
|
rawExif, err := exif.SearchAndExtractExifWithReader(reader)
|
|
if err != nil {
|
|
s.Error(c, http.StatusInternalServerError, err)
|
|
return
|
|
}
|
|
entries, _, err := exif.GetFlatExifDataUniversalSearch(rawExif, nil, true)
|
|
if err != nil {
|
|
s.Error(c, http.StatusInternalServerError, err)
|
|
return
|
|
}
|
|
|
|
c.JSON(http.StatusOK, gin.H{
|
|
"exif": entries,
|
|
})
|
|
}
|
|
|
|
func (s *Service) FileInit() {
|
|
file := s.Gin.Group("/file")
|
|
file.Use(s.RequireSession)
|
|
file.POST("", s.FileCreate)
|
|
file.HEAD("/:checksum", s.FileExists)
|
|
file.GET("/:checksum", s.FileGet)
|
|
file.POST("/chunk", s.FileCreateChunk)
|
|
file.HEAD("/chunk/:checksum", s.FileChunkExists)
|
|
file.GET("/analyze/:checksum", s.FileAnalyze)
|
|
}
|