Compare commits

...

4 Commits

Author SHA1 Message Date
celogeek
5dbd59fe8f
move request/response to internal api 2022-05-14 19:13:10 +02:00
celogeek
3e48bd14df
remove chunk 2022-05-14 17:48:13 +02:00
celogeek
44f0cf68be
disable file 2022-05-14 17:47:55 +02:00
celogeek
2d1fdc1314
factor error and cleanup 2022-05-14 17:47:23 +02:00
13 changed files with 306 additions and 716 deletions

View File

@ -1,10 +1,10 @@
package main
import (
"errors"
"fmt"
"github.com/go-resty/resty/v2"
photosapi "gitlab.celogeek.com/photos/api/internal/photos/api"
)
type LoginCommand struct {
@ -13,19 +13,6 @@ type LoginCommand struct {
Password string `short:"p" long:"password" description:"Password" required:"true"`
}
type LoginRequest struct {
Login string `json:"login"`
Password string `json:"password"`
}
type LoginError struct {
Error string
}
type LoginResponse struct {
Token string
}
func (c *LoginCommand) Execute(args []string) error {
logger.Printf("Login on %s...\n", c.Url)
@ -33,22 +20,25 @@ func (c *LoginCommand) Execute(args []string) error {
resp, err := cli.
R().
SetBody(&LoginRequest{c.Login, c.Password}).
SetResult(&LoginResponse{}).
SetError(&LoginError{}).
SetBody(&photosapi.LoginRequest{
Login: c.Login,
Password: c.Password,
}).
SetResult(&photosapi.LoginResponse{}).
SetError(&photosapi.ErrorWithDetails{}).
Post("/account/login")
if err != nil {
return err
}
if err, ok := resp.Error().(*LoginError); ok {
if resp.IsError() {
logger.Printf("Login failed!")
return errors.New(err.Error)
return resp.Error().(*photosapi.ErrorWithDetails)
}
logger.Println("Login succeed!")
if result, ok := resp.Result().(*LoginResponse); ok {
if result, ok := resp.Result().(*photosapi.LoginResponse); ok {
fmt.Println(result.Token)
}

View File

@ -1,9 +1,8 @@
package main
import (
"errors"
"github.com/go-resty/resty/v2"
photosapi "gitlab.celogeek.com/photos/api/internal/photos/api"
)
type RegisterCommand struct {
@ -12,18 +11,6 @@ type RegisterCommand struct {
Password string `short:"p" long:"password" description:"Password" required:"true"`
}
type RegisterRequest struct {
Login string `json:"login"`
Password string `json:"password"`
}
type RegisterResponse struct {
}
type RegisterError struct {
Error string
}
func (c *RegisterCommand) Execute(args []string) error {
logger.Printf("Registering on %s...\n", c.Url)
@ -31,18 +18,20 @@ func (c *RegisterCommand) Execute(args []string) error {
resp, err := cli.
R().
SetBody(&RegisterRequest{c.Login, c.Password}).
SetResult(&RegisterResponse{}).
SetError(&RegisterError{}).
SetError(&photosapi.ErrorWithDetails{}).
SetBody(&photosapi.SignupRequest{
Login: c.Login,
Password: c.Password,
}).
Post("/account/signup")
if err != nil {
return err
}
if err, ok := resp.Error().(*RegisterError); ok {
if resp.IsError() {
logger.Println("Registering failed!")
return errors.New(err.Error)
return resp.Error().(*photosapi.ErrorWithDetails)
}
logger.Println("Registering succeed!")

View File

@ -1,16 +1,12 @@
package main
import (
"crypto/sha1"
"crypto/sha256"
"encoding/hex"
"errors"
"fmt"
"io"
"os"
"path/filepath"
"strings"
"sync"
"github.com/go-resty/resty/v2"
"github.com/schollz/progressbar/v3"
@ -24,197 +20,22 @@ type UploadCommand struct {
Workers uint32 `short:"w" long:"workers" description:"Number of workers for uploading chunks" default:"4"`
}
type UploadError struct {
Err string `json:"error"`
Details []string `json:"details"`
}
func (u *UploadError) Error() string {
if len(u.Details) == 0 {
return u.Err
}
return fmt.Sprintf("%s: \n - %s", u.Err, strings.Join(u.Details, "\n - "))
}
type UploadCreate struct {
UploadId string `json:"upload_id"`
}
type UploadPartResult struct {
UploadId string `json:"upload_id"`
Part uint `json:"part"`
Size uint `json:"size"`
PartSha256 string `json:"sha256"`
}
type UploadCompleteRequest struct {
Sha256 string `json:"sha256" binding:"required,sha256"`
Name string `json:"name" binding:"required"`
Parts uint `json:"parts" binding:"required"`
}
type UploadFileRequest struct {
Name string
Checksum string
Chunks []string
}
type UploadFileResponse struct {
Sum string
NbChunks uint32
Size uint64
}
func (c *UploadCommand) Cli() *resty.Client {
return resty.New().SetBaseURL(c.Url).SetAuthScheme("Private").SetAuthToken(c.Token)
}
func (c *UploadCommand) FileExists() (string, error) {
f, err := os.Open(c.File)
if err != nil {
return "", err
}
defer f.Close()
st, err := f.Stat()
if err != nil {
return "", err
}
progress := progressbar.DefaultBytes(st.Size(), fmt.Sprintf("Checking %s", filepath.Base(c.File)))
defer progress.Close()
tee := io.TeeReader(f, progress)
checksum := sha1.New()
io.Copy(checksum, tee)
sum := hex.EncodeToString(checksum.Sum(nil))
resp, err := c.Cli().R().Head(fmt.Sprintf("/file/%s", sum))
if err != nil {
return "", err
}
if resp.IsSuccess() {
return "", errors.New("file already exists")
}
return sum, nil
}
func (c *UploadCommand) FileUpload(sum string) error {
f, err := os.Open(c.File)
if err != nil {
return err
}
defer f.Close()
st, err := f.Stat()
if err != nil {
return err
}
nbChunks := st.Size() / photosapi.CHUNK_SIZE
if st.Size()%photosapi.CHUNK_SIZE > 0 {
nbChunks++
}
uploadFile := &UploadFileRequest{
Name: filepath.Base(c.File),
Chunks: make([]string, nbChunks),
Checksum: sum,
}
cli := c.Cli()
progress := progressbar.DefaultBytes(st.Size(), fmt.Sprintf("Uploading %s", uploadFile.Name))
defer progress.Close()
wg := sync.WaitGroup{}
mu := sync.Mutex{}
wg.Add(4)
wgErrors := make([]error, c.Workers)
i := int64(0)
for w := uint32(0); w < c.Workers; w++ {
go func(w uint32) {
defer wg.Done()
b := make([]byte, photosapi.CHUNK_SIZE)
for {
mu.Lock()
part := i
i++
n, err := f.Read(b)
mu.Unlock()
if n == 0 {
if err == io.EOF {
break
}
wgErrors[w] = err
return
}
checksum := sha1.New()
checksum.Write(b[0:n])
sum := hex.EncodeToString(checksum.Sum(nil))
resp, err := cli.R().Head(fmt.Sprintf("/file/chunk/%s", sum))
if err != nil {
wgErrors[w] = err
return
}
if resp.IsSuccess() {
uploadFile.Chunks[part] = sum
progress.Add(n)
continue
}
resp, err = cli.R().SetError(&UploadError{}).SetBody(b[0:n]).Post("/file/chunk")
if err != nil {
wgErrors[w] = err
return
}
if err, ok := resp.Error().(*UploadError); ok {
wgErrors[w] = err
return
}
uploadFile.Chunks[part] = sum
progress.Add(n)
}
}(w)
}
wg.Wait()
for _, err := range wgErrors {
if err != nil {
return err
}
}
resp, err := cli.R().SetBody(uploadFile).SetError(&UploadError{}).SetResult(&UploadFileResponse{}).Post("/file")
if err != nil {
return err
}
if err, ok := resp.Error().(*UploadError); ok {
logger.Println("Upload failed")
return err
}
if result, ok := resp.Result().(*UploadFileResponse); ok {
fmt.Printf("Upload succeed\nSum: %s\nNbChunks: %d\nSize: %d\n", result.Sum, result.NbChunks, result.Size)
}
return nil
}
func (c *UploadCommand) Execute(args []string) error {
cli := c.Cli()
resp, err := cli.R().SetError(&UploadError{}).SetResult(&UploadCreate{}).Post("/upload")
resp, err := cli.R().SetError(&photosapi.ErrorWithDetails{}).SetResult(&photosapi.Upload{}).Post("/upload")
if err != nil {
return err
}
if err, ok := resp.Error().(*UploadError); ok {
return err
if resp.IsError() {
return resp.Error().(*photosapi.ErrorWithDetails)
}
uploadId := resp.Result().(*UploadCreate).UploadId
uploadId := resp.Result().(*photosapi.Upload).Id
f, err := os.Open(c.File)
if err != nil {
@ -249,8 +70,7 @@ func (c *UploadCommand) Execute(args []string) error {
resp, err := cli.
R().
SetError(&UploadError{}).
SetResult(&UploadPartResult{}).
SetError(&photosapi.ErrorWithDetails{}).
SetQueryParam("part", fmt.Sprint(parts)).
SetQueryParam("sha256", hex.EncodeToString(partsha256.Sum(nil))).
SetBody(b[:n]).
@ -261,22 +81,22 @@ func (c *UploadCommand) Execute(args []string) error {
return err
}
if err, ok := resp.Error().(*UploadError); ok {
return err
if resp.IsError() {
return resp.Error().(*photosapi.ErrorWithDetails)
}
}
fmt.Printf(
"Upload: %s\nParts: %d\n",
"Result:\n - Upload ID: %s\n - Parts: %d\n",
uploadId,
parts,
)
resp, err = cli.
R().
SetError(&UploadError{}).
SetError(&photosapi.ErrorWithDetails{}).
SetPathParam("id", uploadId).
SetBody(&UploadCompleteRequest{
SetBody(&photosapi.UploadCompleteRequest{
Sha256: hex.EncodeToString(completesha256.Sum(nil)),
Parts: uint(parts),
Name: filepath.Base(c.File),
@ -287,12 +107,10 @@ func (c *UploadCommand) Execute(args []string) error {
return err
}
if err, ok := resp.Error().(*UploadError); ok {
return err
if resp.IsError() {
return resp.Error().(*photosapi.ErrorWithDetails)
}
fmt.Printf("Response: %s\n", resp.Body())
cli.R().SetPathParam("id", uploadId).Delete("/upload/{id}")
return nil

View File

@ -50,13 +50,22 @@ func NewAccount(login string, password string) *Account {
}
// Service
type SignupOrLoginRequest struct {
Login string `binding:"min=3,max=40,alphanum"`
Password string `binding:"min=8,max=40"`
type SignupRequest struct {
Login string `json:"login" binding:"required,min=3,max=40,alphanum"`
Password string `json:"password" binding:"required,min=8,max=40"`
}
type LoginRequest struct {
Login string `json:"login" binding:"required"`
Password string `json:"password" binding:"required"`
}
type LoginResponse struct {
Token string `json:"token"`
}
func (s *Service) Signup(c *gin.Context) {
var account *SignupOrLoginRequest
var account *SignupRequest
if c.BindJSON(&account) != nil {
return
@ -80,7 +89,7 @@ func (s *Service) Signup(c *gin.Context) {
}
func (s *Service) Login(c *gin.Context) {
var account *SignupOrLoginRequest
var account *LoginRequest
if c.BindJSON(&account) != nil {
return
@ -96,9 +105,7 @@ func (s *Service) Login(c *gin.Context) {
return
}
c.JSON(http.StatusOK, gin.H{
"token": session.Token,
})
c.JSON(http.StatusOK, LoginResponse{session.Token})
}
func (s *Service) Logout(c *gin.Context) {

View File

@ -35,7 +35,6 @@ func (s *Service) Migrate() {
tx.AutoMigrate(&Account{})
tx.AutoMigrate(&Session{})
tx.AutoMigrate(&File{})
tx.AutoMigrate(&FileChunk{})
}
func (s *Service) DBConnect() {

View File

@ -2,6 +2,7 @@ package photosapi
import (
"errors"
"fmt"
"strings"
"github.com/gin-gonic/gin"
@ -11,6 +12,18 @@ var (
ErrReqMissingBody = errors.New("missing body")
)
type ErrorWithDetails struct {
Err string `json:"error"`
Details []string `json:"details"`
}
func (u *ErrorWithDetails) Error() string {
if len(u.Details) == 0 {
return u.Err
}
return fmt.Sprintf("%s: \n - %s", u.Err, strings.Join(u.Details, "\n - "))
}
func (s *Service) HandleError(c *gin.Context) {
c.Next()
err := c.Errors.Last()
@ -18,20 +31,15 @@ func (s *Service) HandleError(c *gin.Context) {
return
}
details := err.Error()
if details == "EOF" {
details = "missing body"
errWithDetails := &ErrorWithDetails{err.Error(), nil}
if errWithDetails.Err == "EOF" {
errWithDetails.Err = "missing body"
}
switch err.Type {
case gin.ErrorTypeBind:
c.JSON(-1, gin.H{
"error": "binding error",
"details": strings.Split(details, "\n"),
})
default:
c.JSON(-1, gin.H{
"error": details,
})
if err.Type == gin.ErrorTypeBind {
errWithDetails.Err, errWithDetails.Details = "binding error", strings.Split(errWithDetails.Err, "\n")
}
c.JSON(-1, errWithDetails)
}

View File

@ -1,19 +1,7 @@
package photosapi
import (
"bytes"
"errors"
"fmt"
"io"
"mime"
"net/http"
"path/filepath"
"strings"
"github.com/dsoprea/go-exif/v3"
"github.com/gin-gonic/gin"
"github.com/jackc/pgconn"
"gorm.io/gorm"
)
// Error
@ -29,252 +17,252 @@ var (
ErrStoreMissingName = errors.New("name required")
)
// Service
var CHUNK_SIZE int64 = 4 << 20
// // Service
// var CHUNK_SIZE int64 = 4 << 20
type FileRequest struct {
Name string `json:"name"`
Checksum string `json:"checksum"`
Chunks []string `json:"chunks"`
}
// type FileRequest struct {
// Name string `json:"name"`
// Checksum string `json:"checksum"`
// Chunks []string `json:"chunks"`
// }
func (s *Service) FileCreate(c *gin.Context) {
file := &FileRequest{}
if c.BindJSON(file) != nil {
return
}
// func (s *Service) FileCreate(c *gin.Context) {
// file := &FileRequest{}
// if c.BindJSON(file) != nil {
// return
// }
if len(file.Name) < 1 {
c.AbortWithError(http.StatusBadRequest, ErrStoreMissingName)
return
}
// if len(file.Name) < 1 {
// c.AbortWithError(http.StatusBadRequest, ErrStoreMissingName)
// return
// }
if len(file.Checksum) != 40 {
c.AbortWithError(http.StatusBadRequest, ErrStoreBadChecksum)
return
}
if len(file.Chunks) == 0 {
c.AbortWithError(http.StatusBadRequest, ErrStoreMissingChunks)
return
}
// if len(file.Checksum) != 40 {
// c.AbortWithError(http.StatusBadRequest, ErrStoreBadChecksum)
// return
// }
// if len(file.Chunks) == 0 {
// c.AbortWithError(http.StatusBadRequest, ErrStoreMissingChunks)
// return
// }
for _, chunk := range file.Chunks {
if len(chunk) != 40 {
c.AbortWithError(http.StatusBadRequest, ErrStoreBadChecksum)
return
}
}
// for _, chunk := range file.Chunks {
// if len(chunk) != 40 {
// c.AbortWithError(http.StatusBadRequest, ErrStoreBadChecksum)
// return
// }
// }
r, rs, err := s.Store.Combine(file.Chunks)
if err != nil {
if strings.HasPrefix(err.Error(), "chunk") && strings.HasSuffix(err.Error(), "doesn't exists") {
c.AbortWithError(http.StatusBadRequest, err)
} else {
c.AbortWithError(http.StatusInternalServerError, err)
}
return
}
// r, rs, err := s.Store.Combine(file.Chunks)
// if err != nil {
// if strings.HasPrefix(err.Error(), "chunk") && strings.HasSuffix(err.Error(), "doesn't exists") {
// c.AbortWithError(http.StatusBadRequest, err)
// } else {
// c.AbortWithError(http.StatusInternalServerError, err)
// }
// return
// }
if r != file.Checksum {
c.AbortWithError(http.StatusExpectationFailed, ErrStoreMismatchChecksum)
return
}
// if r != file.Checksum {
// c.AbortWithError(http.StatusExpectationFailed, ErrStoreMismatchChecksum)
// return
// }
sess := s.CurrentSession(c)
// sess := s.CurrentSession(c)
f := &File{
Name: file.Name,
Checksum: file.Checksum,
Size: rs,
AuthorId: &sess.AccountId,
}
// f := &File{
// Name: file.Name,
// Checksum: file.Checksum,
// Size: rs,
// AuthorId: &sess.AccountId,
// }
err = s.DB.Transaction(func(tx *gorm.DB) error {
if err := tx.Create(f).Error; err != nil {
return err
}
for i, chunk := range file.Chunks {
fc := &FileChunk{
FileId: f.ID,
Part: uint32(i + 1),
Checksum: chunk,
}
if err := tx.Create(fc).Error; err != nil {
return err
}
}
return nil
})
// err = s.DB.Transaction(func(tx *gorm.DB) error {
// if err := tx.Create(f).Error; err != nil {
// return err
// }
// for i, chunk := range file.Chunks {
// fc := &FileChunk{
// FileId: f.ID,
// Part: uint32(i + 1),
// Checksum: chunk,
// }
// if err := tx.Create(fc).Error; err != nil {
// return err
// }
// }
// return nil
// })
if nerr, ok := err.(*pgconn.PgError); ok {
if nerr.Code == "23505" && nerr.Detail == fmt.Sprintf("Key (checksum)=(%s) already exists.", file.Checksum) {
err = nil
}
}
// if nerr, ok := err.(*pgconn.PgError); ok {
// if nerr.Code == "23505" && nerr.Detail == fmt.Sprintf("Key (checksum)=(%s) already exists.", file.Checksum) {
// err = nil
// }
// }
if err != nil {
c.AbortWithError(http.StatusInternalServerError, err)
return
}
// if err != nil {
// c.AbortWithError(http.StatusInternalServerError, err)
// return
// }
c.JSON(http.StatusOK, gin.H{
"sum": file.Checksum,
"nbChunks": len(file.Chunks),
"size": rs,
})
}
// c.JSON(http.StatusOK, gin.H{
// "sum": file.Checksum,
// "nbChunks": len(file.Chunks),
// "size": rs,
// })
// }
func (s *Service) FileCreateChunk(c *gin.Context) {
if c.Request.ContentLength > CHUNK_SIZE {
c.AbortWithError(http.StatusBadRequest, ErrStoreBadChunkSize)
return
}
// func (s *Service) FileCreateChunk(c *gin.Context) {
// if c.Request.ContentLength > CHUNK_SIZE {
// c.AbortWithError(http.StatusBadRequest, ErrStoreBadChunkSize)
// return
// }
b := bytes.NewBuffer([]byte{})
io.Copy(b, c.Request.Body)
// b := bytes.NewBuffer([]byte{})
// io.Copy(b, c.Request.Body)
sess := s.CurrentSession(c)
// sess := s.CurrentSession(c)
chunk := s.Store.NewChunk(b.Bytes())
if err := chunk.Save(sess.Account.Login); err != nil {
if errors.Is(err, ErrStoreChunkAlreadyExists) {
c.JSON(http.StatusOK, gin.H{
"checksum": chunk.Sum,
})
} else {
c.AbortWithError(http.StatusBadRequest, err)
}
return
}
// chunk := s.Store.NewChunk(b.Bytes())
// if err := chunk.Save(sess.Account.Login); err != nil {
// if errors.Is(err, ErrStoreChunkAlreadyExists) {
// c.JSON(http.StatusOK, gin.H{
// "checksum": chunk.Sum,
// })
// } else {
// c.AbortWithError(http.StatusBadRequest, err)
// }
// return
// }
c.JSON(http.StatusOK, gin.H{
"checksum": chunk.Sum,
})
}
// c.JSON(http.StatusOK, gin.H{
// "checksum": chunk.Sum,
// })
// }
func (s *Service) FileChunkExists(c *gin.Context) {
checksum := c.Param("checksum")
if len(checksum) != 40 {
c.AbortWithError(http.StatusBadRequest, ErrStoreBadChecksum)
return
}
// func (s *Service) FileChunkExists(c *gin.Context) {
// checksum := c.Param("checksum")
// if len(checksum) != 40 {
// c.AbortWithError(http.StatusBadRequest, ErrStoreBadChecksum)
// return
// }
if s.Store.Chunk(checksum).FileExists() {
c.Status(http.StatusOK)
} else {
c.Status(http.StatusNotFound)
}
}
// if s.Store.Chunk(checksum).FileExists() {
// c.Status(http.StatusOK)
// } else {
// c.Status(http.StatusNotFound)
// }
// }
func (s *Service) FileExists(c *gin.Context) {
checksum := c.Param("checksum")
if len(checksum) != 40 {
c.AbortWithError(http.StatusBadRequest, ErrStoreBadChecksum)
return
}
// func (s *Service) FileExists(c *gin.Context) {
// checksum := c.Param("checksum")
// if len(checksum) != 40 {
// c.AbortWithError(http.StatusBadRequest, ErrStoreBadChecksum)
// return
// }
var fileExists int64
if err := s.DB.Model(&File{}).Where("checksum = ?", checksum).Count(&fileExists).Error; err != nil {
c.AbortWithError(http.StatusInternalServerError, err)
return
}
// var fileExists int64
// if err := s.DB.Model(&File{}).Where("checksum = ?", checksum).Count(&fileExists).Error; err != nil {
// c.AbortWithError(http.StatusInternalServerError, err)
// return
// }
if fileExists > 0 {
c.Status(http.StatusOK)
} else {
c.Status(http.StatusNotFound)
}
}
// if fileExists > 0 {
// c.Status(http.StatusOK)
// } else {
// c.Status(http.StatusNotFound)
// }
// }
func (s *Service) FileGet(c *gin.Context) {
checksum := c.Param("checksum")
if len(checksum) != 40 {
c.AbortWithError(http.StatusBadRequest, ErrStoreBadChecksum)
return
}
if checksum == c.GetHeader("If-None-Match") {
c.Status(http.StatusNotModified)
return
}
// func (s *Service) FileGet(c *gin.Context) {
// checksum := c.Param("checksum")
// if len(checksum) != 40 {
// c.AbortWithError(http.StatusBadRequest, ErrStoreBadChecksum)
// return
// }
// if checksum == c.GetHeader("If-None-Match") {
// c.Status(http.StatusNotModified)
// return
// }
f := &File{}
if err := s.DB.Debug().Preload("Chunks").Where("checksum = ?", checksum).First(&f).Error; err != nil {
c.AbortWithError(http.StatusBadRequest, err)
return
}
// f := &File{}
// if err := s.DB.Debug().Preload("Chunks").Where("checksum = ?", checksum).First(&f).Error; err != nil {
// c.AbortWithError(http.StatusBadRequest, err)
// return
// }
chunks := make([]string, len(f.Chunks))
for _, fc := range f.Chunks {
chunks[fc.Part-1] = fc.Checksum
}
// chunks := make([]string, len(f.Chunks))
// for _, fc := range f.Chunks {
// chunks[fc.Part-1] = fc.Checksum
// }
reader, err := s.Store.NewStoreReader(chunks)
if err != nil {
c.AbortWithError(http.StatusInternalServerError, err)
return
}
defer reader.Close()
// reader, err := s.Store.NewStoreReader(chunks)
// if err != nil {
// c.AbortWithError(http.StatusInternalServerError, err)
// return
// }
// defer reader.Close()
c.DataFromReader(
http.StatusOK,
reader.Size,
mime.TypeByExtension(filepath.Ext(f.Name)),
reader,
map[string]string{
"Content-Disposition": fmt.Sprintf("inline; filename=\"%s\"", f.Name),
"ETag": f.Checksum,
},
)
}
// c.DataFromReader(
// http.StatusOK,
// reader.Size,
// mime.TypeByExtension(filepath.Ext(f.Name)),
// reader,
// map[string]string{
// "Content-Disposition": fmt.Sprintf("inline; filename=\"%s\"", f.Name),
// "ETag": f.Checksum,
// },
// )
// }
func (s *Service) FileAnalyze(c *gin.Context) {
checksum := c.Param("checksum")
if len(checksum) != 40 {
c.AbortWithError(http.StatusBadRequest, ErrStoreBadChecksum)
return
}
// func (s *Service) FileAnalyze(c *gin.Context) {
// checksum := c.Param("checksum")
// if len(checksum) != 40 {
// c.AbortWithError(http.StatusBadRequest, ErrStoreBadChecksum)
// return
// }
f := &File{}
if err := s.DB.Debug().Preload("Chunks").Where("checksum = ?", checksum).First(&f).Error; err != nil {
c.AbortWithError(http.StatusBadRequest, err)
return
}
// f := &File{}
// if err := s.DB.Debug().Preload("Chunks").Where("checksum = ?", checksum).First(&f).Error; err != nil {
// c.AbortWithError(http.StatusBadRequest, err)
// return
// }
chunks := make([]string, len(f.Chunks))
for _, fc := range f.Chunks {
chunks[fc.Part-1] = fc.Checksum
}
// chunks := make([]string, len(f.Chunks))
// for _, fc := range f.Chunks {
// chunks[fc.Part-1] = fc.Checksum
// }
reader, err := s.Store.NewStoreReader(chunks)
if err != nil {
c.AbortWithError(http.StatusInternalServerError, err)
return
}
defer reader.Close()
// reader, err := s.Store.NewStoreReader(chunks)
// if err != nil {
// c.AbortWithError(http.StatusInternalServerError, err)
// return
// }
// defer reader.Close()
rawExif, err := exif.SearchAndExtractExifWithReader(reader)
if err != nil {
c.AbortWithError(http.StatusInternalServerError, err)
return
}
entries, _, err := exif.GetFlatExifDataUniversalSearch(rawExif, nil, true)
if err != nil {
c.AbortWithError(http.StatusInternalServerError, err)
return
}
// rawExif, err := exif.SearchAndExtractExifWithReader(reader)
// if err != nil {
// c.AbortWithError(http.StatusInternalServerError, err)
// return
// }
// entries, _, err := exif.GetFlatExifDataUniversalSearch(rawExif, nil, true)
// if err != nil {
// c.AbortWithError(http.StatusInternalServerError, err)
// return
// }
c.JSON(http.StatusOK, gin.H{
"exif": entries,
})
}
// c.JSON(http.StatusOK, gin.H{
// "exif": entries,
// })
// }
func (s *Service) FileInit() {
file := s.Gin.Group("/file")
file.Use(s.RequireSession)
file.POST("", s.FileCreate)
file.HEAD("/:checksum", s.FileExists)
file.GET("/:checksum", s.FileGet)
file.POST("/chunk", s.FileCreateChunk)
file.HEAD("/chunk/:checksum", s.FileChunkExists)
file.GET("/analyze/:checksum", s.FileAnalyze)
// file.POST("", s.FileCreate)
// file.HEAD("/:checksum", s.FileExists)
// file.GET("/:checksum", s.FileGet)
// file.POST("/chunk", s.FileCreateChunk)
// file.HEAD("/chunk/:checksum", s.FileChunkExists)
// file.GET("/analyze/:checksum", s.FileAnalyze)
}

View File

@ -10,7 +10,6 @@ import (
"time"
"github.com/gin-gonic/gin"
photosstore "gitlab.celogeek.com/photos/api/internal/photos/store"
"gorm.io/gorm"
)
@ -23,7 +22,6 @@ type Service struct {
Gin *gin.Engine
DB *gorm.DB
Config *ServiceConfig
Store *photosstore.Store
StorageTmp *Storage
StorageUpload *Storage
LogOk *Logger
@ -40,7 +38,6 @@ func New(config *ServiceConfig) *Service {
return &Service{
Gin: gin.New(),
Config: config,
Store: &photosstore.Store{Path: config.StorePath},
StorageTmp: NewStorage(config.StorePath, "tmp"),
StorageUpload: NewStorage(config.StorePath, "upload"),
LogOk: &Logger{os.Stdout, "Photos"},
@ -67,7 +64,7 @@ func (s *Service) SetupRoutes() {
}
func (s *Service) PrepareStore() {
d, err := os.Stat(s.Store.Path)
d, err := os.Stat(s.Config.StorePath)
if err != nil {
s.LogErr.Fatal("Store", err)
}

View File

@ -6,10 +6,12 @@ import (
"github.com/gin-gonic/gin"
)
type Me struct {
User string `json:"user"`
}
func (s *Service) Me(c *gin.Context) {
c.JSON(http.StatusOK, gin.H{
"user": s.CurrentSession(c).Account.Login,
})
c.JSON(http.StatusOK, Me{s.CurrentSession(c).Account.Login})
}
func (s *Service) MeInit() {

View File

@ -2,6 +2,7 @@ package photosapi
import (
"errors"
"fmt"
"net/http"
"github.com/gin-gonic/gin"
@ -17,9 +18,9 @@ func (s *Service) Recovery(c *gin.Context) {
defer func() {
if err := recover(); err != nil {
s.LogErr.Print("PANIC", err)
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{
"error": ErrUnexpected.Error(),
"details": err,
c.AbortWithStatusJSON(http.StatusInternalServerError, &ErrorWithDetails{
ErrUnexpected.Error(),
[]string{fmt.Sprint(err)},
})
}
}()

View File

@ -26,22 +26,12 @@ var (
// Model
type File struct {
ID uint32 `gorm:"primary_key" json:"id"`
Name string `gorm:"not null" json:"name"`
Checksum string `gorm:"unique;size:44;not null"`
Size uint64 `gorm:"not null"`
Author *Account `gorm:"constraint:OnDelete:SET NULL,OnUpdate:CASCADE" json:"author"`
AuthorId *uint32 `json:"-"`
Chunks []*FileChunk `gorm:"constraint:OnDelete:CASCADE,OnUpdate:CASCADE"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
}
type FileChunk struct {
FileId uint32
File *File `gorm:"constraint:OnDelete:CASCADE,OnUpdate:CASCADE"`
Part uint32
ID uint32 `gorm:"primary_key" json:"id"`
Name string `gorm:"not null" json:"name"`
Checksum string `gorm:"unique;size:44;not null"`
Size uint64 `gorm:"not null"`
Author *Account `gorm:"constraint:OnDelete:SET NULL,OnUpdate:CASCADE" json:"author"`
AuthorId *uint32 `json:"-"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
}
@ -53,18 +43,18 @@ func (s *Service) UploadCreate(c *gin.Context) {
return
}
if err := s.StorageTmp.Create(sha.String()); err != nil {
upload := &Upload{sha.String()}
if err := s.StorageTmp.Create(upload.Id); err != nil {
c.AbortWithError(http.StatusInternalServerError, err)
return
}
c.JSON(http.StatusCreated, gin.H{
"upload_id": sha.String(),
})
c.JSON(http.StatusCreated, upload)
}
type UploadUri struct {
Id string `uri:"upload_id" binding:"required,uuid"`
type Upload struct {
Id string `json:"upload_id" uri:"upload_id" binding:"required,uuid"`
}
type UploadPartQuery struct {
@ -74,7 +64,7 @@ type UploadPartQuery struct {
func (s *Service) UploadPart(c *gin.Context) {
var (
upload UploadUri
upload Upload
uploadPart UploadPartQuery
)
@ -103,7 +93,7 @@ func (s *Service) UploadPart(c *gin.Context) {
sha := sha256.New()
t := io.TeeReader(c.Request.Body, sha)
w, err := io.Copy(f, t)
_, err = io.Copy(f, t)
if err != nil {
f.Close()
os.Remove(tmp_file)
@ -119,22 +109,18 @@ func (s *Service) UploadPart(c *gin.Context) {
return
}
if err = os.Rename(tmp_file, file); err != nil {
err = os.Rename(tmp_file, file)
if err != nil {
os.Remove(tmp_file)
c.AbortWithError(http.StatusInternalServerError, err)
return
}
c.JSON(http.StatusCreated, gin.H{
"upload_id": upload.Id,
"part": uploadPart.Part,
"size": w,
"sha256": uploadPart.PartSha256,
})
c.Status(http.StatusNoContent)
}
func (s *Service) UploadCancel(c *gin.Context) {
var upload UploadUri
var upload Upload
if c.BindUri(&upload) != nil {
return
}
@ -155,7 +141,7 @@ type UploadCompleteRequest struct {
func (s *Service) UploadComplete(c *gin.Context) {
var (
upload UploadUri
upload Upload
uploadCompleteRequest UploadCompleteRequest
)
if c.BindUri(&upload) != nil || c.BindJSON(&uploadCompleteRequest) != nil {
@ -167,15 +153,12 @@ func (s *Service) UploadComplete(c *gin.Context) {
return
}
c.JSON(http.StatusOK, gin.H{
"sha256": uploadCompleteRequest.Sha256,
"parts": uploadCompleteRequest.Parts,
"name": uploadCompleteRequest.Name,
})
c.Status(http.StatusNoContent)
}
func (s *Service) UploadInit() {
upload := s.Gin.Group("/upload")
upload.Use(s.RequireSession)
// start
upload.POST("", s.UploadCreate)

View File

@ -1,122 +0,0 @@
package photosstore
import (
"crypto/sha1"
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"os"
"path/filepath"
"time"
"github.com/gin-gonic/gin"
)
var (
ErrStoreChunkAlreadyExists = errors.New("chunk file already exists")
)
type Store struct {
Path string
}
type Chunk struct {
*Store
Sum string
Bytes []byte
}
func (s *Store) NewChunk(b []byte) *Chunk {
sum := sha1.New()
sum.Write(b)
sumString := hex.EncodeToString(sum.Sum(nil))
return &Chunk{s, sumString, b}
}
func (s *Store) LoadChunk(sum string) (*Chunk, error) {
c := s.Chunk(sum)
if !c.FileExists() {
return nil, fmt.Errorf("chunk %s doesn't exists", sum)
}
b, err := os.ReadFile(c.Filename())
if err != nil {
return nil, err
}
c.Bytes = b
return c, nil
}
func (s *Store) Chunk(sum string) *Chunk {
return &Chunk{s, sum, nil}
}
func (c *Chunk) Dir() string {
return filepath.Join(c.Path, "storage", c.Sum[0:1], c.Sum[1:2])
}
func (c *Chunk) Filename() string {
return filepath.Join(c.Dir(), c.Sum)
}
func (c *Chunk) Size() int64 {
st, err := os.Stat(c.Filename())
if err != nil {
return -1
}
return st.Size()
}
func (c *Chunk) FileExists() bool {
fs, err := os.Stat(c.Filename())
if errors.Is(err, os.ErrNotExist) {
return false
}
return !fs.IsDir()
}
func (c *Chunk) Mkdir() error {
return os.MkdirAll(c.Dir(), 0755)
}
func (c *Chunk) Save(login string) error {
if c.FileExists() {
return ErrStoreChunkAlreadyExists
}
if err := c.Mkdir(); err != nil {
return err
}
if err := os.WriteFile(c.Filename(), c.Bytes, 0666); err != nil {
return err
}
meta, err := os.Create(c.Filename() + ".json")
if err != nil {
return err
}
enc := json.NewEncoder(meta)
enc.SetIndent("", " ")
return enc.Encode(gin.H{
"author": login,
"date": time.Now().UTC().Format("2006-01-02 15:04:05"),
"checksum": c.Sum,
"size": len(c.Bytes),
})
}
func (s *Store) Combine(sumb []string) (string, uint64, error) {
sum := sha1.New()
size := uint64(0)
for _, sb := range sumb {
c, err := s.LoadChunk(sb)
if err != nil {
return "", 0, err
}
sum.Write(c.Bytes)
size += uint64(len(c.Bytes))
}
return hex.EncodeToString(sum.Sum(nil)), size, nil
}

View File

@ -1,70 +0,0 @@
package photosstore
import (
"errors"
"io"
"os"
)
var (
ErrStoreMissingChunks = errors.New("part checksum missing")
)
type StoreReaderChunk struct {
Filename string
Size int64
}
type StoreReader struct {
current *os.File
chunk int
chunks []StoreReaderChunk
Size int64
}
func (s *Store) NewStoreReader(chunks []string) (*StoreReader, error) {
sr := &StoreReader{nil, 0, make([]StoreReaderChunk, len(chunks)), 0}
for i, chunk := range chunks {
c := s.Chunk(chunk)
name := c.Filename()
size := c.Size()
if size < 0 {
return nil, ErrStoreMissingChunks
}
sr.chunks[i] = StoreReaderChunk{name, size}
sr.Size += size
}
return sr, nil
}
func (s *StoreReader) Read(p []byte) (n int, err error) {
if s.current == nil {
f, err := os.Open(s.chunks[s.chunk].Filename)
if err != nil {
return -1, err
}
s.current = f
}
n, err = s.current.Read(p)
if err == io.EOF {
s.chunk++
if s.chunk > len(s.chunks)-1 {
return
}
s.Close()
return s.Read(p)
}
return
}
func (s *StoreReader) Close() {
if s.current != nil {
s.current.Close()
s.current = nil
}
}
// func (s *StoreReader) Seek(offset int64, whence int) (int64, error) {
// }