Skip to content

Commit

Permalink
Merge pull request #16 from lomorage/dev
Browse files Browse the repository at this point in the history
refactor hashHex and hashBase64 variable name
  • Loading branch information
dwebfan authored May 19, 2024
2 parents 29bc165 + 6ecaa65 commit 86c127b
Show file tree
Hide file tree
Showing 17 changed files with 225 additions and 150 deletions.
4 changes: 2 additions & 2 deletions clients/upload.go
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ func (ac *AWSClient) HeadObject(bucket, remotePath string) (*types.ISOInfo, erro
info.Size = int(*object.ContentLength)
}
if object.ChecksumSHA256 != nil {
info.HashBase64 = *object.ChecksumSHA256
info.HashRemote = *object.ChecksumSHA256
}
common.LogDebugObject("HeadObjectReply", info)
return info, nil
Expand Down Expand Up @@ -232,7 +232,7 @@ func (ac *AWSClient) CompleteMultipartUpload(request *UploadRequest, parts []*ty
completedParts[i] = &s3.CompletedPart{
PartNumber: aws.Int64(int64(p.PartNo)),
ETag: aws.String(p.Etag),
ChecksumSHA256: aws.String(p.HashBase64),
ChecksumSHA256: aws.String(p.HashRemote),
}
}

Expand Down
10 changes: 5 additions & 5 deletions cmd/lomob/crypt.go
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ func encryptCmd(ctx *cli.Context) error {
defer dst.Close()

fmt.Printf("Start encrypt '%s', and save output to '%s'\n", ifilename, ofilename)
_, err = encryptLocalFile(src, dst, []byte(masterKey), salt, true)
_, _, err = encryptLocalFile(src, dst, []byte(masterKey), salt, true)
if err != nil {
return err
}
Expand All @@ -101,20 +101,20 @@ func encryptCmd(ctx *cli.Context) error {
return nil
}

func encryptLocalFile(src io.ReadSeeker, dst io.Writer, masterKey, iv []byte, hasHeader bool) ([]byte, error) {
func encryptLocalFile(src io.ReadSeeker, dst io.Writer, masterKey, iv []byte, hasHeader bool) ([]byte, []byte, error) {
// Derive key from passphrase using Argon2
// TODO: Using IV as salt for simplicity, change to different salt?
encryptKey := crypto.DeriveKeyFromMasterKey(masterKey, iv)
encryptor, err := crypto.NewEncryptor(src, encryptKey, iv, hasHeader)
if err != nil {
return nil, err
return nil, nil, err
}

_, err = io.Copy(dst, encryptor)
if err != nil {
return nil, err
return nil, nil, err
}
return encryptor.GetHash(), nil
return encryptor.GetHashOrig(), encryptor.GetHashEncrypt(), nil
}

func decryptLocalFile(ctx *cli.Context) error {
Expand Down
6 changes: 3 additions & 3 deletions cmd/lomob/iso.go
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,7 @@ func createIso(maxSize uint64, isoFilename string, scanRootDirs map[int]string,
if err != nil {
return 0, "", nil, err
}
isoInfo.HashHex = lomohash.CalculateHashHex(hash)
isoInfo.SetHashLocal(hash)
// create db entry and update file info
start := time.Now()
_, count, err := db.CreateIsoWithFileIDs(isoInfo,
Expand Down Expand Up @@ -249,15 +249,15 @@ func listISO(ctx *cli.Context) error {
writer := tabwriter.NewWriter(os.Stdout, 0, 0, 4, ' ', tabwriter.TabIndent)
defer writer.Flush()

fmt.Fprint(writer, "ID\tName\tSize\tStatus\tRegion\tBucket\tFiles Count\tCreate Time\tHash\n")
fmt.Fprint(writer, "ID\tName\tSize\tStatus\tRegion\tBucket\tFiles Count\tCreate Time\tLocal Hash\n")
for _, iso := range isos {
_, count, err := db.GetTotalFilesInIso(iso.ID)
if err != nil {
return err
}
fmt.Fprintf(writer, "%d\t%s\t%s\t%s\t%s\t%s\t%d\t%s\t%s\n", iso.ID, iso.Name,
datasize.ByteSize(iso.Size).HR(), iso.Status, iso.Region, iso.Bucket, count,
common.FormatTime(iso.CreateTime.Local()), iso.HashBase64)
common.FormatTime(iso.CreateTime.Local()), iso.HashLocal)
}
return nil
}
Expand Down
14 changes: 7 additions & 7 deletions cmd/lomob/list-cloud.go
Original file line number Diff line number Diff line change
Expand Up @@ -55,17 +55,17 @@ func listFileTreeInGDrive(client *gcloud.DriveClient, currNode treeprint.Tree, f
}
for _, file := range files {
t := file.ModTime
hashOrigin := file.Hash
if len(hashOrigin) > 6 {
hashOrigin = hashOrigin[:6]
hashLocal := file.HashLocal
if len(hashLocal) > 6 {
hashLocal = hashLocal[:6]
}

hashEncrypt := file.HashEncrypt
if len(hashEncrypt) > 6 {
hashEncrypt = hashEncrypt[:6]
hashRemote := file.HashRemote
if len(hashRemote) > 6 {
hashRemote = hashRemote[:6]
}
currNode.AddMetaNode(fmt.Sprintf("\t%12s\t%02d/%02d/%d\t%s\t%s", strconv.Itoa(file.Size),
t.Month(), t.Day(), t.Year(), hashOrigin, hashEncrypt), file.Name)
t.Month(), t.Day(), t.Year(), hashLocal, hashRemote), file.Name)
}
return nil
}
2 changes: 1 addition & 1 deletion cmd/lomob/scan.go
Original file line number Diff line number Diff line change
Expand Up @@ -157,8 +157,8 @@ func selectOrInsertFile(dirID int, path string, info os.FileInfo) error {
Name: info.Name(),
Size: int(info.Size()),
ModTime: info.ModTime(),
Hash: lomohash.CalculateHashHex(hash),
}
fi.SetHashLocal(hash)

_, err = db.InsertFile(fi)
return err
Expand Down
12 changes: 6 additions & 6 deletions cmd/lomob/upload-files.go
Original file line number Diff line number Diff line change
Expand Up @@ -172,15 +172,15 @@ func uploadFilesToGdrive(ctx *cli.Context) error {
logrus.Warnf("Close %s: %s", fullLocalPath, err)
}

hashEnc := hash.CalculateHashHex(encryptor.GetHash())
err = db.UpdateFileIsoIDAndEncHash(types.IsoIDCloud, f.ID, hashEnc)
hashEnc := hash.CalculateHashHex(encryptor.GetHashEncrypt())
err = db.UpdateFileIsoIDAndRemoteHash(types.IsoIDCloud, f.ID, hashEnc)
if err != nil {
return err
}

// add encrypt hash as part of the file's metadata
err = client.UpdateFileMetadata(fileID, map[string]string{
types.MetadataKeyHashOrig: f.Hash,
types.MetadataKeyHashOrig: f.HashLocal,
types.MetadataKeyHashEncrypt: hashEnc,
})
if err != nil {
Expand Down Expand Up @@ -263,9 +263,9 @@ func uploadFileToS3(cli *clients.AWSClient, bucket, storageClass, remoteFilename
remoteFilename, remoteInfo.Size, expectSize)
recreate = true
}
if remoteInfo.HashBase64 != expectHash {
if remoteInfo.HashRemote != expectHash {
logrus.Warnf("%s exists in cloud and its checksum is %s, but provided checksum is %s",
remoteFilename, remoteInfo.HashBase64, expectHash)
remoteFilename, remoteInfo.HashRemote, expectHash)
recreate = true
}
// no need upload, return nil upload request
Expand Down Expand Up @@ -327,7 +327,7 @@ func uploadEncryptFileToS3(cli *clients.AWSClient, bucket, storageClass, filenam
tmpFileName := tmpFile.Name()
defer tmpFile.Close()

hash, err := encryptLocalFile(src, tmpFile, []byte(masterKey), salt, true)
_, hash, err := encryptLocalFile(src, tmpFile, []byte(masterKey), salt, true)
if err != nil {
return "", err
}
Expand Down
52 changes: 26 additions & 26 deletions cmd/lomob/upload-iso.go
Original file line number Diff line number Diff line change
Expand Up @@ -58,8 +58,8 @@ func validateISO(isoFilename string) (*os.File, *types.ISOInfo, error) {
return nil, nil, err
}
hashHex := lomohash.CalculateHashHex(hash)
if hashHex != iso.HashHex {
return nil, nil, errors.Errorf("Hash in DB is %s, but got %s", iso.HashHex, hashHex)
if hashHex != iso.HashLocal {
return nil, nil, errors.Errorf("Hash in DB is %s, but got %s", iso.HashLocal, hashHex)
}
return f, iso, nil
}
Expand Down Expand Up @@ -106,8 +106,8 @@ func prepareUploadParts(isoFilename string, partSize int, calHash bool) (*os.Fil
Size: partLength,
}
if partsChecksum != nil {
parts[i].HashHex = lomohash.CalculateHashHex(partsChecksum[i])
parts[i].HashBase64 = lomohash.CalculateHashBase64(partsChecksum[i])
parts[i].HashLocal = lomohash.CalculateHashHex(partsChecksum[i])
parts[i].HashRemote = lomohash.CalculateHashBase64(partsChecksum[i])
}
remaining -= partLength
}
Expand All @@ -120,11 +120,11 @@ func prepareUploadParts(isoFilename string, partSize int, calHash bool) (*os.Fil
return isoFile, isoInfo, parts, nil
}

isoInfo.HashBase64, err = lomohash.ConcatAndCalculateBase64Hash(partsChecksum)
isoInfo.HashRemote, err = lomohash.ConcatAndCalculateBase64Hash(partsChecksum)
if err != nil {
return nil, nil, nil, err
}
return isoFile, isoInfo, parts, db.UpdateIsoBase64Hash(isoInfo.ID, isoInfo.HashBase64)
return isoFile, isoInfo, parts, db.UpdateIsoRemoteHash(isoInfo.ID, isoInfo.HashRemote)
}

func prepareUploadRequest(cli *clients.AWSClient, region, bucket, storageClass string,
Expand All @@ -139,11 +139,11 @@ func prepareUploadRequest(cli *clients.AWSClient, region, bucket, storageClass s
return nil, errors.Errorf("%s exists in cloud and its size is %d, but provided file size is %d",
isoFilename, remoteInfo.Size, isoInfo.Size)
}
if isoInfo.HashBase64 != "" {
remoteHash := strings.Split(remoteInfo.HashBase64, "-")[0]
if remoteHash != isoInfo.HashBase64 {
if isoInfo.HashRemote != "" {
remoteHash := strings.Split(remoteInfo.HashRemote, "-")[0]
if remoteHash != isoInfo.HashRemote {
return nil, errors.Errorf("%s exists in cloud and its checksum is %s, but provided ccommonhecksum is %s",
isoFilename, remoteHash, isoInfo.HashBase64)
isoFilename, remoteHash, isoInfo.HashRemote)
}
}
// no need upload, return nil upload request
Expand Down Expand Up @@ -291,7 +291,7 @@ func uploadRawParts(cli *clients.AWSClient, region, bucket, storageClass, isoFil
readSeeker = prs
}

p.Etag, err = cli.Upload(int64(p.PartNo), int64(p.Size), request, readSeeker, p.HashBase64)
p.Etag, err = cli.Upload(int64(p.PartNo), int64(p.Size), request, readSeeker, p.HashRemote)
if err != nil {
failParts = append(failParts, p.PartNo)
logrus.Infof("Upload %s's part number %d:%s", isoFilename, p.PartNo, err)
Expand All @@ -313,7 +313,7 @@ func uploadRawParts(cli *clients.AWSClient, region, bucket, storageClass, isoFil
if len(failParts) != 0 {
return errors.Errorf("Parts %v failed to upload", failParts)
}
err = cli.CompleteMultipartUpload(request, parts, isoInfo.HashBase64)
err = cli.CompleteMultipartUpload(request, parts, isoInfo.HashRemote)
if err != nil {
logrus.Warnf("Upload %s fail: %s", isoFilename, err)
return err
Expand All @@ -332,7 +332,7 @@ func uploadEncryptParts(cli *clients.AWSClient, region, bucket, storageClass, is
}
defer isoFile.Close()

decoded, err := hex.DecodeString(isoInfo.HashHex)
decoded, err := hex.DecodeString(isoInfo.HashLocal)
if err != nil {
return err
}
Expand All @@ -341,9 +341,9 @@ func uploadEncryptParts(cli *clients.AWSClient, region, bucket, storageClass, is
}

salt := decoded[:crypto.SaltLen()]
// iso size need add salt block size
// iso size need add salt block size so as to compare with remote size
isoInfo.Size += crypto.SaltLen()
isoInfo.HashBase64 = ""
isoInfo.HashRemote = ""
request, err := prepareUploadRequest(cli, region, bucket, storageClass, isoInfo, force)
if err != nil {
return err
Expand Down Expand Up @@ -373,9 +373,9 @@ func uploadEncryptParts(cli *clients.AWSClient, region, bucket, storageClass, is

if p.Status == types.PartUploaded {
logrus.Infof("%s's part %d was uploaded successfully, skip new upload", isoFilename, p.PartNo)
h, err := lomohash.DecpdeHashBase64(p.HashBase64)
h, err := lomohash.DecodeHashBase64(p.HashRemote)
if err != nil {
return errors.Wrapf(err, "while decode part %d's base64 hash %s", i+1, p.HashBase64)
return errors.Wrapf(err, "while decode part %d's base64 hash %s", i+1, p.HashRemote)
}
partsHash = append(partsHash, h)
continue
Expand All @@ -393,18 +393,18 @@ func uploadEncryptParts(cli *clients.AWSClient, region, bucket, storageClass, is
defer tmpFile.Close()

prs := lomoio.NewFilePartReadSeeker(isoFile, start, end)
h, err := encryptLocalFile(prs, tmpFile, []byte(masterKey), salt, i == 0)
hl, hr, err := encryptLocalFile(prs, tmpFile, []byte(masterKey), salt, i == 0)
if err != nil {
return err
}
p.HashHex = lomohash.CalculateHashHex(h)
p.HashBase64 = lomohash.CalculateHashBase64(h)
p.SetHashLocal(hl)
p.SetHashRemote(hr)

_, err = tmpFile.Seek(0, io.SeekStart)
if err != nil {
return err
}
p.Etag, err = cli.Upload(int64(p.PartNo), int64(p.Size), request, tmpFile, p.HashBase64)
p.Etag, err = cli.Upload(int64(p.PartNo), int64(p.Size), request, tmpFile, p.HashRemote)
if err != nil {
failParts = append(failParts, p.PartNo)
logrus.Infof("Upload %s's part number %d:%s", isoFilename, p.PartNo, err)
Expand All @@ -415,8 +415,8 @@ func uploadEncryptParts(cli *clients.AWSClient, region, bucket, storageClass, is
}
continue
}
partsHash = append(partsHash, h)
err = db.UpdatePartEtagAndStatusHash(p.IsoID, p.PartNo, p.Etag, p.HashHex, p.HashBase64, types.PartUploaded)
partsHash = append(partsHash, hr)
err = db.UpdatePartEtagAndStatusHash(p.IsoID, p.PartNo, p.Etag, p.HashLocal, p.HashRemote, types.PartUploaded)
if err != nil {
logrus.Infof("Update %s's part number %d status %s:%s", isoFilename, p.PartNo,
types.PartUploaded, err)
Expand All @@ -438,19 +438,19 @@ func uploadEncryptParts(cli *clients.AWSClient, region, bucket, storageClass, is
return errors.Errorf("Parts %v failed to upload", failParts)
}

isoInfo.HashBase64, err = lomohash.ConcatAndCalculateBase64Hash(partsHash)
isoInfo.HashRemote, err = lomohash.ConcatAndCalculateBase64Hash(partsHash)
if err != nil {
return errors.Wrapf(err, "while encode iso base64 hash %v", partsHash)
}
err = cli.CompleteMultipartUpload(request, parts, isoInfo.HashBase64)
err = cli.CompleteMultipartUpload(request, parts, isoInfo.HashRemote)
if err != nil {
logrus.Warnf("Upload %s fail: %s", isoFilename, err)
return err
}
fmt.Printf("%s is uploaded to region %s, bucket %s successfully!\n",
isoFilename, region, bucket)

return db.UpdateIsoStatusHash(isoInfo.ID, isoInfo.HashBase64, types.IsoUploaded)
return db.UpdateIsoStatusRemoteHash(isoInfo.ID, isoInfo.HashRemote, types.IsoUploaded)
}

func uploadISO(accessKeyID, accessKey, region, bucket, storageClass, isoFilename, masterKey string,
Expand Down
8 changes: 6 additions & 2 deletions common/crypto/crypto.go
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,12 @@ func (e *Encryptor) Seek(offset int64, whence int) (int64, error) {
return e.sreader.Seek(offset, whence)
}

func (e *Encryptor) GetHash() []byte {
return e.sreader.GetHash()
func (e *Encryptor) GetHashOrig() []byte {
return e.sreader.GetHashOrig()
}

func (e *Encryptor) GetHashEncrypt() []byte {
return e.sreader.GetHashEncrypt()
}

type Decryptor struct {
Expand Down
9 changes: 6 additions & 3 deletions common/crypto/crypto_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ func TestEncryptDecrypt(t *testing.T) {
}
}

func genExpectHash(t *testing.T, plaintext, key, iv []byte, inclIv bool) []byte {
func genExpectEncryptHash(t *testing.T, plaintext, key, iv []byte, inclIv bool) []byte {
stream, err := newCipherStream(key, iv)
require.Nil(t, err)

Expand All @@ -49,7 +49,7 @@ func testEncryptDecrypt(t *testing.T, plaintext []byte, hasHeader bool) {
_, err = io.ReadFull(rand.Reader, iv)
require.Nil(t, err)

expectHash := genExpectHash(t, plaintext, key, iv, hasHeader)
expectHashEncrypt := genExpectEncryptHash(t, plaintext, key, iv, hasHeader)

buf := bytes.NewReader(plaintext)
en, err := NewEncryptor(buf, key, iv, hasHeader)
Expand All @@ -68,7 +68,10 @@ func testEncryptDecrypt(t *testing.T, plaintext []byte, hasHeader bool) {
require.EqualValues(t, n, len(plaintext))

// verify hash
require.EqualValues(t, expectHash, en.GetHash())
h := sha256.New()
h.Write(plaintext)
require.EqualValues(t, h.Sum(nil), en.GetHashOrig(), string(plaintext))
require.EqualValues(t, expectHashEncrypt, en.GetHashEncrypt())

testBuffer = testBuffer[:n]

Expand Down
Loading

0 comments on commit 86c127b

Please sign in to comment.