Skip to content

Commit

Permalink
Merge pull request #37 from yaxia/master
Browse files Browse the repository at this point in the history
Storage Client Library - 0.4.2
  • Loading branch information
slepox committed Dec 30, 2014
2 parents 52622a1 + cdaea14 commit ce2faa9
Show file tree
Hide file tree
Showing 66 changed files with 25,435 additions and 1,457 deletions.
15 changes: 15 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
language: node_js
node_js:
- "0.11"
- "0.10"
- "0.8"

matrix:
allow_failures:
- node_js: "0.11"

install:
- npm install -g [email protected]
- npm --version
- npm install

13 changes: 13 additions & 0 deletions ChangeLog.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,19 @@
Note: This is an Azure Storage only package. The all up Azure node sdk still has the old storage bits in there. In a future release, those storage bits will be removed and an npm dependency to this storage node sdk will
be taken. This is a CTP v1 release and the changes described below indicate the changes from the Azure node SDK 0.9.8 available here - https://github.com/Azure/azure-sdk-for-node.

2014.12 Version 0.4.2
ALL
* Fixed an issue that batch operation could probably wait without callback.
* Added the readable-stream module to adapt stream operations in both node 0.8 and node 0.10.
* Supported nock in tests.

BLOB
* Supported trimming the default port of http or https when getting URL for the blob service.
* Fixed an issue that the metadata is not populated when getting the blob to text.

FILE
* Supported trimming the default port of http or https when getting URL for the file service.

2014.11.28 Version 0.4.1

ALL
Expand Down
1 change: 1 addition & 0 deletions lib/common/common.js
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ exports.ChunkStream = require('./streams/chunkstream');
exports.ChunkStreamWithStream = require('./streams/chunkstreamwithstream');
exports.FileReadStream = require('./streams/filereadstream');
exports.SpeedSummary = require('./streams/speedsummary');
exports.rfs = require('./streams/readablefs');

// Utilities
exports.Constants = require('./util/constants');
Expand Down
22 changes: 12 additions & 10 deletions lib/common/services/storageserviceclient.js
Original file line number Diff line number Diff line change
Expand Up @@ -137,9 +137,9 @@ StorageServiceClient.prototype.setHost = function (host) {

if (!parsedHost.port) {
if (parsedHost.protocol === Constants.HTTPS) {
parsedHost.port = 443;
parsedHost.port = Constants.DEFAULT_HTTPS_PORT;
} else {
parsedHost.port = 80;
parsedHost.port = Constants.DEFAULT_HTTP_PORT;
}
}

Expand Down Expand Up @@ -872,14 +872,16 @@ StorageServiceClient._normalizeError = function (error, response) {
// blob/queue errors should have error.Error, table errors should have error['odata.error']
var errorProperties = error.Error || error.error || error['odata.error'] || error;
for (var property in errorProperties) {
var key = property.toLowerCase();
normalizedError[key] = errorProperties[property];

// if this is a table error, message is an object - flatten it to normalize with blob/queue errors
// ex: "message":{"lang":"en-US","value":"The specified resource does not exist."} becomes message: "The specified resource does not exist."
if (key === 'message' && _.isObject(errorProperties[property])) {
if (errorProperties[property]['value']) {
normalizedError[key] = errorProperties[property]['value'];
if (errorProperties.hasOwnProperty(property)) {
var key = property.toLowerCase();
normalizedError[key] = errorProperties[property];

// if this is a table error, message is an object - flatten it to normalize with blob/queue errors
// ex: "message":{"lang":"en-US","value":"The specified resource does not exist."} becomes message: "The specified resource does not exist."
if (key === 'message' && _.isObject(errorProperties[property])) {
if (errorProperties[property]['value']) {
normalizedError[key] = errorProperties[property]['value'];
}
}
}
}
Expand Down
12 changes: 5 additions & 7 deletions lib/common/streams/batchoperation.js
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ BatchOperation.prototype.setConcurrency = function(concurrency) {
};

/**
* Is the workload heavy and It can used to determine whether we could queue operations
* Is the workload heavy and It can be used to determine whether we could queue operations
*/
BatchOperation.prototype.IsWorkloadHeavy = function() {
//Only support one batch operation for now.
Expand Down Expand Up @@ -129,7 +129,7 @@ BatchOperation.prototype.addOperation = function(operation) {
operation.status = OperationState.QUEUED;
operation.operationId = ++this._totalOperation;
this._queuedOperation++;
this.logger.debug(util.format('Add operation %d into batch operation %s.', operation.operationId, this.name));
this.logger.debug(util.format('Add operation %d into batch operation %s. Active: %s; Queued: %s', operation.operationId, this.name, this._activeOperation, this._queuedOperation));
//Immediately start the idle operation if workload isn't heavy
this._runOperation(operation);
return this.IsWorkloadHeavy();
Expand Down Expand Up @@ -211,12 +211,12 @@ BatchOperation.prototype.getBatchOperationCallback = function (operation) {
operation.status = OperationState.CALLBACK;
self.logger.debug(util.format('Batch operation paused and Operation %d wait for firing callback', operation.operationId));
} else if (self.callbackInOrder) {
operation.status = OperationState.CALLBACK;
if (self._currentOperationId === operation.operationId) {
self._fireOperationUserCallback(operation);
} else if (self._currentOperationId > operation.operationId) {
throw new Error('Debug error: current callback operation id cannot be larger than operation id');
} else {
operation.status = OperationState.CALLBACK;
self.logger.debug(util.format('Operation %d is waiting for firing callback %s', operation.operationId, self._currentOperationId));
}
} else {
Expand All @@ -233,11 +233,9 @@ BatchOperation.prototype.getBatchOperationCallback = function (operation) {
* Fire user's call back
*/
BatchOperation.prototype._fireOperationUserCallback = function (operation) {
if (!operation) {
var index = this._getCallbackOperationIndex();
if (index != -1) {
var index = this._getCallbackOperationIndex();
if (!operation && index != -1) {
operation = this._operations[index];
}
}

if (operation && !this._paused) {
Expand Down
4 changes: 2 additions & 2 deletions lib/common/streams/rangestream.js
Original file line number Diff line number Diff line change
Expand Up @@ -56,10 +56,10 @@ RangeStream.prototype.list = function (options) {
var start = this._startOffset;
var singleRangeSize = Constants.BlobConstants.MAX_SINGLE_GET_PAGE_RANGE_SIZE;
var end = Math.min(this._startOffset + singleRangeSize - 1, this._endOffset);
options.rangeStart = start
options.rangeStart = start;
options.rangeEnd = end;

if (this._listFunc == undefined) {
if (this._listFunc === undefined) {
throw new Error(SR.NO_LIST_FUNC_PROVIDED);
}

Expand Down
32 changes: 32 additions & 0 deletions lib/common/streams/readablefs.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//

var rs = require('readable-stream').Readable;
var fs = require('fs');

/*
* As far as streams support goes, we can get the node 0.10 stream API in node 0.8.
* Use the readable-stream module (https://www.npmjs.org/package/readable-stream) which is
* essentially a copy of the stream modules from core node 0.10 and it just works on both 0.8 and 0.10.
*/

exports.createReadStream = function(path, options) {
var stream = fs.createReadStream(path, options);
if (/^v0\.8\./.test(process.version)) {
stream = rs().wrap(stream);
}
return stream;
};
21 changes: 19 additions & 2 deletions lib/common/util/constants.js
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ var Constants = {
/*
* Specifies the value to use for UserAgent header.
*/
USER_AGENT_PRODUCT_VERSION: '0.4.1',
USER_AGENT_PRODUCT_VERSION: '0.4.2',

/**
* The number of default concurrent requests for parallel operation.
Expand Down Expand Up @@ -80,6 +80,22 @@ var Constants = {
* @type {string}
*/
HTTPS: 'https:',

/**
* Default HTTP port.
*
* @const
* @type {int}
*/
DEFAULT_HTTP_PORT: 80,

/**
* Default HTTPS port.
*
* @const
* @type {int}
*/
DEFAULT_HTTPS_PORT: 443,

/**
* Marker for atom metadata.
Expand Down Expand Up @@ -1998,7 +2014,8 @@ var Constants = {

FileErrorCodeStrings: {
SHARE_ALREADY_EXISTS: 'ShareAlreadyExists',
SHARE_NOT_FOUND: 'ShareNotFound'
SHARE_NOT_FOUND: 'ShareNotFound',
FILE_NOT_FOUND: 'FileNotFound'
},

QueueErrorCodeStrings: {
Expand Down
15 changes: 15 additions & 0 deletions lib/common/util/util.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,25 @@ var path = require('path');
var crypto = require('crypto');
var _ = require('underscore');
var util = require('util');
var url = require('url');
var Constants = require('./constants');
var StorageUtilities = require('./storageutilities');
var SR = require('./sr');

/**
* Trim the default port in the url.
*
* @param {string} uri The URI to be encoded.
* @return {string} The URI without defualt port.
*/
exports.trimPortFromUri = function (uri) {
var uri = url.parse(uri);
if ((uri.protocol === Constants.HTTPS && uri.port == Constants.DEFAULT_HTTPS_PORT) || (uri.protocol === Constants.HTTP && uri.port == Constants.DEFAULT_HTTP_PORT)) {
uri.host = uri.hostname;
}
return url.format(uri);
};

/**
* Encodes an URI.
*
Expand Down
8 changes: 5 additions & 3 deletions lib/common/util/validate.js
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ var getNameError = function(name) {
}

return null;
}
};

/**
* Validates a container name.
Expand Down Expand Up @@ -343,7 +343,7 @@ exports.pageRangesAreValid = function (rangeStart, rangeEnd, writeBlockSizeInByt

callback();
return true;
}
};

/**
* Validates a blob type.
Expand All @@ -355,7 +355,9 @@ exports.blobTypeIsValid = function (type, callback) {
var getEnumValues = function (obj) {
var values = [];
for (var prop in obj) {
values.push(obj[prop]);
if (obj.hasOwnProperty(prop)) {
values.push(obj[prop]);
}
}
return values;
};
Expand Down
28 changes: 10 additions & 18 deletions lib/services/blob/blobservice.js
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ var SR = azureCommon.SR;
var validate = azureCommon.validate;
var StorageServiceClient = azureCommon.StorageServiceClient;
var WebResource = azureCommon.WebResource;
var rfs = azureCommon.rfs;

// Constants
var Constants = azureCommon.Constants;
Expand Down Expand Up @@ -1785,6 +1786,7 @@ BlobService.prototype.getBlobToText = function (container, blob, optionsOrCallba

if (!responseObject.error) {
responseObject.blobResult = new BlobResult(container, blob);
responseObject.blobResult.metadata = self.parseMetadataHeaders(responseObject.response.headers);
responseObject.blobResult.getPropertiesFromHeaders(responseObject.response.headers);
responseObject.text = responseObject.response.body;

Expand Down Expand Up @@ -2264,7 +2266,8 @@ BlobService.prototype.getUrl = function (container, blob, sasToken, primary) {
}
else {
host = this.host.primaryHost;
}
}
host = azureutil.trimPortFromUri(host);

return url.resolve(host, url.format({ pathname: this._getPath('/' + createResourceName(container, blob)), query: qs.parse(sasToken) }));
};
Expand Down Expand Up @@ -3000,7 +3003,7 @@ BlobService.prototype.createBlockBlobFromLocalFile = function (container, blob,
self._createBlobFromChunkStream(container, blob, BlobConstants.BlobTypes.BLOCK, stream, stat.size, options, callback);
} else {
//Use putBlob to upload file
var stream = new fs.createReadStream(localFileName);
var stream = new rfs.createReadStream(localFileName);
if (azureutil.objectIsNull(options.contentMD5) && options.useTransactionalMD5) {
azureutil.calculateMD5(stream, self.singleBlobPutThresholdInBytes, options, function (internalBuff) {
self._putBlockBlob(container, blob, internalBuff, null /* stream */, internalBuff.length, options, callback);
Expand Down Expand Up @@ -3757,7 +3760,7 @@ BlobService.prototype._updatePageBlobPagesImpl = function (container, blob, rang
var webResource = WebResource.put(resourceName)
.withQueryOption(QueryStringConstants.COMP, 'page')
.withHeader(HeaderConstants.CONTENT_TYPE, 'application/octet-stream')
.withHeader(HeaderConstants.PAGE_WRITE, writeMethod)
.withHeader(HeaderConstants.PAGE_WRITE, writeMethod);

BlobResult.setHeadersFromBlob(webResource, options);

Expand Down Expand Up @@ -4216,7 +4219,7 @@ BlobService.prototype._getBlobToRangeStream = function (container, blob, blobTyp

var speedSummary = options.speedSummary;
var parallelOperationThreadCount = options.parallelOperationThreadCount || this.parallelOperationThreadCount;
var batchOperations = new BatchOperation('getblob', { callbackInOrder: true, logger : this.logger });
var batchOperations = new BatchOperation('getblobInRanges', { callbackInOrder: true, logger : this.logger });
batchOperations.setConcurrency(parallelOperationThreadCount);

var self = this;
Expand Down Expand Up @@ -4315,22 +4318,11 @@ BlobService.prototype._getBlobToRangeStream = function (container, blob, blobTyp
batchOperations.resume();
});

batchOperations.on('end', function (error, retValue) {
batchOperations.on('end', function (error) {
self.logger.debug('Download completed!');
if (error) {
if (azureutil.pathExistsSync(localFileName)) {
// make sure writeStream is closed / destroyed to avoid locking issues
if (writeStream.close) {
writeStream.close();
}

// If the download failed from the beginning, remove the file.
fs.unlink(localFileName, function () {
callback(error, retValue);
});

callback(error);
return;
}
} else {
writeStream.end(function () {
self.logger.debug('Write stream has ended');
Expand Down Expand Up @@ -4475,7 +4467,7 @@ BlobService.prototype._getBlobToLocalFile = function (container, blob, localFile
}
callback(error, responseBlob, response);
});
}
};

/**
* The callback for {BlobService~getBlobToText}.
Expand Down
3 changes: 1 addition & 2 deletions lib/services/blob/internal/blockrangestream.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@

var Constants = require('./../../../common/util/constants');
var EventEmitter = require('events').EventEmitter;
var BlobConstants = Constants.BlobConstants;
var BlobUtilities = require('./../blobutilities');

/**
Expand Down Expand Up @@ -124,7 +123,7 @@ BlockRangeStream.prototype._getTypeList = function (callback) {
} finally {
this._isEmitting = false;
}
}
};

/**
* Get the block list
Expand Down
Loading

0 comments on commit ce2faa9

Please sign in to comment.