Skip to content

Commit

Permalink
make sure buckets get deleted after the job
Browse files Browse the repository at this point in the history
  • Loading branch information
michaeloffner committed Jul 12, 2024
1 parent e790b37 commit 1b9ba41
Show file tree
Hide file tree
Showing 2 changed files with 65 additions and 68 deletions.
22 changes: 11 additions & 11 deletions tests/functions/S3Download.cfc
Original file line number Diff line number Diff line change
@@ -1,22 +1,17 @@
component extends="org.lucee.cfml.test.LuceeTestCase" labels="s3" {

function beforeAll() {
variables.cred=Util::getAWSCredentials();
variables.bucketName = Util::createBucketName("download");
}

function afterAll() {
deleteBucketEL(variables.cred,variables.bucketName);
}


function run( testResults , testBox ) {
describe( title="Test suite for S3Download()",skip=Util::isAWSNotSupported(), body=function() {
var cred = variables.cred;
var bucketName = variables.bucketName;
var cred = Util::getAWSCredentials();
var bucketName = Util::createBucketName("download");
var objectName="sub/test.txt";
var content="Susi
Sorglos";

try {


if(!S3Exists(
bucketName=bucketName, objectName=objectName,
accessKeyId=cred.ACCESS_KEY_ID, secretAccessKey=cred.SECRET_KEY, host=(isNull(cred.HOST)?nullvalue():cred.HOST))) {
Expand Down Expand Up @@ -169,6 +164,11 @@ Sor:4;glos:4;");
assertTrue(isSimpleValue(data));
assertEquals(data, "before;4;4;4;after;");
});

}
finally {
deleteBucketEL(cred,bucketName);
}

});
}
Expand Down
111 changes: 54 additions & 57 deletions tests/general/BigBucket.cfc
Original file line number Diff line number Diff line change
@@ -1,75 +1,72 @@
component extends="org.lucee.cfml.test.LuceeTestCase" labels="s3" {

function beforeAll() {
variables.cred=Util::getAWSCredentials();
variables.bucketName = Util::createBucketName("big");
}

function afterAll() {
deleteBucketEL(variables.cred,variables.bucketName);
}

function run( testResults , testBox ) {

describe( title="Test suite for big buckets",skip=Util::isAWSNotSupported(), body=function() {

// does the bucket exists?
if(!S3Exists(
bucketName:bucketName,
accessKeyId:cred.ACCESS_KEY_ID, secretAccessKey:cred.SECRET_KEY, host:(isNull(cred.HOST)?nullvalue():cred.HOST))) {

S3CreateBucket(
bucketName:bucketName,
accessKeyId:cred.ACCESS_KEY_ID, secretAccessKey:cred.SECRET_KEY, host:(isNull(cred.HOST)?nullvalue():cred.HOST));

}
var executionTime=getTickCount();
var qry=S3ListBucket(
bucketName:bucketName,
accessKeyId:cred.ACCESS_KEY_ID, secretAccessKey:cred.SECRET_KEY, host:(isNull(cred.HOST)?nullvalue():cred.HOST));
var executionTime=getTickCount()-executionTime;
var records=qry.recordcount;

if(records==0) {
loop from=1 to=100 index="i" {
S3Write(
value:i,
var cred=Util::getAWSCredentials()
var bucketName=Util::createBucketName("big");
try {
// does the bucket exists?
if(!S3Exists(
bucketName:bucketName,
accessKeyId:cred.ACCESS_KEY_ID, secretAccessKey:cred.SECRET_KEY, host:(isNull(cred.HOST)?nullvalue():cred.HOST))) {

S3CreateBucket(
bucketName:bucketName,
objectName:"file#i#.txt",
accessKeyId:cred.ACCESS_KEY_ID, secretAccessKey:cred.SECRET_KEY, host:(isNull(cred.HOST)?nullvalue():cred.HOST));

}
}

it(title="checking with S3ListBucket", skip=Util::isAWSNotSupported(), body = function( currentSpec ) {
var cred=Util::getAWSCredentials()

if(records==0){
var executionTime=getTickCount();
var qry=S3ListBucket(
var executionTime=getTickCount();
var qry=S3ListBucket(
bucketName:bucketName,
accessKeyId:cred.ACCESS_KEY_ID, secretAccessKey:cred.SECRET_KEY, host:(isNull(cred.HOST)?nullvalue():cred.HOST));
var executionTime=getTickCount()-executionTime;
var records=qry.recordcount;

if(records==0) {
loop from=1 to=100 index="i" {
S3Write(
value:i,
bucketName:bucketName,
objectName:"file#i#.txt",
accessKeyId:cred.ACCESS_KEY_ID, secretAccessKey:cred.SECRET_KEY, host:(isNull(cred.HOST)?nullvalue():cred.HOST));
var executionTime=getTickCount()-executionTime;
var records=qry.recordcount;
}
}

it(title="checking with S3ListBucket", skip=Util::isAWSNotSupported(), body = function( currentSpec ) {
var cred=Util::getAWSCredentials()

assertTrue( executionTime<3000 );
assertEquals(10000, records); // 10000] but received [861
});
if(records==0){
var executionTime=getTickCount();
var qry=S3ListBucket(
bucketName:bucketName,
accessKeyId:cred.ACCESS_KEY_ID, secretAccessKey:cred.SECRET_KEY, host:(isNull(cred.HOST)?nullvalue():cred.HOST));
var executionTime=getTickCount()-executionTime;
var records=qry.recordcount;
}

assertTrue( executionTime<3000 );
assertEquals(10000, records); // 10000] but received [861
});


it(title="checking with S3Exists", skip=Util::isAWSNotSupported(), body = function( currentSpec ) {
var cred=Util::getAWSCredentials()

var executionTime=getTickCount();
S3Exists(
bucketName:bucketName,
accessKeyId:cred.ACCESS_KEY_ID, secretAccessKey:cred.SECRET_KEY, host:(isNull(cred.HOST)?nullvalue():cred.HOST));
var executionTime=getTickCount()-executionTime;
it(title="checking with S3Exists", skip=Util::isAWSNotSupported(), body = function( currentSpec ) {
var cred=Util::getAWSCredentials()


assertTrue( executionTime<100 );
// assertEquals(10000, executionTime);
});
var executionTime=getTickCount();
S3Exists(
bucketName:bucketName,
accessKeyId:cred.ACCESS_KEY_ID, secretAccessKey:cred.SECRET_KEY, host:(isNull(cred.HOST)?nullvalue():cred.HOST));
var executionTime=getTickCount()-executionTime;


assertTrue( executionTime<100 );
// assertEquals(10000, executionTime);
});
}
finally {
deleteBucketEL(cred,bucketName);
}

});
}
Expand Down

0 comments on commit 1b9ba41

Please sign in to comment.