Skip to content

Commit

Permalink
Merge pull request #2191 from zspitzer/cleanup-even-older-build-and-t…
Browse files Browse the repository at this point in the history
…ests

cleanup old tests and update build
  • Loading branch information
zspitzer authored Jul 21, 2023
2 parents e064bee + 099be24 commit 8123901
Show file tree
Hide file tree
Showing 20 changed files with 355 additions and 356 deletions.
23 changes: 14 additions & 9 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Java CI
name: Build 5.3.8

on:
push:
Expand Down Expand Up @@ -27,7 +27,7 @@ jobs:
- 1433:1433
options: --health-cmd="/opt/mssql-tools/bin/sqlcmd -S localhost -U SA -P ${SA_PASSWORD} -Q 'SELECT 1' || exit 1" --health-interval 10s --health-timeout 5s --health-retries 5
steps:
- uses: szenius/set-timezone@v1.0
- uses: szenius/set-timezone@v1.1
with:
timezoneLinux: "UTC"
- uses: actions/checkout@v2
Expand All @@ -37,16 +37,16 @@ jobs:
java-version: '8'
distribution: 'adopt'
- name: Cache Lucee extensions
uses: actions/cache@v2
uses: actions/cache@v3
with:
path: ~/work/Lucee/Lucee/cache/
key: lucee-extensions
restore-keys: lucee-extensions
- name: Cache Maven packages
uses: actions/cache@v2
uses: actions/cache@v3
with:
path: |
~/.m2/repository
~/.m2/repository/*
!~/.m2/repository/org/lucee/lucee/
key: ${{ runner.os }}-maven-${{ hashFiles('loader/pom.xml','core/pom.xml') }}
restore-keys: |
Expand Down Expand Up @@ -92,8 +92,10 @@ jobs:
MSSQL_PORT: 1433
MSSQL_DATABASE: master
S3_ACCESS_KEY_ID: ${{ secrets.S3_ACCESS_ID_TEST }}
S3_SECRET_KEY: ${{ secrets.S3_SECRET_KEY_TEST }}
DO_DEPLOY: ${{ github.event_name == 'push' }}
# s3 tests disabled due to LDEV-4474
#S3_SECRET_KEY: ${{ secrets.S3_SECRET_KEY_TEST }}
S3_BUCKET_PREFIX: lucee-ldev-5-
# DO_DEPLOY: ${{ github.event_name == 'push' }}
# for uploading successful builds
S3_ACCESS_ID_DOWNLOAD: ${{ secrets.S3_ACCESS_ID_DOWNLOAD }}
S3_SECRET_KEY_DOWNLOAD: ${{ secrets.S3_SECRET_KEY_DOWNLOAD }}
Expand All @@ -104,15 +106,18 @@ jobs:
LUCEE_DOCKER_FILES_PAT_TOKEN: ${{ secrets.LUCEE_DOCKER_FILES_PAT_TOKEN }}
#run: ant -noinput -buildfile loader/build.xml
run: |
if [ "${{ github.event_name == 'push' }}" == "true" ]; then
if [ "${{ env.DO_DEPLOY }}" == "true" ]; then
echo "------- Maven Deploy on ${{ github.event_name }} -------";
mvn -B -e -f loader/pom.xml clean deploy --settings travis-settings.xml;
else
echo "------- Maven Install on ${{ github.event_name }} ---------";
mvn -B -e -f loader/pom.xml clean install
fi
- name: Remove Lucee build artifacts from local maven cache (avoid growing cache)
run: |
rm -rfv ~/.m2/repository/org/lucee/lucee/
- name: Publish Test Results
uses: EnricoMi/publish-unit-test-result-action@v1
uses: EnricoMi/publish-unit-test-result-action@v2
if: always()
with:
files: test/reports/junit-test-results.xml
Expand Down
1 change: 1 addition & 0 deletions ant/build-core.xml
Original file line number Diff line number Diff line change
Expand Up @@ -887,6 +887,7 @@
<arg value="-f"/>
<arg value="${ant}/upload-to-s3.xml"/>

<jvmarg value="-Dantdir=${ant}"/>
<jvmarg value="-Dbasedir=${baseDir}"/>
<jvmarg value="-Dlucee.base.dir=${temp}/archive/base"/>
<jvmarg value="-Dlucee.web.dir=${temp}/archive/webroot"/>
Expand Down
236 changes: 41 additions & 195 deletions ant/upload-to-s3.xml
Original file line number Diff line number Diff line change
Expand Up @@ -2,213 +2,59 @@
<project default="run" basedir="." name="LuceeUploadToS3">

<macrodef name="echots">
<attribute name="message"/>
<sequential>
<local name="timestamp" />
<tstamp>
<format property="timestamp" pattern="yyyy-MM-dd HH:mm:ss" />
</tstamp>
<echo message="---------- ${timestamp} - @{message} ----------" />
</sequential>
<attribute name="message"/>
<sequential>
<local name="timestamp" />
<tstamp>
<format property="timestamp" pattern="yyyy-MM-dd HH:mm:ss" />
</tstamp>
<echo message="---------- ${timestamp} - @{message} ----------" />
</sequential>
</macrodef>

<target name="run">
<echots message="within run of upload s3"/>

<script language="CFML">
<![CDATA[
try{
NL = "
";
SystemOutput( "inside upload-to-s3.xml", 1, 1 );
SystemOutput( "Using S3 Extension: #extensionList().filter( function(row){ return row.name contains "s3" }).version#", true);
src.jar = server.system.properties.luceejar;
src.core = server.system.properties.luceeCore;
src.dir = getDirectoryFromPath( src.jar );
src.jarName = listLast( src.jar,"\/" );
src.coreName = listLast( src.core,"\/" );
src.version = mid( src.coreName,1,len( src.coreName )-4 );
if ( ! FileExists( src.jar ) || ! FileExists( src.core ) ){
SystemOutput( src.jar & " exists: " & FileExists( src.jar ), true );
SystemOutput( src.core & " exists: " & FileExists( src.core ), true );
throw "missing jar or .lco file";
}
// no S3 credentials?
if ( (server.system.environment.DO_DEPLOY?:false) eq false ){
SystemOutput( "skip, DO_DEPLOY is false", 1 ,1 );
return;
} else if ( isNull( server.system.environment.S3_ACCESS_ID_DOWNLOAD )
|| isNull( server.system.environment.S3_SECRET_KEY_DOWNLOAD ) ) {
SystemOutput( "no S3 credentials defined to upload to S3", 1, 1 );
return;
//throw "no S3 credentials defined to upload to S3";
//trg.dir = "";
} else {
s3_bucket = "lucee-downloads";
trg.dir = "s3://#server.system.environment.S3_ACCESS_ID_DOWNLOAD#:#server.system.environment.S3_SECRET_KEY_DOWNLOAD#@/#s3_bucket#/";
}
// test s3 access
SystemOutput( "Testing S3 Bucket Access", 1, 1 );
if (! DirectoryExists( trg.dir ) )
throw "DirectoryExists failed for s3 bucket [#s3_bucket#]"; // it usually will throw an error, rather than even reach this throw, if it fails
trg.jar = trg.dir & src.jarName;
trg.core = trg.dir & src.coreName;
// we only upload / publish artifacts once LDEV-3921
if ( fileExists( trg.jar ) && fileExists( trg.core ) ){
SystemOutput( "Build artifacts have already been uploaded for this version, nothing to do", 1, 1 );
return;
}
// copy jar
SystemOutput( "upload #src.jarName# to S3",1,1 );
if ( fileExists( trg.jar ) )
fileDelete( trg.jar );
fileCopy( src.jar, trg.jar );
// copy core
SystemOutput( "upload #src.coreName# to S3",1,1 );
if ( fileExists( trg.core ) )
fileDelete( trg.core );
fileCopy( src.core, trg.core );
// create war
src.warName = "lucee-" & src.version & ".war";
src.war = src.dir & src.warName;
trg.war = trg.dir & src.warName;
request.WEBADMINPASSWORD = "webweb";
request.SERVERADMINPASSWORD = "webweb";
/*
SystemOutput( "upload #src.warName# to S3",1,1 );
zip action = "zip" file = src.war overwrite = true {
// loader
zipparam source = src.jar entrypath = "WEB-INF/lib/lucee.jar";
// common files
// zipparam source = commonDir;
// website files
// zipparam source = webDir;
// war files
// zipparam source = warDir;
}
fileCopy( src.war,trg.war );
*/
// light
/*src.lightName = "lucee-light-" & src.version & ".jar";
src.light = src.dir & src.lightName;
trg.light = trg.dir & src.lightName;
createLight( src.jar,src.light,src.version );
fileCopy( src.light,trg.light );
*/
// update provider
systemOutput("Trigger builds", true);
http url="https://update.lucee.org/rest/update/provider/buildLatest" method="GET" timeout=90 result="buildLatest";
systemOutput(buildLatest.fileContent, true);
systemOutput("Update Extension Provider", true);
http url="https://extension.lucee.org/rest/extension/provider/reset" method="GET" timeout=90 result="extensionReset";
systemOutput(extensionReset.fileContent, true);
systemOutput("Update Downloads Page", true);
http url="https://download.lucee.org/?type=snapshots&reset=force" method="GET" timeout=90 result="downloadUpdate";
systemOutput("Server response status code: " & downloadUpdate.statusCode, true);
// forgebox
systemOutput("Trigger forgebox builds", true);
gha_pat_token = server.system.environment.LUCEE_DOCKER_FILES_PAT_TOKEN; // github person action token
body = {
"event_type": "forgebox_deploy"
};
http url="https://api.github.com/repos/Ortus-Lucee/forgebox-cfengine-publisher/dispatches" method="POST" result="result" timeout="90"{
httpparam type="header" name='authorization' value='Bearer #gha_pat_token#';
httpparam type="body" value='#body.toJson()#';
// set a password for the admin
try {
admin
action="updatePassword"
type="web"
oldPassword=""
newPassword="#request.WEBADMINPASSWORD#";
}
systemOutput("Forgebox build triggered, #result.statuscode# (always returns a 204 no content, see https://github.com/Ortus-Lucee/forgebox-cfengine-publisher/actions for output)", true);
// Lucee Docker builds
systemOutput("Trigger Lucee Docker builds", true);
gha_pat_token = server.system.environment.LUCEE_DOCKER_FILES_PAT_TOKEN; // github person action token
body = {
"event_type": "build-docker-images",
"client_payload": {
"LUCEE_VERSION": server.system.properties.luceeVersion
}
};
http url="https://api.github.com/repos/lucee/lucee-dockerfiles/dispatches" method="POST" result="result" timeout="90"{
httpparam type="header" name='authorization' value='Bearer #gha_pat_token#';
httpparam type="body" value='#body.toJson()#';
catch(e){} // may exist from previous execution
try {
admin
action="updatePassword"
type="server"
oldPassword=""
newPassword="#request.SERVERADMINPASSWORD#";
}
systemOutput("Lucee Docker builds triggered, #result.statuscode# (always returns a 204 no content, see https://github.com/lucee/lucee-dockerfiles/actions for output)", true);
// express
}
catch( e ){
SystemOutput( serialize( e ),1,1 );
rethrow;
}
private function createLight( string loader, string trg, version ) {
var sep = server.separator.file;
var tmpDir = getDirectoryFromPath( loader );
local.tmpLoader = tmpDir & "lucee-loader-" & createUniqueId( ); // the jar
if ( directoryExists( tmpLoader ) )
directoryDelete( tmpLoader,true );
directoryCreate( tmpLoader );
// unzip
zip action = "unzip" file = loader destination = tmpLoader;
// remove extensions
var extDir = tmpLoader&sep&"extensions";
if ( directoryExists( extDir ) )
directoryDelete( extDir, true ); // deletes directory with all files inside
directoryCreate( extDir ); // create empty dir again ( maybe Lucee expect this directory to exist )
// unzip core
var lcoFile = tmpLoader & sep & "core" & sep & "core.lco";
local.tmpCore = tmpDir & "lucee-core-" & createUniqueId( ); // the jar
directoryCreate( tmpCore );
zip action = "unzip" file = lcoFile destination = tmpCore;
// rewrite manifest
var manifest = tmpCore & sep & "META-INF" & sep & "MANIFEST.MF";
var content = fileRead( manifest );
var index = find( 'Require-Extension',content );
if ( index > 0 )
content = mid( content, 1, index - 1 ) & variables.NL;
fileWrite( manifest,content );
// zip core
fileDelete( lcoFile );
zip action = "zip" source = tmpCore file = lcoFile;
// zip loader
if ( fileExists( trg ) )
fileDelete( trg );
zip action = "zip" source = tmpLoader file = trg;
}
catch(e){} // may exist from previous execution
// create "/upload" mapping
admin
action="updateMapping"
type="web"
password="#request.WEBADMINPASSWORD#"
virtual="/upload"
physical="#antdir#"
toplevel="true"
archive=""
primary="physical"
trusted="no";
// do all the actual processing inside an include file so we get useful error messages
include ( template="/upload/upload_to_s3.cfm" );
]]>
</script>
Expand Down
Loading

0 comments on commit 8123901

Please sign in to comment.