Skip to content

Commit

Permalink
Merge pull request #1181 from ORNL/1180-refactor-authz
Browse files Browse the repository at this point in the history
1180 refactor foxx authz
  • Loading branch information
JoshuaSBrown authored Jan 7, 2025
2 parents c517953 + eba25db commit f60d2f7
Show file tree
Hide file tree
Showing 13 changed files with 860 additions and 82 deletions.
2 changes: 2 additions & 0 deletions .gitlab/build/build_foxx_image.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ build-foxx:
- docker/**/*
- scripts/**/*
- core/database/**/*
- core/CMakeLists.txt
- common/proto/**/*
- .gitlab-ci.yml
- CMakeLists.txt
Expand All @@ -43,6 +44,7 @@ retag-image:
- docker/**/*
- scripts/**/*
- core/database/**/*
- core/CMakeLists.txt
- common/proto/**/*
- .gitlab-ci.yml
- CMakeLists.txt
Expand Down
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
16. [1149] - Docker container GCS Collection Mount Bug Fix
17. [1168] - Add authz unit testing to the CI
18. [1200] - Add JavaScript linter (eslint) and (prettier) formatter for JavaScript
19. [1180] - Refactor of authz foxx module, split into objects and added unit tests

# v2024.6.17.10.40

Expand Down
6 changes: 6 additions & 0 deletions core/database/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,9 @@ configure_file(
if( ENABLE_FOXX_TESTS )
add_test(NAME foxx_setup COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_setup.sh")
add_test(NAME foxx_teardown COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_teardown.sh")
add_test(NAME foxx_authz COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_authz")
add_test(NAME foxx_record COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_record")
add_test(NAME foxx_path COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_path")
add_test(NAME foxx_db_fixtures COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_fixture_setup.sh")
add_test(NAME foxx_version COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_version")
add_test(NAME foxx_support COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_support")
Expand All @@ -23,5 +26,8 @@ if( ENABLE_FOXX_TESTS )
set_tests_properties(foxx_db_fixtures PROPERTIES FIXTURES_SETUP FoxxDBFixtures FIXTURES_REQUIRED Foxx)
set_tests_properties(foxx_version PROPERTIES FIXTURES_REQUIRED Foxx)
set_tests_properties(foxx_support PROPERTIES FIXTURES_REQUIRED Foxx)
set_tests_properties(foxx_authz PROPERTIES FIXTURES_REQUIRED Foxx)
set_tests_properties(foxx_record PROPERTIES FIXTURES_REQUIRED Foxx)
set_tests_properties(foxx_path PROPERTIES FIXTURES_REQUIRED Foxx)
set_tests_properties(foxx_user_router PROPERTIES FIXTURES_REQUIRED "Foxx;FoxxDBFixtures")
endif()
44 changes: 44 additions & 0 deletions core/database/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# WARNING - Adding Tests

Note The granularity of CMake is dependent on how they are defined in the CMakeLists.txt file. The tests are specified in
CMake by passing a string that is matched against the chai test cases in the
"it()" sections of the chai unit tests. Any test cases that match the pattern will run when that test is triggered.

i.e.

CMakeLists.txt line

```
add_test(NAME foxx_record COMMAND "${CMAKE_CURRENT_SOURCE_DIR}/tests/test_foxx.sh" -t "unit_record")
```

This will pass "unit_record" as the pattern to be matched to the test_foxx.sh
script. In turn, the test_foxx.sh script will call foxx test with
"unit_record". Tests are not matched based on the name of the test file they
are matched based on the test cases.

i.e.

Below is part of a test case that would be matched against the "unit_record" pattern.

```
describe('Record Class', () => {
it('unit_record: isPathConsistent should return false paths are inconsistent in new and old alloc.', () => {
:
:
});
it('unit_record: isPathConsistent a different test case.', () => {
:
:
});
});
```

Notice that 'unit_record' is explicitly mentioned in the test cases. In the above exerpt, both tests will run. If ctest were to be explicitly called we could run all unit_record tests with the following.

```
ctest -R foxx_record
```
59 changes: 59 additions & 0 deletions core/database/foxx/api/authz.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
"use strict";

const g_db = require("@arangodb").db;
const path = require("path");
const g_lib = require("./support");

module.exports = (function () {
let obj = {};

/**
* @brief Will check to see if a client has the required permissions on a
* record.
*
* @param {string} a_data_key - A datafed key associated with a record. Is not prepended with 'd/'
* @param {obj} a_client - A user document, the user associated with the document is the one
* who we are verifying if they have permissions to on the data record.
*
* e.g.
*
* a_client id
*
* Client will contain the following information
* {
* "_key" : "bob",
* "_id" : "u/bob",
* "name" : "bob junior ",
* "name_first" : "bob",
* "name_last" : "jones",
* "is_admin" : true,
* "max_coll" : 50,
* "max_proj" : 10,
* "max_sav_qry" : 20,
* :
* "email" : "bobjones@gmail.com"
* }
*
* @param - the permission type that is being checked i.e.
*
* PERM_CREATE
* PERM_WR_DATA
* PERM_RD_DATA
**/
obj.isRecordActionAuthorized = function (a_client, a_data_key, a_perm) {
const data_id = "d/" + a_data_key;
// If the user is not an admin of the object we will need
// to check if the user has the write authorization
if (g_lib.hasAdminPermObject(a_client, data_id)) {
return true;
}
let data = g_db.d.document(data_id);
// Grab the data item
if (g_lib.hasPermissions(a_client, data, a_perm)) {
return true;
}
return false;
};

return obj;
})();
178 changes: 97 additions & 81 deletions core/database/foxx/api/authz_router.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@ const router = createRouter();
const joi = require("joi");
const g_db = require("@arangodb").db;
const g_lib = require("./support");
const pathModule = require("./posix_path"); // Replace with the actual file name
const Record = require("./record"); // Replace with the actual file name
const authzModule = require("./authz"); // Replace with the actual file name

module.exports = router;

Expand All @@ -22,44 +25,67 @@ router
req.queryParams.act,
);

// Client will contain the following information
// {
// "_key" : "bob",
// "_id" : "u/bob",
// "name" : "bob junior ",
// "name_first" : "bob",
// "name_last" : "jones",
// "is_admin" : true,
// "max_coll" : 50,
// "max_proj" : 10,
// "max_sav_qry" : 20,
// :
// "email" : "[email protected]"
// }
const client = g_lib.getUserFromClientID_noexcept(req.queryParams.client);

var idx = req.queryParams.file.lastIndexOf("/");
var data_key = req.queryParams.file.substr(idx + 1);
var data_id = "d/" + data_key;
// Will split a posix path into an array
// E.g.
// req.queryParams.file = "/usr/local/bin"
// const path_components = pathModule.splitPOSIXPath(req.queryParams.file);
//
// Path components will be
// ["usr", "local", "bin"]
const path_components = pathModule.splitPOSIXPath(req.queryParams.file);
const data_key = path_components.at(-1);
let record = new Record(data_key);

// Special case - allow unknown client to read a publicly accessible record
// if record exists and if it is a public record
if (!client) {
if (req.queryParams.act != "read" || !g_lib.hasPublicRead(data_id)) {
console.log("Permission to read denied!");
throw g_lib.ERR_PERM_DENIED;
if (record.exists()) {
if (req.queryParams.act != "read" || !g_lib.hasPublicRead(record.id())) {
console.log(
"AUTHZ act: " +
req.queryParams.act +
" client: " +
client._id +
" path " +
req.queryParams.file +
" FAILED",
);
throw g_lib.ERR_PERM_DENIED;
}
}
console.log("allow anon read of public record");
} else {
console.log("client:", client);

// Actions: read, write, create, delete, chdir, lookup
var req_perm = 0;
let req_perm = 0;
switch (req.queryParams.act) {
case "read":
console.log("Client: ", client, " read permissions?");
req_perm = g_lib.PERM_RD_DATA;
break;
case "write":
console.log("Client: ", client, " write permissions?");
break;
case "create":
console.log("Client: ", client, " create permissions?");
req_perm = g_lib.PERM_WR_DATA;
break;
case "delete":
console.log("Client: ", client, " delete permissions?");
throw g_lib.ERR_PERM_DENIED;
case "chdir":
console.log("Client: ", client, " chdir permissions?");
break;
case "lookup":
console.log("Client: ", client, " lookup permissions?");
// For TESTING, allow these actions
return;
default:
Expand All @@ -70,85 +96,75 @@ router
];
}

console.log("client: ", client, " data_id: ", data_id);
if (!g_lib.hasAdminPermObject(client, data_id)) {
var data = g_db.d.document(data_id);
if (!g_lib.hasPermissions(client, data, req_perm)) {
console.log("Client: ", client, " does not have permission!");
// This will tell us if the action on the record is authorized
// we still do not know if the path is correct.
if (record.exists()) {
if (!authzModule.isRecordActionAuthorized(client, data_key, req_perm)) {
console.log(
"AUTHZ act: " +
req.queryParams.act +
" client: " +
client._id +
" path " +
req.queryParams.file +
" FAILED",
);
throw g_lib.ERR_PERM_DENIED;
}
}
}

// Verify repo and path are correct for record
// Note: only managed records have an allocations and this gridftp auth call is only made for managed records
//var path = req.queryParams.file.substr( req.queryParams.file.indexOf("/",8));
var path = req.queryParams.file;
console.log("data_id is, ", data_id);
var loc = g_db.loc.firstExample({
_from: data_id,
});
console.log("Loc is:");
console.log(loc);
if (!loc) {
if (record.exists()) {
if (!record.isPathConsistent(req.queryParams.file)) {
console.log(
"AUTHZ act: " +
req.queryParams.act +
" client: " +
client._id +
" path " +
req.queryParams.file +
" FAILED",
);
throw [record.error(), record.errorMessage()];
}
} else {
// If the record does not exist then the path would noe be consistent.
console.log(
"Permission denied data is not managed by DataFed. This can happen if you try to do a transfer directly from Globus.",
"AUTHZ act: " +
req.queryParams.act +
" client: " +
client._id +
" path " +
req.queryParams.file +
" FAILED",
);
throw g_lib.ERR_PERM_DENIED;
}
var alloc = g_db.alloc.firstExample({
_from: loc.uid,
_to: loc._to,
});
console.log("path:", path, " alloc path:", alloc.path + data_key, " loc: ", loc);
if (!alloc) {
throw g_lib.ERR_PERM_DENIED;
}

// If path is missing the starting "/" add it back in
if (!path.startsWith("/") && alloc.path.startsWith("/")) {
path = "/" + path;
}

console.log("path:", path, " alloc path:", alloc.path + data_key, " loc: ", loc);
if (alloc.path + data_key != path) {
// This may be due to an alloc/owner change
// Allow If new path matches
console.log("authz loc info:", loc);

if (!loc.new_repo) {
console.log("Throw a permission denied error");
throw g_lib.ERR_PERM_DENIED;
}

console.log("Creating alloc");
alloc = g_db.alloc.firstExample({
_from: loc.new_owner ? loc.new_owner : loc.uid,
_to: loc.new_repo,
});

console.log("alloc is ");
console.log(alloc);
if (!alloc || alloc.path + data_key != path) {
throw [
obj.ERR_PERM_DENIED,
"Permission denied, DataFed registered path is '" +
alloc.path +
data_key +
"' Globus path is '" +
path +
"'",
];
}
throw [g_lib.ERR_PERM_DENIED, "Invalid record specified: " + req.queryParams.file];
}
console.log(
"AUTHZ act: " +
req.queryParams.act +
" client: " +
client._id +
" path " +
req.queryParams.file +
" SUCCESS",
);
} catch (e) {
g_lib.handleException(e, res);
}
})
.queryParam("client", joi.string().required(), "Client ID")
.queryParam("repo", joi.string().required(), "Originating repo ID")
.queryParam(
"repo",
joi.string().required(),
"Originating repo ID, where the DataFed managed GridFTP server is running.",
)
.queryParam("file", joi.string().required(), "Data file name")
.queryParam("act", joi.string().required(), "Action")
.queryParam(
"act",
joi.string().required(),
"GridFTP action: 'lookup', 'chdir', 'read', 'write', 'create', 'delete'",
)
.summary("Checks authorization")
.description("Checks authorization");

Expand Down
Loading

0 comments on commit f60d2f7

Please sign in to comment.