Skip to content

Commit

Permalink
Cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
JoshuaSBrown committed Apr 10, 2024
1 parent e8bf07e commit 3449bbf
Show file tree
Hide file tree
Showing 37 changed files with 187 additions and 182 deletions.
10 changes: 7 additions & 3 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,13 @@ OPTION(BUILD_PYTHON_CLIENT "Build python client" TRUE)
OPTION(BUILD_TESTS "Build Tests" TRUE)
OPTION(BUILD_WEB_SERVER "Build DataFed Web Server" TRUE)
OPTION(ENABLE_UNIT_TESTS "Enable unit tests" TRUE)
OPTION(BUILD_SHARED_LIBS "Build using shared libraries. By default this is off.
With the exception of the libdatafed-authz which must be a shared library. This
includes also attempting to link with shared libraries instead of static ones" OFF)
OPTION(BUILD_SHARED_LIBS "By default DataFed tries to build static libraries
with the exception of libdatafed-authz which must always be a shared library,
it will also try to link with as many static libraries as possible. However,
building with static depencies is not completely possible because some system
libraries must be shared libraries for DataFed to be interoperable. If this
setting is turned on DataFed will build it's libraries as shared and try to
link to shared libraries." OFF)

set(INSTALL_REPO_SERVER ${BUILD_REPO_SERVER})
set(INSTALL_AUTHZ ${BUILD_AUTHZ})
Expand Down
5 changes: 5 additions & 0 deletions cmake/curl_version.cpp
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
#include <curl/curl.h>
#include <iostream>

/**
* This script is used to show what vesion of curl is being used with the
* rest of the build process and will print the version number of the curl
* library.
**/
int main() {
std::cout << curl_version() << std::endl;
return 0;
Expand Down
5 changes: 5 additions & 0 deletions cmake/zlib_version.cpp
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
#include <iostream>
#include <zlib.h>

/**
* This little file is used to make sure that we are compiling with the version
* of a library that we think we are. This one when run will print the zlib
* version numbers.
**/
int main() {
std::cout << zlibVersion() << std::endl;
return 0;
Expand Down
2 changes: 1 addition & 1 deletion common/include/common/CommunicatorFactory.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ class CommunicatorFactory {
LogContext m_log_context;

public:
CommunicatorFactory(LogContext log_context) : m_log_context(log_context){};
CommunicatorFactory(const LogContext & log_context) : m_log_context(log_context){};

std::unique_ptr<ICommunicator> create(const SocketOptions &socket_options,
const ICredentials &credentials,
Expand Down
11 changes: 11 additions & 0 deletions common/include/common/DynaLog.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,17 @@

namespace SDMS {

/**
* Here unsigned int is used so that we can compare the numeric values when
* choosing whether to print a log message.
*
* CRITICAL = 0
* ERROR = 1
* WARNING = 2
* INFO = 3
* DEBUG = 4
* TRACE = 5
**/
enum class LogLevel : unsigned int {
CRITICAL,
ERROR,
Expand Down
6 changes: 3 additions & 3 deletions common/include/common/GSSAPI_Utils.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,21 +21,21 @@ class gssString {
m_gss_buf.length = 0;
}

gssString(const std::string &a_src) {
explicit gssString(const std::string &a_src) {
m_gss_buf.value = 0;
m_gss_buf.length = 0;

set(a_src);
}

gssString(const char *a_src) {
explicit gssString(const char *a_src) {
m_gss_buf.value = 0;
m_gss_buf.length = 0;

set(a_src);
}

gssString(gss_name_t a_src) {
explicit gssString(gss_name_t a_src) {
m_gss_buf.value = 0;
m_gss_buf.length = 0;

Expand Down
2 changes: 1 addition & 1 deletion common/include/common/ServerFactory.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ class ServerFactory {
LogContext m_log_context;

public:
ServerFactory(LogContext log_context) : m_log_context(log_context){};
ServerFactory(const LogContext & log_context) : m_log_context(log_context){};

std::unique_ptr<IServer> create(
ServerType server_type,
Expand Down
4 changes: 2 additions & 2 deletions common/include/common/SmartTokenizer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ template <char delim = ' ', bool keep_empty = false> class SmartTokenizer {

SmartTokenizer() {}

SmartTokenizer(const std::string &a_input_str) {
explicit SmartTokenizer(const std::string &a_input_str) {
m_buffer.reserve(a_input_str.size());
_parse(a_input_str.c_str(), a_input_str.size());
}
Expand Down Expand Up @@ -133,4 +133,4 @@ template <char delim = ' ', bool keep_empty = false> class SmartTokenizer {
std::vector<const char *> m_tokens;
};

#endif
#endif
18 changes: 9 additions & 9 deletions common/include/common/libjson.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -389,17 +389,17 @@ class Value {

Value() : m_type(VT_NULL), m_value({0}) {}

Value(bool a_value) : m_type(VT_BOOL) { m_value.b = a_value; }
explicit Value(bool a_value) : m_type(VT_BOOL) { m_value.b = a_value; }

Value(double a_value) : m_type(VT_NUMBER) { m_value.n = a_value; }
explicit Value(double a_value) : m_type(VT_NUMBER) { m_value.n = a_value; }

Value(int a_value) : m_type(VT_NUMBER) { m_value.n = a_value; }
explicit Value(int a_value) : m_type(VT_NUMBER) { m_value.n = a_value; }

Value(const std::string &a_value) : m_type(VT_STRING) {
explicit Value(const std::string &a_value) : m_type(VT_STRING) {
m_value.s = new String(a_value);
}

Value(const char *a_value) : m_type(VT_STRING) {
explicit Value(const char *a_value) : m_type(VT_STRING) {
m_value.s = new String(a_value);
}

Expand All @@ -410,7 +410,7 @@ class Value {
a_source.m_value.o = 0;
}

Value(ValueType a_type) : m_type(a_type) {
explicit Value(ValueType a_type) : m_type(a_type) {
if (m_type == VT_OBJECT) {
m_value.o = new Object();
} else if (m_type == VT_ARRAY) {
Expand Down Expand Up @@ -837,7 +837,7 @@ class Value {
switch (m_type) {
case VT_OBJECT:
a_buffer.append("{");
for (ObjectIter i = m_value.o->begin(); i != m_value.o->end(); i++) {
for (ObjectIter i = m_value.o->begin(); i != m_value.o->end(); ++i) {
if (i != m_value.o->begin())
a_buffer.append(",\"");
else
Expand All @@ -851,7 +851,7 @@ class Value {
break;
case VT_ARRAY:
a_buffer.append("[");
for (ArrayIter i = m_value.a->begin(); i != m_value.a->end(); i++) {
for (ArrayIter i = m_value.a->begin(); i != m_value.a->end(); ++i) {
if (i != m_value.a->begin())
a_buffer.append(",");
i->toStringRecurse(a_buffer);
Expand Down Expand Up @@ -883,7 +883,7 @@ class Value {

a_buffer.append("\"");

for (c = a_value.begin(); c != a_value.end(); c++) {
for (c = a_value.begin(); c != a_value.end(); ++c) {
if (*c < 0x20) {
a_buffer.append(a, c);
a = c + 1;
Expand Down
6 changes: 3 additions & 3 deletions common/source/Util.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -79,11 +79,11 @@ void hexDump(const char *a_buffer, const char *a_buffer_end, ostream &a_out) {
const unsigned char *e = (unsigned char *)a_buffer_end;
bool done = false;

int l = 0, i = 0;
int l = 0;
while (!done) {
a_out << setw(4) << dec << l << ": ";

for (i = 0; i < 16; ++i) {
for (int i = 0; i < 16; ++i) {
if (i == 8)
a_out << " ";

Expand All @@ -102,7 +102,7 @@ void hexDump(const char *a_buffer, const char *a_buffer_end, ostream &a_out) {

a_out << " ";

for (i = 0; i < 16; ++i) {
for (int i = 0; i < 16; ++i) {
if (p + i != e) {
if (isprint(*(p + i)))
a_out << *(p + i);
Expand Down
2 changes: 1 addition & 1 deletion common/source/communicators/ZeroMQCommunicator.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ class ZeroMQCommunicator : public ICommunicator {

public:
/** To be used by children*/
ZeroMQCommunicator(const LogContext &log_context)
explicit ZeroMQCommunicator(const LogContext &log_context)
: m_log_context(log_context){};

ZeroMQCommunicator(const SocketOptions &socket_options,
Expand Down
6 changes: 3 additions & 3 deletions common/source/servers/ProxyBasicZMQ.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@

namespace SDMS {

std::string sanitize(std::string val, const std::string pattern,
const std::string replacement) {
std::string sanitize(std::string val, const std::string & pattern,
const std::string & replacement) {
for (auto at = val.find(pattern, 0); at != std::string::npos;
at = val.find(pattern, at + replacement.length())) {

Expand Down Expand Up @@ -186,7 +186,7 @@ void ProxyBasicZMQ::run() {
* loop.
**/
auto terminate_call = [](std::chrono::duration<double> duration,
const std::string address, int thread_id,
const std::string & address, int thread_id,
LogContext log_context) {
log_context.thread_name += "-terminate_after_timeout";
log_context.thread_id = thread_id;
Expand Down
2 changes: 1 addition & 1 deletion common/tests/security/tcp_secure/test_tcp_secure.sh
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ echo "TIMEOUT: ${TIMEOUT_CMD}"
echo "MAX_TEST_TIME: ${MAX_TEST_TIME_SEC}"

# Grab the first 30 packets sent on the loop back interface (127.0.0.1) and port 7515
match=$( ${TIMEOUT_CMD} ${MAX_TEST_TIME_SEC} ${TCPDUMP_CMD} -vvv -A port 7515 -i lo | grep token)
match=$( "${TIMEOUT_CMD}" "${MAX_TEST_TIME_SEC}" "${TCPDUMP_CMD}" -vvv -A port 7515 -i lo | grep token)

echo "Content of grep ${match}"
# If '.magic_token' is returned from the network sniffer then we know that
Expand Down
9 changes: 9 additions & 0 deletions common/tests/unit/test_DynaLog.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,15 @@ using namespace SDMS;

BOOST_AUTO_TEST_SUITE(LogTest)

BOOST_AUTO_TEST_CASE(testing_Enum) {
BOOST_CHECK(static_cast<unsigned int>(SDMS::LogLevel::TRACE) == 5);
BOOST_CHECK(static_cast<unsigned int>(SDMS::LogLevel::DEBUG) == 4);
BOOST_CHECK(static_cast<unsigned int>(SDMS::LogLevel::INFO) == 3);
BOOST_CHECK(static_cast<unsigned int>(SDMS::LogLevel::WARNING) == 2);
BOOST_CHECK(static_cast<unsigned int>(SDMS::LogLevel::ERROR) == 1);
BOOST_CHECK(static_cast<unsigned int>(SDMS::LogLevel::CRITICAL) == 0);
}

BOOST_AUTO_TEST_CASE(testing_LogOutput) {

std::string file_name = "./log_output_test1.txt";
Expand Down
2 changes: 2 additions & 0 deletions common/tests/unit/test_ProtoBufMap.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,9 @@ BOOST_AUTO_TEST_CASE(testing_ProtoBufFactory_ProtocolID) {

uint8_t proto_id =
proto_map.getProtocolID(MessageProtocol::GOOGLE_ANONONYMOUS);
BOOST_CHECK(proto_id == 1);
proto_id = proto_map.getProtocolID(MessageProtocol::GOOGLE_AUTHORIZED);
BOOST_CHECK(proto_id == 2);
}

BOOST_AUTO_TEST_CASE(testing_ProtoBufFactory) {
Expand Down
2 changes: 1 addition & 1 deletion common/tests/unit/test_SocketFactory.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ BOOST_AUTO_TEST_SUITE(SocketFactoryTest)

class DummyCredential : public ICredentials {
public:
DummyCredential(const std::string &pub_key) : m_pub_key(pub_key){};
explicit DummyCredential(const std::string &pub_key) : m_pub_key(pub_key){};

private:
std::string m_pub_key = "";
Expand Down
13 changes: 8 additions & 5 deletions compose/README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Compose Dev environment

The compos Dev environment is split into two different compose files. The
The compose Dev environment is split into two different compose files. The
"core metadata services" which comprise the web server, core server and database
and the "repo services" which comprise Globus Connect Server running with the
authz library and the DataFed repo service.
Expand Down Expand Up @@ -102,7 +102,8 @@ and teardown step.
4. Building the images
5. Running the compose file
6. Bringing down the compose file.
7. Running the cleanup_globus_files.sh if you
7. Running the cleanup_globus_files.sh if you want to remove the deployment key
and start completely from scratch.

### 1. Generating .env configuration varaibles for the Repo Services

Expand All @@ -118,7 +119,7 @@ The .env file will be created in the DataFed/compose folder and will be hidden.
The .env file variables can be changed at this point to your configuration.

NOTE the .env file will be read verbatim by compose including any spaces or
"#" comments so do not includ anything but the exact text that needs to be
"#" comments so do not include anything but the exact text that needs to be
included in the variables.

### 3. Globus configuration
Expand All @@ -145,10 +146,12 @@ source ./unset_env.sh
docker compose -f ./compose_repo.yml up
```

Be aware, the 'source' is to apply changes to the environment of your current
terminal session.

NOTE The unset_env.sh script is to make sure you are not accidentially
overwriting what is in the .env with your local shell env. You can check the
configuration before hand by running. Be aware, the 'source' is to apply
changes to your current terminal session.
configuration before hand by running.

```bash
docker compose -f compose_repo.yml config
Expand Down
2 changes: 0 additions & 2 deletions compose/cleanup_globus_files.sh
Original file line number Diff line number Diff line change
Expand Up @@ -33,5 +33,3 @@ sudo globus-connect-server node cleanup

DATAFED_GCS_ROOT_NAME="$DATAFED_GCS_ROOT_NAME" \
python3 "${PROJECT_ROOT}/scripts/globus/globus_cleanup.py"

#rm -rf "${PROJECT_ROOT}/compose/globus"
11 changes: 3 additions & 8 deletions compose/compose_repo.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ services:
DATAFED_GCS_COLLECTION_ROOT_PATH: "${DATAFED_GCS_COLLECTION_ROOT_PATH}"
UID: "${DATAFED_UID}"
HOST_HOSTNAME: "localhost"
network_mode: host
image: datafed-repo:latest
volumes:
- ./keys:/opt/datafed/keys
Expand All @@ -26,7 +25,9 @@ services:
ports:
- 9000:9000 # Communication core server

# Needs host port 80
# Needs host port 80 for apache
# Needs ports 50000 - 51000 for GridFTP
# Needs port 443 for control port
datafed-gcs:
environment:
DATAFED_ZEROMQ_SESSION_SECRET: "${DATAFED_ZEROMQ_SESSION_SECRET}"
Expand All @@ -51,9 +52,3 @@ services:
- ./globus:/opt/datafed/globus
- ./logs:${DATAFED_CONTAINER_LOG_PATH}
- ${DATAFED_HOST_COLLECTION_MOUNT}:${DATAFED_GCS_COLLECTION_ROOT_PATH}

# External true indicates that the network is created by a
# separate compose instance
#networks:
# datafed-core-secure-api:
# external: true
2 changes: 1 addition & 1 deletion compose/generate_env.sh
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ while IFS='=' read -r key value; do
# Check if the line contains the '=' sign
if [ -n "$value" ]; then
# Print the content before the '=' sign
echo "unset $key" >> "${unset_env_file_name}"
echo "unset $key" >> "${unset_env_file_name}"
fi
done < ".env"

Expand Down
4 changes: 2 additions & 2 deletions core/database/backup/datafed-backup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@ systemctl stop arangodb3.service
backup_file=DataFed_DB_Backup_$(date +"%Y_%m_%d").tar.gz

# Tar contents of arangodb directory without full path
tar -C /var/lib/arangodb3 -cvzf ${backup_file} .
tar -C /var/lib/arangodb3 -cvzf "${backup_file}" .

# Move backup file to storage location
mv ${backup_file} /data/backups
mv "${backup_file}" /data/backups

# Restart DataFed services
systemctl start arangodb3.service
Expand Down
3 changes: 3 additions & 0 deletions core/database/foxx/api/user_router.js
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,9 @@ router.get('/create', function(req, res) {
fname = name.substr(0, idx).trim();

var is_admin = req.queryParams.is_admin;
// It is assumed that if this is the first user to ever log
// into the database they are by default made the admin.
// This will simplify the setup process.
if ( g_db.u.count() === 0 ) {
is_admin = true;
}
Expand Down
Loading

0 comments on commit 3449bbf

Please sign in to comment.