Skip to content

Commit

Permalink
Update fink bin
Browse files Browse the repository at this point in the history
  • Loading branch information
JulienPeloton committed May 23, 2024
1 parent 0861c89 commit 79d876a
Showing 1 changed file with 68 additions and 2 deletions.
70 changes: 68 additions & 2 deletions bin/fink
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ while [ "$#" -gt 0 ]; do
echo -e "$nservice Fink service(s) running: "
ps aux | head -1
ps aux | grep -i 'fink start' | grep -v grep
ps aux | grep -i 'gcn_stream start' | grep -v grep
echo "Use <fink stop service_name> to stop a service."
exit
;;
Expand Down Expand Up @@ -93,6 +94,10 @@ while [ "$#" -gt 0 ]; do
echo "$1 requires an argument" >&2
exit 1
;;
-conf_distribution)
conf_distribution="$2"
shift 2
;;
--simulator)
SIM_ONLY=true
shift 1
Expand Down Expand Up @@ -281,14 +286,29 @@ elif [[ $service == "raw2science" ]]; then
-night ${NIGHT} \
-mmconfigpath ${FINK_MM_CONFIG} \
-log_level ${LOG_LEVEL} ${EXIT_AFTER}
elif [[ $service == "raw2science_elasticc_paper" ]]; then
# Store the stream of alerts
spark-submit --master ${SPARK_MASTER} \
--packages ${FINK_PACKAGES} \
--jars ${FINK_JARS} \
${PYTHON_EXTRA_FILE} \
${SECURED_KAFKA_CONFIG} ${EXTRA_SPARK_CONFIG} \
${FINK_HOME}/bin/raw2science_elasticc_batch.py -night ${NIGHT} ${HELP_ON_SERVICE}
elif [[ $service == "distribution" ]]; then
if [[ $ELASTICC == true ]]; then
SCRIPT=${FINK_HOME}/bin/distribute_elasticc.py
else
SCRIPT=${FINK_HOME}/bin/distribute.py
fi
# Read configuration for redistribution
source ${FINK_HOME}/conf/fink.conf.distribution

# Check if the conf file exists
if [[ -f $conf_distribution ]]; then
echo "Reading custom Fink distribution configuration file from " $conf_distribution
source $conf_distribution
else
echo "Reading default Fink distribution conf from " ${FINK_HOME}/conf/fink.conf.distribution
source ${FINK_HOME}/conf/fink.conf.distribution
fi
# Start the Spark Producer
spark-submit --master ${SPARK_MASTER} \
--packages ${FINK_PACKAGES} \
Expand All @@ -297,6 +317,12 @@ elif [[ $service == "distribution" ]]; then
--driver-java-options "-Djava.security.auth.login.config=${FINK_PRODUCER_JAAS}" \
--conf "spark.driver.extraJavaOptions=-Djava.security.auth.login.config=${FINK_PRODUCER_JAAS}" \
--conf "spark.executor.extraJavaOptions=-Djava.security.auth.login.config=${FINK_PRODUCER_JAAS}" \
--conf spark.executorEnv.KNWEBHOOK=${KNWEBHOOK}\
--conf spark.executorEnv.KNWEBHOOK_FINK=${KNWEBHOOK_FINK}\
--conf spark.executorEnv.KNWEBHOOK_AMA_CL=${KNWEBHOOK_AMA_CL}\
--conf spark.executorEnv.KNWEBHOOK_AMA_GALAXIES=${KNWEBHOOK_AMA_GALAXIES}\
--conf spark.executorEnv.KNWEBHOOK_DWF=${KNWEBHOOK_DWF}\
--conf spark.executorEnv.FINK_TG_TOKEN=${FINK_TG_TOKEN} \
$SCRIPT ${HELP_ON_SERVICE} \
-producer ${PRODUCER} \
-online_data_prefix ${ONLINE_DATA_PREFIX} \
Expand All @@ -311,6 +337,31 @@ elif [[ $service == "distribution" ]]; then
-mmconfigpath ${FINK_MM_CONFIG} \
-night ${NIGHT} \
-log_level ${LOG_LEVEL} ${EXIT_AFTER}
elif [[ $service == "distribution_replayed" ]]; then
# Check if the conf file exists
if [[ -f $conf_distribution ]]; then
echo "Reading custom Fink distribution configuration file from " $conf_distribution
source $conf_distribution
else
echo "Reading default Fink distribution conf from " ${FINK_HOME}/conf/fink.conf.distribution
source ${FINK_HOME}/conf/fink.conf.distribution
fi
# Start the Spark Producer
spark-submit --master ${SPARK_MASTER} \
--packages ${FINK_PACKAGES} \
--jars ${FINK_JARS} ${PYTHON_EXTRA_FILE} ${EXTRA_SPARK_CONFIG} \
--files ${FINK_HOME}/conf/fink_kafka_producer_jaas.conf \
--driver-java-options "-Djava.security.auth.login.config=${FINK_PRODUCER_JAAS}" \
--conf "spark.driver.extraJavaOptions=-Djava.security.auth.login.config=${FINK_PRODUCER_JAAS}" \
--conf "spark.executor.extraJavaOptions=-Djava.security.auth.login.config=${FINK_PRODUCER_JAAS}" \
${FINK_HOME}/bin/distribute.py ${HELP_ON_SERVICE} \
-online_data_prefix ${ONLINE_DATA_PREFIX} \
-distribution_servers ${DISTRIBUTION_SERVERS} \
-distribution_schema ${DISTRIBUTION_SCHEMA} \
-substream_prefix ${SUBSTREAM_PREFIX} \
-tinterval ${FINK_TRIGGER_UPDATE} \
-night ${NIGHT} \
-log_level ${LOG_LEVEL} ${EXIT_AFTER}
elif [[ $service == "merge" ]]; then
spark-submit --master ${SPARK_MASTER} \
--packages ${FINK_PACKAGES} \
Expand All @@ -320,6 +371,21 @@ elif [[ $service == "merge" ]]; then
-agg_data_prefix ${AGG_DATA_PREFIX} \
-night ${NIGHT} \
-log_level ${LOG_LEVEL} ${EXIT_AFTER}
elif [[ $service == "sanitize" ]]; then
spark-submit --master ${SPARK_MASTER} \
--packages ${FINK_PACKAGES} \
--jars ${FINK_JARS} ${PYTHON_EXTRA_FILE} ${EXTRA_SPARK_CONFIG} \
${FINK_HOME}/bin/sanitize.py ${HELP_ON_SERVICE} \
-agg_data_prefix ${AGG_DATA_PREFIX} \
-night ${NIGHT} \
-log_level ${LOG_LEVEL} ${EXIT_AFTER}
elif [[ $service == "cast" ]]; then
spark-submit --master ${SPARK_MASTER} \
--packages ${FINK_PACKAGES} \
--jars ${FINK_JARS} ${PYTHON_EXTRA_FILE} ${EXTRA_SPARK_CONFIG} \
${FINK_HOME}/bin/cast_df.py ${HELP_ON_SERVICE} \
-night ${NIGHT} \
-log_level ${LOG_LEVEL} ${EXIT_AFTER}
elif [[ $service == "stats" ]]; then
spark-submit --master ${SPARK_MASTER} \
--packages ${FINK_PACKAGES} \
Expand Down

0 comments on commit 79d876a

Please sign in to comment.