forked from twelvelabs-io/tl-jockey
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathexample.env
16 lines (16 loc) · 1.08 KB
/
example.env
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
LANGSMITH_API_KEY=<YOUR_LANGSMITH_API_KEY>
AZURE_OPENAI_ENDPOINT=<IF USING AZURE GPTs>
AZURE_OPENAI_API_KEY=<IF USING AZURE GPTs>
OPENAI_API_VERSION=<IF USING AZURE GPTs>
OPENAI_API_KEY=<IF USING OPEN AI GPTs>
# Determines which Langchain classes are used to construct Jockey LLM instances.
LLM_PROVIDER=<MUST BE ONE OF [AZURE, OPENAI]>
TWELVE_LABS_API_KEY=<YOUR TWELVE LABS API KEY>
# This variable is used to persist and make rendered video servable from within the LangGraph API container.
# Please make sure this directory exists on the host machine.
# Please make sure this directory is available as a File Sharing resource in Docker For Mac.
HOST_PUBLIC_DIR=<VOLUME MOUNTED TO LANGGRAPH API SERVER CONTAINER WHERE RENDERED VIDEOS GO>
# This variable is a placeholder that will be used by an upcoming Jockey core worker it currently doesn't impact anything.
# Please make sure this directory exists on the host machine.
# Please make sure this directory is available as a File Sharing resource in Docker For Mac.
HOST_VECTOR_DB_DIR=<VOLUME MOUNTED TO LANGGRAPH API SERVER CONTAINER WHERE VECTOR DB GOES>