Skip to content

Commit

Permalink
[CLEANUP]
Browse files Browse the repository at this point in the history
  • Loading branch information
Kye committed Mar 24, 2024
1 parent 4ba8239 commit 061dc1d
Show file tree
Hide file tree
Showing 4 changed files with 4 additions and 11 deletions.
2 changes: 1 addition & 1 deletion send_local_request_to_cogvlm.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def image_to_base64(image_path):
}

# Specify the URL of your FastAPI application
url = f"http://localhost:8100/v1/chat/completions"
url = "http://localhost:8100/v1/chat/completions"

# Start the timer
start_time = time.time()
Expand Down
1 change: 0 additions & 1 deletion servers/cogvlm/cogvlm.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@
from swarms_cloud.calculate_pricing import calculate_pricing, count_tokens
from swarms_cloud.auth_with_swarms_cloud import fetch_api_key_info
from swarms_cloud.log_api_request_to_supabase import log_to_supabase, ModelAPILogEntry
from swarms_cloud import verify_token

# from exa import calculate_workers
# import torch.distributed as dist
Expand Down
8 changes: 2 additions & 6 deletions swarms_cloud/auth_with_swarms_cloud.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,16 +102,12 @@ def authenticate_user(
return token



def verify_token(req: Request):
token = req.headers["Authorization"]
# Here your code for verifying the token or whatever you use
token = token.split("Bearer ")[1]
token = token.strip()
valid = is_token_valid(token)
if valid is False:
raise HTTPException(
status_code=401,
detail="Unauthorized"
)
return True
raise HTTPException(status_code=401, detail="Unauthorized")
return True
4 changes: 1 addition & 3 deletions swarms_cloud/calculate_pricing.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,7 @@
from transformers import AutoTokenizer, PreTrainedTokenizer


def count_tokens(
texts: List[str], tokenizer: PreTrainedTokenizer, model: str
) -> int:
def count_tokens(texts: List[str], tokenizer: PreTrainedTokenizer, model: str) -> int:
"""
Counts the total number of tokens in a list of texts using a tokenizer.
Expand Down

0 comments on commit 061dc1d

Please sign in to comment.