Skip to content

Commit

Permalink
first commit
Browse files Browse the repository at this point in the history
  • Loading branch information
benbaptist committed Dec 3, 2024
0 parents commit e9a3a1d
Show file tree
Hide file tree
Showing 29 changed files with 1,292 additions and 0 deletions.
165 changes: 165 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
chat.json
.git.old/

# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# C extensions
*.so

# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST

# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
.pybuilder/
target/

# Jupyter Notebook
.ipynb_checkpoints

# IPython
profile_default/
ipython_config.py

# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version

# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock

# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock

# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
.pdm.toml
.pdm-python
.pdm-build/

# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/

# Celery stuff
celerybeat-schedule
celerybeat.pid

# SageMath parsed files
*.sage.py

# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/

# Spyder project settings
.spyderproject
.spyproject

# Rope project settings
.ropeproject

# mkdocs documentation
/site

# mypy
.mypy_cache/
.dmypy.json
dmypy.json

# Pyre type checker
.pyre/

# pytype static type analyzer
.pytype/

# Cython debug symbols
cython_debug/

# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
53 changes: 53 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
# Ploppie

A high-level, stupid-simple Pythonic LiteLLM abstraction layer for implementing simple chat workflows, with tools. Supports vision and audio models. Includes facilities for easy (de)serialization of chat histories.

So stupid that I couldn't come up with a better name.

## Installation

```bash
pip install ploppie
```

## Usage

### Simple chat
```python
from ploppie import Chat

chat = Chat()

response = chat.system("You are a helpful assistant.") \
.user("What is the capital of France?") \
.ready()

print(response)
```

### Chat with tools
```python
from ploppie import Chat

chat = Chat()

@chat.tool("Perform mathematical calculations")
def calculate(expression: "str: The expression to calculate"):
return eval(expression)

print(chat.send("What is 2502 * 2502, and 2858 - 28592? Please tell me the results."))
```

### Chat with vision
```python
from ploppie import Chat
from ploppie.messages import Image

chat = Chat()

response = chat.system("You are a helpful assistant.") \
.user(Image(file_handle=open("beautiful_landscape.png", "rb"))) \
.ready()

print(response)
```
83 changes: 83 additions & 0 deletions SCOPE.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
Ploppie is a high-level, stupid-simple Pythonic LiteLLM abstraction layer for implementing simple chat workflows, with tools. No more messing around with dictionaries and JSON, no more OpenAI-specific APIs. Just plain Python.

Supports vision, audio, and any other LiteLLM features.

# Vanilla LiteLLM Example

```python
from litellm import completion

# Define a system prompt
system_prompt = "You are a helpful assistant that can perform calculations."

# Define a tool for basic math
def calculate(expression):
try:
return str(eval(expression))
except:
return "Invalid expression"

# Example conversation
messages = [
{"role": "system", "content": system_prompt},
{"role": "user", "content": "What is 25 * 4?"}
]

# Get completion from LiteLLM
response = completion(
model="gpt-4o-mini",
messages=messages,
functions=[{
"name": "calculate",
"description": "Perform basic mathematical calculations",
"parameters": {
"type": "object",
"properties": {
"expression": {
"type": "string",
"description": "The mathematical expression to evaluate"
}
},
"required": ["expression"]
}
}]
)

# Print the response
print(response.choices[0].message)
```

# Ploppie Example
```python
from ploppie import Chat, Tool, System

chat = Chat(model="gpt-4o-mini")

@chat.tool(description="Perform basic mathematical calculations.")
def calculate(
expression: "str: The result of the calculation as a string, or 'Invalid expression' if evaluation fails") -> str:
try:
return str(eval(expression))
except:
return "Invalid expression"

chat.system("You are a helpful assistant that can perform calculations.")\
.user("What is 25 * 4?")\
.ready()

print(_)
```

# Vision Example

```python
from ploppie import Chat, System, User, Assistant

chat = Chat(model="gpt-4o-mini")

a = chat.system("Identify the objects in the image.") \
.user(Image(content=open("path/to/image.jpg", "rb"))) \
.ready() # Signals chat is ready for interaction

print(a)
```
3 changes: 3 additions & 0 deletions TODO.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
- [ ] Streaming support
- [ ] JSON mode
- [x] Image and file support
47 changes: 47 additions & 0 deletions examples/chat_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
from ploppie import Chat
from ploppie.messages import System
from ploppie.messages.files import Image

import os
import re
from datetime import datetime

if __name__ == "__main__":
chat = Chat(model="gpt-4o-mini")

@chat.dynamic()
def dynamic_message():
return System("The current time is " + datetime.now().strftime("%H:%M:%S"))

@chat.tool("Perform mathematical calculations")
def calculate(expression: "str: The expression to calculate"):
print(f"Calculating {expression}")

try:
return eval(expression)
except Exception as e:
return f"I'm sorry, I can't calculate that. ({e})"

while True:
input_ = input("<You> ")

# Parse the input for a file path

inputs = [input_]

file_path_match = re.search(r'(?:^|\s)([\'"]?)([a-zA-Z0-9_\-./\\]+\.(png|jpg|jpeg|gif|webp))\1(?:\s|$)', input_)
if file_path_match:
file_path = file_path_match.group(2)

if os.path.exists(file_path):
print(f"* Found image: {file_path}")
image = Image(open(file_path, 'rb'))
inputs.append(image)
else:
chat.system(f"File not found: {file_path} - please inform the user")
print(f"* File not found: {file_path}")

responses = chat.send(inputs)

for response in responses:
print(f"<Ploppie> {response}")
12 changes: 12 additions & 0 deletions examples/json_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
from ploppie import Chat
import random

if __name__ == "__main__":
# You can pass any standard LiteLLM parameters to the Chat object
chat = Chat(model="gpt-4o-mini", response_format={"type": "json_object"})

response = chat.system("Take any input and convert it to a sensible JSON object.") \
.user("Make a JSON object that represents a cat.") \
.ready()

print(response)
24 changes: 24 additions & 0 deletions examples/save_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import json
import os
from ploppie import Chat

if __name__ == "__main__":
chat = Chat(model="gpt-4o-mini")

@chat.tool("Perform mathematical calculations")
def calculate(expression: "str: The expression to calculate"):
return eval(expression)

if os.path.exists("chat.json"):
with open("chat.json", "r") as f:
chat.from_dict(json.load(f))

while True:
input_ = input("<You> ")
responses = chat.send(input_)

for response in responses:
print(f"<Ploppie> {response}")

with open("chat.json", "w") as f:
json.dump(chat.to_dict(), f)
Loading

0 comments on commit e9a3a1d

Please sign in to comment.