Skip to content

Commit 519c2a5

Browse files
committed
Initial commit
0 parents  commit 519c2a5

File tree

8 files changed

+375
-0
lines changed

8 files changed

+375
-0
lines changed

.gitignore

+160
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,160 @@
1+
# Byte-compiled / optimized / DLL files
2+
__pycache__/
3+
*.py[cod]
4+
*$py.class
5+
6+
# C extensions
7+
*.so
8+
9+
# Distribution / packaging
10+
.Python
11+
build/
12+
develop-eggs/
13+
dist/
14+
downloads/
15+
eggs/
16+
.eggs/
17+
lib/
18+
lib64/
19+
parts/
20+
sdist/
21+
var/
22+
wheels/
23+
share/python-wheels/
24+
*.egg-info/
25+
.installed.cfg
26+
*.egg
27+
MANIFEST
28+
29+
# PyInstaller
30+
# Usually these files are written by a python script from a template
31+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
32+
*.manifest
33+
*.spec
34+
35+
# Installer logs
36+
pip-log.txt
37+
pip-delete-this-directory.txt
38+
39+
# Unit test / coverage reports
40+
htmlcov/
41+
.tox/
42+
.nox/
43+
.coverage
44+
.coverage.*
45+
.cache
46+
nosetests.xml
47+
coverage.xml
48+
*.cover
49+
*.py,cover
50+
.hypothesis/
51+
.pytest_cache/
52+
cover/
53+
54+
# Translations
55+
*.mo
56+
*.pot
57+
58+
# Django stuff:
59+
*.log
60+
local_settings.py
61+
db.sqlite3
62+
db.sqlite3-journal
63+
64+
# Flask stuff:
65+
instance/
66+
.webassets-cache
67+
68+
# Scrapy stuff:
69+
.scrapy
70+
71+
# Sphinx documentation
72+
docs/_build/
73+
74+
# PyBuilder
75+
.pybuilder/
76+
target/
77+
78+
# Jupyter Notebook
79+
.ipynb_checkpoints
80+
81+
# IPython
82+
profile_default/
83+
ipython_config.py
84+
85+
# pyenv
86+
# For a library or package, you might want to ignore these files since the code is
87+
# intended to run in multiple environments; otherwise, check them in:
88+
# .python-version
89+
90+
# pipenv
91+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
93+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
94+
# install all needed dependencies.
95+
#Pipfile.lock
96+
97+
# poetry
98+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99+
# This is especially recommended for binary packages to ensure reproducibility, and is more
100+
# commonly ignored for libraries.
101+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
102+
#poetry.lock
103+
104+
# pdm
105+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
106+
#pdm.lock
107+
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
108+
# in version control.
109+
# https://pdm.fming.dev/#use-with-ide
110+
.pdm.toml
111+
112+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
113+
__pypackages__/
114+
115+
# Celery stuff
116+
celerybeat-schedule
117+
celerybeat.pid
118+
119+
# SageMath parsed files
120+
*.sage.py
121+
122+
# Environments
123+
.env
124+
.venv
125+
env/
126+
venv/
127+
ENV/
128+
env.bak/
129+
venv.bak/
130+
131+
# Spyder project settings
132+
.spyderproject
133+
.spyproject
134+
135+
# Rope project settings
136+
.ropeproject
137+
138+
# mkdocs documentation
139+
/site
140+
141+
# mypy
142+
.mypy_cache/
143+
.dmypy.json
144+
dmypy.json
145+
146+
# Pyre type checker
147+
.pyre/
148+
149+
# pytype static type analyzer
150+
.pytype/
151+
152+
# Cython debug symbols
153+
cython_debug/
154+
155+
# PyCharm
156+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
157+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
158+
# and can be added to the global gitignore or merged into this file. For a more nuclear
159+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
160+
#.idea/

README.md

+57
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
# Assert LLM
2+
3+
A tool for lazy people that doesn't want to spend time on writing tests.
4+
5+
(Just a project that I wrote for fun)
6+
7+
## How to run
8+
9+
Install the required packages with `pip install .`
10+
11+
Load any model on [LM Studio](https://lmstudio.ai) and start the "Local Inference Server".
12+
Assuming it's configured to expose the port `1234` on localhost, you can use this tool
13+
on your unit tests like this:
14+
15+
```
16+
>>> from src.main import llm_assert
17+
>>> from src.engines.lmstudio import LMStudioEngine
18+
>>> engine = LMStudioEngine("http://localhost:1234/v1")
19+
>>> llm_assert(engine, "5", "this is a number")
20+
>>> llm_assert(engine, "5", "this is not a number")
21+
Traceback (most recent call last):
22+
File "<stdin>", line 1, in <module>
23+
File "/Users/jdiazsua/Documents/Projects/PoCs/assert-llm/src/main.py", line 49, in llm_assert
24+
assert answer == "True", f"'{sut}' doesn't match the prompt '{prompt}'"
25+
^^^^^^^^^^^^^^^^
26+
AssertionError: '5' doesn't match the prompt 'this is not a number'
27+
```
28+
29+
This looks like an overkill, but it can be useful for lazy checks like:
30+
31+
```
32+
>>> input_text = """
33+
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
34+
123 Elm Street, Springfield, IL 62704.
35+
Vivamus aliquet, augue id semper varius, ex tellus luctus justo, nec viverra metus lectus nec magna.
36+
"""
37+
>>> llm_assert(engine, input_text, "it contains an address")
38+
>>> llm_assert(engine, input_text, "it contains a telephone number")
39+
Traceback (most recent call last):
40+
File "<stdin>", line 1, in <module>
41+
File "/Users/jdiazsua/Documents/Projects/PoCs/assert-llm/src/main.py", line 49, in llm_assert
42+
assert answer == "True", f"'{sut}' doesn't match the prompt '{prompt}'"
43+
^^^^^^^^^^^^^^^^
44+
AssertionError: '
45+
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
46+
123 Elm Street, Springfield, IL 62704.
47+
Vivamus aliquet, augue id semper varius, ex tellus luctus justo, nec viverra metus lectus nec magna.
48+
' doesn't match the prompt 'it contains a telephone number'
49+
```
50+
51+
## Contributing
52+
53+
Feel free to add new engines to the [engines](src/engines/) package. The only requirement is that
54+
they must implement the `Engine` interface, as it will be used by the `llm_assert` function.
55+
56+
You can also contribute by adding new functions with different `system_message` and `prompt`
57+
or calling different APIs.

pyproject.toml

+24
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
[build-system]
2+
requires = ["setuptools>=61.0"]
3+
build-backend = "setuptools.build_meta"
4+
5+
[project]
6+
name = "llm-assert"
7+
version = "0.1.3"
8+
description = "Assertify using LLMs"
9+
authors = [{name = "Juan D.", email = "[email protected]"}]
10+
readme = "README.md"
11+
requires-python = ">=3.7.1"
12+
classifiers = [
13+
"Programming Language :: Python :: 3",
14+
"License :: OSI Approved :: Apache Software License",
15+
"Operating System :: OS Independent",
16+
]
17+
dependencies = [
18+
"openai",
19+
"pytest"
20+
]
21+
22+
[project.urls]
23+
Homepage = "https://github.com/juandspy/llm-assert"
24+
Issues = "https://github.com/juandspy/llm-assert/issues"

src/engines/__init__.py

+7
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
from abc import ABC, abstractmethod
2+
3+
class Engine(ABC):
4+
"""Engine is an interface that must be implemented by any LLM engine."""
5+
@abstractmethod
6+
def ask_chat(self, system_message: str, user_message: str, temperature=0.7) -> str:
7+
"""Use an LLM to get a response."""

src/engines/lmstudio.py

+36
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
"""Logic to make use of LM Studio models."""
2+
from openai import OpenAI
3+
4+
5+
class BaseEngine:
6+
def __init__(self, base_url: str, api_key: str="not-needed"):
7+
self.client = OpenAI(base_url=base_url, api_key=api_key)
8+
self.model = "local-model"
9+
10+
def ask_chat(self, system_message: str, user_message: str, temperature=0.7) -> str:
11+
completion = self.client.chat.completions.create(
12+
model=self.model,
13+
messages=[
14+
{"role": "system", "content": system_message},
15+
{"role": "user", "content": user_message}
16+
],
17+
temperature=temperature,
18+
)
19+
return completion.choices[0].message.content
20+
21+
class LMStudioEngine(BaseEngine):
22+
pass
23+
24+
class OpenAIEngine(BaseEngine):
25+
def __init__(self, base_url: str, api_key: str):
26+
if not api_key:
27+
raise ValueError("API key cannot be empty")
28+
super().__init__(base_url, api_key)
29+
30+
31+
if __name__ == "__main__":
32+
engine = LMStudioEngine(base_url="http://localhost:1234/v1")
33+
answer = engine.ask_chat(
34+
system_message="Always answer in rhymes.",
35+
user_message="Introduce yourself.")
36+
print(answer)

src/main.py

+49
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
"""Main functions of the package."""
2+
from typing import Any
3+
from src.engines import Engine
4+
5+
6+
class UnexpectedEngineResponseError(Exception):
7+
"""Raised when the LLM Engine provides an unexpected response."""
8+
9+
# Characters we want to include on the answer
10+
whitelist = set('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ')
11+
12+
def llm_assert(engine: Engine, sut: Any, prompt: str):
13+
"""
14+
Assert, using an LLM Engine, whether an input matches a prompt.
15+
16+
This function queries an LLM engine using the prompt parameter,
17+
informing it that it can only respond with 'True' or 'False'.
18+
It filters the engine's response, removing any characters that are not letters.
19+
If the filtered response is not 'True' or 'False', it raises a ValueError.
20+
Otherwise, it returns a boolean value based on the response.
21+
22+
Example:
23+
>>> engine = LMStudioEngine("http://localhost:1234/v1")
24+
>>> assert llm_assert(engine, "5", "this is a number")
25+
>>> assert llm_assert(engine, "apple", "this is a number")
26+
Traceback (most recent call last):
27+
File "<stdin>", line 1, in <module>
28+
AssertionError
29+
30+
Args:
31+
engine (Engine): The LLM Engine to ask the question.
32+
sut (Any): The item under test.
33+
prompt (str): The question to ask the engine.
34+
35+
Raises:
36+
UnexpectedEngineResponseError: If the engine's response is not 'True' or 'False'.
37+
AssertionError: If the engine's response is 'False'
38+
"""
39+
answer = engine.ask_chat(
40+
system_message="""
41+
You can only answer 'True' or 'False', no matter what the input is.
42+
Don't provide any explanation.
43+
""",
44+
user_message=f"{sut}: {prompt}")
45+
answer = ''.join(filter(whitelist.__contains__, answer))
46+
if answer not in ["True", "False"]:
47+
raise UnexpectedEngineResponseError(
48+
f"Got unexpected answer from the LLM Engine: '{answer}'.")
49+
assert answer == "True", f"'{sut}' doesn't match the prompt '{prompt}'"

tests/__init__.py

Whitespace-only changes.

tests/test_assert.py

+42
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
"""Test the main functions of the package."""
2+
import pytest
3+
4+
from src.main import llm_assert
5+
from src.engines.lmstudio import LMStudioEngine
6+
7+
8+
INPUTS= [
9+
("5", "this is a number", True),
10+
("twenty", "this is a number", True),
11+
("apple", "this is a number", False),
12+
("5", "this is not a number", False),
13+
("twenty", "this is not a number", False),
14+
("apple", "this is not a number", True)
15+
]
16+
17+
class MockEngine:
18+
def __init__(self, mocked_answer: str):
19+
self.mocked_answer = mocked_answer
20+
def ask_chat(self, system_message: str, user_message: str, temperature=0.7) -> str:
21+
return self.mocked_answer
22+
23+
@pytest.mark.parametrize("sut,prompt,expected_exception", INPUTS)
24+
def test_llm_assert__mock(sut, prompt, expected_exception):
25+
"""Test the assert function"""
26+
engine = MockEngine(str(expected_exception))
27+
if not expected_exception:
28+
with pytest.raises(AssertionError):
29+
llm_assert(engine, sut, prompt)
30+
else:
31+
llm_assert(engine, sut, prompt)
32+
33+
34+
@pytest.mark.parametrize("sut,prompt,expected_exception", INPUTS)
35+
def test_llm_assert__lmstudio(sut, prompt, expected_exception):
36+
"""Test the assert function"""
37+
engine = LMStudioEngine("http://localhost:1234/v1")
38+
if not expected_exception:
39+
with pytest.raises(AssertionError):
40+
llm_assert(engine, sut, prompt)
41+
else:
42+
llm_assert(engine, sut, prompt)

0 commit comments

Comments
 (0)