blob: bf44d5eda1117e518d20c14ed09cccc783e2256b (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
|
#!/usr/bin/env run.sh
"""
Test that llama.cpp can build and exec in the omni repo.
Note that this does not test if llama-cpp can actually execute any models. I
(currently) use ollama for running and managing models, but I'd like to make
sure llama-cpp still works in case I need/want to switch at some point.
"""
# : out llamacpp-test
# : run llama-cpp
import Omni.App as App
import Omni.Log as Log
import Omni.Test as Test
import os
import sys
import unittest
class TestLlamaCpp(unittest.TestCase):
"""Test that llama.cpp is available."""
def test_in_path(self) -> None:
"""Test that llama.cpp is in $PATH."""
self.assertIn("llama-cpp", os.environ.get("PATH", ""))
def main() -> None:
"""Entrypoint."""
if sys.argv[1] == "test":
Log.setup()
Test.run(App.Area.Test, [TestLlamaCpp])
else:
sys.exit(0)
|