|
| 1 | +%global pypi_name llama-cpp-python |
| 2 | +%global pypi_version 0.2.60 |
| 3 | +# it's all python code |
| 4 | +%global debug_package %{nil} |
| 5 | + |
| 6 | +Name: python-%{pypi_name} |
| 7 | +Version: %{pypi_version} |
| 8 | +Release: %autorelease |
| 9 | +License: MIT |
| 10 | +Summary: Simple Python bindings for @ggerganov's llama.cpp library |
| 11 | +URL: https://github.com/abetlen/llama-cpp-python |
| 12 | +Source: %{url}/archive/v%{version}/%{pypi_name}-%{version}.tar.gz |
| 13 | +Patch1: 0001-don-t-build-llama.cpp-and-llava.patch |
| 14 | +Patch2: 0002-search-for-libllama-so-in-usr-lib64.patch |
| 15 | + |
| 16 | +%bcond_with test |
| 17 | + |
| 18 | +# this is what llama-cpp is on |
| 19 | +# and this library is by default installed in /usr/lib64/python3.12/site-packages/llama_cpp/__init__.py |
| 20 | +ExclusiveArch: x86_64 aarch64 |
| 21 | + |
| 22 | +BuildRequires: git-core |
| 23 | +BuildRequires: gcc |
| 24 | +BuildRequires: gcc-c++ |
| 25 | +BuildRequires: ninja-build |
| 26 | +BuildRequires: python3-devel |
| 27 | +BuildRequires: llama-cpp-devel |
| 28 | +%if %{with test} |
| 29 | +BuildRequires: python3-pytest |
| 30 | +BuildRequires: python3-scipy |
| 31 | +%endif |
| 32 | + |
| 33 | +%generate_buildrequires |
| 34 | +%pyproject_buildrequires |
| 35 | + |
| 36 | +%description |
| 37 | +%{pypi_name} provides: |
| 38 | +Low-level access to C API via `ctypes` interface. |
| 39 | +High-level Python API for text completion. |
| 40 | +OpenAI compatible web server |
| 41 | + |
| 42 | +%package -n python3-%{pypi_name} |
| 43 | +Summary: %{summary} |
| 44 | +# -devel has the unversioned libllama.so |
| 45 | +Requires: llama-cpp-devel |
| 46 | + |
| 47 | +%description -n python3-%{pypi_name} |
| 48 | +%{pypi_name} provides: |
| 49 | +Low-level access to C API via `ctypes` interface. |
| 50 | +High-level Python API for text completion. |
| 51 | +OpenAI compatible web server |
| 52 | + |
| 53 | + |
| 54 | +%prep |
| 55 | +%autosetup -p1 -n %{pypi_name}-%{version} -Sgit |
| 56 | + |
| 57 | +%build |
| 58 | +%pyproject_wheel |
| 59 | + |
| 60 | +%if %{with test} |
| 61 | +%check |
| 62 | +# most test_llama.py tests utilize model ggml-vocab-llama.gguf from vendored llama.cpp |
| 63 | +%pytest -vs tests/test_llama.py::test_llama_cpp_version tests/test_llama.py::test_logits_to_logprobs tests/test_llama_speculative.py tests/test_llama_chat_format.py tests/test_llama_grammar.py |
| 64 | +%endif |
| 65 | + |
| 66 | +%install |
| 67 | +%pyproject_install |
| 68 | +%pyproject_save_files -l llama_cpp |
| 69 | + |
| 70 | +%files -n python3-%{pypi_name} -f %{pyproject_files} |
| 71 | +%license LICENSE.md |
| 72 | +%doc README.md |
| 73 | + |
| 74 | +%changelog |
| 75 | +%autochangelog |
| 76 | + |
0 commit comments