clean up and jank windows wheel build

This commit is contained in:
Richard Guo 2023-05-10 15:58:27 -04:00
parent 5e873a060c
commit 23e748d1c2
5 changed files with 30 additions and 65 deletions

View File

@ -66,46 +66,6 @@ jobs:
paths: paths:
- "*.whl" - "*.whl"
# build-linux:
# docker:
# - image: circleci/python:3.8
# steps:
# - checkout
# - run:
# name: Install dependencies
# command: |
# sudo apt-get update
# sudo apt-get install -y cmake build-essential
# pip install setuptools wheel cmake
# - run:
# name: Install MingW
# command: sudo apt-get install -y mingw-w64
# - run:
# name: Build C library
# command: |
# git submodule init
# git submodule update
# cd llmodel
# mkdir build
# cd build
# cmake \
# -DCMAKE_SYSTEM_NAME=Windows \
# -DCMAKE_C_COMPILER=x86_64-w64-mingw32-gcc \
# -DCMAKE_CXX_COMPILER=x86_64-w64-mingw32-g++ \
# -DCMAKE_EXECUTABLE_SUFFIX=".exe" \
# -DCMAKE_SHARED_LIBRARY_SUFFIX=".dll" \
# ..
# cmake --build . --parallel
# - run:
# name: Build wheel
# command: |
# cd bindings/python/
# python setup.py bdist_wheel --plat-name=manylinux1_x86_64
# - persist_to_workspace:
# root: bindings/python/dist
# paths:
# - "*.whl"
build-windows: build-windows:
executor: executor:
name: win/default name: win/default
@ -135,8 +95,15 @@ jobs:
cmake --build . --parallel cmake --build . --parallel
- run: - run:
name: Build wheel name: Build wheel
# TODO: As part of this task, we need to move mingw64 binaries into package.
# This is terrible and needs a more robust solution eventually.
command: | command: |
cd gpt4all-bindings/python cd gpt4all-bindings/python
cd gpt4all
mkdir llmodel_DO_NOT_MODIFY
mkdir llmodel_DO_NOT_MODIFY/build/
cp 'C:\ProgramData\chocolatey\lib\mingw\tools\install\mingw64\bin\*dll' 'llmodel_DO_NOT_MODIFY/build/'
cd ..
python setup.py bdist_wheel --plat-name=win_amd64 python setup.py bdist_wheel --plat-name=win_amd64
- persist_to_workspace: - persist_to_workspace:
root: gpt4all-bindings/python/dist root: gpt4all-bindings/python/dist
@ -156,10 +123,10 @@ jobs:
sudo apt-get update sudo apt-get update
sudo apt-get install -y cmake build-essential sudo apt-get install -y cmake build-essential
pip install setuptools wheel twine pip install setuptools wheel twine
# - run: - run:
# name: Upload Python package name: Upload Python package
# command: | command: |
# twine upload /tmp/workspace/*.whl --username __token__ --password $PYPI_CRED twine upload /tmp/workspace/*.whl --username __token__ --password $PYPI_CRED
- store_artifacts: - store_artifacts:
path: /tmp/workspace path: /tmp/workspace
@ -167,11 +134,11 @@ workflows:
version: 2 version: 2
build-deploy: build-deploy:
jobs: jobs:
#- build-linux - build-linux
#- build-macos - build-macos
- build-windows - build-windows
- store-and-upload-wheels: - store-and-upload-wheels:
requires: requires:
- build-windows - build-windows
# - build-linux - build-linux
# - build-macos - build-macos

View File

@ -73,7 +73,7 @@ class GPT4All():
Returns: Returns:
Model file destination. Model file destination.
""" """
model_path = model_path.replace("\\", "\\\\")
model_filename = model_name model_filename = model_name
if ".bin" not in model_filename: if ".bin" not in model_filename:
model_filename += ".bin" model_filename += ".bin"
@ -87,6 +87,8 @@ class GPT4All():
except: except:
raise ValueError("Failed to create model download directory at ~/.cache/gpt4all/. \ raise ValueError("Failed to create model download directory at ~/.cache/gpt4all/. \
Please specify download_dir.") Please specify download_dir.")
else:
model_path = model_path.replace("\\", "\\\\")
if os.path.exists(model_path): if os.path.exists(model_path):
model_dest = os.path.join(model_path, model_filename).replace("\\", "\\\\") model_dest = os.path.join(model_path, model_filename).replace("\\", "\\\\")
@ -178,7 +180,6 @@ class GPT4All():
full_prompt = self._build_prompt(messages, full_prompt = self._build_prompt(messages,
default_prompt_header=default_prompt_header, default_prompt_header=default_prompt_header,
default_prompt_footer=default_prompt_footer) default_prompt_footer=default_prompt_footer)
if verbose: if verbose:
print(full_prompt) print(full_prompt)

View File

@ -31,9 +31,7 @@ def load_llmodel_library():
# For windows # For windows
llama_dir = llama_dir.replace("\\", "\\\\") llama_dir = llama_dir.replace("\\", "\\\\")
print(llama_dir)
llmodel_dir = llmodel_dir.replace("\\", "\\\\") llmodel_dir = llmodel_dir.replace("\\", "\\\\")
print(llmodel_dir)
llama_lib = ctypes.CDLL(llama_dir, mode=ctypes.RTLD_GLOBAL) llama_lib = ctypes.CDLL(llama_dir, mode=ctypes.RTLD_GLOBAL)
llmodel_lib = ctypes.CDLL(llmodel_dir) llmodel_lib = ctypes.CDLL(llmodel_dir)
@ -80,7 +78,6 @@ llmodel.llmodel_prompt.argtypes = [ctypes.c_void_p,
RecalculateCallback, RecalculateCallback,
ctypes.POINTER(LLModelPromptContext)] ctypes.POINTER(LLModelPromptContext)]
class LLModel: class LLModel:
""" """
Base class and universal wrapper for GPT4All language models Base class and universal wrapper for GPT4All language models

View File

@ -36,19 +36,19 @@ def copy_prebuilt_C_lib(src_dir, dest_dir, dest_build_dir):
os.mkdir(dest_dir) os.mkdir(dest_dir)
os.mkdir(dest_build_dir) os.mkdir(dest_build_dir)
for dirpath, _, filenames in os.walk(src_dir): for dirpath, _, filenames in os.walk(src_dir):
for item in filenames: for item in filenames:
# copy over header files to dest dir # copy over header files to dest dir
s = os.path.join(dirpath, item)
if item.endswith(".h"):
d = os.path.join(dest_dir, item)
shutil.copy2(s, d)
files_copied += 1
if item.endswith(lib_ext):
s = os.path.join(dirpath, item) s = os.path.join(dirpath, item)
if item.endswith(".h"): d = os.path.join(dest_build_dir, item)
d = os.path.join(dest_dir, item) shutil.copy2(s, d)
shutil.copy2(s, d) files_copied += 1
files_copied += 1
if item.endswith(lib_ext):
s = os.path.join(dirpath, item)
d = os.path.join(dest_build_dir, item)
shutil.copy2(s, d)
files_copied += 1
return files_copied return files_copied