mirror of
https://github.com/nomic-ai/gpt4all
synced 2024-11-18 03:25:46 +00:00
clean up and jank windows wheel build
This commit is contained in:
parent
38f5c28b73
commit
3668cf00cf
@ -66,46 +66,6 @@ jobs:
|
||||
paths:
|
||||
- "*.whl"
|
||||
|
||||
# build-linux:
|
||||
# docker:
|
||||
# - image: circleci/python:3.8
|
||||
# steps:
|
||||
# - checkout
|
||||
# - run:
|
||||
# name: Install dependencies
|
||||
# command: |
|
||||
# sudo apt-get update
|
||||
# sudo apt-get install -y cmake build-essential
|
||||
# pip install setuptools wheel cmake
|
||||
# - run:
|
||||
# name: Install MingW
|
||||
# command: sudo apt-get install -y mingw-w64
|
||||
# - run:
|
||||
# name: Build C library
|
||||
# command: |
|
||||
# git submodule init
|
||||
# git submodule update
|
||||
# cd llmodel
|
||||
# mkdir build
|
||||
# cd build
|
||||
# cmake \
|
||||
# -DCMAKE_SYSTEM_NAME=Windows \
|
||||
# -DCMAKE_C_COMPILER=x86_64-w64-mingw32-gcc \
|
||||
# -DCMAKE_CXX_COMPILER=x86_64-w64-mingw32-g++ \
|
||||
# -DCMAKE_EXECUTABLE_SUFFIX=".exe" \
|
||||
# -DCMAKE_SHARED_LIBRARY_SUFFIX=".dll" \
|
||||
# ..
|
||||
# cmake --build . --parallel
|
||||
# - run:
|
||||
# name: Build wheel
|
||||
# command: |
|
||||
# cd bindings/python/
|
||||
# python setup.py bdist_wheel --plat-name=manylinux1_x86_64
|
||||
# - persist_to_workspace:
|
||||
# root: bindings/python/dist
|
||||
# paths:
|
||||
# - "*.whl"
|
||||
|
||||
build-windows:
|
||||
executor:
|
||||
name: win/default
|
||||
@ -135,8 +95,15 @@ jobs:
|
||||
cmake --build . --parallel
|
||||
- run:
|
||||
name: Build wheel
|
||||
# TODO: As part of this task, we need to move mingw64 binaries into package.
|
||||
# This is terrible and needs a more robust solution eventually.
|
||||
command: |
|
||||
cd gpt4all-bindings/python
|
||||
cd gpt4all
|
||||
mkdir llmodel_DO_NOT_MODIFY
|
||||
mkdir llmodel_DO_NOT_MODIFY/build/
|
||||
cp 'C:\ProgramData\chocolatey\lib\mingw\tools\install\mingw64\bin\*dll' 'llmodel_DO_NOT_MODIFY/build/'
|
||||
cd ..
|
||||
python setup.py bdist_wheel --plat-name=win_amd64
|
||||
- persist_to_workspace:
|
||||
root: gpt4all-bindings/python/dist
|
||||
@ -156,10 +123,10 @@ jobs:
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y cmake build-essential
|
||||
pip install setuptools wheel twine
|
||||
# - run:
|
||||
# name: Upload Python package
|
||||
# command: |
|
||||
# twine upload /tmp/workspace/*.whl --username __token__ --password $PYPI_CRED
|
||||
- run:
|
||||
name: Upload Python package
|
||||
command: |
|
||||
twine upload /tmp/workspace/*.whl --username __token__ --password $PYPI_CRED
|
||||
- store_artifacts:
|
||||
path: /tmp/workspace
|
||||
|
||||
@ -167,11 +134,11 @@ workflows:
|
||||
version: 2
|
||||
build-deploy:
|
||||
jobs:
|
||||
#- build-linux
|
||||
#- build-macos
|
||||
- build-linux
|
||||
- build-macos
|
||||
- build-windows
|
||||
- store-and-upload-wheels:
|
||||
requires:
|
||||
- build-windows
|
||||
# - build-linux
|
||||
# - build-macos
|
||||
- build-linux
|
||||
- build-macos
|
||||
|
@ -73,7 +73,7 @@ class GPT4All():
|
||||
Returns:
|
||||
Model file destination.
|
||||
"""
|
||||
model_path = model_path.replace("\\", "\\\\")
|
||||
|
||||
model_filename = model_name
|
||||
if ".bin" not in model_filename:
|
||||
model_filename += ".bin"
|
||||
@ -87,6 +87,8 @@ class GPT4All():
|
||||
except:
|
||||
raise ValueError("Failed to create model download directory at ~/.cache/gpt4all/. \
|
||||
Please specify download_dir.")
|
||||
else:
|
||||
model_path = model_path.replace("\\", "\\\\")
|
||||
|
||||
if os.path.exists(model_path):
|
||||
model_dest = os.path.join(model_path, model_filename).replace("\\", "\\\\")
|
||||
@ -178,7 +180,6 @@ class GPT4All():
|
||||
full_prompt = self._build_prompt(messages,
|
||||
default_prompt_header=default_prompt_header,
|
||||
default_prompt_footer=default_prompt_footer)
|
||||
|
||||
if verbose:
|
||||
print(full_prompt)
|
||||
|
||||
|
@ -31,9 +31,7 @@ def load_llmodel_library():
|
||||
|
||||
# For windows
|
||||
llama_dir = llama_dir.replace("\\", "\\\\")
|
||||
print(llama_dir)
|
||||
llmodel_dir = llmodel_dir.replace("\\", "\\\\")
|
||||
print(llmodel_dir)
|
||||
|
||||
llama_lib = ctypes.CDLL(llama_dir, mode=ctypes.RTLD_GLOBAL)
|
||||
llmodel_lib = ctypes.CDLL(llmodel_dir)
|
||||
@ -80,7 +78,6 @@ llmodel.llmodel_prompt.argtypes = [ctypes.c_void_p,
|
||||
RecalculateCallback,
|
||||
ctypes.POINTER(LLModelPromptContext)]
|
||||
|
||||
|
||||
class LLModel:
|
||||
"""
|
||||
Base class and universal wrapper for GPT4All language models
|
||||
|
@ -36,19 +36,19 @@ def copy_prebuilt_C_lib(src_dir, dest_dir, dest_build_dir):
|
||||
os.mkdir(dest_dir)
|
||||
os.mkdir(dest_build_dir)
|
||||
|
||||
for dirpath, _, filenames in os.walk(src_dir):
|
||||
for item in filenames:
|
||||
# copy over header files to dest dir
|
||||
for dirpath, _, filenames in os.walk(src_dir):
|
||||
for item in filenames:
|
||||
# copy over header files to dest dir
|
||||
s = os.path.join(dirpath, item)
|
||||
if item.endswith(".h"):
|
||||
d = os.path.join(dest_dir, item)
|
||||
shutil.copy2(s, d)
|
||||
files_copied += 1
|
||||
if item.endswith(lib_ext):
|
||||
s = os.path.join(dirpath, item)
|
||||
if item.endswith(".h"):
|
||||
d = os.path.join(dest_dir, item)
|
||||
shutil.copy2(s, d)
|
||||
files_copied += 1
|
||||
if item.endswith(lib_ext):
|
||||
s = os.path.join(dirpath, item)
|
||||
d = os.path.join(dest_build_dir, item)
|
||||
shutil.copy2(s, d)
|
||||
files_copied += 1
|
||||
d = os.path.join(dest_build_dir, item)
|
||||
shutil.copy2(s, d)
|
||||
files_copied += 1
|
||||
|
||||
return files_copied
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user