|
|
|
@ -138,31 +138,23 @@ $(info I CC: $(CCV))
|
|
|
|
|
$(info I CXX: $(CXXV))
|
|
|
|
|
$(info )
|
|
|
|
|
|
|
|
|
|
llama.o:
|
|
|
|
|
mkdir buildllama
|
|
|
|
|
cd buildllama && cmake ../../../gpt4all-backend/llama.cpp $(CMAKEFLAGS) && make VERBOSE=1 llama.o && cp -rf CMakeFiles/llama.dir/llama.cpp.o ../llama.o
|
|
|
|
|
|
|
|
|
|
llmodel.o:
|
|
|
|
|
mkdir buildllm
|
|
|
|
|
cd buildllm && cmake ../../../gpt4all-backend/ $(CMAKEFLAGS) && make VERBOSE=1 llmodel ggml-mainline-default ggml-230511-default ggml-230519-default
|
|
|
|
|
cd buildllm && cmake ../../../gpt4all-backend/ $(CMAKEFLAGS) && make
|
|
|
|
|
cd buildllm && cp -rf CMakeFiles/llmodel.dir/llmodel_c.cpp.o ../llmodel_c.o
|
|
|
|
|
cd buildllm && cp -rf CMakeFiles/llmodel.dir/llmodel.cpp.o ../llmodel.o
|
|
|
|
|
cd buildllm && cp -rf CMakeFiles/ggml-230519-default.dir/llama.cpp-230519/ggml.c.o ../ggml-230519.o
|
|
|
|
|
cd buildllm && cp -rf CMakeFiles/ggml-230511-default.dir/llama.cpp-230511/ggml.c.o ../ggml-230511.o
|
|
|
|
|
cd buildllm && cp -rf CMakeFiles/ggml-mainline-default.dir/llama.cpp-mainline/ggml.c.o ../ggml-mainline.o
|
|
|
|
|
|
|
|
|
|
clean:
|
|
|
|
|
rm -f *.o
|
|
|
|
|
rm -f *.a
|
|
|
|
|
rm -rf buildllm
|
|
|
|
|
rm -rf buildllama
|
|
|
|
|
rm -rf example/main
|
|
|
|
|
|
|
|
|
|
binding.o:
|
|
|
|
|
$(CXX) $(CXXFLAGS) binding.cpp -o binding.o -c $(LDFLAGS)
|
|
|
|
|
|
|
|
|
|
libgpt4all.a: binding.o llmodel.o llama.o
|
|
|
|
|
ar src libgpt4all.a ggml-230519.o ggml-230511.o ggml-mainline.o llama.o llmodel.o binding.o
|
|
|
|
|
libgpt4all.a: binding.o llmodel.o
|
|
|
|
|
ar src libgpt4all.a llmodel.o binding.o
|
|
|
|
|
|
|
|
|
|
test: libgpt4all.a
|
|
|
|
|
@C_INCLUDE_PATH=${INCLUDE_PATH} LIBRARY_PATH=${LIBRARY_PATH} go test -v ./...
|
|
|
|
|