2023-05-31 21:04:01 +00:00
# ifndef LLAMAMODEL_H_I_KNOW_WHAT_I_AM_DOING_WHEN_INCLUDING_THIS_FILE
# error This file is NOT meant to be included outside of llamamodel.cpp. Doing so is DANGEROUS. Be sure to know what you are doing before proceeding to #define LLAMAMODEL_H_I_KNOW_WHAT_I_AM_DOING_WHEN_INCLUDING_THIS_FILE
# endif
2023-04-15 19:57:32 +00:00
# ifndef LLAMAMODEL_H
# define LLAMAMODEL_H
# include <string>
# include <functional>
# include <vector>
# include "llmodel.h"
2023-05-31 21:04:01 +00:00
struct LLamaPrivate ;
2023-04-15 19:57:32 +00:00
class LLamaModel : public LLModel {
public :
LLamaModel ( ) ;
~ LLamaModel ( ) ;
bool loadModel ( const std : : string & modelPath ) override ;
bool isModelLoaded ( ) const override ;
2023-05-04 19:31:41 +00:00
size_t stateSize ( ) const override ;
size_t saveState ( uint8_t * dest ) const override ;
size_t restoreState ( const uint8_t * src ) override ;
2023-04-25 12:38:29 +00:00
void prompt ( const std : : string & prompt ,
2023-04-27 15:08:15 +00:00
std : : function < bool ( int32_t ) > promptCallback ,
std : : function < bool ( int32_t , const std : : string & ) > responseCallback ,
std : : function < bool ( bool ) > recalculateCallback ,
2023-04-25 12:38:29 +00:00
PromptContext & ctx ) override ;
2023-06-02 03:15:58 +00:00
bool evalTokens ( PromptContext & ctx , const std : : vector < int32_t > & tokens ) override ;
2023-04-15 19:57:32 +00:00
void setThreadCount ( int32_t n_threads ) override ;
2023-05-21 20:45:29 +00:00
int32_t threadCount ( ) const override ;
2023-04-15 19:57:32 +00:00
private :
LLamaPrivate * d_ptr ;
} ;
2023-05-31 21:04:01 +00:00
# endif // LLAMAMODEL_H