Skip to content

Commit

Permalink
grammar experiments that did not work
Browse files Browse the repository at this point in the history
  • Loading branch information
spirobel committed Oct 4, 2023
1 parent 8e88a3d commit a0f3406
Show file tree
Hide file tree
Showing 2 changed files with 62 additions and 9 deletions.
9 changes: 5 additions & 4 deletions bunny/example_init_functions/global_llama.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ export function global_llama(path: string) {
returns: FFIType.ptr,
},
prompt: {
args: [FFIType.cstring, FFIType.ptr, FFIType.cstring, FFIType.function],
args: [FFIType.cstring, FFIType.ptr, FFIType.cstring, FFIType.function, FFIType.cstring],
},
});
async function loadModel(params: string) {
Expand All @@ -37,7 +37,7 @@ export function global_llama(path: string) {
}
global.llama.load_model = loadModel;

function promptLLama(p: string, prompt_callback?: PromptCallback, params?: string) {
function promptLLama(p: string, grammmar?: string, prompt_callback?: PromptCallback, params?: string) {
if (!prompt_callback) {
prompt_callback = (response: string) => { console.log(response); return true }
}
Expand All @@ -56,12 +56,13 @@ export function global_llama(path: string) {
}
const params_cstr = Buffer.from(`${params}\0`, "utf8");
const prompt_cstr = Buffer.from(`${p}\0`, "utf8");

const grammar_string = grammmar ? ptr(Buffer.from(`${grammmar}\0`, "utf8")) : 0;
lib.symbols.prompt(
ptr(prompt_cstr),
global.llama._model.llama_parts,
ptr(params_cstr),
promptCallback,
grammar_string
);
}
global.llama.prompt = promptLLama;
Expand All @@ -76,7 +77,7 @@ export interface LLama {
params: string,

) => Promise<Model>;
prompt: (p: string, prompt_callback?: PromptCallback, params?: string) => void;
prompt: (p: string, grammmar?: string, prompt_callback?: PromptCallback, params?: string) => void;
_model: Model;
}
export interface Model {
Expand Down
62 changes: 57 additions & 5 deletions dev.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,66 @@ import { global_llama } from "./bunny/example_init_functions/global_llama";
function init(path: string) {
global_llama(path);
if (!llama._model) {
llama.load_model("llama --log-disable --model models/model.gguf --ctx-size 512")
llama.load_model(`llama --log-disable --model models/model.gguf --ctx-size 2048 `)
}

const grammar = String.raw`root ::= (declaration)*
declaration ::= dataType identifier "(" parameter? ")" "{" statement* "}"
dataType ::= "int" ws | "float" ws | "char" ws
identifier ::= [a-zA-Z_] [a-zA-Z_0-9]*
parameter ::= dataType identifier
statement ::=
( dataType identifier ws "=" ws expression ";" ) |
( identifier ws "=" ws expression ";" ) |
( identifier ws "(" argList? ")" ";" ) |
( "return" ws expression ";" ) |
( "while" "(" condition ")" "{" statement* "}" ) |
( "for" "(" forInit ";" ws condition ";" ws forUpdate ")" "{" statement* "}" ) |
( "if" "(" condition ")" "{" statement* "}" ("else" "{" statement* "}")? ) |
( singleLineComment ) |
( multiLineComment )
forInit ::= dataType identifier ws "=" ws expression | identifier ws "=" ws expression
forUpdate ::= identifier ws "=" ws expression
condition ::= expression relationOperator expression
relationOperator ::= ("<=" | "<" | "==" | "!=" | ">=" | ">")
expression ::= term (("+" | "-") term)*
term ::= factor(("*" | "/") factor)*
factor ::= identifier | number | unaryTerm | funcCall | parenExpression
unaryTerm ::= "-" factor
funcCall ::= identifier "(" argList? ")"
parenExpression ::= "(" ws expression ws ")"
argList ::= expression ("," ws expression)*
number ::= [0-9]+
singleLineComment ::= "//" [^\n]* "\n"
multiLineComment ::= "/*" ( [^*] | ("*" [^/]) )* "*/"
ws ::= ([ \t\n]+)
`
console.log(llama._model)
llama.prompt("Hello this is a test")
console.log(llama.prompt("Hello this is a test"))
llama.prompt("Hello this is a test2")
llama.prompt(String.raw`### System Prompt
You are an intelligent programming assistant.
### User Message
Implement a linked list in C
### Assistant
${grammar}
...blablabla here is the code:
hello_world(`, grammar)
}
bunny_hmr(init, "../llama.cpp/api-llama.so");
bunny_hmr(init, "../llama.cpp/api-llama.so");

// This is the answer:
//- (District name: [a - z] +.$property value: [a - z 0 - 9,] +\n) +

0 comments on commit a0f3406

Please sign in to comment.