Here's the base component. It assumes that there's one conversation
perpage, and it keeps the query and response as attributes. When a
new one is created, it takes passes the previous completions to the
backing model to give it some memory.
generate-response.js
:
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
| import { Ollama } from "@langchain/community/llms/ollama";
import { ChatOpenAI } from "@langchain/openai";
import {
ChatPromptTemplate,
} from "@langchain/core/prompts";
import { StringOutputParser } from "@langchain/core/output_parsers"
import { marked } from 'marked';
class GenerateResponse extends HTMLElement {
connectedCallback() {
this.state = {
llm: this.getAttribute( 'llm' ) || 'ollama',
api_key: this.getAttribute( 'api_key' ),
status: "",
model: this.getAttribute( "model" ) || "mistral",
response: "",
query: this.getAttribute( "query" )
}
this.doQuery();
this.render();
}
async doQuery() {
const model = this.state.model;
this.state.status = `generating response from ${model}`
this.render();
let chatModel = new Ollama( { model } );
if( this.state.llm == 'openAI' ) {
chatModel = new ChatOpenAI( {apiKey: this.state.api_key} );
}
const messages = [
[
"system",
"You are a helpful AI assistant"
]
];
// Get the history
const exchange = document.querySelectorAll( "generate-response" )
for( let m of exchange ) {
if( m.state.query ) {
messages.push( [ "user", m.state.query ] )
}
if( m.state.response != "" ) {
messages.push( [ "ai", m.state.response ] )
}
}
const prompt = ChatPromptTemplate.fromMessages(messages);
const outputParser = new StringOutputParser();
const llmChain = prompt.pipe(chatModel).pipe(outputParser);
const answer = await llmChain.stream({
input: this.state.query
});
for await (const chunk of answer) {
this.state.response = this.state.response + chunk;
this.render()
}
this.state.status = "";
this.render();
}
render() {
let h = ""
h += `<h2 font-header text-2xl>${this.state.query}</h2>`
if( this.state.response == "" ) {
h += `<sl-progress-bar indeterminate py-2></sl-progress-bar>`
}
if( this.state.status != "" ) {
h += `<p>${this.state.status}</p>`
}
if( this.state.response != "" ) {
h += `<div>`
h += marked.parse( this.state.response )
h += `</div>`
}
this.innerHTML = h;
}
}
customElements.define("generate-response", GenerateResponse );
|