Skip to content

Commit 7bb9207

Browse files
authored
Merge pull request #175 from Cloud-Code-AI/174-change-the-default-output-format-to-be-compatible-with-openai
made default response openai compatible
2 parents e5d2a2e + 8706279 commit 7bb9207

File tree

10 files changed

+24
-19
lines changed

10 files changed

+24
-19
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ await browserAI.loadModel('llama-3.2-1b-instruct', {
7979

8080
// Generate text
8181
const response = await browserAI.generateText('Hello, how are you?');
82-
console.log(response);
82+
console.log(response.choices[0].message.content);
8383
```
8484

8585

docs/pages/advanced/structured_generation.mdx

Lines changed: 11 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -34,12 +34,17 @@ const response = await browserAI.generateText('List 3 popular colors', {
3434
});
3535

3636
// Returns:
37-
// {
38-
// "colors": [
39-
// { "name": "blue", "hex": "#0000FF" },
40-
// { "name": "red", "hex": "#FF0000" },
41-
// { "name": "green", "hex": "#00FF00" }
42-
// ]
37+
// { "choices": [
38+
// { "message": {
39+
// "content": {
40+
// "colors": [
41+
// { "name": "blue", "hex": "#0000FF" },
42+
// { "name": "red", "hex": "#FF0000" },
43+
// { "name": "green", "hex": "#00FF00" }
44+
// ]
45+
// }
46+
// }
47+
// ]
4348
// }
4449
```
4550

docs/pages/advanced/text_generation_options.mdx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ const response = await browserAI.generateText('Write a story about AI', {
5353
system_prompt: "You are a creative storyteller specialized in science fiction.",
5454
});
5555

56-
console.log('Generated Story:', response); // "In the year 2045..."
56+
console.log('Generated Story:', response.choices[0].message.content); // "In the year 2045..."
5757
```
5858

5959
## Structured Output Generation

docs/pages/getting_started/core_features_and_usage.mdx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ async function generateText() {
2222

2323
// Generate a response
2424
const response = await browserAI.generateText('What is machine learning?');
25-
console.log(response); // "Machine learning is a branch of AI..."
25+
console.log(response.choices[0].message.content); // "Machine learning is a branch of AI..."
2626
}
2727
```
2828

docs/pages/getting_started/installation.mdx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ async function quickStart() {
4242

4343
// Generate your first AI response!
4444
const response = await browserAI.generateText('Hello, BrowserAI!');
45-
console.log(response);
45+
console.log(response.choices[0].message.content);
4646
}
4747

4848
quickStart();

examples/audio-demo/src/components/ChatInterface.tsx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -604,13 +604,13 @@ export default function ChatInterface() {
604604

605605
captureAnalytics('chat_response_generated', {
606606
inputLength: transcribedText?.length,
607-
responseLength: response?.toString().length,
607+
responseLength: (response as { choices: { message: { content: string } }[] }).choices[0]?.message?.content?.length,
608608
processingTimeMs: chatProcessingTime,
609609
memoryUsage: stats.memoryUsage,
610610
peakMemoryUsage: stats.peakMemoryUsage,
611611
});
612612

613-
const responseText = response?.toString() || 'No response';
613+
const responseText = (response as { choices: { message: { content: string } }[] }).choices[0]?.message?.content || 'No response';
614614
setMessages(prev => [...prev, { text: responseText, isUser: false }]);
615615
} catch (error) {
616616
console.error('Error generating response:', error);

examples/realtime-chat-demo/src/App.tsx

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -90,13 +90,13 @@ function App() {
9090
}
9191

9292
// Generate speech before showing the response
93-
const audioBuffer = await ttsAI.textToSpeech(response as string, {
93+
const audioBuffer = await ttsAI.textToSpeech((response as { choices: { message: { content: string } }[] }).choices[0]?.message?.content as string, {
9494
voice: voices[language as keyof typeof voices]
9595
});
9696

9797
// Update message with response but keep it hidden until speaking starts
9898
setMessages(prev => prev.map((msg, idx) =>
99-
idx === prev.length - 1 ? { ...msg, text: response as string, status: 'speaking' } : msg
99+
idx === prev.length - 1 ? { ...msg, text: (response as { choices: { message: { content: string } }[] }).choices[0]?.message?.content as string, status: 'speaking' } : msg
100100
));
101101

102102
// Play the audio
@@ -110,7 +110,7 @@ function App() {
110110
// Reset status when audio ends
111111
source.onended = () => {
112112
setMessages(prev => prev.map((msg, idx) =>
113-
idx === prev.length - 1 ? { ...msg, status: null } : msg
113+
idx === prev.length - 1 ? { ...msg, status: undefined } : msg
114114
));
115115
};
116116
});

examples/schema-llm/src/App.tsx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,10 +44,10 @@ function App() {
4444

4545
// Format and display the output
4646
try {
47-
const jsonResult = JSON.parse(result as string)
47+
const jsonResult = JSON.parse((result as { choices: { message: { content: string } }[] }).choices[0]?.message?.content as string)
4848
setOutput(JSON.stringify(jsonResult, null, 2))
4949
} catch (e) {
50-
setOutput(result as string)
50+
setOutput((result as { choices: { message: { content: string } }[] }).choices[0]?.message?.content as string)
5151
}
5252
} catch (error) {
5353
setOutput(`Error: ${error instanceof Error ? error.message : 'An unknown error occurred'}`)

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@browserai/browserai",
3-
"version": "1.0.37",
3+
"version": "2.0.0",
44
"private": false,
55
"description": "A library for running AI models directly in the browser",
66
"main": "dist/index.js",

src/engines/mlc-engine-wrapper.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -764,7 +764,7 @@ export class MLCEngineWrapper {
764764
}
765765

766766
const result = await this.mlcEngine.chat.completions.create(finalOptions);
767-
return result.choices[0].message.content;
767+
return result;
768768
}
769769

770770
async embed(input: string, options: any = {}) {

0 commit comments

Comments
 (0)