Skip to content

Commit

Permalink
chore(examples): tighten up the openai chat example a bit (#519)
Browse files Browse the repository at this point in the history
Use atypical (for this repo) 2-space indent to help avoid fighting
80-columns reflow, for use in a blog post where appearance matters
a little bit more.
  • Loading branch information
trentm authored Jan 10, 2025
1 parent a82250e commit 6b0bfda
Show file tree
Hide file tree
Showing 4 changed files with 61 additions and 51 deletions.
4 changes: 4 additions & 0 deletions examples/openai/.editorconfig
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# Use 2-space indent for these examples to help tighten them
# up in a blog post. Typically this repo is using 4-space.
[*.js]
indent_size = 2
10 changes: 10 additions & 0 deletions examples/openai/.prettierrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
{
"printWidth": 80,
"useTabs": false,
"semi": true,
"trailingComma": "es5",

"bracketSpacing": false,
"singleQuote": true,
"tabWidth": 2
}
26 changes: 11 additions & 15 deletions examples/openai/chat.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,21 +22,17 @@ const {OpenAI} = require('openai');
let chatModel = process.env.CHAT_MODEL ?? 'gpt-4o-mini';

async function main() {
const client = new OpenAI();

const messages = [
{
role: 'user',
content:
'Answer in up to 3 words: Which ocean contains Bouvet Island?',
},
];

const chatCompletion = await client.chat.completions.create({
model: chatModel,
messages: messages,
});
console.log(chatCompletion.choices[0].message.content);
const client = new OpenAI();
const completion = await client.chat.completions.create({
model: chatModel,
messages: [
{
role: 'user',
content: 'Answer in up to 3 words: Which ocean contains Bouvet Island?',
},
],
});
console.log(completion.choices[0].message.content);
}

main();
72 changes: 36 additions & 36 deletions examples/openai/embeddings.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,51 +23,51 @@ const {dot, norm} = require('mathjs');
let embeddingsModel = process.env.EMBEDDINGS_MODEL ?? 'text-embedding-3-small';

async function main() {
const client = new OpenAI();
const client = new OpenAI();

const products = [
"Search: Ingest your data, and explore Elastic's machine learning and retrieval augmented generation (RAG) capabilities.",
'Observability: Unify your logs, metrics, traces, and profiling at scale in a single platform.',
'Security: Protect, investigate, and respond to cyber threats with AI-driven security analytics.',
'Elasticsearch: Distributed, RESTful search and analytics.',
'Kibana: Visualize your data. Navigate the Stack.',
'Beats: Collect, parse, and ship in a lightweight fashion.',
'Connectors: Connect popular databases, file systems, collaboration tools, and more.',
'Logstash: Ingest, transform, enrich, and output.',
];
const products = [
"Search: Ingest your data, and explore Elastic's machine learning and retrieval augmented generation (RAG) capabilities.",
'Observability: Unify your logs, metrics, traces, and profiling at scale in a single platform.',
'Security: Protect, investigate, and respond to cyber threats with AI-driven security analytics.',
'Elasticsearch: Distributed, RESTful search and analytics.',
'Kibana: Visualize your data. Navigate the Stack.',
'Beats: Collect, parse, and ship in a lightweight fashion.',
'Connectors: Connect popular databases, file systems, collaboration tools, and more.',
'Logstash: Ingest, transform, enrich, and output.',
];

// Generate embeddings for each product. Keep them in an array instead of a vector DB.
const productEmbeddings = [];
for (const product of products) {
productEmbeddings.push(await createEmbedding(client, product));
}
// Generate embeddings for each product. Keep them in an array instead of a vector DB.
const productEmbeddings = [];
for (const product of products) {
productEmbeddings.push(await createEmbedding(client, product));
}

const queryEmbedding = await createEmbedding(
client,
'What can help me connect to a database?'
);
const queryEmbedding = await createEmbedding(
client,
'What can help me connect to a database?'
);

// Calculate cosine similarity between the query and document embeddings
const similarities = productEmbeddings.map((productEmbedding) => {
return (
dot(queryEmbedding, productEmbedding) /
(norm(queryEmbedding) * norm(productEmbedding))
);
});
// Calculate cosine similarity between the query and document embeddings
const similarities = productEmbeddings.map((productEmbedding) => {
return (
dot(queryEmbedding, productEmbedding) /
(norm(queryEmbedding) * norm(productEmbedding))
);
});

// Get the index of the most similar document
const mostSimilarIndex = similarities.indexOf(Math.max(...similarities));
// Get the index of the most similar document
const mostSimilarIndex = similarities.indexOf(Math.max(...similarities));

console.log(products[mostSimilarIndex]);
console.log(products[mostSimilarIndex]);
}

async function createEmbedding(client, text) {
const response = await client.embeddings.create({
input: [text],
model: embeddingsModel,
encoding_format: 'float',
});
return response.data[0].embedding;
const response = await client.embeddings.create({
input: [text],
model: embeddingsModel,
encoding_format: 'float',
});
return response.data[0].embedding;
}

main();

0 comments on commit 6b0bfda

Please sign in to comment.