// 1. Set up your environment variables
const LAVA_FORWARD_TOKEN = process.env.LAVA_FORWARD_TOKEN;
// 2. Define the Together AI endpoint
const TOGETHER_ENDPOINT = 'https://api.together.xyz/v1/chat/completions';
// 3. Make the request through Lava
const response = await fetch(
`https://api.lavapayments.com/v1/forward?u=${encodeURIComponent(TOGETHER_ENDPOINT)}`,
{
method: 'POST',
headers: {
'Authorization': `Bearer ${LAVA_FORWARD_TOKEN}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
model: 'meta-llama/Meta-Llama-3.3-70B-Instruct-Turbo',
messages: [
{
role: 'user',
content: 'Explain the benefits of open-source AI.'
}
],
temperature: 0.7,
max_tokens: 512
})
}
);
// 4. Parse response and extract usage
const data = await response.json();
console.log('Response:', data.choices[0].message.content);
// 5. Track usage (from response body)
const usage = data.usage;
console.log('Tokens used:', usage.total_tokens);
// 6. Get Lava request ID (from headers)
const requestId = response.headers.get('x-lava-request-id');
console.log('Lava Request ID:', requestId);