Integration
Seamless Integration of User Feedback & Voting
Last updated
Seamless Integration of User Feedback & Voting
Last updated
In your GPTBoost Request Log, you'll instantly spot the request for which feedback was collected. There'll be a 👍 👎 ❤️ symbol describing the rating and a 🗨️ bubble if there is a comment.
All the feedback information is of course present in the JSON object, as well.
Additionally, you can now filter and export requests based on feedback ratings.
POST
https://api.gptboost.io/v1/feedback/
Submits user feedback for a specific completion or message to the GPTBoost API.
https://api.gptboost.io/v1/feedback/*
String
GPTBoost endpoint to submit feedback
Content-Type*
String
application/json
message_id*
String
The completion id of the OpenAI API response
rating*
String
"positive", "negative", or "amazing" are the supported values
comment
String
Additional comment that the user has added to the vote
tags
Array
Any meaningful categories for the query or rating
# This example is for v1+ of the openai: https://pypi.org/project/openai/
from openai import OpenAI
import requests
client = OpenAI(
base_url = "https://turbo.gptboost.io/v1",
api_key=os.getenv("OPENAI_API_KEY")
)
# Make the request to OpenAI API
response = client.chat.completions.create(
model="gpt-3.5-turbo",
messages=[
{"role": "user", "content": "Tell me an interesting fact about the Big Apple"},
],
)
completion_id = response.id
# Logic to collect the user feedback
def collect_feedback(completion_id: str, rating: str, comment="" , tags=[]):
feedback_url = "https://api.gptboost.io/v1/feedback/"
data = {
"message_id": completion_id,
"rating": rating,
"tags": tags,
"comment": comment
}
# Make the post request to GPTBoost
response = requests.post(feedback_url, json=data)
if response.status_code == 200:
print("Feedback submitted successfully.")
else:
print(f"Failed to submit feedback. Status code: {response.status_code}")
print(response.text)
# Call collect_feedback function
collect_feedback(completion_id=completion_id, rating="positive")
curl --request POST --url 'https://app.gptboost.io/v1/feedback/' \
--header 'Accept: application/json' \
--header 'Content-Type: application/json' \
--data '{
"message_id": "response_id",
"rating": "positive",
"tags": [
"string"
],
"comment": "test"
}'
// This code is for v4+ of the openai package: npmjs.com/package/openai
import fetch from 'node-fetch';
import OpenAI from 'openai';
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,,
baseURL: "https://turbo.gptboost.io/v1",
});
async function ask_gpt(){
const response = await openai.chat.completions.create({
model: "gpt-3.5-turbo-16k",
messages: [{ role: "user", content: "Tell me an interesting action movie" }]
});
let completion_id = response.id;
await collect_feedback(completion_id, "positive", ["movies"], "Awesome")
}
async function collect_feedback(completion_id, rating, tags = [], comment = ""){
const feedbackUrl = 'https://api.gptboost.io/v1/feedback/';
const data = {
message_id: completion_id,
rating: rating,
tags: tags,
comment: comment,
};
try {
const response = await fetch(feedbackUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify(data),
});
if (response.ok) {
const result = await response.json();
console.log('Feedback submitted successfully');
} else {
console.error('Failed to submit feedback', response.status);
console.error(response.statusText);
}
} catch (error) {
console.error('Failed to submit feedback:', error);
}
}
ask_gpt()
// This code is for v4+ of the openai package: npmjs.com/package/openai
import OpenAI from 'openai';
import { OpenAIStream, StreamingTextResponse } from 'ai';
import fetch from 'node-fetch';
// Can be 'nodejs', but Vercel recommends using 'edge'
export const runtime = 'edge';
const openai = new OpenAI({
apiKey: process.env.OPEN_API_KEY,
baseURL: "https://turbo.gptboost.io/v1",
});
let completionId = null;
// This method must be named GET
export async function GET() {
// Make a request to OpenAI's API
const response = await openai.chat.completions.create({
model: 'gpt-3.5-turbo',
stream: true,
messages: [{ role: 'user', content: 'Say this is a test.' }],
});
// Save the streaming results in a variable to prevent the stream from being exhausted.
const [logStream, responseStream] = response.tee ? response.tee() : [response, response];
// get the completionId
for await (const part of logStream) {
if (!completionId && part.id) {
completionId = part.id;
}
}
const headers = {
headers: { 'completionId': completionId?.toString() || '' }
};
// Convert the response into a friendly text-stream
const stream = OpenAIStream(response);
// Respond with the stream and headers
return new StreamingTextResponse(stream, headers);
}
# This example is for v1+ of the openai: https://pypi.org/project/openai/
#Collecting feedback on stream.
from openai import OpenAI
import requests
client = OpenAI(
base_url = "https://turbo.gptboost.io/v1",
api_key = os.getenv("OPENAI_API_KEY")
)
completion_id = None
# Make a request to OpenAI API
for chunk in client.chat.completions.create(
model = 'gpt-3.5-turbo',
messages = [{
'role': 'user',
'content': "Tell me a joke"
}],
stream=True
):
content = chunk.choices[0].delta.content or ""
print(content)
if completion_id == None:
completion_id = chunk.id
def collect_feedback(completion_id: str, rating: str, comment="" , tags=[]):
feedback_url = "https://api.gptboost.io/v1/feedback/"
data = {
"message_id": completion_id,
"rating": rating,
"tags": tags,
"comment": comment
}
response = requests.post(feedback_url, json=data)
if response.status_code == 200:
print("Feedback submitted successfully.")
else:
print(f"Failed to submit feedback. Status code: {response.status_code}")
print(response.text)
# call collect_feedback function
collect_feedback(completion_id=completion_id, rating="positive")
// Collecting feedback on stream.
// This code is for v4 of the openai package: npmjs.com/package/openai
import fetch from 'node-fetch';
import OpenAI from 'openai';
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,,
baseURL: "https://turbo.gptboost.io/v1",
});
let completion_id = null
async function generateStream(prompt) {
const stream = await openai.chat.completions.create({
model: "gpt-3.5-turbo",
messages: [{"role": "user", "content": prompt}],
stream: true,
});
for await (const chunk of stream) {
console.log(chunk.choices[0].delta.content);
if (completion_id == null){
completion_id = chunk.id
}
}
await collect_feedback(completion_id, "positive", ["movies"], "Awesome")
}
async function collect_feedback(completion_id, rating, tags = [], comment = ""){
const feedbackUrl = 'https://api.gptboost.io/v1/feedback/';
const data = {
message_id: completion_id,
rating: rating,
tags: tags,
comment: comment,
};
try {
const response = await fetch(feedbackUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify(data),
});
if (response.ok) {
const result = await response.json();
console.log('Feedback submitted successfully');
} else {
console.error('Failed to submit feedback', response.status);
console.error(response.statusText);
}
} catch (error) {
console.error('Failed to submit feedback2:', error);
}
}
generateStream("Recommend three good thriller movies")