Skip to content

Commit

Permalink
added Stream to AI requests
Browse files Browse the repository at this point in the history
  • Loading branch information
nk-o committed Dec 9, 2024
1 parent 3995877 commit bb7b0b6
Show file tree
Hide file tree
Showing 9 changed files with 619 additions and 128 deletions.
185 changes: 129 additions & 56 deletions classes/class-rest.php
Original file line number Diff line number Diff line change
Expand Up @@ -108,28 +108,12 @@ public function update_settings( WP_REST_Request $req ) {
}

/**
* Send request to OpenAI.
*
* @param WP_REST_Request $req request object.
* Prepare messages for request.
*
* @return mixed
* @param string $request user request.
* @param string $context context.
*/
public function request_ai( WP_REST_Request $req ) {
$settings = get_option( 'mind_settings', array() );
$openai_key = $settings['openai_api_key'] ?? '';

$request = $req->get_param( 'request' ) ?? '';
$context = $req->get_param( 'context' ) ?? '';

if ( ! $openai_key ) {
return $this->error( 'no_openai_key_found', __( 'Provide OpenAI key in the plugin settings.', 'mind' ) );
}

if ( ! $request ) {
return $this->error( 'no_request', __( 'Provide request to receive AI response.', 'mind' ) );
}

// Messages.
public function prepare_messages( $request, $context ) {
$messages = [];

$messages[] = [
Expand Down Expand Up @@ -189,52 +173,75 @@ public function request_ai( WP_REST_Request $req ) {
),
];

$body = [
'model' => 'gpt-4o-mini',
'stream' => false,
'temperature' => 0.7,
'messages' => $messages,
];
return $messages;
}

// Make Request to OpenAI API.
$ai_request = wp_remote_post(
'https://api.openai.com/v1/chat/completions',
[
'headers' => [
'Authorization' => 'Bearer ' . $openai_key,
'Content-Type' => 'application/json',
],
'timeout' => 30,
'sslverify' => false,
'body' => wp_json_encode( $body ),
]
);
/**
* Send request to OpenAI.
*
* @param WP_REST_Request $req request object.
*
* @return mixed
*/
public function request_ai( WP_REST_Request $req ) {
// Set headers for streaming.
header( 'Content-Type: text/event-stream' );
header( 'Cache-Control: no-cache' );
header( 'Connection: keep-alive' );
// For Nginx.
header( 'X-Accel-Buffering: no' );

// Error.
if ( is_wp_error( $ai_request ) ) {
$response = $ai_request->get_error_message();
$settings = get_option( 'mind_settings', array() );
$openai_key = $settings['openai_api_key'] ?? '';

return $this->error( 'openai_request_error', $response );
} elseif ( wp_remote_retrieve_response_code( $ai_request ) !== 200 ) {
$response = json_decode( wp_remote_retrieve_body( $ai_request ), true );
$request = $req->get_param( 'request' ) ?? '';
$context = $req->get_param( 'context' ) ?? '';

if ( isset( $response['error']['message'] ) ) {
return $this->error( 'openai_request_error', $response['error']['message'] );
}
if ( ! $openai_key ) {
$this->send_stream_error( 'no_openai_key_found', __( 'Provide OpenAI key in the plugin settings.', 'mind' ) );
exit;
}

return $this->error( 'openai_request_error', __( 'OpenAI data failed to load.', 'mind' ) );
if ( ! $request ) {
$this->send_stream_error( 'no_request', __( 'Provide request to receive AI response.', 'mind' ) );
exit;
}

// Success.
$result = '';
$response = json_decode( wp_remote_retrieve_body( $ai_request ), true );
// Messages.
$messages = $this->prepare_messages( $request, $context );

$body = [
'model' => 'gpt-4o-mini',
'stream' => true,
'temperature' => 0.7,
'messages' => $messages,
];

// TODO: this a limited part, which should be reworked.
if ( isset( $response['choices'][0]['message']['content'] ) ) {
$result = $response['choices'][0]['message']['content'];
// Initialize cURL.
// phpcs:disable
$ch = curl_init( 'https://api.openai.com/v1/chat/completions' );
curl_setopt( $ch, CURLOPT_POST, 1 );
curl_setopt( $ch, CURLOPT_RETURNTRANSFER, true );
curl_setopt( $ch, CURLOPT_HTTPHEADER, [
'Content-Type: application/json',
'Authorization: Bearer ' . $openai_key,
] );
curl_setopt( $ch, CURLOPT_POSTFIELDS, json_encode( $body ) );
curl_setopt( $ch, CURLOPT_WRITEFUNCTION, function ( $curl, $data ) {
$this->process_stream_chunk( $data );
return strlen( $data );
});

// Execute request
curl_exec( $ch );

if ( curl_errno( $ch ) ) {
$this->send_stream_error( 'curl_error', curl_error( $ch ) );
}

return $this->success( $result );
curl_close( $ch );
// phpcs:enable
exit;
}

/**
Expand All @@ -255,6 +262,72 @@ private function build_base_string( $base_uri, $method, $params ) {
return $method . '&' . rawurlencode( $base_uri ) . '&' . rawurlencode( implode( '&', $r ) );
}

/**
* Process streaming chunk from OpenAI
*
* @param string $chunk - chunk of data.
*/
private function process_stream_chunk( $chunk ) {
$lines = explode( "\n", $chunk );

foreach ( $lines as $line ) {
if ( strlen( trim( $line ) ) === 0 ) {
continue;
}

if ( strpos( $line, 'data: ' ) === 0 ) {
$json_data = trim( substr( $line, 6 ) );

if ( '[DONE]' === $json_data ) {
$this->send_stream_chunk( [ 'done' => true ] );
return;
}

try {
$data = json_decode( $json_data, true );

if ( isset( $data['choices'][0]['delta']['content'] ) ) {
// Send smaller chunks immediately.
$this->send_stream_chunk(
[
'content' => $data['choices'][0]['delta']['content'],
]
);
flush();
}
} catch ( Exception $e ) {
$this->send_stream_error( 'json_error', $e->getMessage() );
}
}
}
}

/**
* Send stream chunk
*
* @param array $data - data to send.
*/
private function send_stream_chunk( $data ) {
echo 'data: ' . wp_json_encode( $data ) . "\n\n";
flush();
}

/**
* Send stream error
*
* @param string $code - error code.
* @param string $message - error message.
*/
private function send_stream_error( $code, $message ) {
$this->send_stream_chunk(
[
'error' => true,
'code' => $code,
'message' => $message,
]
);
}

/**
* Success rest.
*
Expand Down
70 changes: 70 additions & 0 deletions src/editor/popup/components/ai-response/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
/**
* Styles
*/
import './style.scss';

/**
* WordPress dependencies
*/
import { useRef, useEffect, RawHTML, memo } from '@wordpress/element';

const AIResponse = memo(
function AIResponse({ response, loading }) {
const responseRef = useRef();

useEffect(() => {
if (!responseRef.current) {
return;
}

const popupContent = responseRef.current.closest(
'.mind-popup-content'
);

if (!popupContent) {
return;
}

// Smooth scroll to bottom of response.
const { scrollHeight, clientHeight } = popupContent;

// Only auto-scroll for shorter contents.
const shouldScroll = scrollHeight - clientHeight < 1000;

if (shouldScroll) {
popupContent.scrollTo({
top: scrollHeight,
behavior: 'smooth',
});
}
}, [response]);

if (!response && !loading) {
return null;
}

return (
<div
ref={responseRef}
className="mind-popup-response"
style={{
opacity: loading ? 0.85 : 1,
}}
>
<RawHTML>{response}</RawHTML>
{loading && <div className="mind-popup-cursor" />}
</div>
);
},
(prevProps, nextProps) => {
// Custom memoization to prevent unnecessary rerenders.
return (
prevProps.renderBuffer.lastUpdate ===
nextProps.renderBuffer.lastUpdate &&
prevProps.loading === nextProps.loading &&
prevProps.progress.isComplete === nextProps.progress.isComplete
);
}
);

export default AIResponse;
36 changes: 36 additions & 0 deletions src/editor/popup/components/ai-response/style.scss
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
.mind-popup-response {
/* GPU acceleration */
transform: translateZ(0);
will-change: transform;

/* Optimize repaints */
contain: content;

/* Smooth typing cursor */
.mind-popup-cursor {
display: inline-block;
width: 1.5px;
height: 1em;
background: currentColor;
margin-left: 2px;
animation: mind-cursor-blink 1s step-end infinite;
}
}

@keyframes mind-cursor-blink {
0%,
100% {
opacity: 1;
}
50% {
opacity: 0;
}
}

/* Optimize for mobile */
@media (max-width: 768px) {
.mind-popup-response {
contain: strict;
height: 100%;
}
}
Loading

0 comments on commit bb7b0b6

Please sign in to comment.