Skip to content

Commit

Permalink
feat(law,openai): AI요약 결과를 stream으로 전달받을 수 있도록 함
Browse files Browse the repository at this point in the history
  • Loading branch information
IsthisLee committed Aug 15, 2023
1 parent 475dc94 commit 979c66e
Show file tree
Hide file tree
Showing 3 changed files with 94 additions and 4 deletions.
40 changes: 36 additions & 4 deletions src/apis/laws/laws.controller.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Controller, Param, Query, Get, ParseEnumPipe, Post, Body, ParseIntPipe } from '@nestjs/common';
import { Controller, Param, Query, Get, ParseEnumPipe, Post, Body, ParseIntPipe, Res } from '@nestjs/common';
import { LawsService } from './laws.service';
import { ApiOperation, ApiTags, ApiParam, ApiBody, ApiResponse, ApiTooManyRequestsResponse } from '@nestjs/swagger';
import { getLawListDto } from './dtos/get-law.dto';
Expand All @@ -11,11 +11,13 @@ import {
LawSummaryResponseData,
} from 'src/common/types';
import { Throttle } from '@nestjs/throttler';
import { Response } from 'express';
import { OpenaiService } from 'src/shared/services/openai.service';

@Controller('laws')
@ApiTags('Laws')
export class LawsController {
constructor(private readonly lawsService: LawsService) {}
constructor(private readonly lawsService: LawsService, private readonly openaiService: OpenaiService) {}

@Get(':type')
@ApiOperation({ summary: '판례/법령 목록 조회' })
Expand Down Expand Up @@ -57,8 +59,7 @@ export class LawsController {
summary: '판례/법령 요약 조회',
description: `
'더 쉽게 해석'을 위한 요청을 보내는 경우, 마지막에 제공받았던 요약문을 body의 recentAssistMsg에 담아서 요청합니다.\n
'더 쉽게 해석' 요청인 경우, summary만 제공됩니다.
`,
'더 쉽게 해석' 요청인 경우, summary만 제공됩니다.`,
})
@Throttle(4, 60)
@ApiParam({
Expand Down Expand Up @@ -188,4 +189,35 @@ export class LawsController {
): Promise<LawSummaryResponseData> {
return this.lawsService.createLawSummary(type, id, requestSummaryDto.recentSummaryMsg);
}

@Post(':type/:id/summary-stream')
@ApiOperation({
summary: '판례/법령 요약 조회 - stream version',
description: `
'더 쉽게 해석'을 위한 요청을 보내는 경우, 마지막에 제공받았던 요약문을 body의 recentAssistMsg에 담아서 요청합니다.\n\n
stream 버전은 요청에 대한 응답이 ReadableStream으로 제공됩니다.(요약 제목, 키워드 제외한 본문 요약 내용만 제공됩니다.)`,
})
@ApiParam({
name: 'type',
enum: SearchTabEnum,
description: 'prec: 판례, statute: 법령',
})
@ApiParam({
name: 'id',
description: '판례 또는 법령의 ID(판례일련번호/법령ID)',
})
async createLawStreamSummary(
@Res() res: Response,
@Param('type', new ParseEnumPipe(SearchTabEnum))
type: SearchTabEnum,
@Param('id', new ParseIntPipe()) id: number,
@Body() requestSummaryDto?: RequestSummaryDto,
) {
const lawSummaryReadableStream: ReadableStream<Uint8Array> = await this.lawsService.createLawStreamSummary(
type,
id,
requestSummaryDto.recentSummaryMsg,
);
return this.openaiService.sendResWithReadableStream(res, lawSummaryReadableStream);
}
}
15 changes: 15 additions & 0 deletions src/apis/laws/laws.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -272,6 +272,21 @@ export class LawsService {
};
}

async createLawStreamSummary(
type: SearchTabEnum,
id: number,
recentSummaryMsg: string,
): Promise<ReadableStream<any>> {
const lawDetail = await this.getLawDetail(type, id);

const summaryReqMsgs = await this.generateSummaryReqMessasges(lawDetail, recentSummaryMsg, {
onlySummary: true,
});
const summaryReadableStream = await this.openAiService.createAIStramChatCompletion(summaryReqMsgs);

return summaryReadableStream;
}

private async fetchTitleAndKeywords(
summaryContent: StatuteDetailData | PrecDetailData,
retryCount = 2,
Expand Down
43 changes: 43 additions & 0 deletions src/shared/services/openai.service.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import { BadRequestException, Injectable } from '@nestjs/common';
import { OpenAI } from 'openai';
import { Stream } from 'openai/streaming';
import { ConfigService } from '@nestjs/config';
import { encoding_for_model } from '@dqbd/tiktoken';
import { TiktokenModel } from '@dqbd/tiktoken';
import { Response } from 'express';

@Injectable()
export class OpenaiService {
Expand All @@ -27,6 +29,47 @@ export class OpenaiService {
return chatCompletion;
}

async createAIStramChatCompletion(
promptMessages: Array<OpenAI.Chat.Completions.ChatCompletionMessage>,
): Promise<ReadableStream<Uint8Array>> {
const requestData = this.generateOpenAIChatRequestData(promptMessages);
const stream: Stream<OpenAI.Chat.Completions.ChatCompletionChunk> = await this.openai.chat.completions.create({
...requestData,
stream: true,
});

const encoder = new TextEncoder();

const readableStream = new ReadableStream({
async start(controller) {
for await (const chunk of stream) {
const str = typeof chunk === 'string' ? chunk : JSON.stringify(chunk);
const bytes = encoder.encode(str);
controller.enqueue(bytes);
}
},
});

return readableStream;
}

async sendResWithReadableStream(res: Response, readableStream: ReadableStream<Uint8Array>) {
const reader = readableStream.getReader();

const readAllChunks = async () => {
const { value, done } = await reader.read();
if (done) {
res.end();
return;
}

res.write(value); // 클라이언트에 chunk data를 write
readAllChunks(); // 스트림이 소진될 때까지 재귀 호출
};

readAllChunks();
}

private generateOpenAIChatRequestData(promptMessages: Array<OpenAI.Chat.Completions.ChatCompletionMessage>): {
model: TiktokenModel | 'gpt-3.5-turbo-16k';
messages: OpenAI.Chat.Completions.ChatCompletionMessage[];
Expand Down

0 comments on commit 979c66e

Please sign in to comment.