| 
									
										
										
										
											2024-09-04 23:56:20 +08:00
										 |  |  | import { Dispatch, SetStateAction, useCallback, useEffect, useState } from 'react'; | 
					
						
							| 
									
										
										
										
											2023-09-27 22:47:06 +08:00
										 |  |  | import { useAsync } from 'react-use'; | 
					
						
							|  |  |  | import { Subscription } from 'rxjs'; | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-03-12 16:53:20 +08:00
										 |  |  | import { llm } from '@grafana/llm'; | 
					
						
							| 
									
										
										
										
											2024-02-01 22:08:40 +08:00
										 |  |  | import { createMonitoringLogger } from '@grafana/runtime'; | 
					
						
							| 
									
										
										
										
											2023-10-02 22:04:12 +08:00
										 |  |  | import { useAppNotification } from 'app/core/copy/appNotification'; | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-03-12 16:53:20 +08:00
										 |  |  | import { isLLMPluginEnabled, DEFAULT_LLM_MODEL } from './utils'; | 
					
						
							| 
									
										
										
										
											2023-09-27 22:47:06 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | // Declared instead of imported from utils to make this hook modular
 | 
					
						
							|  |  |  | // Ideally we will want to move the hook itself to a different scope later.
 | 
					
						
							| 
									
										
										
										
											2025-03-12 16:53:20 +08:00
										 |  |  | type Message = llm.Message; | 
					
						
							| 
									
										
										
										
											2023-09-27 22:47:06 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-02-01 22:08:40 +08:00
										 |  |  | const genAILogger = createMonitoringLogger('features.dashboards.genai'); | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-10-05 21:25:35 +08:00
										 |  |  | export enum StreamStatus { | 
					
						
							|  |  |  |   IDLE = 'idle', | 
					
						
							|  |  |  |   GENERATING = 'generating', | 
					
						
							|  |  |  |   COMPLETED = 'completed', | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-10-10 22:23:08 +08:00
										 |  |  | export const TIMEOUT = 10000; | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-09-04 23:56:20 +08:00
										 |  |  | interface Options { | 
					
						
							|  |  |  |   model: string; | 
					
						
							|  |  |  |   temperature: number; | 
					
						
							|  |  |  |   onResponse?: (response: string) => void; | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | const defaultOptions = { | 
					
						
							| 
									
										
										
										
											2025-03-12 16:53:20 +08:00
										 |  |  |   model: DEFAULT_LLM_MODEL, | 
					
						
							| 
									
										
										
										
											2024-09-04 23:56:20 +08:00
										 |  |  |   temperature: 1, | 
					
						
							|  |  |  | }; | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-03-12 16:53:20 +08:00
										 |  |  | interface UseLLMStreamResponse { | 
					
						
							| 
									
										
										
										
											2024-09-04 23:56:20 +08:00
										 |  |  |   setMessages: Dispatch<SetStateAction<Message[]>>; | 
					
						
							|  |  |  |   stopGeneration: () => void; | 
					
						
							| 
									
										
										
										
											2023-11-17 17:02:53 +08:00
										 |  |  |   messages: Message[]; | 
					
						
							| 
									
										
										
										
											2023-09-27 22:47:06 +08:00
										 |  |  |   reply: string; | 
					
						
							| 
									
										
										
										
											2023-10-05 21:25:35 +08:00
										 |  |  |   streamStatus: StreamStatus; | 
					
						
							| 
									
										
										
										
											2024-09-04 23:56:20 +08:00
										 |  |  |   error?: Error; | 
					
						
							|  |  |  |   value?: { | 
					
						
							|  |  |  |     enabled?: boolean | undefined; | 
					
						
							|  |  |  |     stream?: Subscription; | 
					
						
							|  |  |  |   }; | 
					
						
							|  |  |  | } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | // TODO: Add tests
 | 
					
						
							| 
									
										
										
										
											2025-03-12 16:53:20 +08:00
										 |  |  | export function useLLMStream({ model, temperature, onResponse }: Options = defaultOptions): UseLLMStreamResponse { | 
					
						
							| 
									
										
										
										
											2023-09-27 22:47:06 +08:00
										 |  |  |   // The messages array to send to the LLM, updated when the button is clicked.
 | 
					
						
							|  |  |  |   const [messages, setMessages] = useState<Message[]>([]); | 
					
						
							| 
									
										
										
										
											2024-09-04 23:56:20 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-09-27 22:47:06 +08:00
										 |  |  |   // The latest reply from the LLM.
 | 
					
						
							|  |  |  |   const [reply, setReply] = useState(''); | 
					
						
							| 
									
										
										
										
											2023-10-05 21:25:35 +08:00
										 |  |  |   const [streamStatus, setStreamStatus] = useState<StreamStatus>(StreamStatus.IDLE); | 
					
						
							| 
									
										
										
										
											2023-10-02 22:04:12 +08:00
										 |  |  |   const [error, setError] = useState<Error>(); | 
					
						
							|  |  |  |   const { error: notifyError } = useAppNotification(); | 
					
						
							| 
									
										
										
										
											2024-03-05 19:10:46 +08:00
										 |  |  |   // Accumulate response and it will only update the state of the attatched component when the stream is completed.
 | 
					
						
							|  |  |  |   let partialReply = ''; | 
					
						
							| 
									
										
										
										
											2023-09-27 22:47:06 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-10-10 22:23:08 +08:00
										 |  |  |   const onError = useCallback( | 
					
						
							|  |  |  |     (e: Error) => { | 
					
						
							|  |  |  |       setStreamStatus(StreamStatus.IDLE); | 
					
						
							|  |  |  |       setMessages([]); | 
					
						
							|  |  |  |       setError(e); | 
					
						
							|  |  |  |       notifyError( | 
					
						
							| 
									
										
										
										
											2025-03-12 16:53:20 +08:00
										 |  |  |         'Failed to generate content using LLM', | 
					
						
							| 
									
										
										
										
											2024-09-04 23:56:20 +08:00
										 |  |  |         'Please try again or if the problem persists, contact your organization admin.' | 
					
						
							| 
									
										
										
										
											2023-10-10 22:23:08 +08:00
										 |  |  |       ); | 
					
						
							|  |  |  |       console.error(e); | 
					
						
							| 
									
										
										
										
											2024-02-01 22:08:40 +08:00
										 |  |  |       genAILogger.logError(e, { messages: JSON.stringify(messages), model, temperature: String(temperature) }); | 
					
						
							| 
									
										
										
										
											2023-10-10 22:23:08 +08:00
										 |  |  |     }, | 
					
						
							|  |  |  |     [messages, model, temperature, notifyError] | 
					
						
							|  |  |  |   ); | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-03-05 19:10:46 +08:00
										 |  |  |   useEffect(() => { | 
					
						
							|  |  |  |     if (messages.length > 0) { | 
					
						
							|  |  |  |       setReply(''); | 
					
						
							|  |  |  |     } | 
					
						
							|  |  |  |   }, [messages]); | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-10-04 05:36:45 +08:00
										 |  |  |   const { error: enabledError, value: enabled } = useAsync( | 
					
						
							|  |  |  |     async () => await isLLMPluginEnabled(), | 
					
						
							|  |  |  |     [isLLMPluginEnabled] | 
					
						
							|  |  |  |   ); | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-10-02 22:04:12 +08:00
										 |  |  |   const { error: asyncError, value } = useAsync(async () => { | 
					
						
							| 
									
										
										
										
											2023-10-04 05:36:45 +08:00
										 |  |  |     if (!enabled || !messages.length) { | 
					
						
							| 
									
										
										
										
											2023-09-27 22:47:06 +08:00
										 |  |  |       return { enabled }; | 
					
						
							|  |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-10-05 21:25:35 +08:00
										 |  |  |     setStreamStatus(StreamStatus.GENERATING); | 
					
						
							| 
									
										
										
										
											2023-10-02 22:04:12 +08:00
										 |  |  |     setError(undefined); | 
					
						
							| 
									
										
										
										
											2023-09-27 22:47:06 +08:00
										 |  |  |     // Stream the completions. Each element is the next stream chunk.
 | 
					
						
							| 
									
										
										
										
											2025-03-12 16:53:20 +08:00
										 |  |  |     const stream = llm | 
					
						
							| 
									
										
										
										
											2023-09-27 22:47:06 +08:00
										 |  |  |       .streamChatCompletions({ | 
					
						
							|  |  |  |         model, | 
					
						
							|  |  |  |         temperature, | 
					
						
							|  |  |  |         messages, | 
					
						
							|  |  |  |       }) | 
					
						
							|  |  |  |       .pipe( | 
					
						
							|  |  |  |         // Accumulate the stream content into a stream of strings, where each
 | 
					
						
							|  |  |  |         // element contains the accumulated message so far.
 | 
					
						
							| 
									
										
										
										
											2025-03-12 16:53:20 +08:00
										 |  |  |         llm.accumulateContent() | 
					
						
							| 
									
										
										
										
											2023-09-27 22:47:06 +08:00
										 |  |  |         // The stream is just a regular Observable, so we can use standard rxjs
 | 
					
						
							|  |  |  |         // functionality to update state, e.g. recording when the stream
 | 
					
						
							|  |  |  |         // has completed.
 | 
					
						
							|  |  |  |         // The operator decision tree on the rxjs website is a useful resource:
 | 
					
						
							| 
									
										
										
										
											2023-10-02 22:04:12 +08:00
										 |  |  |         // https://rxjs.dev/operator-decision-tree.)
 | 
					
						
							| 
									
										
										
										
											2023-09-27 22:47:06 +08:00
										 |  |  |       ); | 
					
						
							|  |  |  |     // Subscribe to the stream and update the state for each returned value.
 | 
					
						
							|  |  |  |     return { | 
					
						
							|  |  |  |       enabled, | 
					
						
							|  |  |  |       stream: stream.subscribe({ | 
					
						
							| 
									
										
										
										
											2024-03-05 19:10:46 +08:00
										 |  |  |         next: (reply) => { | 
					
						
							|  |  |  |           partialReply = reply; | 
					
						
							|  |  |  |         }, | 
					
						
							| 
									
										
										
										
											2023-10-10 22:23:08 +08:00
										 |  |  |         error: onError, | 
					
						
							| 
									
										
										
										
											2023-09-27 22:47:06 +08:00
										 |  |  |         complete: () => { | 
					
						
							| 
									
										
										
										
											2024-03-05 19:10:46 +08:00
										 |  |  |           setReply(partialReply); | 
					
						
							| 
									
										
										
										
											2023-10-05 21:25:35 +08:00
										 |  |  |           setStreamStatus(StreamStatus.COMPLETED); | 
					
						
							| 
									
										
										
										
											2024-09-04 23:56:20 +08:00
										 |  |  |           onResponse?.(partialReply); | 
					
						
							| 
									
										
										
										
											2023-09-27 22:47:06 +08:00
										 |  |  |           setMessages([]); | 
					
						
							| 
									
										
										
										
											2023-10-02 22:04:12 +08:00
										 |  |  |           setError(undefined); | 
					
						
							| 
									
										
										
										
											2023-09-27 22:47:06 +08:00
										 |  |  |         }, | 
					
						
							|  |  |  |       }), | 
					
						
							|  |  |  |     }; | 
					
						
							| 
									
										
										
										
											2023-10-04 05:36:45 +08:00
										 |  |  |   }, [messages, enabled]); | 
					
						
							| 
									
										
										
										
											2023-09-27 22:47:06 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-10-10 22:23:08 +08:00
										 |  |  |   // Unsubscribe from the stream when the component unmounts.
 | 
					
						
							|  |  |  |   useEffect(() => { | 
					
						
							|  |  |  |     return () => { | 
					
						
							| 
									
										
										
										
											2024-09-04 23:56:20 +08:00
										 |  |  |       value?.stream?.unsubscribe(); | 
					
						
							| 
									
										
										
										
											2023-10-10 22:23:08 +08:00
										 |  |  |     }; | 
					
						
							|  |  |  |   }, [value]); | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-11-09 23:06:14 +08:00
										 |  |  |   // Unsubscribe from the stream when user stops the generation.
 | 
					
						
							| 
									
										
										
										
											2024-09-04 23:56:20 +08:00
										 |  |  |   const stopGeneration = useCallback(() => { | 
					
						
							|  |  |  |     value?.stream?.unsubscribe(); | 
					
						
							|  |  |  |     setStreamStatus(StreamStatus.IDLE); | 
					
						
							|  |  |  |     setError(undefined); | 
					
						
							|  |  |  |     setMessages([]); | 
					
						
							|  |  |  |   }, [value]); | 
					
						
							| 
									
										
										
										
											2023-11-09 23:06:14 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-10-10 22:23:08 +08:00
										 |  |  |   // If the stream is generating and we haven't received a reply, it times out.
 | 
					
						
							|  |  |  |   useEffect(() => { | 
					
						
							|  |  |  |     let timeout: NodeJS.Timeout | undefined; | 
					
						
							|  |  |  |     if (streamStatus === StreamStatus.GENERATING && reply === '') { | 
					
						
							|  |  |  |       timeout = setTimeout(() => { | 
					
						
							| 
									
										
										
										
											2025-03-12 16:53:20 +08:00
										 |  |  |         onError(new Error(`LLM stream timed out after ${TIMEOUT}ms`)); | 
					
						
							| 
									
										
										
										
											2023-10-10 22:23:08 +08:00
										 |  |  |       }, TIMEOUT); | 
					
						
							|  |  |  |     } | 
					
						
							| 
									
										
										
										
											2024-09-04 23:56:20 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-10-10 22:23:08 +08:00
										 |  |  |     return () => { | 
					
						
							| 
									
										
										
										
											2024-09-04 23:56:20 +08:00
										 |  |  |       clearTimeout(timeout); | 
					
						
							| 
									
										
										
										
											2023-10-10 22:23:08 +08:00
										 |  |  |     }; | 
					
						
							|  |  |  |   }, [streamStatus, reply, onError]); | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2023-10-04 05:36:45 +08:00
										 |  |  |   if (asyncError || enabledError) { | 
					
						
							|  |  |  |     setError(asyncError || enabledError); | 
					
						
							| 
									
										
										
										
											2023-09-27 22:47:06 +08:00
										 |  |  |   } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |   return { | 
					
						
							|  |  |  |     setMessages, | 
					
						
							| 
									
										
										
										
											2024-09-04 23:56:20 +08:00
										 |  |  |     stopGeneration, | 
					
						
							| 
									
										
										
										
											2023-11-17 17:02:53 +08:00
										 |  |  |     messages, | 
					
						
							| 
									
										
										
										
											2023-09-27 22:47:06 +08:00
										 |  |  |     reply, | 
					
						
							| 
									
										
										
										
											2023-10-05 21:25:35 +08:00
										 |  |  |     streamStatus, | 
					
						
							| 
									
										
										
										
											2023-09-27 22:47:06 +08:00
										 |  |  |     error, | 
					
						
							|  |  |  |     value, | 
					
						
							|  |  |  |   }; | 
					
						
							|  |  |  | } |