mirror of https://github.com/grafana/grafana.git
Remove @grafana/experimental in the last various places (#99875)
* Remove @grafana/experimental in the last various places * Remove experimental from yarn.lock * Fix import order
This commit is contained in:
parent
d699f023c2
commit
7c15d33304
|
|
@ -267,6 +267,7 @@
|
|||
"@grafana/flamegraph": "workspace:*",
|
||||
"@grafana/google-sdk": "0.1.2",
|
||||
"@grafana/lezer-logql": "0.2.7",
|
||||
"@grafana/llm": "0.12.0",
|
||||
"@grafana/monaco-logql": "^0.0.8",
|
||||
"@grafana/o11y-ds-frontend": "workspace:*",
|
||||
"@grafana/plugin-ui": "0.10.1",
|
||||
|
|
|
|||
|
|
@ -2,8 +2,7 @@ import { css } from '@emotion/css';
|
|||
import { useCallback, useState } from 'react';
|
||||
import { useDebounce } from 'react-use';
|
||||
|
||||
import { Stack } from '@grafana/experimental';
|
||||
import { Button, Field, Icon, Input, useStyles2 } from '@grafana/ui';
|
||||
import { Button, Field, Icon, Input, Stack, useStyles2 } from '@grafana/ui';
|
||||
|
||||
import { useURLSearchParams } from '../../../hooks/useURLSearchParams';
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { RawQuery } from '@grafana/experimental';
|
||||
import { RawQuery } from '@grafana/plugin-ui';
|
||||
import lokiGrammar from 'app/plugins/datasource/loki/syntax';
|
||||
|
||||
interface Props {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { RawQuery } from '@grafana/experimental';
|
||||
import { RawQuery } from '@grafana/plugin-ui';
|
||||
import { promqlGrammar } from '@grafana/prometheus';
|
||||
|
||||
interface Props {
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import { Dispatch, SetStateAction, useCallback, useEffect, useState } from 'reac
|
|||
import { useAsync } from 'react-use';
|
||||
import { Subscription } from 'rxjs';
|
||||
|
||||
import { llms } from '@grafana/experimental';
|
||||
import { openai } from '@grafana/llm';
|
||||
import { createMonitoringLogger } from '@grafana/runtime';
|
||||
import { useAppNotification } from 'app/core/copy/appNotification';
|
||||
|
||||
|
|
@ -10,7 +10,7 @@ import { isLLMPluginEnabled, DEFAULT_OAI_MODEL } from './utils';
|
|||
|
||||
// Declared instead of imported from utils to make this hook modular
|
||||
// Ideally we will want to move the hook itself to a different scope later.
|
||||
type Message = llms.openai.Message;
|
||||
type Message = openai.Message;
|
||||
|
||||
const genAILogger = createMonitoringLogger('features.dashboards.genai');
|
||||
|
||||
|
|
@ -93,7 +93,7 @@ export function useOpenAIStream({ model, temperature, onResponse }: Options = de
|
|||
setStreamStatus(StreamStatus.GENERATING);
|
||||
setError(undefined);
|
||||
// Stream the completions. Each element is the next stream chunk.
|
||||
const stream = llms.openai
|
||||
const stream = openai
|
||||
.streamChatCompletions({
|
||||
model,
|
||||
temperature,
|
||||
|
|
@ -102,7 +102,7 @@ export function useOpenAIStream({ model, temperature, onResponse }: Options = de
|
|||
.pipe(
|
||||
// Accumulate the stream content into a stream of strings, where each
|
||||
// element contains the accumulated message so far.
|
||||
llms.openai.accumulateContent()
|
||||
openai.accumulateContent()
|
||||
// The stream is just a regular Observable, so we can use standard rxjs
|
||||
// functionality to update state, e.g. recording when the stream
|
||||
// has completed.
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { llms } from '@grafana/experimental';
|
||||
import { openai } from '@grafana/llm';
|
||||
|
||||
import { DASHBOARD_SCHEMA_VERSION } from '../../state/DashboardMigrator';
|
||||
import { createDashboardModelFixture, createPanelSaveModel } from '../../state/__fixtures__/dashboardFixtures';
|
||||
|
|
@ -6,15 +6,13 @@ import { NEW_PANEL_TITLE } from '../../utils/dashboard';
|
|||
|
||||
import { getDashboardChanges, getPanelStrings, isLLMPluginEnabled, sanitizeReply } from './utils';
|
||||
|
||||
// Mock the llms.openai module
|
||||
jest.mock('@grafana/experimental', () => ({
|
||||
...jest.requireActual('@grafana/experimental'),
|
||||
llms: {
|
||||
openai: {
|
||||
streamChatCompletions: jest.fn(),
|
||||
accumulateContent: jest.fn(),
|
||||
health: jest.fn(),
|
||||
},
|
||||
// Mock the openai module
|
||||
jest.mock('@grafana/llm', () => ({
|
||||
...jest.requireActual('@grafana/llm'),
|
||||
openai: {
|
||||
streamChatCompletions: jest.fn(),
|
||||
accumulateContent: jest.fn(),
|
||||
health: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
|
|
@ -101,8 +99,8 @@ describe('getDashboardChanges', () => {
|
|||
|
||||
describe('isLLMPluginEnabled', () => {
|
||||
it('should return false if LLM plugin is not enabled', async () => {
|
||||
// Mock llms.openai.health to return false
|
||||
jest.mocked(llms.openai.health).mockResolvedValue({ ok: false, configured: false });
|
||||
// Mock openai.health to return false
|
||||
jest.mocked(openai.health).mockResolvedValue({ ok: false, configured: false });
|
||||
|
||||
const enabled = await isLLMPluginEnabled();
|
||||
|
||||
|
|
@ -110,8 +108,8 @@ describe('isLLMPluginEnabled', () => {
|
|||
});
|
||||
|
||||
it('should return true if LLM plugin is enabled', async () => {
|
||||
// Mock llms.openai.health to return true
|
||||
jest.mocked(llms.openai.health).mockResolvedValue({ ok: true, configured: false });
|
||||
// Mock openai.health to return true
|
||||
jest.mocked(openai.health).mockResolvedValue({ ok: true, configured: false });
|
||||
|
||||
const enabled = await isLLMPluginEnabled();
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { pick } from 'lodash';
|
||||
|
||||
import { llms } from '@grafana/experimental';
|
||||
import { openai } from '@grafana/llm';
|
||||
import { config } from '@grafana/runtime';
|
||||
import { Panel } from '@grafana/schema';
|
||||
|
||||
|
|
@ -18,7 +18,7 @@ export enum Role {
|
|||
'user' = 'user',
|
||||
}
|
||||
|
||||
export type Message = llms.openai.Message;
|
||||
export type Message = openai.Message;
|
||||
|
||||
export enum QuickFeedbackType {
|
||||
Shorter = 'Even shorter',
|
||||
|
|
@ -80,7 +80,7 @@ export async function isLLMPluginEnabled(): Promise<boolean> {
|
|||
// Check if the LLM plugin is enabled.
|
||||
// If not, we won't be able to make requests, so return early.
|
||||
llmHealthCheck = new Promise((resolve) => {
|
||||
llms.openai.health().then((response) => {
|
||||
openai.health().then((response) => {
|
||||
if (!response.ok) {
|
||||
// Health check fail clear cached promise so we can try again later
|
||||
llmHealthCheck = undefined;
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import { useMemo } from 'react';
|
||||
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
import { SQLEditor } from '@grafana/experimental';
|
||||
import { SQLEditor } from '@grafana/plugin-ui';
|
||||
|
||||
import { ExpressionQuery } from '../types';
|
||||
|
||||
|
|
|
|||
|
|
@ -5,8 +5,7 @@ import * as React from 'react';
|
|||
import { FormEvent, useEffect, useReducer } from 'react';
|
||||
|
||||
import { GrafanaTheme2, SelectableValue } from '@grafana/data';
|
||||
import { Stack } from '@grafana/experimental';
|
||||
import { InlineField, InlineFieldRow, InlineSwitch, Input, Select, useStyles2 } from '@grafana/ui';
|
||||
import { InlineField, InlineFieldRow, InlineSwitch, Input, Select, useStyles2, Stack } from '@grafana/ui';
|
||||
import { config } from 'app/core/config';
|
||||
import { EvalFunction } from 'app/features/alerting/state/alertDef';
|
||||
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import { useCallback, useMemo } from 'react';
|
|||
import { useAsync } from 'react-use';
|
||||
|
||||
import { DataQuery, GrafanaTheme2, SelectableValue, DataTopic, QueryEditorProps } from '@grafana/data';
|
||||
import { OperationsEditorRow } from '@grafana/experimental';
|
||||
import { OperationsEditorRow } from '@grafana/plugin-ui';
|
||||
import { Field, Select, useStyles2, Spinner, RadioButtonGroup, Stack, InlineSwitch } from '@grafana/ui';
|
||||
import config from 'app/core/config';
|
||||
import { getDashboardSrv } from 'app/features/dashboard/services/DashboardSrv';
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@
|
|||
"dependencies": {
|
||||
"@emotion/css": "11.13.5",
|
||||
"@grafana/data": "11.5.0-pre",
|
||||
"@grafana/experimental": "2.1.6",
|
||||
"@grafana/runtime": "11.5.0-pre",
|
||||
"@grafana/schema": "11.5.0-pre",
|
||||
"@grafana/ui": "11.5.0-pre",
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import {
|
|||
ConnectionSettings,
|
||||
DataSourceDescription,
|
||||
convertLegacyAuthProps,
|
||||
} from '@grafana/experimental';
|
||||
} from '@grafana/plugin-ui';
|
||||
import { config } from '@grafana/runtime';
|
||||
import { Divider, SecureSocksProxySettings, Stack, useStyles2 } from '@grafana/ui';
|
||||
|
||||
|
|
|
|||
39
yarn.lock
39
yarn.lock
|
|
@ -2821,7 +2821,6 @@ __metadata:
|
|||
"@emotion/css": "npm:11.13.5"
|
||||
"@grafana/data": "npm:11.5.0-pre"
|
||||
"@grafana/e2e-selectors": "npm:11.5.0-pre"
|
||||
"@grafana/experimental": "npm:2.1.6"
|
||||
"@grafana/plugin-configs": "npm:11.5.0-pre"
|
||||
"@grafana/runtime": "npm:11.5.0-pre"
|
||||
"@grafana/schema": "npm:11.5.0-pre"
|
||||
|
|
@ -3312,32 +3311,6 @@ __metadata:
|
|||
languageName: unknown
|
||||
linkType: soft
|
||||
|
||||
"@grafana/experimental@npm:2.1.6":
|
||||
version: 2.1.6
|
||||
resolution: "@grafana/experimental@npm:2.1.6"
|
||||
dependencies:
|
||||
"@hello-pangea/dnd": "npm:^16.6.0"
|
||||
"@types/uuid": "npm:^8.3.3"
|
||||
lodash: "npm:^4.17.21"
|
||||
prismjs: "npm:^1.29.0"
|
||||
react-popper-tooltip: "npm:^4.4.2"
|
||||
react-use: "npm:^17.4.2"
|
||||
semver: "npm:^7.5.4"
|
||||
uuid: "npm:^8.3.2"
|
||||
peerDependencies:
|
||||
"@emotion/css": ^11.11.2
|
||||
"@grafana/data": ^10.4.0 || ^11.0.0
|
||||
"@grafana/e2e-selectors": ^10.0.0 || ^11.0.0
|
||||
"@grafana/runtime": ^10.4.0 || ^11.0.0
|
||||
"@grafana/ui": ^10.4.0 || ^11.0.0
|
||||
react: ^18.2.0
|
||||
react-dom: ^18.2.0
|
||||
react-select: ^5.8.0
|
||||
rxjs: ^7.8.1
|
||||
checksum: 10/3df81fef944e6dfdf843bf5954a586fe3eb90a1a2671088ae526b29fb9118793c05be96a3edd3d3babfdcbfa801fdfb328e87e6cbb1fd0de7f8f2d2e5c32f66f
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@grafana/faro-core@npm:^1.12.3, @grafana/faro-core@npm:^1.3.6":
|
||||
version: 1.12.3
|
||||
resolution: "@grafana/faro-core@npm:1.12.3"
|
||||
|
|
@ -4179,7 +4152,7 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@hello-pangea/dnd@npm:16.6.0, @hello-pangea/dnd@npm:^16.6.0":
|
||||
"@hello-pangea/dnd@npm:16.6.0":
|
||||
version: 16.6.0
|
||||
resolution: "@hello-pangea/dnd@npm:16.6.0"
|
||||
dependencies:
|
||||
|
|
@ -10320,13 +10293,6 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/uuid@npm:^8.3.3":
|
||||
version: 8.3.4
|
||||
resolution: "@types/uuid@npm:8.3.4"
|
||||
checksum: 10/6f11f3ff70f30210edaa8071422d405e9c1d4e53abbe50fdce365150d3c698fe7bbff65c1e71ae080cbfb8fded860dbb5e174da96fdbbdfcaa3fb3daa474d20f
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/webpack-assets-manifest@npm:^5":
|
||||
version: 5.1.4
|
||||
resolution: "@types/webpack-assets-manifest@npm:5.1.4"
|
||||
|
|
@ -17815,6 +17781,7 @@ __metadata:
|
|||
"@grafana/flamegraph": "workspace:*"
|
||||
"@grafana/google-sdk": "npm:0.1.2"
|
||||
"@grafana/lezer-logql": "npm:0.2.7"
|
||||
"@grafana/llm": "npm:0.12.0"
|
||||
"@grafana/monaco-logql": "npm:^0.0.8"
|
||||
"@grafana/o11y-ds-frontend": "workspace:*"
|
||||
"@grafana/plugin-e2e": "npm:1.17.1"
|
||||
|
|
@ -26614,7 +26581,7 @@ __metadata:
|
|||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"react-use@npm:17.6.0, react-use@npm:^17.3.1, react-use@npm:^17.4.0, react-use@npm:^17.4.2, react-use@npm:^17.5.0":
|
||||
"react-use@npm:17.6.0, react-use@npm:^17.3.1, react-use@npm:^17.4.0, react-use@npm:^17.5.0":
|
||||
version: 17.6.0
|
||||
resolution: "react-use@npm:17.6.0"
|
||||
dependencies:
|
||||
|
|
|
|||
Loading…
Reference in New Issue