diff --git a/frontend/src/component/App.tsx b/frontend/src/component/App.tsx
index 505dea5adf..5a2f5c8a38 100644
--- a/frontend/src/component/App.tsx
+++ b/frontend/src/component/App.tsx
@@ -21,6 +21,7 @@ import { InternalBanners } from './banners/internalBanners/InternalBanners';
import { ExternalBanners } from './banners/externalBanners/ExternalBanners';
import { LicenseBanner } from './banners/internalBanners/LicenseBanner';
import { Demo } from './demo/Demo';
+import { AIChat } from './common/AI/AIChat';
const StyledContainer = styled('div')(() => ({
'& ul': {
@@ -98,6 +99,8 @@ export const App = () => {
+
+
>
diff --git a/frontend/src/component/changeRequest/ChangeRequest/ChangeRequest.tsx b/frontend/src/component/changeRequest/ChangeRequest/ChangeRequest.tsx
index 766273422d..ec3b43c5a4 100644
--- a/frontend/src/component/changeRequest/ChangeRequest/ChangeRequest.tsx
+++ b/frontend/src/component/changeRequest/ChangeRequest/ChangeRequest.tsx
@@ -6,6 +6,7 @@ import { FeatureChange } from './Changes/Change/FeatureChange';
import { ChangeActions } from './Changes/Change/ChangeActions';
import { ConditionallyRender } from 'component/common/ConditionallyRender/ConditionallyRender';
import { SegmentChange } from './Changes/Change/SegmentChange';
+import { AIChangeRequestDescription } from './Changes/Change/AIChangeRequestDescription';
interface IChangeRequestProps {
changeRequest: ChangeRequestType;
@@ -61,6 +62,7 @@ export const ChangeRequest: VFC = ({
onNavigate={onNavigate}
conflict={feature.conflict}
>
+
{feature.changes.map((change, index) => (
({
+ background: theme.palette.secondary.light,
+ color: theme.palette.secondary.contrastText,
+ border: `1px solid ${theme.palette.secondary.border}`,
+ padding: theme.spacing(2),
+}));
+
+export const AIChangeRequestDescription = ({
+ changes,
+}: { changes: IFeatureChange[] }) => {
+ const { prompt } = useAI();
+ const [response, setResponse] = useState();
+
+ const changesBlock = `\`\`\`\n${JSON.stringify(changes)}\n\`\`\``;
+ const message = `Please parse these changes into a concise, easy-to-understand, human-readable description:\n\n${changesBlock}\nWe support markdown and don't care about profile pictures. You should handle weight by dividing it by 10 and assuming it's a percentage. Don't mention weight otherwise. Only include the changes, without any auxiliary text in the response.`;
+
+ const load = async () => {
+ const response = await prompt(message); // TODO: Might be broken after recent changes
+ setResponse(response);
+ };
+
+ useEffect(() => {
+ load();
+ }, []);
+
+ if (!response) return null;
+
+ return (
+
+ {response}
+
+ );
+};
diff --git a/frontend/src/component/commandBar/CommandBar.tsx b/frontend/src/component/commandBar/CommandBar.tsx
index 9701022191..907c0ceb46 100644
--- a/frontend/src/component/commandBar/CommandBar.tsx
+++ b/frontend/src/component/commandBar/CommandBar.tsx
@@ -155,7 +155,8 @@ export const CommandBar = () => {
query.length !== 0 &&
mappedProjects.length === 0 &&
mappedPages.length === 0 &&
- searchedFlagCount === 0;
+ searchedFlagCount === 0 &&
+ !query.startsWith('#');
if (noResultsFound) {
trackEvent('command-bar', {
props: {
@@ -287,6 +288,10 @@ export const CommandBar = () => {
}
};
+ const AIPrompt = (searchString || '').startsWith('#')
+ ? (searchString as unknown as string).split('#')[1].trim()
+ : undefined;
+
return (
diff --git a/frontend/src/component/common/AI/AIChat.tsx b/frontend/src/component/common/AI/AIChat.tsx
new file mode 100644
index 0000000000..5ad46bc46c
--- /dev/null
+++ b/frontend/src/component/common/AI/AIChat.tsx
@@ -0,0 +1,255 @@
+import SmartToyIcon from '@mui/icons-material/SmartToy';
+import CloseIcon from '@mui/icons-material/Close';
+import {
+ Avatar,
+ IconButton,
+ styled,
+ TextField,
+ Typography,
+} from '@mui/material';
+import { useEffect, useRef, useState } from 'react';
+import useToast from 'hooks/useToast';
+import { formatUnknownError } from 'utils/formatUnknownError';
+import { useAI } from 'hooks/api/actions/useAI/useAI';
+import { Markdown } from '../Markdown/Markdown';
+import { useAuthUser } from 'hooks/api/getters/useAuth/useAuthUser';
+
+const StyledContainer = styled('div')(({ theme }) => ({
+ position: 'fixed',
+ bottom: 10,
+ right: 10,
+ zIndex: theme.zIndex.fab,
+}));
+
+const StyledIconButton = styled(IconButton)(({ theme }) => ({
+ background: theme.palette.primary.main,
+ color: theme.palette.primary.contrastText,
+ '&:hover': {
+ background: theme.palette.primary.dark,
+ },
+}));
+
+const StyledChat = styled('div')(({ theme }) => ({
+ border: `1px solid ${theme.palette.primary.border}`,
+ borderRadius: theme.shape.borderRadius,
+}));
+
+const StyledHeader = styled('div')(({ theme }) => ({
+ background: theme.palette.primary.light,
+ color: theme.palette.primary.contrastText,
+ display: 'flex',
+ alignItems: 'center',
+ justifyContent: 'space-between',
+ padding: theme.spacing(1, 2),
+}));
+
+const StyledChatContent = styled('div')(({ theme }) => ({
+ display: 'flex',
+ flexDirection: 'column',
+ padding: theme.spacing(2),
+ background: theme.palette.background.paper,
+ width: theme.spacing(40),
+ height: theme.spacing(50),
+ overflow: 'auto',
+}));
+
+const StyledMessageContainer = styled('div')(({ theme }) => ({
+ display: 'flex',
+ justifyContent: 'flex-start',
+ gap: theme.spacing(1),
+ marginTop: theme.spacing(1),
+ marginBottom: theme.spacing(1),
+ '&:first-child': {
+ marginTop: 0,
+ },
+ '&:last-child': {
+ marginBottom: 0,
+ },
+}));
+
+const StyledMessage = styled('div')(({ theme }) => ({
+ background: theme.palette.secondary.light,
+ color: theme.palette.secondary.contrastText,
+ border: `1px solid ${theme.palette.secondary.border}`,
+ padding: theme.spacing(0.75),
+}));
+
+const StyledAIMessage = styled(StyledMessage)(({ theme }) => ({
+ background: theme.palette.secondary.light,
+ color: theme.palette.secondary.contrastText,
+ border: `1px solid ${theme.palette.secondary.border}`,
+ borderRadius: theme.shape.borderRadius,
+ display: 'inline-block',
+ wordWrap: 'break-word',
+}));
+
+const StyledUserMessage = styled(StyledMessage)(({ theme }) => ({
+ background: theme.palette.primary.light,
+ color: theme.palette.primary.contrastText,
+ border: `1px solid ${theme.palette.primary.border}`,
+ borderRadius: theme.shape.borderRadius,
+ display: 'inline-block',
+ wordWrap: 'break-word',
+}));
+
+const StyledAvatar = styled(Avatar)(({ theme }) => ({
+ width: theme.spacing(4),
+ height: theme.spacing(4),
+}));
+
+const StyledForm = styled('form')(({ theme }) => ({
+ background: theme.palette.background.paper,
+ display: 'flex',
+ alignItems: 'center',
+ padding: theme.spacing(1),
+}));
+
+const StyledInput = styled(TextField)(({ theme }) => ({
+ margin: theme.spacing(0.5),
+}));
+
+export const AIChat = () => {
+ const { user } = useAuthUser();
+ const [open, setOpen] = useState(false);
+ const [prompt, setPrompt] = useState('');
+ const [loading, setLoading] = useState(false);
+ const { setToastApiError } = useToast();
+ const { promptWithTools } = useAI();
+
+ const [messages, setMessages] = useState<
+ { role: 'system' | 'assistant' | 'user'; content: string }[]
+ >([
+ {
+ role: 'system',
+ content: `You are an assistant that helps users interact with Unleash. You should ask the user in case you're missing any required information. Unless I say otherwise, assume every flag belongs to the "default" project.`,
+ },
+ ]);
+
+ const chatEndRef = useRef(null);
+
+ useEffect(() => {
+ if (chatEndRef.current) {
+ chatEndRef.current.scrollIntoView({ behavior: 'smooth' });
+ }
+ }, [messages]);
+
+ if (!open) {
+ return (
+
+ setOpen(!open)}>
+
+
+
+ );
+ }
+
+ const onSubmit = async (event: React.SyntheticEvent) => {
+ event.preventDefault();
+
+ try {
+ setLoading(true);
+ let tempMessages = [
+ ...messages,
+ { role: 'user' as const, content: prompt },
+ ];
+ setMessages(tempMessages);
+ setPrompt('');
+ const content = await promptWithTools(tempMessages);
+ if (content) {
+ tempMessages = [
+ ...tempMessages,
+ { role: 'assistant', content },
+ ];
+ }
+ setMessages(tempMessages);
+ setLoading(false);
+ } catch (error: unknown) {
+ setToastApiError(formatUnknownError(error));
+ }
+ };
+
+ return (
+
+
+
+
+ Unleash AI
+
+ setOpen(!open)}>
+
+
+
+
+
+ ({
+ backgroundColor: theme.palette.primary.main,
+ })}
+ >
+
+
+
+ Hello, how can I assist you?
+
+
+ {messages.map(({ role, content }, index) => {
+ if (role === 'assistant') {
+ return (
+
+ ({
+ backgroundColor:
+ theme.palette.primary.main,
+ })}
+ >
+
+
+
+ {content}
+
+
+ );
+ }
+
+ if (role === 'user') {
+ return (
+
+
+ {content}
+
+
+
+ );
+ }
+ })}
+ {loading && (
+
+ ({
+ backgroundColor: theme.palette.primary.main,
+ })}
+ >
+
+
+
+ _Unleash AI is typing..._
+
+
+ )}
+
+
+
+ setPrompt(e.target.value)}
+ />
+
+
+
+ );
+};
diff --git a/frontend/src/component/common/AI/AIMessage.tsx b/frontend/src/component/common/AI/AIMessage.tsx
new file mode 100644
index 0000000000..047dabcecc
--- /dev/null
+++ b/frontend/src/component/common/AI/AIMessage.tsx
@@ -0,0 +1,5 @@
+import { Markdown } from 'component/common/Markdown/Markdown';
+
+export const AIMessage = ({ children }: { children: string }) => (
+ {children}
+);
diff --git a/frontend/src/component/project/Project/PaginatedProjectFeatureToggles/ProjectFeatureToggles.tsx b/frontend/src/component/project/Project/PaginatedProjectFeatureToggles/ProjectFeatureToggles.tsx
index 606d707504..2e476f7788 100644
--- a/frontend/src/component/project/Project/PaginatedProjectFeatureToggles/ProjectFeatureToggles.tsx
+++ b/frontend/src/component/project/Project/PaginatedProjectFeatureToggles/ProjectFeatureToggles.tsx
@@ -77,7 +77,7 @@ export const ProjectFeatureToggles = ({
initialLoad,
tableState,
setTableState,
- } = useProjectFeatureSearch(projectId);
+ } = useProjectFeatureSearch(projectId, undefined, 1000);
const { onFlagTypeClick, onTagClick, onAvatarClick } =
useProjectFeatureSearchActions(tableState, setTableState);
diff --git a/frontend/src/hooks/api/actions/useAI/useAI.ts b/frontend/src/hooks/api/actions/useAI/useAI.ts
new file mode 100644
index 0000000000..545112f71b
--- /dev/null
+++ b/frontend/src/hooks/api/actions/useAI/useAI.ts
@@ -0,0 +1,123 @@
+import { useState } from 'react';
+import useAPI from '../useApi/useApi';
+import { useUiFlag } from 'hooks/useUiFlag';
+
+const ENDPOINT = 'api/admin/ai';
+
+type ChatMessage = {
+ role: 'system' | 'user' | 'assistant';
+ content: string;
+};
+
+export const useAI = () => {
+ const {
+ makeStreamingRequest,
+ makeRequest,
+ createRequest,
+ errors,
+ loading,
+ } = useAPI({
+ propagateErrors: true,
+ });
+
+ const unleashAI = useUiFlag('unleashAI');
+
+ const [messages, setMessages] = useState([]);
+ const [streamingComplete, setStreamingComplete] = useState(true);
+
+ const prompt = async (content: string): Promise => {
+ if (!unleashAI) return;
+
+ const requestId = 'prompt';
+
+ setMessages((prevMessages) => [
+ ...prevMessages,
+ { role: 'user', content },
+ ]);
+
+ const req = createRequest(ENDPOINT, {
+ method: 'POST',
+ body: JSON.stringify({
+ messages: [...messages, { role: 'user', content }],
+ }),
+ requestId,
+ });
+
+ const res = await makeRequest(req.caller, req.id);
+ const { response } = await res.json();
+ return response;
+ };
+
+ const promptWithTools = async (
+ messages: ChatMessage[],
+ ): Promise => {
+ if (!unleashAI) return;
+
+ const requestId = 'promptWithTools';
+
+ const req = createRequest(`${ENDPOINT}`, {
+ method: 'POST',
+ body: JSON.stringify({
+ messages,
+ }),
+ requestId,
+ });
+
+ const res = await makeRequest(req.caller, req.id);
+ const { response } = await res.json();
+ return response;
+ };
+
+ const promptStream = async (content: string) => {
+ setMessages((prevMessages) => [
+ ...prevMessages,
+ { role: 'user', content },
+ ]);
+
+ const req = createRequest(`${ENDPOINT}/stream`, {
+ method: 'POST',
+ body: JSON.stringify({
+ messages: [...messages, { role: 'user', content }],
+ }),
+ });
+
+ setStreamingComplete(false);
+
+ await makeStreamingRequest(
+ req.caller,
+ (chunk: string) => {
+ setMessages((prevMessages) => {
+ const lastMessage = prevMessages[prevMessages.length - 1];
+
+ if (lastMessage && lastMessage.role === 'assistant') {
+ return [
+ ...prevMessages.slice(0, -1),
+ {
+ role: 'assistant',
+ content: lastMessage.content + chunk,
+ },
+ ];
+ } else {
+ return [
+ ...prevMessages,
+ { role: 'assistant', content: chunk },
+ ];
+ }
+ });
+ },
+ 'prompt',
+ );
+
+ setStreamingComplete(true);
+ };
+
+ return {
+ prompt,
+ promptWithTools,
+ promptStream,
+ messages,
+ errors,
+ loading,
+ streamingComplete,
+ };
+};
diff --git a/frontend/src/hooks/api/actions/useApi/useApi.ts b/frontend/src/hooks/api/actions/useApi/useApi.ts
index a5c6c6a414..e8a7ee71d3 100644
--- a/frontend/src/hooks/api/actions/useApi/useApi.ts
+++ b/frontend/src/hooks/api/actions/useApi/useApi.ts
@@ -245,6 +245,61 @@ const useAPI = ({
[handleResponses],
);
+ const makeStreamingRequest = useCallback(
+ async (
+ apiCaller: () => Promise,
+ onData: (chunk: string) => void,
+ requestId: string,
+ ) => {
+ setLoading(true);
+ try {
+ const res = await apiCaller();
+ if (!res.body) {
+ throw new Error(
+ 'Streaming request failed: No body returned',
+ );
+ }
+
+ const reader = res.body.getReader();
+ const decoder = new TextDecoder('utf-8');
+ let done = false;
+ let buffer = '';
+
+ while (!done) {
+ const { value, done: readerDone } = await reader.read();
+ done = readerDone;
+
+ buffer += decoder.decode(value, { stream: true });
+
+ const lines = buffer.split('\n');
+
+ buffer = lines.pop() || '';
+
+ for (const line of lines) {
+ if (line.startsWith('data:')) {
+ const cleanChunk = line.replace('data: ', '');
+ onData(cleanChunk);
+ }
+ }
+ }
+
+ if (buffer.trim() !== '') {
+ if (buffer.startsWith('data:')) {
+ const cleanChunk = buffer.replace('data: ', '');
+ onData(cleanChunk);
+ }
+ }
+
+ setLoading(false);
+ return res;
+ } catch (e) {
+ setLoading(false);
+ throw e;
+ }
+ },
+ [],
+ );
+
const makeLightRequest = useCallback(
async (
apiCaller: () => Promise,
@@ -294,6 +349,7 @@ const useAPI = ({
return {
loading,
makeRequest: isDevelopment ? makeRequestWithTimer : makeRequest,
+ makeStreamingRequest,
makeLightRequest: isDevelopment
? makeLightRequestWithTimer
: makeLightRequest,
diff --git a/frontend/src/interfaces/uiConfig.ts b/frontend/src/interfaces/uiConfig.ts
index 65070583ec..33e4681930 100644
--- a/frontend/src/interfaces/uiConfig.ts
+++ b/frontend/src/interfaces/uiConfig.ts
@@ -90,6 +90,7 @@ export type UiFlags = {
archiveProjects?: boolean;
projectListImprovements?: boolean;
onboardingUI?: boolean;
+ unleashAI?: boolean;
};
export interface IVersionInfo {
diff --git a/package.json b/package.json
index 8e00168084..2dec389ffe 100644
--- a/package.json
+++ b/package.json
@@ -151,6 +151,7 @@
"murmurhash3js": "^3.0.1",
"mustache": "^4.1.0",
"nodemailer": "^6.9.9",
+ "openai": "^4.58.1",
"openapi-types": "^12.1.3",
"owasp-password-strength-test": "^1.3.0",
"parse-database-url": "^0.3.0",
@@ -160,13 +161,13 @@
"prom-client": "^14.0.0",
"response-time": "^2.3.2",
"sanitize-filename": "^1.6.3",
- "semver": "^7.6.2",
+ "semver": "^7.6.3",
"serve-favicon": "^2.5.0",
"slug": "^9.0.0",
"stoppable": "^1.1.0",
"ts-toolbelt": "^9.6.0",
"type-is": "^1.6.18",
- "unleash-client": "5.6.1",
+ "unleash-client": "6.1.1",
"uuid": "^9.0.0"
},
"devDependencies": {
diff --git a/src/lib/create-config.ts b/src/lib/create-config.ts
index e6d49d5f34..c3550efd9b 100644
--- a/src/lib/create-config.ts
+++ b/src/lib/create-config.ts
@@ -711,6 +711,8 @@ export function createConfig(options: IUnleashOptions): IUnleashConfig {
),
};
+ const openAIAPIKey = process.env.OPENAI_API_KEY;
+
return {
db,
session,
@@ -749,6 +751,7 @@ export function createConfig(options: IUnleashOptions): IUnleashConfig {
rateLimiting,
feedbackUriPath,
dailyMetricsStorageDays,
+ openAIAPIKey,
};
}
diff --git a/src/lib/features/ai/ai-controller.ts b/src/lib/features/ai/ai-controller.ts
new file mode 100644
index 0000000000..1a99219613
--- /dev/null
+++ b/src/lib/features/ai/ai-controller.ts
@@ -0,0 +1,159 @@
+import type { Response } from 'express';
+import Controller from '../../routes/controller';
+
+import { NONE } from '../../types/permissions';
+import type { IUnleashConfig } from '../../types/option';
+import type { IUnleashServices } from '../../types/services';
+import type { Logger } from '../../logger';
+
+import { getStandardResponses } from '../../openapi/util/standard-responses';
+import { createRequestSchema, createResponseSchema } from '../../openapi';
+import type { IAuthRequest } from '../../server-impl';
+import type { OpenApiService } from '../../services';
+import { type AIPromptSchema, aiPromptSchema } from '../../openapi';
+import type { AIService } from './ai-service';
+
+export class AIController extends Controller {
+ private logger: Logger;
+
+ // private openApiService: OpenApiService;
+
+ aiService: AIService;
+
+ constructor(
+ config: IUnleashConfig,
+ {
+ openApiService,
+ aiService,
+ }: Pick,
+ ) {
+ super(config);
+ this.logger = config.getLogger('features/ai/ai-controller.ts');
+ // this.openApiService = openApiService;
+ this.aiService = aiService;
+
+ this.route({
+ method: 'post',
+ path: '',
+ handler: this.promptWithTools,
+ permission: NONE,
+ middleware: [
+ openApiService.validPath({
+ tags: ['Unstable'],
+ operationId: 'prompt',
+ summary: 'Prompts Unleash AI',
+ description: 'This endpoint is used to prompt Unleash AI.',
+ requestBody: createRequestSchema(aiPromptSchema.$id),
+ responses: {
+ // 200: createResponseSchema(aiPromptResponseSchema.$id),
+ ...getStandardResponses(401, 403),
+ },
+ }),
+ ],
+ });
+
+ this.route({
+ method: 'post',
+ path: 'tools',
+ handler: this.promptWithTools,
+ permission: NONE,
+ middleware: [
+ openApiService.validPath({
+ tags: ['Unstable'],
+ operationId: 'promptWithTools',
+ summary: 'Prompts Unleash AI',
+ description: 'This endpoint is used to prompt Unleash AI.',
+ requestBody: createRequestSchema(aiPromptSchema.$id),
+ responses: {
+ // 200: createResponseSchema(aiPromptResponseSchema.$id),
+ ...getStandardResponses(401, 403),
+ },
+ }),
+ ],
+ });
+
+ this.route({
+ method: 'post',
+ path: 'stream',
+ handler: this.promptStream,
+ permission: NONE,
+ middleware: [
+ openApiService.validPath({
+ tags: ['Unstable'],
+ operationId: 'prompt',
+ summary: 'Prompts Unleash AI',
+ description: 'This endpoint is used to prompt Unleash AI.',
+ requestBody: createRequestSchema(aiPromptSchema.$id),
+ responses: {
+ // 200: createResponseSchema(aiPromptResponseSchema.$id),
+ ...getStandardResponses(401, 403),
+ },
+ }),
+ ],
+ });
+ }
+
+ async prompt(
+ req: IAuthRequest,
+ res: Response,
+ ): Promise {
+ const { messages } = req.body;
+
+ try {
+ const responseMessages =
+ await this.aiService.createChatCompletion(messages);
+
+ const response = responseMessages.choices[0].message.content || '';
+
+ res.json({ response });
+ } catch (error) {
+ console.error('Error', error);
+ res.status(500).send('Error');
+ }
+ }
+
+ async promptWithTools(
+ req: IAuthRequest,
+ res: Response,
+ ): Promise {
+ const { messages } = req.body;
+
+ try {
+ const runner =
+ this.aiService.createChatCompletionWithTools(messages);
+
+ const response = await runner.finalContent();
+
+ res.json({ response });
+ } catch (error) {
+ console.error('Error', error);
+ throw new Error('Error');
+ }
+ }
+
+ async promptStream(
+ req: IAuthRequest,
+ res: Response,
+ ): Promise {
+ res.setHeader('Content-Type', 'text/event-stream');
+ res.setHeader('Cache-Control', 'no-cache');
+ res.setHeader('Connection', 'keep-alive');
+
+ const { messages } = req.body;
+
+ try {
+ const stream = this.aiService.createChatCompletionStream(messages);
+
+ for await (const part of stream) {
+ const text = part.choices[0].delta?.content || '';
+ res.write(text);
+ }
+
+ res.write('event: end\n\n');
+ res.end();
+ } catch (error) {
+ console.error('Error during streaming:', error);
+ res.status(500).send('Error during streaming');
+ }
+ }
+}
diff --git a/src/lib/features/ai/ai-service.ts b/src/lib/features/ai/ai-service.ts
new file mode 100644
index 0000000000..4746bcf0ef
--- /dev/null
+++ b/src/lib/features/ai/ai-service.ts
@@ -0,0 +1,259 @@
+import type {
+ ChatCompletion,
+ ChatCompletionMessageParam,
+} from 'openai/resources/chat/completions';
+import type {
+ ChatCompletionRunner,
+ ChatCompletionStream,
+} from 'openai/resources/beta/chat/completions';
+import OpenAI from 'openai';
+import type {
+ IUnleashConfig,
+ IUnleashServices,
+ Logger,
+} from '../../server-impl';
+import type { APIPromise } from 'openai/core';
+import { ADMIN_TOKEN_USER, SYSTEM_USER, SYSTEM_USER_AUDIT } from '../../types';
+import type FeatureToggleService from '../feature-toggle/feature-toggle-service';
+
+export class AIService {
+ private config: IUnleashConfig;
+
+ private logger: Logger;
+
+ private client: OpenAI | undefined;
+
+ private featureService: FeatureToggleService;
+
+ constructor(
+ config: IUnleashConfig,
+ {
+ featureToggleService,
+ }: Pick,
+ ) {
+ this.config = config;
+ this.logger = config.getLogger('features/ai/ai-service.ts');
+ this.featureService = featureToggleService;
+ }
+
+ getClient(): OpenAI {
+ if (this.client) {
+ return this.client;
+ }
+
+ const apiKey = this.config.openAIAPIKey;
+ if (!apiKey) {
+ throw new Error('Missing OpenAI API key');
+ }
+
+ this.client = new OpenAI({ apiKey });
+ return this.client;
+ }
+
+ createChatCompletion(
+ messages: ChatCompletionMessageParam[],
+ ): APIPromise {
+ const client = this.getClient();
+
+ return client.chat.completions.create({
+ model: 'gpt-4o-mini',
+ messages,
+ });
+ }
+
+ createFlag = async ({
+ project,
+ flag,
+ description,
+ }: {
+ project: string;
+ flag: string;
+ description?: string;
+ }) => {
+ try {
+ const flagData = await this.featureService.createFeatureToggle(
+ project,
+ { name: flag, description },
+ SYSTEM_USER_AUDIT,
+ );
+
+ return flagData;
+ } catch (error) {
+ return error;
+ }
+ };
+
+ getFlag = async ({
+ project,
+ flag,
+ }: {
+ project: string;
+ flag: string;
+ }) => {
+ try {
+ const flagData = await this.featureService.getFeature({
+ featureName: flag,
+ archived: false,
+ projectId: project,
+ environmentVariants: false,
+ userId: SYSTEM_USER.id,
+ });
+
+ return flagData;
+ } catch (error) {
+ return error;
+ }
+ };
+
+ toggleFlag = async ({
+ project,
+ flag,
+ environment,
+ enabled,
+ }: {
+ project: string;
+ flag: string;
+ environment: string;
+ enabled: boolean;
+ }) => {
+ try {
+ const data = await this.featureService.updateEnabled(
+ project,
+ flag,
+ environment,
+ enabled,
+ SYSTEM_USER_AUDIT,
+ ADMIN_TOKEN_USER,
+ false,
+ );
+
+ return data;
+ } catch (error) {
+ return error;
+ }
+ };
+
+ archiveFlag = async ({
+ project,
+ flag,
+ }: {
+ project: string;
+ flag: string;
+ }) => {
+ try {
+ const flagData = await this.featureService.archiveToggle(
+ flag,
+ ADMIN_TOKEN_USER,
+ SYSTEM_USER_AUDIT,
+ project,
+ );
+
+ return flagData;
+ } catch (error) {
+ return error;
+ }
+ };
+
+ createChatCompletionWithTools(
+ messages: ChatCompletionMessageParam[],
+ ): ChatCompletionRunner {
+ const client = this.getClient();
+
+ return client.beta.chat.completions.runTools({
+ model: 'gpt-4o-mini',
+ messages,
+ tools: [
+ {
+ type: 'function',
+ function: {
+ function: this.createFlag,
+ name: 'createFlag',
+ description:
+ 'Create a feature flag by name and project. Optionally supply a description',
+ parse: JSON.parse,
+ parameters: {
+ type: 'object',
+ properties: {
+ project: { type: 'string' },
+ flag: { type: 'string' },
+ description: { type: 'string' },
+ },
+ required: ['project', 'flag'],
+ },
+ },
+ },
+ {
+ type: 'function',
+ function: {
+ function: this.getFlag,
+ name: 'getFlag',
+ description: 'Get a feature flag by name and project',
+ parse: JSON.parse,
+ parameters: {
+ type: 'object',
+ properties: {
+ project: { type: 'string' },
+ flag: { type: 'string' },
+ },
+ required: ['project', 'flag'],
+ },
+ },
+ },
+ {
+ type: 'function',
+ function: {
+ function: this.toggleFlag,
+ name: 'toggleFlag',
+ description:
+ 'Toggle a feature flag by name, project, environment, and enabled status',
+ parse: JSON.parse,
+ parameters: {
+ type: 'object',
+ properties: {
+ project: { type: 'string' },
+ flag: { type: 'string' },
+ environment: { type: 'string' },
+ enabled: { type: 'boolean' },
+ },
+ required: [
+ 'project',
+ 'flag',
+ 'environment',
+ 'enabled',
+ ],
+ },
+ },
+ },
+ {
+ type: 'function',
+ function: {
+ function: this.archiveFlag,
+ name: 'archiveFlag',
+ description:
+ 'Archive a feature flag by name and project',
+ parse: JSON.parse,
+ parameters: {
+ type: 'object',
+ properties: {
+ project: { type: 'string' },
+ flag: { type: 'string' },
+ },
+ required: ['project', 'flag'],
+ },
+ },
+ },
+ ],
+ });
+ }
+
+ createChatCompletionStream(
+ messages: ChatCompletionMessageParam[],
+ ): ChatCompletionStream {
+ const client = this.getClient();
+
+ return client.beta.chat.completions.stream({
+ model: 'gpt-4o-mini',
+ messages,
+ });
+ }
+}
diff --git a/src/lib/openapi/spec/ai-prompt-schema.ts b/src/lib/openapi/spec/ai-prompt-schema.ts
new file mode 100644
index 0000000000..c3fbdad1a4
--- /dev/null
+++ b/src/lib/openapi/spec/ai-prompt-schema.ts
@@ -0,0 +1,37 @@
+import type { FromSchema } from 'json-schema-to-ts';
+
+export const aiPromptSchema = {
+ $id: '#/components/schemas/aiPromptSchema',
+ type: 'object',
+ description: 'Describes an Unleash AI prompt.',
+ required: ['messages'],
+ properties: {
+ messages: {
+ type: 'array',
+ description:
+ 'The messages exchanged between the user and the Unleash AI.',
+ items: {
+ type: 'object',
+ required: ['role', 'content'],
+ properties: {
+ role: {
+ type: 'string',
+ enum: ['system', 'user', 'assistant'],
+ description: 'The role of the message sender.',
+ example: 'user',
+ },
+ content: {
+ type: 'string',
+ description: 'The message content.',
+ example: 'What is your purpose?',
+ },
+ },
+ },
+ },
+ },
+ components: {
+ schemas: {},
+ },
+} as const;
+
+export type AIPromptSchema = FromSchema;
diff --git a/src/lib/openapi/spec/index.ts b/src/lib/openapi/spec/index.ts
index a18706ab4e..248e3f8287 100644
--- a/src/lib/openapi/spec/index.ts
+++ b/src/lib/openapi/spec/index.ts
@@ -14,6 +14,7 @@ export * from './advanced-playground-environment-feature-schema';
export * from './advanced-playground-feature-schema';
export * from './advanced-playground-request-schema';
export * from './advanced-playground-response-schema';
+export * from './ai-prompt-schema';
export * from './api-token-schema';
export * from './api-tokens-schema';
export * from './application-environment-instances-schema';
diff --git a/src/lib/routes/admin-api/index.ts b/src/lib/routes/admin-api/index.ts
index f645465eeb..3444fe9623 100644
--- a/src/lib/routes/admin-api/index.ts
+++ b/src/lib/routes/admin-api/index.ts
@@ -35,6 +35,8 @@ import { SegmentsController } from '../../features/segment/segment-controller';
import { InactiveUsersController } from '../../users/inactive/inactive-users-controller';
import { UiObservabilityController } from '../../features/ui-observability-controller/ui-observability-controller';
import { SearchApi } from './search';
+import { conditionalMiddleware } from '../../middleware';
+import { AIController } from '../../features/ai/ai-controller';
export class AdminApi extends Controller {
constructor(config: IUnleashConfig, services: IUnleashServices, db: Db) {
@@ -164,5 +166,13 @@ export class AdminApi extends Controller {
'/record-ui-error',
new UiObservabilityController(config, services).router,
);
+
+ this.app.use(
+ '/ai',
+ conditionalMiddleware(
+ () => config.flagResolver.isEnabled('unleashAI'),
+ new AIController(config, services).router,
+ ),
+ );
}
}
diff --git a/src/lib/services/index.ts b/src/lib/services/index.ts
index ff98fc293c..da8168c594 100644
--- a/src/lib/services/index.ts
+++ b/src/lib/services/index.ts
@@ -146,6 +146,7 @@ import {
createOnboardingService,
} from '../features/onboarding/createOnboardingService';
import { OnboardingService } from '../features/onboarding/onboarding-service';
+import { AIService } from '../features/ai/ai-service';
export const createServices = (
stores: IUnleashStores,
@@ -401,6 +402,10 @@ export const createServices = (
: createFakeOnboardingService(config).onboardingService;
onboardingService.listen();
+ const aiService = new AIService(config, {
+ featureToggleService: featureToggleServiceV2,
+ });
+
return {
accessService,
accountService,
@@ -464,6 +469,7 @@ export const createServices = (
transactionalFeatureLifecycleService,
integrationEventsService,
onboardingService,
+ aiService,
};
};
@@ -514,4 +520,5 @@ export {
FeatureLifecycleService,
IntegrationEventsService,
OnboardingService,
+ AIService,
};
diff --git a/src/lib/types/core.ts b/src/lib/types/core.ts
index 297eaf822a..a83694c31d 100644
--- a/src/lib/types/core.ts
+++ b/src/lib/types/core.ts
@@ -37,7 +37,7 @@ export const ADMIN_TOKEN_USER: Omit = {
imageUrl: '',
isAPI: true,
name: 'Unleash Admin Token',
- permissions: [],
+ permissions: ['ADMIN'],
username: 'unleash_admin_token',
};
diff --git a/src/lib/types/experimental.ts b/src/lib/types/experimental.ts
index 09e56e2bc3..c948e3010d 100644
--- a/src/lib/types/experimental.ts
+++ b/src/lib/types/experimental.ts
@@ -63,7 +63,8 @@ export type IFlagKey =
| 'addonUsageMetrics'
| 'onboardingMetrics'
| 'onboardingUI'
- | 'projectRoleAssignment';
+ | 'projectRoleAssignment'
+ | 'unleashAI';
export type IFlags = Partial<{ [key in IFlagKey]: boolean | Variant }>;
@@ -312,6 +313,10 @@ const flags: IFlags = {
process.env.UNLEASH_EXPERIMENTAL_PROJECT_ROLE_ASSIGNMENT,
false,
),
+ unleashAI: parseEnvVarBoolean(
+ process.env.UNLEASH_EXPERIMENTAL_UNLEASH_AI,
+ false,
+ ),
};
export const defaultExperimentalOptions: IExperimentalOptions = {
diff --git a/src/lib/types/option.ts b/src/lib/types/option.ts
index 0812756060..2f265eb121 100644
--- a/src/lib/types/option.ts
+++ b/src/lib/types/option.ts
@@ -273,4 +273,5 @@ export interface IUnleashConfig {
isEnterprise: boolean;
rateLimiting: IRateLimiting;
feedbackUriPath?: string;
+ openAIAPIKey?: string;
}
diff --git a/src/lib/types/services.ts b/src/lib/types/services.ts
index 6386b686f6..96f11aa4ce 100644
--- a/src/lib/types/services.ts
+++ b/src/lib/types/services.ts
@@ -56,6 +56,7 @@ import type { JobService } from '../features/scheduler/job-service';
import type { FeatureLifecycleService } from '../features/feature-lifecycle/feature-lifecycle-service';
import type { IntegrationEventsService } from '../features/integration-events/integration-events-service';
import type { OnboardingService } from '../features/onboarding/onboarding-service';
+import type { AIService } from '../features/ai/ai-service';
export interface IUnleashServices {
accessService: AccessService;
@@ -123,4 +124,5 @@ export interface IUnleashServices {
transactionalFeatureLifecycleService: WithTransactional;
integrationEventsService: IntegrationEventsService;
onboardingService: OnboardingService;
+ aiService: AIService;
}
diff --git a/src/server-dev.ts b/src/server-dev.ts
index be7e586391..0904abd8ce 100644
--- a/src/server-dev.ts
+++ b/src/server-dev.ts
@@ -57,6 +57,7 @@ process.nextTick(async () => {
addonUsageMetrics: true,
onboardingMetrics: true,
onboardingUI: true,
+ unleashAI: true,
},
},
authentication: {
diff --git a/yarn.lock b/yarn.lock
index ed1f08696a..b4016d1466 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -2175,6 +2175,16 @@ __metadata:
languageName: node
linkType: hard
+"@types/node-fetch@npm:^2.6.4":
+ version: 2.6.11
+ resolution: "@types/node-fetch@npm:2.6.11"
+ dependencies:
+ "@types/node": "npm:*"
+ form-data: "npm:^4.0.0"
+ checksum: 10c0/5283d4e0bcc37a5b6d8e629aee880a4ffcfb33e089f4b903b2981b19c623972d1e64af7c3f9540ab990f0f5c89b9b5dda19c5bcb37a8e177079e93683bfd2f49
+ languageName: node
+ linkType: hard
+
"@types/node@npm:*, @types/node@npm:>=12.0.0":
version: 20.11.17
resolution: "@types/node@npm:20.11.17"
@@ -2200,6 +2210,15 @@ __metadata:
languageName: node
linkType: hard
+"@types/node@npm:^18.11.18":
+ version: 18.19.50
+ resolution: "@types/node@npm:18.19.50"
+ dependencies:
+ undici-types: "npm:~5.26.4"
+ checksum: 10c0/36e6bc9eb47213ce94a868dad9504465ad89fba6af9f7954e22bb27fb17a32ac495f263d0cf4fdaee74becd7b2629609a446ec8c2b59b7a07bd587567c8a4782
+ languageName: node
+ linkType: hard
+
"@types/nodemailer@npm:6.4.15":
version: 6.4.15
resolution: "@types/nodemailer@npm:6.4.15"
@@ -2241,6 +2260,13 @@ __metadata:
languageName: node
linkType: hard
+"@types/qs@npm:^6.9.15":
+ version: 6.9.15
+ resolution: "@types/qs@npm:6.9.15"
+ checksum: 10c0/49c5ff75ca3adb18a1939310042d273c9fc55920861bd8e5100c8a923b3cda90d759e1a95e18334092da1c8f7b820084687770c83a1ccef04fb2c6908117c823
+ languageName: node
+ linkType: hard
+
"@types/range-parser@npm:*":
version: 1.2.4
resolution: "@types/range-parser@npm:1.2.4"
@@ -2390,6 +2416,15 @@ __metadata:
languageName: node
linkType: hard
+"abort-controller@npm:^3.0.0":
+ version: 3.0.0
+ resolution: "abort-controller@npm:3.0.0"
+ dependencies:
+ event-target-shim: "npm:^5.0.0"
+ checksum: 10c0/90ccc50f010250152509a344eb2e71977fbf8db0ab8f1061197e3275ddf6c61a41a6edfd7b9409c664513131dd96e962065415325ef23efa5db931b382d24ca5
+ languageName: node
+ linkType: hard
+
"accepts@npm:~1.3.5, accepts@npm:~1.3.7, accepts@npm:~1.3.8":
version: 1.3.8
resolution: "accepts@npm:1.3.8"
@@ -2425,6 +2460,15 @@ __metadata:
languageName: node
linkType: hard
+"agentkeepalive@npm:^4.2.1":
+ version: 4.5.0
+ resolution: "agentkeepalive@npm:4.5.0"
+ dependencies:
+ humanize-ms: "npm:^1.2.1"
+ checksum: 10c0/394ea19f9710f230722996e156607f48fdf3a345133b0b1823244b7989426c16019a428b56c82d3eabef616e938812981d9009f4792ecc66bd6a59e991c62612
+ languageName: node
+ linkType: hard
+
"aggregate-error@npm:^3.0.0":
version: 3.1.0
resolution: "aggregate-error@npm:3.1.0"
@@ -4178,6 +4222,13 @@ __metadata:
languageName: node
linkType: hard
+"event-target-shim@npm:^5.0.0":
+ version: 5.0.1
+ resolution: "event-target-shim@npm:5.0.1"
+ checksum: 10c0/0255d9f936215fd206156fd4caa9e8d35e62075d720dc7d847e89b417e5e62cf1ce6c9b4e0a1633a9256de0efefaf9f8d26924b1f3c8620cffb9db78e7d3076b
+ languageName: node
+ linkType: hard
+
"eventemitter3@npm:^3.1.0":
version: 3.1.2
resolution: "eventemitter3@npm:3.1.2"
@@ -4607,6 +4658,13 @@ __metadata:
languageName: node
linkType: hard
+"form-data-encoder@npm:1.7.2":
+ version: 1.7.2
+ resolution: "form-data-encoder@npm:1.7.2"
+ checksum: 10c0/56553768037b6d55d9de524f97fe70555f0e415e781cb56fc457a68263de3d40fadea2304d4beef2d40b1a851269bd7854e42c362107071892cb5238debe9464
+ languageName: node
+ linkType: hard
+
"form-data@npm:^2.5.0":
version: 2.5.1
resolution: "form-data@npm:2.5.1"
@@ -4651,6 +4709,16 @@ __metadata:
languageName: node
linkType: hard
+"formdata-node@npm:^4.3.2":
+ version: 4.4.1
+ resolution: "formdata-node@npm:4.4.1"
+ dependencies:
+ node-domexception: "npm:1.0.0"
+ web-streams-polyfill: "npm:4.0.0-beta.3"
+ checksum: 10c0/74151e7b228ffb33b565cec69182694ad07cc3fdd9126a8240468bb70a8ba66e97e097072b60bcb08729b24c7ce3fd3e0bd7f1f80df6f9f662b9656786e76f6a
+ languageName: node
+ linkType: hard
+
"formidable@npm:^3.5.1":
version: 3.5.1
resolution: "formidable@npm:3.5.1"
@@ -5130,6 +5198,15 @@ __metadata:
languageName: node
linkType: hard
+"humanize-ms@npm:^1.2.1":
+ version: 1.2.1
+ resolution: "humanize-ms@npm:1.2.1"
+ dependencies:
+ ms: "npm:^2.0.0"
+ checksum: 10c0/f34a2c20161d02303c2807badec2f3b49cbfbbb409abd4f95a07377ae01cfe6b59e3d15ac609cffcd8f2521f0eb37b7e1091acf65da99aa2a4f1ad63c21e7e7a
+ languageName: node
+ linkType: hard
+
"husky@npm:^9.0.11":
version: 9.1.5
resolution: "husky@npm:9.1.5"
@@ -7103,7 +7180,7 @@ __metadata:
languageName: node
linkType: hard
-"ms@npm:2.1.3, ms@npm:^2.1.1":
+"ms@npm:2.1.3, ms@npm:^2.0.0, ms@npm:^2.1.1":
version: 2.1.3
resolution: "ms@npm:2.1.3"
checksum: 10c0/d924b57e7312b3b63ad21fc5b3dc0af5e78d61a1fc7cfb5457edaf26326bf62be5307cc87ffb6862ef1c2b33b0233cdb5d4f01c4c958cc0d660948b65a287a48
@@ -7181,6 +7258,13 @@ __metadata:
languageName: node
linkType: hard
+"node-domexception@npm:1.0.0":
+ version: 1.0.0
+ resolution: "node-domexception@npm:1.0.0"
+ checksum: 10c0/5e5d63cda29856402df9472335af4bb13875e1927ad3be861dc5ebde38917aecbf9ae337923777af52a48c426b70148815e890a5d72760f1b4d758cc671b1a2b
+ languageName: node
+ linkType: hard
+
"node-fetch-h2@npm:^2.3.0":
version: 2.3.0
resolution: "node-fetch-h2@npm:2.3.0"
@@ -7190,7 +7274,7 @@ __metadata:
languageName: node
linkType: hard
-"node-fetch@npm:^2.6.1":
+"node-fetch@npm:^2.6.1, node-fetch@npm:^2.6.7":
version: 2.7.0
resolution: "node-fetch@npm:2.7.0"
dependencies:
@@ -7465,6 +7549,30 @@ __metadata:
languageName: node
linkType: hard
+"openai@npm:^4.58.1":
+ version: 4.58.1
+ resolution: "openai@npm:4.58.1"
+ dependencies:
+ "@types/node": "npm:^18.11.18"
+ "@types/node-fetch": "npm:^2.6.4"
+ "@types/qs": "npm:^6.9.15"
+ abort-controller: "npm:^3.0.0"
+ agentkeepalive: "npm:^4.2.1"
+ form-data-encoder: "npm:1.7.2"
+ formdata-node: "npm:^4.3.2"
+ node-fetch: "npm:^2.6.7"
+ qs: "npm:^6.10.3"
+ peerDependencies:
+ zod: ^3.23.8
+ peerDependenciesMeta:
+ zod:
+ optional: true
+ bin:
+ openai: bin/cli
+ checksum: 10c0/d63c3cec14c47c8e6a3656d51ae99eeeff5e754c31d119da7fd74ef4b403bdc82587db62c9aba940cbf539df2f31b07b3ac56bd6c69581009ef0ddefc2ab44fd
+ languageName: node
+ linkType: hard
+
"openapi-enforcer@npm:1.23.0":
version: 1.23.0
resolution: "openapi-enforcer@npm:1.23.0"
@@ -8230,6 +8338,15 @@ __metadata:
languageName: node
linkType: hard
+"qs@npm:^6.10.3":
+ version: 6.13.0
+ resolution: "qs@npm:6.13.0"
+ dependencies:
+ side-channel: "npm:^1.0.6"
+ checksum: 10c0/62372cdeec24dc83a9fb240b7533c0fdcf0c5f7e0b83343edd7310f0ab4c8205a5e7c56406531f2e47e1b4878a3821d652be4192c841de5b032ca83619d8f860
+ languageName: node
+ linkType: hard
+
"qs@npm:~6.5.2":
version: 6.5.3
resolution: "qs@npm:6.5.3"
@@ -8941,7 +9058,7 @@ __metadata:
languageName: node
linkType: hard
-"side-channel@npm:^1.0.4":
+"side-channel@npm:^1.0.4, side-channel@npm:^1.0.6":
version: 1.0.6
resolution: "side-channel@npm:1.0.6"
dependencies:
@@ -9885,9 +10002,9 @@ __metadata:
languageName: node
linkType: hard
-"unleash-client@npm:5.6.1":
- version: 5.6.1
- resolution: "unleash-client@npm:5.6.1"
+"unleash-client@npm:6.1.1":
+ version: 6.1.1
+ resolution: "unleash-client@npm:6.1.1"
dependencies:
http-proxy-agent: "npm:^7.0.2"
https-proxy-agent: "npm:^7.0.5"
@@ -9895,7 +10012,7 @@ __metadata:
make-fetch-happen: "npm:^13.0.1"
murmurhash3js: "npm:^3.0.1"
semver: "npm:^7.6.2"
- checksum: 10c0/5a1bda38ebb03ed7cc13981d400bab23442703e01be6ae05bf30925491948d264ab0f368ae11103c6bbdcf3e494a26ab215bb23c8aa2fdf66345393e7444cb69
+ checksum: 10c0/ff1a5d5d047f05de3581320fbe3af2c796a9bd1578ea5546730883217f35f1462bd89ebf558038b69fdb6a1594031d08c720f7a4c29bf985532bd035f334d989
languageName: node
linkType: hard
@@ -9987,6 +10104,7 @@ __metadata:
mustache: "npm:^4.1.0"
nock: "npm:13.5.5"
nodemailer: "npm:^6.9.9"
+ openai: "npm:^4.58.1"
openapi-enforcer: "npm:1.23.0"
openapi-types: "npm:^12.1.3"
owasp-password-strength-test: "npm:^1.3.0"
@@ -9998,7 +10116,7 @@ __metadata:
proxyquire: "npm:2.1.3"
response-time: "npm:^2.3.2"
sanitize-filename: "npm:^1.6.3"
- semver: "npm:^7.6.2"
+ semver: "npm:^7.6.3"
serve-favicon: "npm:^2.5.0"
slug: "npm:^9.0.0"
source-map-support: "npm:0.5.21"
@@ -10010,7 +10128,7 @@ __metadata:
tsc-watch: "npm:6.2.0"
type-is: "npm:^1.6.18"
typescript: "npm:5.4.5"
- unleash-client: "npm:5.6.1"
+ unleash-client: "npm:6.1.1"
uuid: "npm:^9.0.0"
wait-on: "npm:^7.2.0"
languageName: unknown
@@ -10209,6 +10327,13 @@ __metadata:
languageName: node
linkType: hard
+"web-streams-polyfill@npm:4.0.0-beta.3":
+ version: 4.0.0-beta.3
+ resolution: "web-streams-polyfill@npm:4.0.0-beta.3"
+ checksum: 10c0/a9596779db2766990117ed3a158e0b0e9f69b887a6d6ba0779940259e95f99dc3922e534acc3e5a117b5f5905300f527d6fbf8a9f0957faf1d8e585ce3452e8e
+ languageName: node
+ linkType: hard
+
"webidl-conversions@npm:^3.0.0":
version: 3.0.1
resolution: "webidl-conversions@npm:3.0.1"