add chat and completion page under lab
This commit is contained in:
parent
3ae84ce548
commit
811db71bf5
@ -33,6 +33,20 @@ export default {
|
||||
summary_text: 'Translate lanuages leverage AI power',
|
||||
icon_picture: '',
|
||||
cover_picture: cover_picture
|
||||
},
|
||||
{
|
||||
path: 'task-completion',
|
||||
title_text: 'Task Completion',
|
||||
summary_text: 'Respone for a user prompty',
|
||||
icon_picture: '',
|
||||
cover_picture: cover_picture
|
||||
},
|
||||
{
|
||||
path: 'multiturn-chat',
|
||||
title_text: 'Multi turn chat',
|
||||
summary_text: 'Respone based on multi turn messages ',
|
||||
icon_picture: '',
|
||||
cover_picture: cover_picture
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
59
frontend/src/pages/lab/openai/MultiturnChat.vue
Normal file
59
frontend/src/pages/lab/openai/MultiturnChat.vue
Normal file
@ -0,0 +1,59 @@
|
||||
<template>
|
||||
<div class="input_containter">
|
||||
<input class="input_text" type="text" v-model="input_text" @keyup.enter="lets_chat($event)" />
|
||||
<p class="responded_text">{{ responded_text }}</p>
|
||||
</div>
|
||||
</template>
|
||||
<script>
|
||||
import { LabApi } from '@/utils/index'
|
||||
export default {
|
||||
name: 'MultiturnChat',
|
||||
components: {},
|
||||
computed: {},
|
||||
mounted() { },
|
||||
methods: {
|
||||
lets_chat($event) {
|
||||
this.messages.push({
|
||||
"role": "user",
|
||||
"content": this.input_text
|
||||
})
|
||||
LabApi.multiturn_chat(this.messages)
|
||||
.then((response) => {
|
||||
this.responded_text = response.data
|
||||
this.messages.push({
|
||||
"role": "assistant",
|
||||
"content": this.responded_text
|
||||
})
|
||||
})
|
||||
.catch((error) => {
|
||||
this.mnx_backendErrorHandler(error)
|
||||
})
|
||||
}
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
input_text: null,
|
||||
messages: [
|
||||
{ "role": "system", "content": "You are a helpful assistant." }
|
||||
],
|
||||
responded_text: null
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped lang="scss">
|
||||
.input_containter {
|
||||
@extend .container;
|
||||
@extend .m-3;
|
||||
}
|
||||
|
||||
.input_text {
|
||||
@extend .w-100;
|
||||
}
|
||||
|
||||
.responded_text {
|
||||
@extend .w-100;
|
||||
}
|
||||
</style>
|
||||
|
||||
48
frontend/src/pages/lab/openai/TaskCompletion.vue
Normal file
48
frontend/src/pages/lab/openai/TaskCompletion.vue
Normal file
@ -0,0 +1,48 @@
|
||||
<template>
|
||||
<div class="input_containter">
|
||||
<input class="input_text" type="text" v-model="input_text" @keyup.enter="task_completion($event)" />
|
||||
<p class="responded_text">{{ responded_text }}</p>
|
||||
</div>
|
||||
</template>
|
||||
<script>
|
||||
import { LabApi } from '@/utils/index'
|
||||
export default {
|
||||
name: 'TaskCompletion',
|
||||
components: {},
|
||||
computed: {},
|
||||
mounted() { },
|
||||
methods: {
|
||||
task_completion($event) {
|
||||
LabApi.task_completion(this.input_text)
|
||||
.then((response) => {
|
||||
this.responded_text = response.data
|
||||
})
|
||||
.catch((error) => {
|
||||
this.mnx_backendErrorHandler(error)
|
||||
})
|
||||
}
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
input_text: null,
|
||||
responded_text: null
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped lang="scss">
|
||||
.input_containter {
|
||||
@extend .container;
|
||||
@extend .m-3;
|
||||
}
|
||||
|
||||
.input_text {
|
||||
@extend .w-100;
|
||||
}
|
||||
|
||||
.responded_text {
|
||||
@extend .w-100;
|
||||
}
|
||||
</style>
|
||||
|
||||
@ -66,7 +66,8 @@ import HeaderUser from '@/headers/HeaderUser.vue'
|
||||
//Lab
|
||||
import LabHome from '@/pages/lab/Home.vue'
|
||||
import TranslationHome from '@/pages/lab/translation/Home.vue'
|
||||
|
||||
import TaskCompletion from '@/pages/lab/openai/TaskCompletion.vue'
|
||||
import MultiturnChat from '@/pages/lab/openai/MultiturnChat.vue'
|
||||
const router = createRouter({
|
||||
history: createWebHistory(),
|
||||
routes: [
|
||||
@ -389,8 +390,19 @@ const router = createRouter({
|
||||
path: '/machine-translation',
|
||||
meta: { requiredRoles: [userRoleEnum.PERSONAL] },
|
||||
components: { default: TranslationHome, footer: FooterUser, header: HeaderUser }
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'task-completion',
|
||||
path: '/task-completion',
|
||||
meta: { requiredRoles: [userRoleEnum.PERSONAL] },
|
||||
components: { default: TaskCompletion, footer: FooterUser, header: HeaderUser }
|
||||
},
|
||||
{
|
||||
name: 'multiturn-chat',
|
||||
path: '/multiturn-chat',
|
||||
meta: { requiredRoles: [userRoleEnum.PERSONAL] },
|
||||
components: { default: MultiturnChat, footer: FooterUser, header: HeaderUser }
|
||||
}],
|
||||
|
||||
linkActiveClass: 'active'
|
||||
/* scrollBehavior(_, _2, savedPosition) {
|
||||
|
||||
@ -14,5 +14,33 @@ class LabApi {
|
||||
)
|
||||
return request
|
||||
}
|
||||
static task_completion(prompt) {
|
||||
let jwt = userUtils.getJwtToken()
|
||||
const request = backendAxios.post(
|
||||
'/api/lab/openai-completion',
|
||||
{
|
||||
user_prompt: prompt
|
||||
},
|
||||
{
|
||||
headers: { Authorization: `Bearer ${jwt}` }
|
||||
}
|
||||
)
|
||||
return request
|
||||
}
|
||||
|
||||
static multiturn_chat(messages) {
|
||||
let jwt = userUtils.getJwtToken()
|
||||
const request = backendAxios.post(
|
||||
'/api/lab/openai-chat',
|
||||
{
|
||||
messages: messages
|
||||
},
|
||||
{
|
||||
headers: { Authorization: `Bearer ${jwt}` }
|
||||
}
|
||||
)
|
||||
return request
|
||||
|
||||
}
|
||||
}
|
||||
export { LabApi }
|
||||
|
||||
Loading…
Reference in New Issue
Block a user