Merge branch 'main' of http://27.128.240.72:3000/zhuhao/AIx_Smarttalk_WS into zdg_dev
This commit is contained in:
commit
e7990dc175
|
@ -73,7 +73,6 @@ export class PPTApi {
|
|||
// 活动列表处理
|
||||
const workList = (res.rows || []).map(o => o.activityContent)
|
||||
const workItem = [...res.rows]
|
||||
// 加入活动后刷新ppt数据内容,不跟换为第一页
|
||||
// slidesStore.updateSlideIndex(0) // 下标0 为第一页
|
||||
slidesStore.setSlides(slides) // 写入数据
|
||||
// 写入作业列表数据
|
||||
|
@ -110,11 +109,9 @@ export class PPTApi {
|
|||
const rid = await API_entpcoursefile.addEntpcoursefileReturnId(params)
|
||||
if (!!rid) {
|
||||
data.id = rid
|
||||
params.id = rid
|
||||
params.activityContent = null
|
||||
slidesStore.addWorkItem(params)
|
||||
slidesStore.updateSlide(data)
|
||||
// msgUtils.msgSuccess('新增成功')
|
||||
PPTApi.getSlideList(resource.id)
|
||||
this.isUpdate = false // 新增后会触发监听,不再更新数据
|
||||
resolve(true)
|
||||
} else msgUtils.msgError('新增失败');resolve(false)
|
||||
|
|
|
@ -73,3 +73,54 @@ export const svg2File = (svg: string): File => {
|
|||
const blob = new Blob([svg], { type: 'image/svg+xml' })
|
||||
return new File([blob], `${Date.now()}.svg`, { type: 'image/svg+xml' })
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取当前的时间
|
||||
* @returns
|
||||
*/
|
||||
export const getTime=()=>{
|
||||
const now = new Date();
|
||||
const year = now.getFullYear();
|
||||
const month = ('0' + (now.getMonth() + 1)).slice(-2);
|
||||
const day = ('0' + now.getDate()).slice(-2);
|
||||
const hours = ('0' + now.getHours()).slice(-2);
|
||||
const minutes = ('0' + now.getMinutes()).slice(-2);
|
||||
const seconds = ('0' + now.getSeconds()).slice(-2);
|
||||
return `${year}-${month}-${day}_${hours}:${minutes}:${seconds}`;
|
||||
};
|
||||
|
||||
/**
|
||||
* base64转图片File
|
||||
* @param {String} base64 图片base64
|
||||
* @param {String} fileName 图片名称| 默认 → myimg
|
||||
* @returns File 返回转换后的file数据类型
|
||||
*/
|
||||
export const base64ToFile = (base64: string, fileName = '试题图片') => {
|
||||
// 将base64按照 , 进行分割 将前缀 与后续内容分隔开
|
||||
let data = base64.split(','),
|
||||
// 利用正则表达式 从前缀中获取图片的类型信息(image/png、image/jpeg、image/webp等)
|
||||
type = data[0].match(/:(.*?);/)[1],
|
||||
// 从图片的类型信息中 获取具体的文件格式后缀(png、jpeg、webp)
|
||||
suffix = type.split('/')[1],
|
||||
// 使用atob()对base64数据进行解码 结果是一个文件数据流 以字符串的格式输出
|
||||
bstr = window.atob(data[1]),
|
||||
// 获取解码结果字符串的长度
|
||||
n = bstr.length,
|
||||
// 根据解码结果字符串的长度创建一个等长的整形数字数组
|
||||
// 但在创建时 所有元素初始值都为 0
|
||||
u8arr = new Uint8Array(n)
|
||||
|
||||
// 将整形数组的每个元素填充为解码结果字符串对应位置字符的UTF-16 编码单元
|
||||
while (n--) {
|
||||
// charCodeAt():获取给定索引处字符对应的 UTF-16 代码单元
|
||||
u8arr[n] = bstr.charCodeAt(n)
|
||||
}
|
||||
const filename = fileName+getTime()
|
||||
// 利用构造函数创建File文件对象
|
||||
// new File(bits, name, options)
|
||||
const file = new File([u8arr], `${filename}.${suffix}`, {
|
||||
type: type
|
||||
})
|
||||
// 返回file
|
||||
return file
|
||||
}
|
|
@ -141,7 +141,7 @@
|
|||
import { ref } from 'vue'
|
||||
import { storeToRefs } from 'pinia'
|
||||
import { useMainStore, useSnapshotStore } from '../../../store'
|
||||
import { getImageDataURL } from '../../../utils/image'
|
||||
import { getImageDataURL, base64ToFile } from '../../../utils/image'
|
||||
import type { ShapePoolItem } from '../../../configs/shapes'
|
||||
import type { LinePoolItem } from '../../../configs/lines'
|
||||
import useScaleCanvas from '../../../hooks/useScaleCanvas'
|
||||
|
@ -208,57 +208,9 @@ const insertImageElement = (files: FileList) => {
|
|||
// })
|
||||
}
|
||||
|
||||
//获取当前的时间
|
||||
const getTime=()=>{
|
||||
const now = new Date();
|
||||
const year = now.getFullYear();
|
||||
const month = ('0' + (now.getMonth() + 1)).slice(-2);
|
||||
const day = ('0' + now.getDate()).slice(-2);
|
||||
const hours = ('0' + now.getHours()).slice(-2);
|
||||
const minutes = ('0' + now.getMinutes()).slice(-2);
|
||||
const seconds = ('0' + now.getSeconds()).slice(-2);
|
||||
return `${year}-${month}-${day}_${hours}:${minutes}:${seconds}`;
|
||||
};
|
||||
|
||||
/**
|
||||
* base64转图片File
|
||||
* @param {String} base64 图片base64
|
||||
* @param {String} fileName 图片名称| 默认 → myimg
|
||||
* @returns File 返回转换后的file数据类型
|
||||
*/
|
||||
const base64ToFile = (base64: string, fileName = '试题图片') => {
|
||||
// 将base64按照 , 进行分割 将前缀 与后续内容分隔开
|
||||
let data = base64.split(','),
|
||||
// 利用正则表达式 从前缀中获取图片的类型信息(image/png、image/jpeg、image/webp等)
|
||||
type = data[0].match(/:(.*?);/)[1],
|
||||
// 从图片的类型信息中 获取具体的文件格式后缀(png、jpeg、webp)
|
||||
suffix = type.split('/')[1],
|
||||
// 使用atob()对base64数据进行解码 结果是一个文件数据流 以字符串的格式输出
|
||||
bstr = window.atob(data[1]),
|
||||
// 获取解码结果字符串的长度
|
||||
n = bstr.length,
|
||||
// 根据解码结果字符串的长度创建一个等长的整形数字数组
|
||||
// 但在创建时 所有元素初始值都为 0
|
||||
u8arr = new Uint8Array(n)
|
||||
|
||||
// 将整形数组的每个元素填充为解码结果字符串对应位置字符的UTF-16 编码单元
|
||||
while (n--) {
|
||||
// charCodeAt():获取给定索引处字符对应的 UTF-16 代码单元
|
||||
u8arr[n] = bstr.charCodeAt(n)
|
||||
}
|
||||
const filename = fileName+getTime()
|
||||
// 利用构造函数创建File文件对象
|
||||
// new File(bits, name, options)
|
||||
const file = new File([u8arr], `${filename}.${suffix}`, {
|
||||
type: type
|
||||
})
|
||||
// 返回file
|
||||
return file
|
||||
}
|
||||
|
||||
|
||||
const onhtml2canvas = async (html: HTMLElement) => {
|
||||
const base64Dta = await toPng(html);
|
||||
// base64转图片File
|
||||
const toFile = base64ToFile(base64Dta)
|
||||
// 上传图片转为线上地址
|
||||
PPTApi.toRousrceUrl(toFile).then(data=>{
|
||||
|
|
|
@ -305,10 +305,10 @@ const upDateData = async () => {
|
|||
// await PPTApi.updateSlide(paramData.value)
|
||||
loadingActive.value = true
|
||||
await updateEntpcoursefileNew(paramData.value)
|
||||
const res = await homeworklist({ ids: paramData.value.activityContent, pageSize: 100 })
|
||||
await formatClassWorkFile(res.rows)
|
||||
const resource = sessionStore.get('curr.resource')
|
||||
await PPTApi.getSlideList(resource.id)
|
||||
const res = await homeworklist({ ids: paramData.value.activityContent, pageSize: 100 })
|
||||
await formatClassWorkFile(res.rows)
|
||||
}
|
||||
// 判断是否做操作
|
||||
const objectsAreEqual = (obj1: Record<string, any>, obj2: Record<string, any>) => {
|
||||
|
|
Loading…
Reference in New Issue