diff --git a/.ops/.gitlab-ci.yml b/.ops/.gitlab-ci.yml
index 0e8978c4..269485b9 100644
--- a/.ops/.gitlab-ci.yml
+++ b/.ops/.gitlab-ci.yml
@@ -5,7 +5,7 @@ stages:
variables:
APP_NAME: "xtreme1"
- APP_VERSION: "0.6.1"
+ APP_VERSION: "0.7.0"
FRONTEND_PACKAGE_DIR: "dist"
BACKEND_PACKAGE_NAME: "${APP_NAME}-backend-${APP_VERSION}-SNAPSHOT.jar"
@@ -65,6 +65,9 @@ build-frontend-package:
- cd ../pc-tool
- npm install
- npm run build
+ - cd ../text-tool
+ - npm install
+ - npm run build
artifacts:
paths:
- frontend/$FRONTEND_PACKAGE_DIR
@@ -73,6 +76,7 @@ build-frontend-package:
- frontend/main/node_modules
- frontend/image-tool/node_modules
- frontend/pc-tool/node_modules
+ - frontend/text-tool/node_modules
build-release-package:
stage: build-package
diff --git a/.ops/alidev/frontend-deployment.yml b/.ops/alidev/frontend-deployment.yml
index 36097923..c2b181b6 100644
--- a/.ops/alidev/frontend-deployment.yml
+++ b/.ops/alidev/frontend-deployment.yml
@@ -119,3 +119,27 @@ spec:
- hosts:
- xtreme1.alidev.beisai.com
secretName: alidev.beisai.com
+---
+apiVersion: networking.k8s.io/v1
+kind: Ingress
+metadata:
+ annotations:
+ nginx.ingress.kubernetes.io/rewrite-target: /text-tool/$2
+ name: frontend-text-tool
+spec:
+ ingressClassName: nginx
+ rules:
+ - host: xtreme1.alidev.beisai.com
+ http:
+ paths:
+ - backend:
+ service:
+ name: frontend
+ port:
+ number: 80
+ path: /tool/text(/|$)(.*)
+ pathType: Prefix
+ tls:
+ - hosts:
+ - xtreme1.alidev.beisai.com
+ secretName: alidev.beisai.com
diff --git a/.ops/alitest/frontend-deployment.yml b/.ops/alitest/frontend-deployment.yml
index 8e217791..d17a8a30 100644
--- a/.ops/alitest/frontend-deployment.yml
+++ b/.ops/alitest/frontend-deployment.yml
@@ -119,3 +119,27 @@ spec:
- hosts:
- xtreme1.alitest.beisai.com
secretName: alitest.beisai.com
+---
+apiVersion: networking.k8s.io/v1
+kind: Ingress
+metadata:
+ annotations:
+ nginx.ingress.kubernetes.io/rewrite-target: /text-tool/$2
+ name: frontend-text-tool
+spec:
+ ingressClassName: nginx
+ rules:
+ - host: xtreme1.alitest.beisai.com
+ http:
+ paths:
+ - backend:
+ service:
+ name: frontend
+ port:
+ number: 80
+ path: /tool/text(/|$)(.*)
+ pathType: Prefix
+ tls:
+ - hosts:
+ - xtreme1.alitest.beisai.com
+ secretName: alitest.beisai.com
diff --git a/.ops/backend.dockerfile b/.ops/backend.dockerfile
index 77aca33a..06dd9b42 100644
--- a/.ops/backend.dockerfile
+++ b/.ops/backend.dockerfile
@@ -2,7 +2,7 @@ FROM openjdk:11
RUN apt update && \
apt install -y iputils-ping curl wget netcat python3 python3-pip
-RUN pip3 install --upgrade --force-reinstall git+https://github.com/xtreme1-io/xtreme1-sdk.git@bddaa8d
+RUN pip3 install --upgrade --force-reinstall git+https://github.com/xtreme1-io/xtreme1-sdk.git@d0cf4cc
WORKDIR /app
COPY target/$BACKEND_PACKAGE_NAME ./app.jar
RUN mkdir -p config
diff --git a/README.md b/README.md
index 23b6620a..dc67a0d9 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
-![](https://img.shields.io/badge/Release-v0.6.1-green)
+![](https://img.shields.io/badge/Release-v0.7.0-green)
![](https://img.shields.io/badge/License-Apache%202.0-blueviolet)
[![Slack](https://img.shields.io/badge/Join-Slack-orange.svg?logo=slack)](https://join.slack.com/t/xtreme1group/shared_invite/zt-1jhk36uzr-NpdpYXeQAEHN6rYJy5_6pg)
[![Twitter](https://img.shields.io/badge/Follow-Twitter-blue)](https://twitter.com/Xtreme1io)
@@ -58,8 +58,8 @@ Image Data Curation (Visualizing & Debug) - [MobileNetV3](https://github.com/xi
Download the latest release package and unzip it.
```bash
-wget https://github.com/xtreme1-io/xtreme1/releases/download/v0.6.1/xtreme1-v0.6.1.zip
-unzip -d xtreme1-v0.6.1 xtreme1-v0.6.1.zip
+wget https://github.com/xtreme1-io/xtreme1/releases/download/v0.7.0/xtreme1-v0.7.0.zip
+unzip -d xtreme1-v0.7.0 xtreme1-v0.7.0.zip
```
## Start all services
diff --git a/backend/Dockerfile b/backend/Dockerfile
index ac2db101..e31f5121 100644
--- a/backend/Dockerfile
+++ b/backend/Dockerfile
@@ -7,9 +7,9 @@ RUN --mount=type=cache,target=/root/.m2 mvn package
FROM openjdk:11-jre
RUN apt update && \
apt install -y iputils-ping curl wget netcat python3 python3-pip git
-RUN pip3 install --upgrade --force-reinstall git+https://github.com/xtreme1-io/xtreme1-sdk.git@bddaa8d
+RUN pip3 install --upgrade --force-reinstall git+https://github.com/xtreme1-io/xtreme1-sdk.git@d0cf4cc
WORKDIR /app
-COPY --from=build /build/target/xtreme1-backend-0.6.1-SNAPSHOT.jar ./app.jar
+COPY --from=build /build/target/xtreme1-backend-0.7.0-SNAPSHOT.jar ./app.jar
RUN mkdir -p config
RUN wget 'https://basicai-asset.s3.us-west-2.amazonaws.com/xtreme1/xtreme1-lidar-fusion-trial.zip' -O xtreme1-lidar-fusion-trial.zip
RUN wget 'https://basicai-asset.s3.us-west-2.amazonaws.com/xtreme1/xtreme1-image-trial.zip' -O xtreme1-image-trial.zip
diff --git a/backend/README.md b/backend/README.md
index 8e438de9..404099be 100644
--- a/backend/README.md
+++ b/backend/README.md
@@ -92,7 +92,7 @@ cd backend
mvn package
# Using local configuration to start application.
-java -Dspring.profiles.active=local -jar target/xtreme1-backend-0.6.1-SNAPSHOT.jar
+java -Dspring.profiles.active=local -jar target/xtreme1-backend-0.7.0-SNAPSHOT.jar
```
Now you can access the backend service at `http://localhost:8080/`.
diff --git a/backend/pom.xml b/backend/pom.xml
index 39c0801f..4a415144 100644
--- a/backend/pom.xml
+++ b/backend/pom.xml
@@ -13,7 +13,7 @@
ai.basic
xtreme1-backend
-
0.6.1-SNAPSHOT
+
0.7.0-SNAPSHOT
Xtreme1 Backend
diff --git a/backend/src/main/java/ai/basic/x1/adapter/dto/request/DatasetRequestDTO.java b/backend/src/main/java/ai/basic/x1/adapter/dto/request/DatasetRequestDTO.java
index 585db8de..46d31a48 100644
--- a/backend/src/main/java/ai/basic/x1/adapter/dto/request/DatasetRequestDTO.java
+++ b/backend/src/main/java/ai/basic/x1/adapter/dto/request/DatasetRequestDTO.java
@@ -34,7 +34,7 @@ public class DatasetRequestDTO {
* Dataset type LIDAR_FUSION, LIDAR_BASIC, IMAGE
*/
@NotEmpty(message = "dataset type cannot be null", groups = GroupInsert.class)
- @ValidStringEnum(message = "dataset type must be one of LIDAR_FUSION, LIDAR_BASIC, IMAGE", enumClass = DatasetTypeEnum.class)
+ @ValidStringEnum(message = "dataset type must be one of LIDAR_FUSION, LIDAR_BASIC, IMAGE,TEXT", enumClass = DatasetTypeEnum.class)
private String type;
/**
diff --git a/backend/src/main/java/ai/basic/x1/entity/TextDataContentBO.java b/backend/src/main/java/ai/basic/x1/entity/TextDataContentBO.java
new file mode 100644
index 00000000..cfbd50b2
--- /dev/null
+++ b/backend/src/main/java/ai/basic/x1/entity/TextDataContentBO.java
@@ -0,0 +1,21 @@
+package ai.basic.x1.entity;
+
+import lombok.AllArgsConstructor;
+import lombok.Builder;
+import lombok.Data;
+import lombok.NoArgsConstructor;
+
+@Data
+@Builder
+@NoArgsConstructor
+@AllArgsConstructor
+public class TextDataContentBO {
+
+ private String id;
+
+ private String parentId;
+
+ private String text;
+
+ private String role;
+}
diff --git a/backend/src/main/java/ai/basic/x1/entity/TextDataExportBO.java b/backend/src/main/java/ai/basic/x1/entity/TextDataExportBO.java
new file mode 100644
index 00000000..ad993d1b
--- /dev/null
+++ b/backend/src/main/java/ai/basic/x1/entity/TextDataExportBO.java
@@ -0,0 +1,24 @@
+package ai.basic.x1.entity;
+
+import lombok.AllArgsConstructor;
+import lombok.Builder;
+import lombok.Data;
+import lombok.NoArgsConstructor;
+
+@Data
+@Builder
+@NoArgsConstructor
+@AllArgsConstructor
+public class TextDataExportBO extends DataExportBaseBO {
+
+ /**
+ * Text url
+ */
+ private String textUrl;
+
+ /**
+ * The path in the compressed package
+ */
+ private String textZipPath;
+
+}
diff --git a/backend/src/main/java/ai/basic/x1/entity/enums/DatasetTypeEnum.java b/backend/src/main/java/ai/basic/x1/entity/enums/DatasetTypeEnum.java
index 09395786..baf31021 100644
--- a/backend/src/main/java/ai/basic/x1/entity/enums/DatasetTypeEnum.java
+++ b/backend/src/main/java/ai/basic/x1/entity/enums/DatasetTypeEnum.java
@@ -17,5 +17,9 @@ public enum DatasetTypeEnum {
/**
* IMAGE
*/
- IMAGE
+ IMAGE,
+ /**
+ * TEXT
+ */
+ TEXT
}
diff --git a/backend/src/main/java/ai/basic/x1/entity/enums/InputTypeEnum.java b/backend/src/main/java/ai/basic/x1/entity/enums/InputTypeEnum.java
index 15eb26bb..f233c93a 100644
--- a/backend/src/main/java/ai/basic/x1/entity/enums/InputTypeEnum.java
+++ b/backend/src/main/java/ai/basic/x1/entity/enums/InputTypeEnum.java
@@ -22,6 +22,8 @@ public enum InputTypeEnum {
/**
* TEXT
*/
- TEXT
+ TEXT,
+
+ LONG_TEXT
}
diff --git a/backend/src/main/java/ai/basic/x1/usecase/DataInfoUseCase.java b/backend/src/main/java/ai/basic/x1/usecase/DataInfoUseCase.java
index b8ee45bf..a665d4cb 100644
--- a/backend/src/main/java/ai/basic/x1/usecase/DataInfoUseCase.java
+++ b/backend/src/main/java/ai/basic/x1/usecase/DataInfoUseCase.java
@@ -3,7 +3,6 @@
import ai.basic.x1.adapter.api.config.DatasetInitialInfo;
import ai.basic.x1.adapter.api.context.RequestContextHolder;
import ai.basic.x1.adapter.dto.ApiResult;
-import ai.basic.x1.adapter.dto.DatasetClassDTO;
import ai.basic.x1.adapter.port.dao.*;
import ai.basic.x1.adapter.port.dao.mybatis.extension.ExtendLambdaQueryWrapper;
import ai.basic.x1.adapter.port.dao.mybatis.model.DataInfo;
@@ -18,7 +17,6 @@
import ai.basic.x1.usecase.exception.UsecaseCode;
import ai.basic.x1.usecase.exception.UsecaseException;
import ai.basic.x1.util.*;
-import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.collection.ListUtil;
@@ -29,6 +27,8 @@
import cn.hutool.core.io.FileUtil;
import cn.hutool.core.io.StreamProgress;
import cn.hutool.core.lang.UUID;
+import cn.hutool.core.lang.tree.Tree;
+import cn.hutool.core.lang.tree.TreeUtil;
import cn.hutool.core.thread.ThreadUtil;
import cn.hutool.core.util.*;
import cn.hutool.crypto.SecureUtil;
@@ -47,15 +47,18 @@
import org.springframework.dao.DuplicateKeyException;
import org.springframework.transaction.annotation.Transactional;
-import java.io.*;
import java.io.File;
+import java.io.FileFilter;
+import java.io.IOException;
import java.math.BigDecimal;
import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
import java.nio.file.Paths;
import java.time.OffsetDateTime;
import java.util.*;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
+import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.stream.Collectors;
@@ -63,6 +66,7 @@
import static ai.basic.x1.entity.enums.DataUploadSourceEnum.LOCAL;
import static ai.basic.x1.entity.enums.DatasetTypeEnum.IMAGE;
import static ai.basic.x1.entity.enums.DatasetTypeEnum.*;
+import static ai.basic.x1.entity.enums.DatasetTypeEnum.TEXT;
import static ai.basic.x1.entity.enums.RelationEnum.*;
import static ai.basic.x1.entity.enums.SplitTypeEnum.NOT_SPLIT;
import static ai.basic.x1.entity.enums.UploadStatusEnum.*;
@@ -185,11 +189,17 @@ public class DataInfoUseCase {
/**
* Filter out files whose file suffix is not image, and discard the file when it returns false
*/
- private final FileFilter filefilter = file -> {
+ private final FileFilter imageFileFilter = file -> {
//if the file extension is image return true, else false
- return IMAGE_DATA_TYPE.contains(FileUtil.getMimeType(file.getAbsolutePath())) && Constants.IMAGE.equals(FileUtil.getName(file.getParentFile()));
+ return IMAGE_DATA_TYPE.contains(FileUtil.getMimeType(file.getAbsolutePath())) && Constants.IMAGE.equalsIgnoreCase(FileUtil.getName(file.getParentFile()));
};
+ private final FileFilter textFileFilter = file -> {
+ //if the file extension is json return true, else false
+ return file.getAbsolutePath().toUpperCase().endsWith(JSON_SUFFIX) && Constants.TEXT.equalsIgnoreCase(FileUtil.getName(file.getParentFile()));
+ };
+
+
/**
* Data split
*
@@ -485,7 +495,9 @@ public List
insertBatch(List dataInfoBOList, Long datase
if (CollUtil.isNotEmpty(existDataInfoList)) {
var existNames = existDataInfoList.stream().map(DataInfoBO::getName).collect(Collectors.toList());
dataInfoBOList = dataInfoBOList.stream().filter(dataInfoBO -> !existNames.contains(dataInfoBO.getName())).collect(Collectors.toList());
- errorBuilder.append("Duplicate data names;");
+ if (!errorBuilder.toString().contains("Duplicate")) {
+ errorBuilder.append("Duplicate data names;");
+ }
}
if (CollUtil.isEmpty(dataInfoBOList)) {
return List.of();
@@ -535,11 +547,13 @@ public Long upload(DataInfoUploadBO dataInfoUploadBO) {
executorService.execute(Objects.requireNonNull(TtlRunnable.get(() -> {
try {
if (IMAGE.equals(dataset.getType()) && IMAGE_DATA_TYPE.contains(mimeType)) {
- downloadAndDecompressionFile(dataInfoUploadBO, this::parseImageUploadFile);
+ this.downloadAndDecompressionFile(dataInfoUploadBO, this::parseImageUploadFile);
} else if (IMAGE.equals(dataset.getType()) && COMPRESSED_DATA_TYPE.contains(mimeType)) {
- downloadAndDecompressionFile(dataInfoUploadBO, this::parseImageCompressedUploadFile);
+ this.downloadAndDecompressionFile(dataInfoUploadBO, this::parseImageCompressedUploadFile);
+ } else if (TEXT.equals(dataset.getType())) {
+ this.downloadAndDecompressionFile(dataInfoUploadBO, this::parseTextUploadFile);
} else {
- downloadAndDecompressionFile(dataInfoUploadBO, this::parsePointCloudUploadFile);
+ this.downloadAndDecompressionFile(dataInfoUploadBO, this::parsePointCloudUploadFile);
}
} catch (IOException e) {
log.error("Download decompression file error", e);
@@ -746,6 +760,83 @@ private void parseImageUploadFile(DataInfoUploadBO dataInfoUploadBO) {
datasetSimilarityJobUseCase.submitJob(datasetId);
}
+
+ public void parseTextUploadFile(DataInfoUploadBO dataInfoUploadBO) {
+ var userId = dataInfoUploadBO.getUserId();
+ var datasetId = dataInfoUploadBO.getDatasetId();
+ var files = FileUtil.loopFiles(Paths.get(dataInfoUploadBO.getBaseSavePath()), 10, textFileFilter);
+ var rootPath = String.format("%s/%s", userId, datasetId);
+ var errorBuilder = new StringBuilder();
+ var dataInfoBOBuilder = DataInfoBO.builder().datasetId(datasetId).status(DataStatusEnum.VALID)
+ .annotationStatus(DataAnnotationStatusEnum.NOT_ANNOTATED)
+ .createdAt(OffsetDateTime.now())
+ .updatedAt(OffsetDateTime.now())
+ .createdBy(userId)
+ .isDeleted(false);
+ var totalDataNum = Long.valueOf(files.size());
+ AtomicReference parsedDataNum = new AtomicReference<>(0L);
+ var uploadRecordBOBuilder = UploadRecordBO.builder()
+ .id(dataInfoUploadBO.getUploadRecordId()).totalDataNum(totalDataNum).parsedDataNum(parsedDataNum.get()).status(PARSING);
+ if (CollectionUtil.isNotEmpty(files)) {
+ CountDownLatch countDownLatch = new CountDownLatch(files.size());
+ files.forEach(f -> parseExecutorService.submit(Objects.requireNonNull(TtlRunnable.get(() -> {
+ try {
+ var dataInfoBOList = new ArrayList();
+ var textJson = JSONUtil.readJSONArray(f, StandardCharsets.UTF_8);
+ var list = JSONUtil.toList(textJson.toString(), TextDataContentBO.class);
+ var pathList = this.getTreeAllPath(list);
+ if (CollUtil.isEmpty(pathList)) {
+ return;
+ }
+ var newTextFileList = new ArrayList();
+ AtomicInteger i = new AtomicInteger(1);
+ pathList.forEach(path -> {
+ ListUtil.reverse(path);
+ var suffix = FileUtil.getSuffix(f);
+ var originalPath = f.getAbsolutePath();
+ var newPath = String.format("%s_%s.%s", StrUtil.removeSuffix(originalPath, String.format(".%s", suffix)), i.get(), suffix);
+ var file = FileUtil.writeString(JSONUtil.toJsonStr(path), newPath, StandardCharsets.UTF_8);
+ newTextFileList.add(file);
+ i.getAndIncrement();
+ });
+ var fileBOS = uploadFileList(rootPath, newTextFileList, dataInfoUploadBO);
+ createUploadThumbnail(userId, fileBOS, rootPath);
+ fileBOS.forEach(fileBO -> {
+ var tempDataId = ByteUtil.bytesToLong(SecureUtil.md5().digest(UUID.randomUUID().toString()));
+ var file = FileUtil.file(tempPath + fileBO.getPath().replace(rootPath, ""));
+ var fileNodeBO = DataInfoBO.FileNodeBO.builder().name(fileBO.getName())
+ .fileId(fileBO.getId()).type(FILE).build();
+ var dataInfoBO = dataInfoBOBuilder.name(getFileName(file)).content(Collections.singletonList(fileNodeBO)).splitType(NOT_SPLIT).tempDataId(tempDataId).build();
+ dataInfoBOList.add(dataInfoBO);
+ });
+ if (CollectionUtil.isNotEmpty(dataInfoBOList)) {
+ insertBatch(dataInfoBOList, datasetId, errorBuilder);
+ }
+ } catch (Exception e) {
+ log.error("Handle data error", e);
+ } finally {
+ parsedDataNum.set(parsedDataNum.get() + 1);
+ var uploadRecordBO = uploadRecordBOBuilder.parsedDataNum(parsedDataNum.get()).build();
+ uploadRecordDAO.updateById(DefaultConverter.convert(uploadRecordBO, UploadRecord.class));
+ countDownLatch.countDown();
+ }
+
+ }))));
+ try {
+ countDownLatch.await();
+ } catch (InterruptedException e) {
+ log.error("Parse image count down latch error", e);
+ }
+ var uploadRecordBO = uploadRecordBOBuilder.parsedDataNum(totalDataNum).errorMessage(errorBuilder.toString()).status(PARSE_COMPLETED).build();
+ uploadRecordDAO.updateById(DefaultConverter.convert(uploadRecordBO, UploadRecord.class));
+ datasetSimilarityJobUseCase.submitJob(datasetId);
+ } else {
+ var uploadRecordBO = uploadRecordBOBuilder.status(FAILED).errorMessage(COMPRESSED_PACKAGE_EMPTY.getMessage()).build();
+ uploadRecordDAO.updateById(DefaultConverter.convert(uploadRecordBO, UploadRecord.class));
+ log.error("Image compressed package is empty,dataset id:{},filePath:{}", datasetId, dataInfoUploadBO.getFileUrl());
+ }
+ }
+
public void parseImageCompressedUploadFile(DataInfoUploadBO dataInfoUploadBO) {
if (DataFormatEnum.COCO.equals(dataInfoUploadBO.getDataFormat())) {
var respPath = cocoConvertToX1(dataInfoUploadBO);
@@ -762,7 +853,7 @@ public void parseImageCompressedUploadFile(DataInfoUploadBO dataInfoUploadBO) {
}
var userId = dataInfoUploadBO.getUserId();
var datasetId = dataInfoUploadBO.getDatasetId();
- var files = FileUtil.loopFiles(Paths.get(dataInfoUploadBO.getBaseSavePath()), 3, filefilter);
+ var files = FileUtil.loopFiles(Paths.get(dataInfoUploadBO.getBaseSavePath()), 3, imageFileFilter);
var rootPath = String.format("%s/%s", userId, datasetId);
var dataAnnotationObjectBOBuilder = DataAnnotationObjectBO.builder()
.datasetId(datasetId).createdBy(userId).createdAt(OffsetDateTime.now());
@@ -1660,6 +1751,8 @@ private DataExportBaseBO assembleExportDataContent(DataInfoBO dataInfoBO, Datase
String pointCloudZipPath = null;
String cameraConfigUrl = null;
String cameraConfigZipPath = null;
+ String textUrl = null;
+ String textZipPath = null;
var images = new ArrayList();
for (DataInfoBO.FileNodeBO f : dataInfoBO.getContent()) {
var relationFileBO = FILE.equals(f.getType()) ? f.getFile() : CollectionUtil.getFirst(f.getFiles()).getFile();
@@ -1669,6 +1762,9 @@ private DataExportBaseBO assembleExportDataContent(DataInfoBO dataInfoBO, Datase
} else if (f.getName().equals(StrUtil.toCamelCase(CAMERA_CONFIG))) {
cameraConfigUrl = relationFileBO.getUrl();
cameraConfigZipPath = relationFileBO.getZipPath();
+ } else if (f.getName().toUpperCase().endsWith(JSON_SUFFIX) && TEXT.equals(datasetType)) {
+ textUrl = relationFileBO.getUrl();
+ textZipPath = relationFileBO.getZipPath();
} else {
var url = relationFileBO.getUrl();
var zipPath = relationFileBO.getZipPath();
@@ -1703,6 +1799,11 @@ private DataExportBaseBO assembleExportDataContent(DataInfoBO dataInfoBO, Datase
((ImageDataExportBO) dataExportBaseBO).setHeight(image.getHeight());
((ImageDataExportBO) dataExportBaseBO).setFilePath(image.getFilePath());
break;
+ case TEXT:
+ dataExportBaseBO = DefaultConverter.convert(dataExportBaseBO, TextDataExportBO.class);
+ ((TextDataExportBO) dataExportBaseBO).setTextUrl(textUrl);
+ ((TextDataExportBO) dataExportBaseBO).setTextZipPath(textZipPath);
+ break;
default:
break;
}
@@ -2038,4 +2139,60 @@ public DataInfoBO getInitDataInfoBO(DatasetInitialInfo datasetInitialInfo) {
return dataInfoBO;
}
+ /**
+ * Get all paths in the tree list
+ *
+ * @param list tree list
+ * @return
+ */
+ private List> getTreeAllPath(List list) {
+ list = list.stream().filter(t -> StrUtil.isNotEmpty(t.getId()) && StrUtil.isNotEmpty(t.getRole()) && StrUtil.isNotEmpty(t.getText())).collect(Collectors.toList());
+ if (CollUtil.isEmpty(list)) {
+ return List.of();
+ }
+ // convert to tree
+ List> treeNodes = TreeUtil.build(list, null,
+ (treeNode, tree) -> {
+ tree.setId(treeNode.getId());
+ tree.setParentId(treeNode.getParentId());
+ tree.setName(treeNode.getId());
+ // Extended properties ...
+ tree.putExtra("text", treeNode.getText());
+ tree.putExtra("role", treeNode.getRole());
+ });
+
+ var leafNodeList = new ArrayList>();
+ getLeafNodeList(treeNodes, leafNodeList);
+ // get all links
+ List> paths = new ArrayList<>();
+ for (Tree treeNode : leafNodeList) {
+ List path = new ArrayList<>();
+ path.add(DefaultConverter.convert(treeNode, TextDataContentBO.class));
+ Tree parent = treeNode.getParent();
+ while (parent != null) {
+ if (ObjectUtil.isNotNull(parent.getId())) {
+ path.add(DefaultConverter.convert(parent, TextDataContentBO.class));
+ }
+ parent = parent.getParent();
+ }
+ paths.add(path);
+ }
+ return paths;
+ }
+
+ /**
+ * Get all leaf nodes under the tree
+ *
+ * @param treeNodes tree node
+ * @param leafNodeList collection of leaf nodes
+ */
+ private void getLeafNodeList(List> treeNodes, List> leafNodeList) {
+ treeNodes.forEach(tree -> {
+ if (CollUtil.isNotEmpty(tree.getChildren())) {
+ getLeafNodeList(tree.getChildren(), leafNodeList);
+ } else {
+ leafNodeList.add(tree);
+ }
+ });
+ }
}
diff --git a/backend/src/main/java/ai/basic/x1/usecase/ExportUseCase.java b/backend/src/main/java/ai/basic/x1/usecase/ExportUseCase.java
index 93ba98ba..ca34b0bb 100644
--- a/backend/src/main/java/ai/basic/x1/usecase/ExportUseCase.java
+++ b/backend/src/main/java/ai/basic/x1/usecase/ExportUseCase.java
@@ -183,11 +183,11 @@ private void writeFile(List list, String zipPath,
logger.error("Download object error", e);
}
}
- var dataPath = String.format("%s/%s/%s-%s%s", zipPath, Constants.DATA, dataExportBaseBO.getName(), dataExportBaseBO.getId(), ".json");
+ var dataPath = String.format("%s/%s/%s%s", zipPath, Constants.DATA, dataExportBaseBO.getName(), ".json");
FileUtil.writeString(JSONUtil.toJsonStr(dataExportBaseBO, jsonConfig), dataPath, StandardCharsets.UTF_8);
if (ObjectUtil.isNotNull(dataExportBO.getResult())) {
- var resultPath = String.format("%s/%s/%s-%s%s", zipPath, Constants.RESULT,
- dataExportBaseBO.getName(), dataExportBaseBO.getId(), ".json");
+ var resultPath = String.format("%s/%s/%s%s", zipPath, Constants.RESULT,
+ dataExportBaseBO.getName(), ".json");
FileUtil.writeString(JSONUtil.toJsonStr(dataExportBO.getResult(), jsonConfig), resultPath, StandardCharsets.UTF_8);
}
});
diff --git a/backend/src/main/java/ai/basic/x1/util/ClassificationUtils.java b/backend/src/main/java/ai/basic/x1/util/ClassificationUtils.java
index 7d847471..3f9a6b38 100644
--- a/backend/src/main/java/ai/basic/x1/util/ClassificationUtils.java
+++ b/backend/src/main/java/ai/basic/x1/util/ClassificationUtils.java
@@ -41,10 +41,10 @@ private static List parse(DataAnnotationClassification
var classificationNodes = JSONUtil.toList(values, ClassificationNode.class);
var classificationNodeMap =
classificationNodes.stream().collect(Collectors.toUnmodifiableMap(ClassificationNode::getId,
- t -> t));
+ t -> t));
return classificationNodes.stream()
- .filter(e -> e.isLeaf && !Objects.equals(e.getType(), InputTypeEnum.TEXT))
+ .filter(e -> e.isLeaf && !Objects.equals(e.getType(), InputTypeEnum.TEXT) && !Objects.equals(e.getType(), InputTypeEnum.LONG_TEXT))
.map(leafNode -> convert(leafNode, classificationNodeMap, dataAnnotation))
.flatMap(Collection::stream)
.collect(Collectors.toList());
@@ -54,7 +54,7 @@ private static List parse(DataAnnotationClassification
}
private static List convert(ClassificationNode leafNode, Map classificationNodeMap, DataAnnotationClassificationBO dataAnnotation) {
+ ClassificationNode> classificationNodeMap, DataAnnotationClassificationBO dataAnnotation) {
var results = new ArrayList();
if (leafNode.value instanceof Collection) {
Collection optionNames = (Collection) leafNode.value;
diff --git a/backend/src/main/java/ai/basic/x1/util/Constants.java b/backend/src/main/java/ai/basic/x1/util/Constants.java
index a3533550..2de27070 100644
--- a/backend/src/main/java/ai/basic/x1/util/Constants.java
+++ b/backend/src/main/java/ai/basic/x1/util/Constants.java
@@ -79,6 +79,11 @@ public interface Constants {
*/
String IMAGE = "image";
+ /**
+ * image
+ */
+ String TEXT = "text";
+
String RESULT = "result";
String SLANTING_BAR = "/";
diff --git a/deploy/mysql/migration/V1__Create_tables.sql b/deploy/mysql/migration/V1__Create_tables.sql
index 1b539da6..cc009ffc 100644
--- a/deploy/mysql/migration/V1__Create_tables.sql
+++ b/deploy/mysql/migration/V1__Create_tables.sql
@@ -33,7 +33,7 @@ CREATE TABLE `classification`
`ontology_id` bigint(20) NOT NULL,
`name` varchar(256) NOT NULL,
`is_required` bit(1) NOT NULL DEFAULT b'0',
- `input_type` enum('RADIO','TEXT','MULTI_SELECTION','DROPDOWN') CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL,
+ `input_type` enum('RADIO','TEXT','MULTI_SELECTION','DROPDOWN','LONG_TEXT') CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL,
`attribute` json DEFAULT NULL,
`created_at` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
`created_by` bigint(20) NOT NULL,
@@ -169,7 +169,7 @@ CREATE TABLE `dataset`
(
`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'Primary key',
`name` varchar(255) NOT NULL COMMENT 'Dataset name',
- `type` enum('LIDAR_FUSION','LIDAR_BASIC','IMAGE') NOT NULL DEFAULT 'LIDAR_FUSION' COMMENT 'Dataset type LIDAR_FUSION,LIDAR_BASIC,IMAGE',
+ `type` enum('LIDAR_FUSION','LIDAR_BASIC','IMAGE','TEXT') NOT NULL DEFAULT 'LIDAR_FUSION' COMMENT 'Dataset type LIDAR_FUSION,LIDAR_BASIC,IMAGE,TEXT',
`description` text COMMENT 'Dataset description',
`is_deleted` bit(1) NOT NULL DEFAULT b'0' COMMENT 'Is deleted',
`del_unique_key` bigint(20) NOT NULL DEFAULT '0' COMMENT 'Delete unique flag, 0 when writing, set as primary key id after tombstone',
@@ -230,7 +230,7 @@ CREATE TABLE `dataset_classification`
`dataset_id` bigint(20) NOT NULL,
`name` varchar(256) NOT NULL,
`is_required` bit(1) NOT NULL DEFAULT b'0',
- `input_type` enum('RADIO','TEXT','MULTI_SELECTION','DROPDOWN') CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL,
+ `input_type` enum('RADIO','TEXT','MULTI_SELECTION','DROPDOWN','LONG_TEXT') CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL,
`attribute` json DEFAULT NULL,
`created_at` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
`created_by` bigint(20) DEFAULT NULL,
@@ -424,7 +424,7 @@ CREATE TABLE `model_run_record`
(
`id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'Primary key',
`model_id` bigint(20) NOT NULL COMMENT 'Model id',
- `model_version` varchar(255) NOT NULL COMMENT 'Model version',
+ `model_version` varchar(255) DEFAULT NULL COMMENT 'Model version',
`run_no` varchar(20) NOT NULL COMMENT 'Serial number(For interface display)',
`dataset_id` bigint(20) NOT NULL COMMENT 'Dataset id',
`status` enum('STARTED','RUNNING','SUCCESS','FAILURE','SUCCESS_WITH_ERROR') CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL COMMENT 'Model running status',
diff --git a/deploy/nginx/conf.d/default.conf b/deploy/nginx/conf.d/default.conf
index 85741c61..7c4b2517 100644
--- a/deploy/nginx/conf.d/default.conf
+++ b/deploy/nginx/conf.d/default.conf
@@ -55,6 +55,22 @@ server {
proxy_pass http://frontend:80/pc-tool/;
}
+ location /tool/text {
+ set $no_cache 0;
+ if ($uri ~* ^/tool/text[/]?$) {
+ set $no_cache 1;
+ }
+ if ($uri ~* \.(?:html|json)$) {
+ set $no_cache 1;
+ }
+ if ($no_cache = 1) {
+ add_header Cache-Control "no-store,no-cache";
+ add_header Pragma "no-cache";
+ }
+
+ proxy_pass http://frontend:80/text-tool/;
+ }
+
location /api/ {
proxy_set_header Host $http_host;
proxy_set_header X-Forwarded-Proto $scheme;
diff --git a/docker-compose.yml b/docker-compose.yml
index b3c8b985..fedb7f0a 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -65,7 +65,7 @@ services:
retries: 10
backend:
# By default, Compose will pull image from Docker Hub when no local image found.
- image: basicai/xtreme1-backend:v0.6.1
+ image: basicai/xtreme1-backend:v0.7.0
# Uncomment this line and comment previous line to build image locally, not pull from Docker Hub.
# build: ./backend
ports:
@@ -95,7 +95,7 @@ services:
condition: service_healthy
frontend:
# By default, Compose will pull image from Docker Hub when no local image found.
- image: basicai/xtreme1-frontend:v0.6.1
+ image: basicai/xtreme1-frontend:v0.7.0
# Uncomment this line and comment previous line to build image locally, not pull from Docker Hub.
# build: ./frontend
ports:
diff --git a/frontend/.dockerignore b/frontend/.dockerignore
index 314420ef..422f29e0 100644
--- a/frontend/.dockerignore
+++ b/frontend/.dockerignore
@@ -2,4 +2,5 @@ dist/
main/node_modules/
pc-tool/node_modules/
-image-tool/node_modules/
\ No newline at end of file
+image-tool/node_modules/
+text-tool/node_modules/
\ No newline at end of file
diff --git a/frontend/Dockerfile b/frontend/Dockerfile
index 697d23bb..6a5379d7 100644
--- a/frontend/Dockerfile
+++ b/frontend/Dockerfile
@@ -8,6 +8,8 @@ RUN --mount=type=cache,target=image-tool/node_modules \
cd image-tool && npm install && npm run build
RUN --mount=type=cache,target=pc-tool/node_modules \
cd pc-tool && npm install && npm run build
+RUN --mount=type=cache,target=text-tool/node_modules \
+ cd text-tool && npm install && npm run build
FROM nginx:1.22
COPY --from=build /build/dist /usr/share/nginx/html
diff --git a/frontend/README.md b/frontend/README.md
index 41510667..4d1ed54c 100644
--- a/frontend/README.md
+++ b/frontend/README.md
@@ -6,15 +6,17 @@ Xtreme1 frontend mainly includes three parts, each part is a separate project an
- [main](./main/README.md) - Main web application
- [pc-tool](./pc-tool/README.md) - Point cloud annotation tool
- [image-tool](./image-tool/README.md) - Image annotation tool
+- [text-tool](./text-tool/README.md) - Text annotation tool
## Deploy
Each part is developed independently, and finally the static files are put together to `frontend/dist`.
-`main` build to `/main` -- nginx: `/`
-`pc-tool` build to `/pc-tool` -- nginx: `/tool/pc`
-`image-tool` build to `/image-tool` -- nginx: `/tool/image`
+`main` build to `/main` -- nginx: `/`
+`pc-tool` build to `/pc-tool` -- nginx: `/tool/pc`
+`image-tool` build to `/image-tool` -- nginx: `/tool/image`
+`text-tool` build to `/text-tool` -- nginx: `/tool/text`
For more information, refer to `.ops/**/frontend-deployment.yml`
diff --git a/frontend/main/src/api/business/model/classesModel.ts b/frontend/main/src/api/business/model/classesModel.ts
index f9728b69..97f3c049 100644
--- a/frontend/main/src/api/business/model/classesModel.ts
+++ b/frontend/main/src/api/business/model/classesModel.ts
@@ -28,6 +28,7 @@ export enum inputTypeEnum {
MULTI_SELECTION = 'MULTI_SELECTION',
DROPDOWN = 'DROPDOWN',
TEXT = 'TEXT',
+ LONG_TEXT = 'LONG_TEXT',
}
/** searchForm params */
diff --git a/frontend/main/src/api/business/model/datasetModel.ts b/frontend/main/src/api/business/model/datasetModel.ts
index 6eaf4ca6..5ed88572 100644
--- a/frontend/main/src/api/business/model/datasetModel.ts
+++ b/frontend/main/src/api/business/model/datasetModel.ts
@@ -99,6 +99,7 @@ export enum datasetTypeEnum {
LIDAR_BASIC = 'LIDAR_BASIC',
IMAGE = 'IMAGE',
LIDAR = 'LIDAR',
+ TEXT = 'TEXT',
}
export interface fileItem {
diff --git a/frontend/main/src/assets/images/dataset/textEmpty.png b/frontend/main/src/assets/images/dataset/textEmpty.png
new file mode 100644
index 00000000..f9c95198
Binary files /dev/null and b/frontend/main/src/assets/images/dataset/textEmpty.png differ
diff --git a/frontend/main/src/assets/images/dataset/textImg.png b/frontend/main/src/assets/images/dataset/textImg.png
new file mode 100644
index 00000000..2365e798
Binary files /dev/null and b/frontend/main/src/assets/images/dataset/textImg.png differ
diff --git a/frontend/main/src/assets/images/dataset/text_type.png b/frontend/main/src/assets/images/dataset/text_type.png
new file mode 100644
index 00000000..b49b82f8
Binary files /dev/null and b/frontend/main/src/assets/images/dataset/text_type.png differ
diff --git a/frontend/main/src/components/BasicCustom/CustomRadio/index.vue b/frontend/main/src/components/BasicCustom/CustomRadio/index.vue
index 0156a54d..2bb25969 100644
--- a/frontend/main/src/components/BasicCustom/CustomRadio/index.vue
+++ b/frontend/main/src/components/BasicCustom/CustomRadio/index.vue
@@ -1,7 +1,7 @@
-
-
+ {
@@ -10,10 +10,8 @@
"
>
All
-
-
-
@@ -53,20 +51,21 @@
// display: flex;
margin-top: 6px;
font-size: 12px;
- // .options {
- .item {
- cursor: pointer;
- margin-right: 10px;
+ .options {
+ .item {
+ display: inline-flex;
+ cursor: pointer;
+ margin-right: 5px;
- &:last-child {
- margin-right: 0;
- }
+ &:last-child {
+ margin-right: 0;
+ }
- &.active {
- color: @primary-color;
+ &.active {
+ color: @primary-color;
+ }
}
}
- // }
// .box {
// text-align: center;
// }
diff --git a/frontend/main/src/utils/business/index.ts b/frontend/main/src/utils/business/index.ts
index 969ed817..216e0b74 100644
--- a/frontend/main/src/utils/business/index.ts
+++ b/frontend/main/src/utils/business/index.ts
@@ -3,12 +3,27 @@ import qs from 'qs';
import { datasetTypeEnum } from '/@/api/business/model/datasetModel';
export const goToTool = (query: any, type?: datasetTypeEnum) => {
- let toolPath = type === datasetTypeEnum.IMAGE ? '/tool/image' : '/tool/pc';
+ let toolPath;
+ console.log(type === datasetTypeEnum.TEXT);
+ switch (type) {
+ case datasetTypeEnum.IMAGE:
+ toolPath = '/tool/image';
+ break;
+ case datasetTypeEnum.TEXT:
+ toolPath = '/tool/text';
+ break;
+ default:
+ toolPath = '/tool/pc';
+ break;
+ }
if (import.meta.env.DEV) {
switch (type) {
case datasetTypeEnum.IMAGE:
toolPath = 'http://localhost:3300/tool/image';
break;
+ case datasetTypeEnum.TEXT:
+ toolPath = 'http://localhost:3300/tool/text';
+ break;
default:
toolPath = 'http://localhost:3200/tool/pc';
break;
diff --git a/frontend/main/src/views/datasets/datasetClass/components/formSchemas.tsx b/frontend/main/src/views/datasets/datasetClass/components/formSchemas.tsx
index 338f353a..e82211f3 100644
--- a/frontend/main/src/views/datasets/datasetClass/components/formSchemas.tsx
+++ b/frontend/main/src/views/datasets/datasetClass/components/formSchemas.tsx
@@ -83,6 +83,7 @@ export const inputItemImg = {
[inputType.MultiSelection]: checkIcon,
[inputType.Dropdown]: dropdownIcon,
[inputType.Text]: textIcon,
+ [inputType.LongText]: textIcon,
};
function getOption() {
diff --git a/frontend/main/src/views/datasets/datasetClass/components/typing.ts b/frontend/main/src/views/datasets/datasetClass/components/typing.ts
index e6b30a7c..6bc986fc 100644
--- a/frontend/main/src/views/datasets/datasetClass/components/typing.ts
+++ b/frontend/main/src/views/datasets/datasetClass/components/typing.ts
@@ -5,6 +5,7 @@ export enum inputType {
MultiSelection = 'MULTI_SELECTION',
Dropdown = 'DROPDOWN',
Text = 'TEXT',
+ LongText = 'LONG_TEXT',
}
export enum formType {
diff --git a/frontend/main/src/views/datasets/datasetContent/components/ExportModal.vue b/frontend/main/src/views/datasets/datasetContent/components/ExportModal.vue
index 0419b7c4..0bf22790 100644
--- a/frontend/main/src/views/datasets/datasetContent/components/ExportModal.vue
+++ b/frontend/main/src/views/datasets/datasetContent/components/ExportModal.vue
@@ -88,7 +88,7 @@
const dataFormat = ref
('XTREME1');
let dataFormatOption = computed(() => {
- return props.datasetType?.includes('LIDAR')
+ return props.datasetType?.includes('LIDAR') || props.datasetType?.includes('TEXT')
? [
{
value: 'XTREME1',
diff --git a/frontend/main/src/views/datasets/datasetContent/components/ImgCard.vue b/frontend/main/src/views/datasets/datasetContent/components/ImgCard.vue
index 2c758eb9..be342971 100644
--- a/frontend/main/src/views/datasets/datasetContent/components/ImgCard.vue
+++ b/frontend/main/src/views/datasets/datasetContent/components/ImgCard.vue
@@ -66,50 +66,54 @@