Skip to content

Commit

Permalink
Merge pull request #15 from nameczz/dev
Browse files Browse the repository at this point in the history
Fix binary vector when query data and disable multiple vector fields when create collection
  • Loading branch information
shanghaikid authored Jan 20, 2022
2 parents 41e0d45 + 2ffffd8 commit 4bae96f
Show file tree
Hide file tree
Showing 8 changed files with 79 additions and 27 deletions.
3 changes: 3 additions & 0 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,5 +27,8 @@ jobs:
- name: Docker tag
run: docker tag zilliz/attu:${GITHUB_REF#refs/tags/} zilliz/attu:latest

- name: Docker Push version
run: docker push zilliz/attu:${GITHUB_REF#refs/tags/}

- name: Docker Push lastest
run: docker push zilliz/attu
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ node_modules

# testing
/clientcoverage
/client/vectors.csv
**/vectors.csv

# production

Expand Down
2 changes: 1 addition & 1 deletion client/src/pages/collections/Constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ export const VECTOR_FIELDS_OPTIONS: KeyValuePair[] = [
];

export const ALL_OPTIONS: KeyValuePair[] = [
...VECTOR_FIELDS_OPTIONS,
// ...VECTOR_FIELDS_OPTIONS,
{
label: 'Int8',
value: DataTypeEnum.Int8,
Expand Down
44 changes: 25 additions & 19 deletions client/src/plugins/search/VectorSearch.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -123,21 +123,20 @@ const VectorSearch = () => {
*/
return searchResult && searchResult.length > 0
? Object.keys(searchResult[0])
.filter(item => {
// if primary key field name is id, don't filter it
const invalidItems =
primaryKeyField === 'id' ? ['score'] : ['id', 'score'];
return !invalidItems.includes(item);
})
.map(key => ({
id: key,
align: 'left',
disablePadding: false,
label: key,
}))
.filter(item => {
// if primary key field name is id, don't filter it
const invalidItems =
primaryKeyField === 'id' ? ['score'] : ['id', 'score'];
return !invalidItems.includes(item);
})
.map(key => ({
id: key,
align: 'left',
disablePadding: false,
label: key,
}))
: [];
}, [searchResult, primaryKeyField]);

const {
metricType,
indexType,
Expand All @@ -157,7 +156,6 @@ const VectorSearch = () => {
index?._metricType || DEFAULT_METRIC_VALUE_MAP[embeddingType];
const indexParams = index?._indexParameterPairs || [];
const dim = selectedFieldInfo?.dimension || 0;

return {
metricType: metric,
indexType: index?._indexType || getDefaultIndexType(embeddingType),
Expand Down Expand Up @@ -187,10 +185,15 @@ const VectorSearch = () => {
if (vectors === '' || selectedFieldDimension === 0) {
return true;
}
const dim =
fieldType === DataTypeEnum.BinaryVector
? selectedFieldDimension / 8
: selectedFieldDimension;
console.log(fieldType);
const value = parseValue(vectors);
const isArray = Array.isArray(value);
return isArray && value.length === selectedFieldDimension;
}, [vectors, selectedFieldDimension]);
return isArray && value.length === dim;
}, [vectors, selectedFieldDimension, fieldType]);

const searchDisabled = useMemo(() => {
/**
Expand Down Expand Up @@ -374,7 +377,10 @@ const VectorSearch = () => {
{!vectorValueValid && (
<Typography variant="caption" className={classes.error}>
{searchTrans('vectorValueWarning', {
dimension: selectedFieldDimension,
dimension:
fieldType === DataTypeEnum.BinaryVector
? selectedFieldDimension / 8
: selectedFieldDimension,
})}
</Typography>
)}
Expand Down Expand Up @@ -421,8 +427,8 @@ const VectorSearch = () => {
metricType={metricType!}
embeddingType={
embeddingType as
| DataTypeEnum.BinaryVector
| DataTypeEnum.FloatVector
| DataTypeEnum.BinaryVector
| DataTypeEnum.FloatVector
}
indexType={indexType}
indexParams={indexParams!}
Expand Down
30 changes: 30 additions & 0 deletions express/generate-csv.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import { createObjectCsvWriter as createCsvWriter } from 'csv-writer';

// use to test vector insert
const csvWriter = createCsvWriter({
path: './vectors.csv',
header: [{ id: 'vector', title: 'vector' }],
});

const records = [];

const generateVector = (dimension: number) => {
let index = 0;
const vectors = [];
while (index < dimension) {
vectors.push(1 + Math.random());
index++;
}
return JSON.stringify(vectors);
};

while (records.length < 50000) {
const value = generateVector(8);
records.push({ vector: value });
}

csvWriter
.writeRecords(records) // returns a promise
.then(() => {
console.log('...Done');
});
5 changes: 3 additions & 2 deletions express/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,14 @@
"url": "https://github.com/zilliztech/attu"
},
"dependencies": {
"@zilliz/milvus2-sdk-node": "^1.0.19",
"@zilliz/milvus2-sdk-node": "^1.1.0",
"chalk": "^4.1.2",
"class-sanitizer": "^1.0.1",
"class-transformer": "^0.4.0",
"class-validator": "^0.13.1",
"cors": "^2.8.5",
"cross-env": "^7.0.3",
"csv-writer": "^1.6.0",
"express": "^4.17.1",
"glob": "^7.2.0",
"helmet": "^4.6.0",
Expand Down Expand Up @@ -137,4 +138,4 @@
]
}
}
}
}
1 change: 1 addition & 0 deletions express/src/middlewares/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ export const ReqHeaderMiddleware = (
// all ape requests need set milvus address in header.
// server will set activeaddress in milvus service.
const milvusAddress = (req.headers[MILVUS_ADDRESS] as string) || '';

// only api request has MILVUS_ADDRESS.
// When client run in express, we dont need static files like: xx.js run this logic.
// Otherwise will cause 401 error.
Expand Down
19 changes: 15 additions & 4 deletions express/yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -1137,15 +1137,16 @@
dependencies:
"@types/yargs-parser" "*"

"@zilliz/milvus2-sdk-node@^1.0.19":
version "1.0.19"
resolved "https://registry.yarnpkg.com/@zilliz/milvus2-sdk-node/-/milvus2-sdk-node-1.0.19.tgz#966e68a2d88e62ba475a138b382fb71209670a2e"
integrity sha512-LMRRM+vU+AiB3ETUAzIgiFxVZJJ63vyuEP7cZevz4tQPXUW1tN63MVCWFVikQKx3QNE+ikArnoQIGJc0I9+kEA==
"@zilliz/milvus2-sdk-node@^1.1.0":
version "1.1.0"
resolved "https://registry.yarnpkg.com/@zilliz/milvus2-sdk-node/-/milvus2-sdk-node-1.1.0.tgz#0fc8c0b630bb29056363f86a86e6cc470e5ac63a"
integrity sha512-yVfGbJ+qOttUAb/KOxFmj8Pn4tGoSgWcR7xaN3P2LGX8czo1gzywJVqYgE0H6J8xA3B48hUYQ3dA8ie5LbTtgQ==
dependencies:
"@grpc/grpc-js" "^1.2.12"
"@grpc/proto-loader" "^0.6.0"
"@microsoft/api-documenter" "^7.13.39"
"@microsoft/api-extractor" "^7.18.5"
json-schema "^0.4.0"
protobufjs "^6.11.2"

abab@^2.0.3, abab@^2.0.5:
Expand Down Expand Up @@ -2005,6 +2006,11 @@ cssstyle@^2.3.0:
dependencies:
cssom "~0.3.6"

csv-writer@^1.6.0:
version "1.6.0"
resolved "https://registry.yarnpkg.com/csv-writer/-/csv-writer-1.6.0.tgz#d0cea44b6b4d7d3baa2ecc6f3f7209233514bcf9"
integrity sha512-NOx7YDFWEsM/fTRAJjRpPp8t+MKRVvniAg9wQlUKx20MFrPs73WLJhFf5iteqrxNYnsy924K3Iroh3yNHeYd2g==

data-urls@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b"
Expand Down Expand Up @@ -3637,6 +3643,11 @@ json-schema-traverse@^0.4.1:
resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660"
integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==

json-schema@^0.4.0:
version "0.4.0"
resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5"
integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==

json-stringify-safe@^5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb"
Expand Down

0 comments on commit 4bae96f

Please sign in to comment.