未验证 提交 cc057a7a 编写于 作者: K Kirill Lakhov 提交者: GitHub

Large files uploads (#3692)

上级 e39a17b0
......@@ -36,6 +36,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Points of invisible shapes are visible in autobordering (<https://github.com/openvinotoolkit/cvat/pull/3931>)
- Order of the label attributes in the object item details(<https://github.com/openvinotoolkit/cvat/pull/3945>)
- Order of labels in tasks and projects (<https://github.com/openvinotoolkit/cvat/pull/3987>)
- Fixed task creating with large files via webpage (<https://github.com/openvinotoolkit/cvat/pull/3692>)
- Added information to export CVAT_HOST when performing local installation for accessing over network (<https://github.com/openvinotoolkit/cvat/pull/4014>)
### Security
......
{
"name": "cvat-core",
"version": "3.21.0",
"version": "3.21.1",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "cvat-core",
"version": "3.21.0",
"version": "3.21.1",
"license": "MIT",
"dependencies": {
"axios": "^0.21.4",
"browser-or-node": "^1.2.1",
"cvat-data": "../cvat-data",
"detect-browser": "^5.2.0",
"detect-browser": "^5.2.1",
"error-stack-parser": "^2.0.2",
"form-data": "^2.5.0",
"jest-config": "^26.6.3",
......@@ -20,7 +20,8 @@
"json-logic-js": "^2.0.1",
"platform": "^1.3.5",
"quickhull": "^1.0.3",
"store": "^2.0.12"
"store": "^2.0.12",
"tus-js-client": "^2.3.0"
},
"devDependencies": {
"coveralls": "^3.0.5",
......@@ -38,6 +39,12 @@
},
"devDependencies": {}
},
"detect-browser": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/detect-browser/-/detect-browser-5.2.1.tgz",
"integrity": "sha512-eAcRiEPTs7utXWPaAgu/OX1HRJpxW7xSHpw4LTDrGFaeWnJ37HRlqpUkKsDm0AoTbtrvHQhH+5U2Cd87EGhJTg==",
"extraneous": true
},
"node_modules/@babel/code-frame": {
"version": "7.15.8",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.15.8.tgz",
......@@ -1617,6 +1624,15 @@
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
},
"node_modules/combine-errors": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/combine-errors/-/combine-errors-3.0.3.tgz",
"integrity": "sha1-9N9nQAg+VwOjGBEQwrEFUfAD2oY=",
"dependencies": {
"custom-error-instance": "2.1.1",
"lodash.uniqby": "4.5.0"
}
},
"node_modules/combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
......@@ -1714,6 +1730,11 @@
"resolved": "https://registry.npmjs.org/cssom/-/cssom-0.3.8.tgz",
"integrity": "sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg=="
},
"node_modules/custom-error-instance": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/custom-error-instance/-/custom-error-instance-2.1.1.tgz",
"integrity": "sha1-PPY5FIemYppiR+sMoM4ACBt+Nho="
},
"node_modules/cvat-data": {
"resolved": "../cvat-data",
"link": true
......@@ -2814,7 +2835,6 @@
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
"integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
"dev": true,
"engines": {
"node": ">=8"
},
......@@ -3669,6 +3689,11 @@
"node": ">= 10.13.0"
}
},
"node_modules/js-base64": {
"version": "2.6.4",
"resolved": "https://registry.npmjs.org/js-base64/-/js-base64-2.6.4.tgz",
"integrity": "sha512-pZe//GGmwJndub7ZghVHz7vjb2LgC1m8B07Au3eYqeqv9emhESByMXxaEgkUkEqJe87oBbSniGYoQNIBklc7IQ=="
},
"node_modules/js-cookie": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/js-cookie/-/js-cookie-2.2.1.tgz",
......@@ -3957,6 +3982,60 @@
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
"node_modules/lodash._baseiteratee": {
"version": "4.7.0",
"resolved": "https://registry.npmjs.org/lodash._baseiteratee/-/lodash._baseiteratee-4.7.0.tgz",
"integrity": "sha1-NKm1VDVycnw9sueO2uPA6eZr0QI=",
"dependencies": {
"lodash._stringtopath": "~4.8.0"
}
},
"node_modules/lodash._basetostring": {
"version": "4.12.0",
"resolved": "https://registry.npmjs.org/lodash._basetostring/-/lodash._basetostring-4.12.0.tgz",
"integrity": "sha1-kyfJ3FFYhmt/pLnUL0Y45XZt2d8="
},
"node_modules/lodash._baseuniq": {
"version": "4.6.0",
"resolved": "https://registry.npmjs.org/lodash._baseuniq/-/lodash._baseuniq-4.6.0.tgz",
"integrity": "sha1-DrtE5FaBSveQXGIS+iybLVG4Qeg=",
"dependencies": {
"lodash._createset": "~4.0.0",
"lodash._root": "~3.0.0"
}
},
"node_modules/lodash._createset": {
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/lodash._createset/-/lodash._createset-4.0.3.tgz",
"integrity": "sha1-D0ZZ+7CddRlPqeK4imZE02PJ/iY="
},
"node_modules/lodash._root": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/lodash._root/-/lodash._root-3.0.1.tgz",
"integrity": "sha1-+6HEUkwZ7ppfgTa0YJ8BfPTe1pI="
},
"node_modules/lodash._stringtopath": {
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/lodash._stringtopath/-/lodash._stringtopath-4.8.0.tgz",
"integrity": "sha1-lBvPDmQmbl/B1m/tCmlZVExXaCQ=",
"dependencies": {
"lodash._basetostring": "~4.12.0"
}
},
"node_modules/lodash.throttle": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/lodash.throttle/-/lodash.throttle-4.1.1.tgz",
"integrity": "sha1-wj6RtxAkKscMN/HhzaknTMOb8vQ="
},
"node_modules/lodash.uniqby": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/lodash.uniqby/-/lodash.uniqby-4.5.0.tgz",
"integrity": "sha1-o6F7v2LutiQPSRhG6XwcTipeHiE=",
"dependencies": {
"lodash._baseiteratee": "~4.7.0",
"lodash._baseuniq": "~4.6.0"
}
},
"node_modules/log-driver": {
"version": "1.2.7",
"resolved": "https://registry.npmjs.org/log-driver/-/log-driver-1.2.7.tgz",
......@@ -4630,6 +4709,18 @@
"node": ">= 6"
}
},
"node_modules/proper-lockfile": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/proper-lockfile/-/proper-lockfile-2.0.1.tgz",
"integrity": "sha1-FZ+wYZPTIAP0s2kd0uwaY0qoDR0=",
"dependencies": {
"graceful-fs": "^4.1.2",
"retry": "^0.10.0"
},
"engines": {
"node": ">=4.0.0"
}
},
"node_modules/psl": {
"version": "1.8.0",
"resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz",
......@@ -4661,6 +4752,11 @@
"node": ">=0.6"
}
},
"node_modules/querystringify": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ=="
},
"node_modules/quickhull": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/quickhull/-/quickhull-1.0.3.tgz",
......@@ -4832,6 +4928,11 @@
"resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz",
"integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg=="
},
"node_modules/requires-port": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
"integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8="
},
"node_modules/requizzle": {
"version": "0.2.3",
"resolved": "https://registry.npmjs.org/requizzle/-/requizzle-0.2.3.tgz",
......@@ -4887,6 +4988,14 @@
"node": ">=0.12"
}
},
"node_modules/retry": {
"version": "0.10.1",
"resolved": "https://registry.npmjs.org/retry/-/retry-0.10.1.tgz",
"integrity": "sha1-52OI0heZLCUnUCQdPTlW/tmNj/Q=",
"engines": {
"node": "*"
}
},
"node_modules/rimraf": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
......@@ -5189,11 +5298,6 @@
"node": ">=0.10.0"
}
},
"detect-browser": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/detect-browser/-/detect-browser-5.2.1.tgz",
"integrity": "sha512-eAcRiEPTs7utXWPaAgu/OX1HRJpxW7xSHpw4LTDrGFaeWnJ37HRlqpUkKsDm0AoTbtrvHQhH+5U2Cd87EGhJTg=="
},
"node_modules/saxes": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/saxes/-/saxes-5.0.1.tgz",
......@@ -5932,6 +6036,25 @@
"node": "*"
}
},
"node_modules/tus-js-client": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/tus-js-client/-/tus-js-client-2.3.0.tgz",
"integrity": "sha512-I4cSwm6N5qxqCmBqenvutwSHe9ntf81lLrtf6BmLpG2v4wTl89atCQKqGgqvkodE6Lx+iKIjMbaXmfvStTg01g==",
"dependencies": {
"buffer-from": "^0.1.1",
"combine-errors": "^3.0.3",
"is-stream": "^2.0.0",
"js-base64": "^2.6.1",
"lodash.throttle": "^4.1.1",
"proper-lockfile": "^2.0.1",
"url-parse": "^1.4.3"
}
},
"node_modules/tus-js-client/node_modules/buffer-from": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-0.1.2.tgz",
"integrity": "sha512-RiWIenusJsmI2KcvqQABB83tLxCByE3upSP8QU3rJDMVFGPWLvPQJt/O1Su9moRWeH7d+Q2HYb68f6+v+tw2vg=="
},
"node_modules/tweetnacl": {
"version": "0.14.5",
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
......@@ -6078,6 +6201,15 @@
"integrity": "sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI=",
"deprecated": "Please see https://github.com/lydell/urix#deprecated"
},
"node_modules/url-parse": {
"version": "1.5.3",
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.3.tgz",
"integrity": "sha512-IIORyIQD9rvj0A4CLWsHkBBJuNqWpFQe224b6j9t/ABmquIS0qDU2pY6kl6AuOrL5OkCXHMCFNe1jBcuAggjvQ==",
"dependencies": {
"querystringify": "^2.1.1",
"requires-port": "^1.0.0"
}
},
"node_modules/use": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/use/-/use-3.1.1.tgz",
......@@ -6207,7 +6339,6 @@
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
"integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
"dev": true,
"dependencies": {
"isexe": "^2.0.0"
},
......@@ -7581,6 +7712,15 @@
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
},
"combine-errors": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/combine-errors/-/combine-errors-3.0.3.tgz",
"integrity": "sha1-9N9nQAg+VwOjGBEQwrEFUfAD2oY=",
"requires": {
"custom-error-instance": "2.1.1",
"lodash.uniqby": "4.5.0"
}
},
"combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
......@@ -7662,6 +7802,11 @@
}
}
},
"custom-error-instance": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/custom-error-instance/-/custom-error-instance-2.1.1.tgz",
"integrity": "sha1-PPY5FIemYppiR+sMoM4ACBt+Nho="
},
"cvat-data": {
"version": "file:../cvat-data",
"requires": {
......@@ -8482,8 +8627,7 @@
"is-stream": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
"integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
"dev": true
"integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg=="
},
"is-typedarray": {
"version": "1.0.0",
......@@ -9155,6 +9299,11 @@
"supports-color": "^7.0.0"
}
},
"js-base64": {
"version": "2.6.4",
"resolved": "https://registry.npmjs.org/js-base64/-/js-base64-2.6.4.tgz",
"integrity": "sha512-pZe//GGmwJndub7ZghVHz7vjb2LgC1m8B07Au3eYqeqv9emhESByMXxaEgkUkEqJe87oBbSniGYoQNIBklc7IQ=="
},
"js-cookie": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/js-cookie/-/js-cookie-2.2.1.tgz",
......@@ -9385,6 +9534,60 @@
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
"lodash._baseiteratee": {
"version": "4.7.0",
"resolved": "https://registry.npmjs.org/lodash._baseiteratee/-/lodash._baseiteratee-4.7.0.tgz",
"integrity": "sha1-NKm1VDVycnw9sueO2uPA6eZr0QI=",
"requires": {
"lodash._stringtopath": "~4.8.0"
}
},
"lodash._basetostring": {
"version": "4.12.0",
"resolved": "https://registry.npmjs.org/lodash._basetostring/-/lodash._basetostring-4.12.0.tgz",
"integrity": "sha1-kyfJ3FFYhmt/pLnUL0Y45XZt2d8="
},
"lodash._baseuniq": {
"version": "4.6.0",
"resolved": "https://registry.npmjs.org/lodash._baseuniq/-/lodash._baseuniq-4.6.0.tgz",
"integrity": "sha1-DrtE5FaBSveQXGIS+iybLVG4Qeg=",
"requires": {
"lodash._createset": "~4.0.0",
"lodash._root": "~3.0.0"
}
},
"lodash._createset": {
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/lodash._createset/-/lodash._createset-4.0.3.tgz",
"integrity": "sha1-D0ZZ+7CddRlPqeK4imZE02PJ/iY="
},
"lodash._root": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/lodash._root/-/lodash._root-3.0.1.tgz",
"integrity": "sha1-+6HEUkwZ7ppfgTa0YJ8BfPTe1pI="
},
"lodash._stringtopath": {
"version": "4.8.0",
"resolved": "https://registry.npmjs.org/lodash._stringtopath/-/lodash._stringtopath-4.8.0.tgz",
"integrity": "sha1-lBvPDmQmbl/B1m/tCmlZVExXaCQ=",
"requires": {
"lodash._basetostring": "~4.12.0"
}
},
"lodash.throttle": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/lodash.throttle/-/lodash.throttle-4.1.1.tgz",
"integrity": "sha1-wj6RtxAkKscMN/HhzaknTMOb8vQ="
},
"lodash.uniqby": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/lodash.uniqby/-/lodash.uniqby-4.5.0.tgz",
"integrity": "sha1-o6F7v2LutiQPSRhG6XwcTipeHiE=",
"requires": {
"lodash._baseiteratee": "~4.7.0",
"lodash._baseuniq": "~4.6.0"
}
},
"log-driver": {
"version": "1.2.7",
"resolved": "https://registry.npmjs.org/log-driver/-/log-driver-1.2.7.tgz",
......@@ -9893,6 +10096,15 @@
"sisteransi": "^1.0.5"
}
},
"proper-lockfile": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/proper-lockfile/-/proper-lockfile-2.0.1.tgz",
"integrity": "sha1-FZ+wYZPTIAP0s2kd0uwaY0qoDR0=",
"requires": {
"graceful-fs": "^4.1.2",
"retry": "^0.10.0"
}
},
"psl": {
"version": "1.8.0",
"resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz",
......@@ -9918,6 +10130,11 @@
"integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==",
"dev": true
},
"querystringify": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ=="
},
"quickhull": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/quickhull/-/quickhull-1.0.3.tgz",
......@@ -10054,6 +10271,11 @@
"resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz",
"integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg=="
},
"requires-port": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
"integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8="
},
"requizzle": {
"version": "0.2.3",
"resolved": "https://registry.npmjs.org/requizzle/-/requizzle-0.2.3.tgz",
......@@ -10096,6 +10318,11 @@
"resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz",
"integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg=="
},
"retry": {
"version": "0.10.1",
"resolved": "https://registry.npmjs.org/retry/-/retry-0.10.1.tgz",
"integrity": "sha1-52OI0heZLCUnUCQdPTlW/tmNj/Q="
},
"rimraf": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
......@@ -10330,14 +10557,6 @@
"is-number": "^3.0.0",
"repeat-string": "^1.6.1"
}
},
"which": {
"version": "1.3.1",
"resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz",
"integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==",
"requires": {
"isexe": "^2.0.0"
}
}
}
},
......@@ -10923,6 +11142,27 @@
"safe-buffer": "^5.0.1"
}
},
"tus-js-client": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/tus-js-client/-/tus-js-client-2.3.0.tgz",
"integrity": "sha512-I4cSwm6N5qxqCmBqenvutwSHe9ntf81lLrtf6BmLpG2v4wTl89atCQKqGgqvkodE6Lx+iKIjMbaXmfvStTg01g==",
"requires": {
"buffer-from": "^0.1.1",
"combine-errors": "^3.0.3",
"is-stream": "^2.0.0",
"js-base64": "^2.6.1",
"lodash.throttle": "^4.1.1",
"proper-lockfile": "^2.0.1",
"url-parse": "^1.4.3"
},
"dependencies": {
"buffer-from": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-0.1.2.tgz",
"integrity": "sha512-RiWIenusJsmI2KcvqQABB83tLxCByE3upSP8QU3rJDMVFGPWLvPQJt/O1Su9moRWeH7d+Q2HYb68f6+v+tw2vg=="
}
}
},
"tweetnacl": {
"version": "0.14.5",
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
......@@ -11041,6 +11281,15 @@
"resolved": "https://registry.npmjs.org/urix/-/urix-0.1.0.tgz",
"integrity": "sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI="
},
"url-parse": {
"version": "1.5.3",
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.3.tgz",
"integrity": "sha512-IIORyIQD9rvj0A4CLWsHkBBJuNqWpFQe224b6j9t/ABmquIS0qDU2pY6kl6AuOrL5OkCXHMCFNe1jBcuAggjvQ==",
"requires": {
"querystringify": "^2.1.1",
"requires-port": "^1.0.0"
}
},
"use": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/use/-/use-3.1.1.tgz",
......@@ -11148,7 +11397,6 @@
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
"integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
"dev": true,
"requires": {
"isexe": "^2.0.0"
}
......
{
"name": "cvat-core",
"version": "3.21.0",
"version": "3.21.1",
"description": "Part of Computer Vision Tool which presents an interface for client-side integration",
"main": "babel.config.js",
"scripts": {
......@@ -31,10 +31,11 @@
"error-stack-parser": "^2.0.2",
"form-data": "^2.5.0",
"jest-config": "^26.6.3",
"json-logic-js": "^2.0.1",
"js-cookie": "^2.2.0",
"json-logic-js": "^2.0.1",
"platform": "^1.3.5",
"quickhull": "^1.0.3",
"store": "^2.0.12"
"store": "^2.0.12",
"tus-js-client": "^2.3.0"
}
}
......@@ -697,6 +697,8 @@ function build() {
* @property {string} proxy Axios proxy settings.
* For more details please read <a href="https://github.com/axios/axios"> here </a>
* @memberof module:API.cvat.config
* @property {string} origin ui URL origin
* @memberof module:API.cvat.config
* @memberof module:API.cvat.config
*/
get backendAPI() {
......@@ -711,6 +713,12 @@ function build() {
set proxy(value) {
config.proxy = value;
},
get origin() {
return config.origin;
},
set origin(value) {
config.origin = value;
},
},
/**
* Namespace contains some library information e.g. api version
......
......@@ -5,4 +5,5 @@
module.exports = {
backendAPI: '/api/v1',
proxy: false,
origin: '',
};
......@@ -8,6 +8,7 @@
const store = require('store');
const config = require('./config');
const DownloadWorker = require('./download.worker');
const tus = require('tus-js-client');
function waitFor(frequencyHz, predicate) {
return new Promise((resolve, reject) => {
......@@ -567,7 +568,7 @@
}
async function createTask(taskSpec, taskDataSpec, onUpdate) {
const { backendAPI } = config;
const { backendAPI, origin } = config;
async function wait(id) {
return new Promise((resolve, reject) => {
......@@ -607,6 +608,22 @@
});
}
const chunkSize = 1024 * 1024 * 100; // 100 mb
const clientFiles = taskDataSpec.client_files;
const chunkFiles = [];
const bulkFiles = [];
let totalSize = 0;
let totalSentSize = 0;
for (const file of clientFiles) {
if (file.size > chunkSize) {
chunkFiles.push(file);
} else {
bulkFiles.push(file);
}
totalSize += file.size;
}
delete taskDataSpec.client_files;
const taskData = new FormData();
for (const [key, value] of Object.entries(taskDataSpec)) {
if (Array.isArray(value)) {
......@@ -632,18 +649,96 @@
throw generateError(errorData);
}
onUpdate('The data are being uploaded to the server..');
try {
await Axios.post(`${backendAPI}/tasks/${response.data.id}/data`, taskData, {
proxy: config.proxy,
onUpdate('The data are being uploaded to the server 0%');
async function chunkUpload(taskId, file) {
return new Promise((resolve, reject) => {
const upload = new tus.Upload(file, {
endpoint: `${origin}/${backendAPI}/tasks/${taskId}/data/`,
metadata: {
filename: file.name,
filetype: file.type,
},
headers: {
Authorization: `Token ${store.get('token')}`,
},
chunkSize,
retryDelays: null,
onError(error) {
reject(error);
},
onBeforeRequest(req) {
const xhr = req.getUnderlyingObject();
xhr.withCredentials = true;
},
onProgress(bytesUploaded) {
const currentUploadedSize = totalSentSize + bytesUploaded;
const percentage = ((currentUploadedSize / totalSize) * 100).toFixed(2);
onUpdate(`The data are being uploaded to the server ${percentage}%`);
},
onSuccess() {
totalSentSize += file.size;
resolve();
},
});
upload.start();
});
}
async function bulkUpload(taskId, files) {
const fileBulks = files.reduce((fileGroups, file) => {
const lastBulk = fileGroups[fileGroups.length - 1];
if (chunkSize - lastBulk.size >= file.size) {
lastBulk.files.push(file);
lastBulk.size += file.size;
} else {
fileGroups.push({ files: [file], size: file.size });
}
return fileGroups;
}, [{ files: [], size: 0 }]);
const totalBulks = fileBulks.length;
let currentChunkNumber = 0;
while (currentChunkNumber < totalBulks) {
for (const [idx, element] of fileBulks[currentChunkNumber].files.entries()) {
taskData.append(`client_files[${idx}]`, element);
}
onUpdate(`The data are being uploaded to the server
${((totalSentSize / totalSize) * 100).toFixed(2)}%`);
await Axios.post(`${backendAPI}/tasks/${taskId}/data`, taskData, {
proxy: config.proxy,
headers: { 'Upload-Multiple': true },
});
for (let i = 0; i < fileBulks[currentChunkNumber].files.length; i++) {
taskData.delete(`client_files[${i}]`);
}
totalSentSize += fileBulks[currentChunkNumber].size;
currentChunkNumber++;
}
}
try {
await Axios.post(`${backendAPI}/tasks/${response.data.id}/data`,
taskData, {
proxy: config.proxy,
headers: { 'Upload-Start': true },
});
for (const file of chunkFiles) {
await chunkUpload(response.data.id, file);
}
if (bulkFiles.length > 0) {
await bulkUpload(response.data.id, bulkFiles);
}
await Axios.post(`${backendAPI}/tasks/${response.data.id}/data`,
taskData, {
proxy: config.proxy,
headers: { 'Upload-Finish': true },
});
} catch (errorData) {
try {
await deleteTask(response.data.id);
} catch (_) {
// ignore
}
throw generateError(errorData);
}
......
......@@ -7,6 +7,7 @@ import _cvat from 'cvat-core/src/api';
const cvat: any = _cvat;
cvat.config.backendAPI = '/api/v1';
cvat.config.origin = window.location.origin;
export default function getCore(): any {
return cvat;
......
# Copyright (C) 2021 Intel Corporation
#
# SPDX-License-Identifier: MIT
import os
import base64
import uuid
from django.conf import settings
from django.core.cache import cache
from rest_framework import status
from rest_framework.decorators import action
from rest_framework.response import Response
from cvat.apps.engine.serializers import DataSerializer
class TusFile:
_tus_cache_timeout = 3600
def __init__(self, file_id, upload_dir):
self.file_id = file_id
self.upload_dir = upload_dir
self.file_path = os.path.join(self.upload_dir, self.file_id)
self.filename = cache.get("tus-uploads/{}/filename".format(file_id))
self.file_size = int(cache.get("tus-uploads/{}/file_size".format(file_id)))
self.metadata = cache.get("tus-uploads/{}/metadata".format(file_id))
self.offset = cache.get("tus-uploads/{}/offset".format(file_id))
def init_file(self):
file_path = os.path.join(self.upload_dir, self.file_id)
with open(file_path, 'wb') as file:
file.seek(self.file_size - 1)
file.write(b'\0')
def write_chunk(self, chunk):
with open(self.file_path, 'r+b') as file:
file.seek(chunk.offset)
file.write(chunk.content)
self.offset = cache.incr("tus-uploads/{}/offset".format(self.file_id), chunk.size)
def is_complete(self):
return self.offset == self.file_size
def rename(self):
file_id_path = os.path.join(self.upload_dir, self.file_id)
file_path = os.path.join(self.upload_dir, self.filename)
file_exists = os.path.lexists(os.path.join(self.upload_dir, self.filename))
if file_exists:
raise FileExistsError("File {} is already uploaded".format(self.filename))
os.rename(file_id_path, file_path)
def clean(self):
cache.delete_many([
"tus-uploads/{}/file_size".format(self.file_id),
"tus-uploads/{}/filename".format(self.file_id),
"tus-uploads/{}/offset".format(self.file_id),
"tus-uploads/{}/metadata".format(self.file_id),
])
@staticmethod
def get_tusfile(file_id, upload_dir):
file_exists = cache.get("tus-uploads/{}/filename".format(file_id), None) is not None
if file_exists:
return TusFile(file_id, upload_dir)
return None
@staticmethod
def create_file(metadata, file_size, upload_dir):
file_id = str(uuid.uuid4())
cache.add("tus-uploads/{}/filename".format(file_id), "{}".format(metadata.get("filename")), TusFile._tus_cache_timeout)
cache.add("tus-uploads/{}/file_size".format(file_id), file_size, TusFile._tus_cache_timeout)
cache.add("tus-uploads/{}/offset".format(file_id), 0, TusFile._tus_cache_timeout)
cache.add("tus-uploads/{}/metadata".format(file_id), metadata, TusFile._tus_cache_timeout)
tus_file = TusFile(file_id, upload_dir)
tus_file.init_file()
return tus_file
class TusChunk:
def __init__(self, request):
self.META = request.META
self.offset = int(request.META.get("HTTP_UPLOAD_OFFSET", 0))
self.size = int(request.META.get("CONTENT_LENGTH", settings.TUS_DEFAULT_CHUNK_SIZE))
self.content = request.body
# This upload mixin is implemented using tus
# tus is open protocol for file uploads (see more https://tus.io/)
class UploadMixin(object):
_tus_api_version = '1.0.0'
_tus_api_version_supported = ['1.0.0']
_tus_api_extensions = []
_tus_max_file_size = str(settings.TUS_MAX_FILE_SIZE)
_base_tus_headers = {
'Tus-Resumable': _tus_api_version,
'Tus-Version': ",".join(_tus_api_version_supported),
'Tus-Extension': ",".join(_tus_api_extensions),
'Tus-Max-Size': _tus_max_file_size,
'Access-Control-Allow-Origin': "*",
'Access-Control-Allow-Methods': "PATCH,HEAD,GET,POST,OPTIONS",
'Access-Control-Expose-Headers': "Tus-Resumable,upload-length,upload-metadata,Location,Upload-Offset",
'Access-Control-Allow-Headers': "Tus-Resumable,upload-length,upload-metadata,Location,Upload-Offset,content-type",
'Cache-Control': 'no-store'
}
_file_id_regex = r'(?P<file_id>\b[0-9a-f]{8}\b-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-\b[0-9a-f]{12}\b)'
def _tus_response(self, status, data=None, extra_headers=None):
response = Response(data, status)
for key, value in self._base_tus_headers.items():
response.__setitem__(key, value)
if extra_headers:
for key, value in extra_headers.items():
response.__setitem__(key, value)
return response
def _get_metadata(self, request):
metadata = {}
if request.META.get("HTTP_UPLOAD_METADATA"):
for kv in request.META.get("HTTP_UPLOAD_METADATA").split(","):
splited_metadata = kv.split(" ")
if len(splited_metadata) == 2:
key, value = splited_metadata
value = base64.b64decode(value)
if isinstance(value, bytes):
value = value.decode()
metadata[key] = value
else:
metadata[splited_metadata[0]] = ""
return metadata
def upload_data(self, request):
tus_request = request.headers.get('Upload-Length', None) is not None or request.method == 'OPTIONS'
bulk_file_upload = request.headers.get('Upload-Multiple', None) is not None
start_upload = request.headers.get('Upload-Start', None) is not None
finish_upload = request.headers.get('Upload-Finish', None) is not None
one_request_upload = start_upload and finish_upload
if one_request_upload or finish_upload:
return self.upload_finished(request)
elif start_upload:
return Response(status=status.HTTP_202_ACCEPTED)
elif tus_request:
return self.init_tus_upload(request)
elif bulk_file_upload:
return self.append(request)
else: # backward compatibility case - no upload headers were found
return self.upload_finished(request)
def init_tus_upload(self, request):
if request.method == 'OPTIONS':
return self._tus_response(status=status.HTTP_204)
else:
if not self.can_upload():
return self._tus_response(data='Adding more data is not allowed',
status=status.HTTP_400_BAD_REQUEST)
metadata = self._get_metadata(request)
filename = metadata.get('filename', '')
if not self.validate_filename(filename):
return self._tus_response(status=status.HTTP_400_BAD_REQUEST, data="File name {} is not allowed".format(filename))
message_id = request.META.get("HTTP_MESSAGE_ID")
if message_id:
metadata["message_id"] = base64.b64decode(message_id)
file_exists = os.path.lexists(os.path.join(self.get_upload_dir(), filename))
if file_exists:
return self._tus_response(status=status.HTTP_409_CONFLICT, data="File with same name already exists")
file_size = int(request.META.get("HTTP_UPLOAD_LENGTH", "0"))
if file_size > int(self._tus_max_file_size):
return self._tus_response(status=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE,
data="File size exceeds max limit of {} bytes".format(self._tus_max_file_size))
tus_file = TusFile.create_file(metadata, file_size, self.get_upload_dir())
return self._tus_response(
status=status.HTTP_201_CREATED,
extra_headers={'Location': '{}{}'.format(request.build_absolute_uri(), tus_file.file_id)})
@action(detail=True, methods=['HEAD', 'PATCH'], url_path=r'data/'+_file_id_regex)
def append_tus_chunk(self, request, pk, file_id):
if request.method == 'HEAD':
tus_file = TusFile.get_tusfile(str(file_id), self.get_upload_dir())
if tus_file:
return self._tus_response(status=status.HTTP_200_OK, extra_headers={
'Upload-Offset': tus_file.offset,
'Upload-Length': tus_file.file_size})
return self._tus_response(status=status.HTTP_404_NOT_FOUND)
else:
tus_file = TusFile.get_tusfile(str(file_id), self.get_upload_dir())
chunk = TusChunk(request)
if chunk.offset != tus_file.offset:
return self._tus_response(status=status.HTTP_409_CONFLICT)
if chunk.offset > tus_file.file_size:
return self._tus_response(status=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE)
tus_file.write_chunk(chunk)
if tus_file.is_complete():
tus_file.rename()
tus_file.clean()
return self._tus_response(status=status.HTTP_204_NO_CONTENT,
extra_headers={'Upload-Offset': tus_file.offset})
def validate_filename(self, filename):
upload_dir = self.get_upload_dir()
file_path = os.path.join(upload_dir, filename)
return os.path.commonprefix((os.path.realpath(file_path), upload_dir)) == upload_dir
def can_upload(self):
db_model = self.get_object()
model_data = db_model.data
return model_data.size == 0
def get_upload_dir(self):
db_model = self.get_object()
return db_model.data.get_upload_dirname()
def get_request_client_files(self, request):
db_model = self.get_object()
serializer = DataSerializer(db_model, data=request.data)
serializer.is_valid(raise_exception=True)
data = {k: v for k, v in serializer.validated_data.items()}
return data.get('client_files', None);
def append(self, request):
if not self.can_upload():
return Response(data='Adding more data is not allowed',
status=status.HTTP_400_BAD_REQUEST)
client_files = self.get_request_client_files(request)
if client_files:
upload_dir = self.get_upload_dir()
for client_file in client_files:
with open(os.path.join(upload_dir, client_file['file'].name), 'ab+') as destination:
destination.write(client_file['file'].read())
return Response(status=status.HTTP_200_OK)
# override this to do stuff after upload
def upload_finished(self, request):
raise NotImplementedError('You need to implement upload_finished in UploadMixin')
......@@ -4,6 +4,7 @@
import os
import re
import shutil
from enum import Enum
from django.conf import settings
......@@ -12,7 +13,6 @@ from django.core.files.storage import FileSystemStorage
from django.db import models
from django.db.models.fields import FloatField
from django.utils.translation import gettext_lazy as _
from cvat.apps.engine.utils import parse_specific_attributes
class SafeCharField(models.CharField):
......@@ -159,9 +159,24 @@ class Data(models.Model):
def get_manifest_path(self):
return os.path.join(self.get_upload_dirname(), 'manifest.jsonl')
def get_index_path(self):
return os.path.join(self.get_upload_dirname(), 'index.json')
def make_dirs(self):
data_path = self.get_data_dirname()
if os.path.isdir(data_path):
shutil.rmtree(data_path)
os.makedirs(self.get_compressed_cache_dirname())
os.makedirs(self.get_original_cache_dirname())
os.makedirs(self.get_upload_dirname())
def get_uploaded_files(self):
upload_dir = self.get_upload_dirname()
uploaded_files = [os.path.join(upload_dir, file) for file in os.listdir(upload_dir) if os.path.isfile(os.path.join(upload_dir, file))]
represented_files = [{'file':f} for f in uploaded_files]
return represented_files
class Video(models.Model):
data = models.OneToOneField(Data, on_delete=models.CASCADE, related_name="video", null=True)
path = models.CharField(max_length=1024, default='')
......
......@@ -303,38 +303,55 @@ class DataSerializer(serializers.ModelSerializer):
raise serializers.ValidationError('Stop frame must be more or equal start frame')
return data
# pylint: disable=no-self-use
def create(self, validated_data):
files = self._pop_data(validated_data)
db_data = models.Data.objects.create(**validated_data)
db_data.make_dirs()
self._create_files(db_data, files)
db_data.save()
return db_data
def update(self, instance, validated_data):
files = self._pop_data(validated_data)
for key, value in validated_data.items():
setattr(instance, key, value)
self._create_files(instance, files)
instance.save()
return instance
# pylint: disable=no-self-use
def _pop_data(self, validated_data):
client_files = validated_data.pop('client_files')
server_files = validated_data.pop('server_files')
remote_files = validated_data.pop('remote_files')
for extra_key in { 'use_zip_chunks', 'use_cache', 'copy_data' }:
validated_data.pop(extra_key)
db_data = models.Data.objects.create(**validated_data)
data_path = db_data.get_data_dirname()
if os.path.isdir(data_path):
shutil.rmtree(data_path)
os.makedirs(db_data.get_compressed_cache_dirname())
os.makedirs(db_data.get_original_cache_dirname())
os.makedirs(db_data.get_upload_dirname())
files = {'client_files': client_files, 'server_files': server_files, 'remote_files': remote_files}
return files
for f in client_files:
client_file = models.ClientFile(data=db_data, **f)
client_file.save()
for f in server_files:
server_file = models.ServerFile(data=db_data, **f)
server_file.save()
for f in remote_files:
remote_file = models.RemoteFile(data=db_data, **f)
remote_file.save()
db_data.save()
return db_data
# pylint: disable=no-self-use
def _create_files(self, instance, files):
if 'client_files' in files:
client_objects = []
for f in files['client_files']:
client_file = models.ClientFile(data=instance, **f)
client_objects.append(client_file)
models.ClientFile.objects.bulk_create(client_objects)
if 'server_files' in files:
for f in files['server_files']:
server_file = models.ServerFile(data=instance, **f)
server_file.save()
if 'remote_files' in files:
for f in files['remote_files']:
remote_file = models.RemoteFile(data=instance, **f)
remote_file.save()
class TaskSerializer(WriteOnceMixin, serializers.ModelSerializer):
labels = LabelSerializer(many=True, source='label_set', partial=True, required=False)
......
......@@ -48,7 +48,7 @@ from cvat.apps.engine.frame_provider import FrameProvider
from cvat.apps.engine.media_extractors import ImageListReader
from cvat.apps.engine.mime_types import mimetypes
from cvat.apps.engine.models import (
Job, StatusChoice, Task, Project, Review, Issue,
Job, StatusChoice, Task, Data, Project, Review, Issue,
Comment, StorageMethodChoice, ReviewStatus, StorageChoice, Image,
CredentialsTypeChoice, CloudProviderChoice
)
......@@ -64,6 +64,7 @@ from cvat.apps.engine.serializers import (
from utils.dataset_manifest import ImageManifestManager
from cvat.apps.engine.utils import av_scan_paths
from cvat.apps.engine.backup import import_task
from cvat.apps.engine.mixins import UploadMixin
from . import models, task
from .log import clogger, slogger
......@@ -102,7 +103,6 @@ class ServerViewSet(viewsets.ViewSet):
def exception(request):
"""
Saves an exception from a client on the server
Sends logs to the ELK if it is connected
"""
serializer = ExceptionSerializer(data=request.data)
......@@ -129,7 +129,6 @@ class ServerViewSet(viewsets.ViewSet):
def logs(request):
"""
Saves logs from a client on the server
Sends logs to the ELK if it is connected
"""
serializer = LogEventSerializer(many=True, data=request.data)
......@@ -415,7 +414,7 @@ class DjangoFilterInspector(CoreAPICompatInspector):
@method_decorator(name='update', decorator=swagger_auto_schema(operation_summary='Method updates a task by id'))
@method_decorator(name='destroy', decorator=swagger_auto_schema(operation_summary='Method deletes a specific task, all attached jobs, annotations, and data'))
@method_decorator(name='partial_update', decorator=swagger_auto_schema(operation_summary='Methods does a partial update of chosen fields in a task'))
class TaskViewSet(auth.TaskGetQuerySetMixin, viewsets.ModelViewSet):
class TaskViewSet(UploadMixin, auth.TaskGetQuerySetMixin, viewsets.ModelViewSet):
queryset = Task.objects.prefetch_related(
Prefetch('label_set', queryset=models.Label.objects.order_by('id')),
"label_set__attributespec_set",
......@@ -609,6 +608,40 @@ class TaskViewSet(auth.TaskGetQuerySetMixin, viewsets.ModelViewSet):
return Response(serializer.data)
def upload_finished(self, request):
db_task = self.get_object() # call check_object_permissions as well
task_data = db_task.data
serializer = DataSerializer(task_data, data=request.data)
serializer.is_valid(raise_exception=True)
data = dict(serializer.validated_data.items())
uploaded_files = task_data.get_uploaded_files()
uploaded_files.extend(data.get('client_files'))
serializer.validated_data.update({'client_files': uploaded_files})
db_data = serializer.save()
db_task.data = db_data
db_task.save()
data = {k: v for k, v in serializer.data.items()}
data['use_zip_chunks'] = serializer.validated_data['use_zip_chunks']
data['use_cache'] = serializer.validated_data['use_cache']
data['copy_data'] = serializer.validated_data['copy_data']
if data['use_cache']:
db_task.data.storage_method = StorageMethodChoice.CACHE
db_task.data.save(update_fields=['storage_method'])
if data['server_files'] and not data.get('copy_data'):
db_task.data.storage = StorageChoice.SHARE
db_task.data.save(update_fields=['storage'])
if db_data.cloud_storage:
db_task.data.storage = StorageChoice.CLOUD_STORAGE
db_task.data.save(update_fields=['storage'])
# if the value of stop_frame is 0, then inside the function we cannot know
# the value specified by the user or it's default value from the database
if 'stop_frame' not in serializer.validated_data:
data['stop_frame'] = None
task.create(db_task.id, data)
return Response(serializer.data, status=status.HTTP_202_ACCEPTED)
@swagger_auto_schema(method='post', operation_summary='Method permanently attaches images or video to a task',
request_body=DataSerializer,
)
......@@ -624,36 +657,21 @@ class TaskViewSet(auth.TaskGetQuerySetMixin, viewsets.ModelViewSet):
description="A unique number value identifying chunk or frame, doesn't matter for 'preview' type"),
]
)
@action(detail=True, methods=['POST', 'GET'])
@action(detail=True, methods=['OPTIONS', 'POST', 'GET'], url_path=r'data/?$')
def data(self, request, pk):
db_task = self.get_object() # call check_object_permissions as well
if request.method == 'POST':
if db_task.data:
if request.method == 'POST' or request.method == 'OPTIONS':
task_data = db_task.data
if not task_data:
task_data = Data.objects.create()
task_data.make_dirs()
db_task.data = task_data
db_task.save()
elif task_data.size != 0:
return Response(data='Adding more data is not supported',
status=status.HTTP_400_BAD_REQUEST)
serializer = DataSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
db_data = serializer.save()
db_task.data = db_data
db_task.save()
data = {k:v for k, v in serializer.data.items()}
for extra_key in { 'use_zip_chunks', 'use_cache', 'copy_data' }:
data[extra_key] = serializer.validated_data[extra_key]
if data['use_cache']:
db_task.data.storage_method = StorageMethodChoice.CACHE
db_task.data.save(update_fields=['storage_method'])
if data['server_files'] and not data.get('copy_data'):
db_task.data.storage = StorageChoice.SHARE
db_task.data.save(update_fields=['storage'])
if db_data.cloud_storage:
db_task.data.storage = StorageChoice.CLOUD_STORAGE
db_task.data.save(update_fields=['storage'])
# if the value of stop_frame is 0, then inside the function we cannot know
# the value specified by the user or it's default value from the database
if 'stop_frame' not in serializer.validated_data:
data['stop_frame'] = None
task.create(db_task.id, data)
return Response(serializer.data, status=status.HTTP_202_ACCEPTED)
return self.upload_data(request)
else:
data_type = request.query_params.get('type', None)
data_id = request.query_params.get('number', None)
......@@ -997,6 +1015,7 @@ class JobViewSet(viewsets.GenericViewSet,
serializer = CombinedIssueSerializer(queryset, context={'request': request}, many=True)
return Response(serializer.data)
@method_decorator(name='create', decorator=swagger_auto_schema(operation_summary='Submit a review for a job'))
@method_decorator(name='destroy', decorator=swagger_auto_schema(operation_summary='Method removes a review from a job'))
class ReviewViewSet(viewsets.GenericViewSet, mixins.DestroyModelMixin, mixins.CreateModelMixin):
......
......@@ -21,6 +21,7 @@ import shutil
import subprocess
import mimetypes
from distutils.util import strtobool
from corsheaders.defaults import default_headers
mimetypes.add_type("application/wasm", ".wasm", True)
......@@ -482,3 +483,18 @@ CACHES = {
USE_CACHE = True
CORS_ALLOW_HEADERS = list(default_headers) + [
# tus upload protocol headers
'upload-offset',
'upload-length',
'tus-version',
'tus-resumable',
# extended upload protocol headers
'upload-start',
'upload-finish',
'upload-multiple'
]
TUS_MAX_FILE_SIZE = 26843545600 # 25gb
TUS_DEFAULT_CHUNK_SIZE = 104857600 # 100 mb
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册