mirror of
https://github.com/wekan/wekan.git
synced 2025-04-23 13:37:09 -04:00
Remove 1.8.x files because Sandstorm now uses newest Meteor.
Thanks to xet7 !
This commit is contained in:
parent
079b3da6ef
commit
1a836969e1
17 changed files with 0 additions and 7816 deletions
|
@ -1,20 +0,0 @@
|
|||
# This file contains information which helps Meteor properly upgrade your
|
||||
# app when you run 'meteor update'. You should check it into version control
|
||||
# with your project.
|
||||
|
||||
notices-for-0.9.0
|
||||
notices-for-0.9.1
|
||||
0.9.4-platform-file
|
||||
notices-for-facebook-graph-api-2
|
||||
1.2.0-standard-minifiers-package
|
||||
1.2.0-meteor-platform-split
|
||||
1.2.0-cordova-changes
|
||||
1.2.0-breaking-changes
|
||||
1.3.0-split-minifiers-package
|
||||
1.3.5-remove-old-dev-bundle-link
|
||||
1.4.0-remove-old-dev-bundle-link
|
||||
1.4.1-add-shell-server-package
|
||||
1.4.3-split-account-service-packages
|
||||
1.5-add-dynamic-import-package
|
||||
1.7-split-underscore-from-meteor-base
|
||||
1.8.3-split-jquery-from-blaze
|
2
.sandstorm-meteor-1.8/.meteor/.gitignore
vendored
2
.sandstorm-meteor-1.8/.meteor/.gitignore
vendored
|
@ -1,2 +0,0 @@
|
|||
dev_bundle
|
||||
local
|
|
@ -1,7 +0,0 @@
|
|||
# This file contains a token that is unique to your project.
|
||||
# Check it into your repository along with the rest of this directory.
|
||||
# It can be used for purposes such as:
|
||||
# - ensuring you don't accidentally deploy one app on top of another
|
||||
# - providing package authors with aggregated statistics
|
||||
|
||||
dvyihgykyzec6y1dpg
|
|
@ -1,100 +0,0 @@
|
|||
# Meteor packages used by this project, one per line.
|
||||
#
|
||||
# 'meteor add' and 'meteor remove' will edit this file for you,
|
||||
# but you can also edit it by hand.
|
||||
|
||||
meteor-base@1.4.0
|
||||
|
||||
# Build system
|
||||
ecmascript@0.13.2
|
||||
standard-minifier-css@1.5.4
|
||||
standard-minifier-js@2.5.2
|
||||
mquandalle:jade
|
||||
|
||||
# Polyfills
|
||||
es5-shim@4.8.0
|
||||
|
||||
# Collections
|
||||
aldeed:collection2
|
||||
cfs:standard-packages
|
||||
cottz:publish-relations
|
||||
dburles:collection-helpers
|
||||
idmontie:migrations
|
||||
matb33:collection-hooks
|
||||
matteodem:easy-search
|
||||
mongo@1.7.0
|
||||
mquandalle:collection-mutations
|
||||
|
||||
# Account system
|
||||
kenton:accounts-sandstorm
|
||||
service-configuration@1.0.11
|
||||
useraccounts:unstyled
|
||||
useraccounts:flow-routing
|
||||
wekan-ldap
|
||||
wekan-accounts-cas
|
||||
wekan-accounts-oidc
|
||||
|
||||
# Utilities
|
||||
check@1.3.1
|
||||
jquery@1.11.10
|
||||
random@1.1.0
|
||||
reactive-dict@1.3.0
|
||||
session@1.2.0
|
||||
tracker@1.2.0
|
||||
underscore@1.0.10
|
||||
3stack:presence
|
||||
alethes:pages
|
||||
arillo:flow-router-helpers
|
||||
audit-argument-checks@1.0.7
|
||||
kadira:blaze-layout
|
||||
kadira:dochead
|
||||
mquandalle:autofocus
|
||||
ongoworks:speakingurl
|
||||
raix:handlebar-helpers
|
||||
tap:i18n
|
||||
http@1.4.2
|
||||
|
||||
# UI components
|
||||
blaze
|
||||
reactive-var@1.0.11
|
||||
fortawesome:fontawesome
|
||||
mousetrap:mousetrap
|
||||
mquandalle:jquery-textcomplete
|
||||
mquandalle:jquery-ui-drag-drop-sort
|
||||
mquandalle:mousetrap-bindglobal
|
||||
peerlibrary:blaze-components@=0.15.1
|
||||
templates:tabs
|
||||
verron:autosize
|
||||
simple:json-routes
|
||||
rajit:bootstrap3-datepicker
|
||||
shell-server@0.4.0
|
||||
simple:rest-accounts-password
|
||||
useraccounts:core
|
||||
email@1.2.3
|
||||
horka:swipebox
|
||||
dynamic-import@0.5.1
|
||||
staringatlights:fast-render
|
||||
|
||||
accounts-password@1.5.2
|
||||
cfs:gridfs
|
||||
rzymek:fullcalendar
|
||||
momentjs:moment@2.22.2
|
||||
browser-policy-framing@1.1.0
|
||||
mquandalle:moment
|
||||
msavin:usercache
|
||||
wekan-scrollbar
|
||||
mquandalle:perfect-scrollbar
|
||||
mdg:meteor-apm-agent@3.2.0-rc.0!
|
||||
# Keep stylus in 1.1.0, because building v2 takes extra 52 minutes.
|
||||
coagmano:stylus@1.1.0!
|
||||
lucasantoniassi:accounts-lockout
|
||||
meteorhacks:subs-manager
|
||||
meteorhacks:picker
|
||||
lamhieu:unblock
|
||||
meteorhacks:aggregate@1.3.0
|
||||
wekan-markdown
|
||||
konecty:mongo-counter
|
||||
percolate:synced-cron
|
||||
easylogic:summernote
|
||||
cfs:filesystem
|
||||
ostrio:cookies
|
|
@ -1,2 +0,0 @@
|
|||
server
|
||||
browser
|
|
@ -1 +0,0 @@
|
|||
METEOR@1.8.3
|
|
@ -1,198 +0,0 @@
|
|||
3stack:presence@1.1.2
|
||||
accounts-base@1.4.5
|
||||
accounts-oauth@1.1.16
|
||||
accounts-password@1.5.2
|
||||
aldeed:collection2@2.10.0
|
||||
aldeed:collection2-core@1.2.0
|
||||
aldeed:schema-deny@1.1.0
|
||||
aldeed:schema-index@1.1.1
|
||||
aldeed:simple-schema@1.5.4
|
||||
alethes:pages@1.8.6
|
||||
allow-deny@1.1.0
|
||||
arillo:flow-router-helpers@0.5.2
|
||||
audit-argument-checks@1.0.7
|
||||
autoupdate@1.6.0
|
||||
babel-compiler@7.4.2
|
||||
babel-runtime@1.4.0
|
||||
base64@1.0.12
|
||||
binary-heap@1.0.11
|
||||
blaze@2.3.4
|
||||
blaze-tools@1.0.10
|
||||
boilerplate-generator@1.6.0
|
||||
browser-policy-common@1.0.11
|
||||
browser-policy-framing@1.1.0
|
||||
caching-compiler@1.2.1
|
||||
caching-html-compiler@1.1.3
|
||||
callback-hook@1.2.0
|
||||
cfs:access-point@0.1.49
|
||||
cfs:base-package@0.0.30
|
||||
cfs:collection@0.5.5
|
||||
cfs:collection-filters@0.2.4
|
||||
cfs:data-man@0.0.6
|
||||
cfs:file@0.1.17
|
||||
cfs:filesystem@0.1.2
|
||||
cfs:gridfs@0.0.34
|
||||
cfs:http-methods@0.0.32
|
||||
cfs:http-publish@0.0.13
|
||||
cfs:power-queue@0.9.11
|
||||
cfs:reactive-list@0.0.9
|
||||
cfs:reactive-property@0.0.4
|
||||
cfs:standard-packages@0.5.10
|
||||
cfs:storage-adapter@0.2.4
|
||||
cfs:tempstore@0.1.6
|
||||
cfs:upload-http@0.0.20
|
||||
cfs:worker@0.1.5
|
||||
check@1.3.1
|
||||
chuangbo:cookie@1.1.0
|
||||
coagmano:stylus@1.1.0
|
||||
coffeescript@1.0.17
|
||||
cottz:publish-relations@2.0.8
|
||||
dburles:collection-helpers@1.1.0
|
||||
ddp@1.4.0
|
||||
ddp-client@2.3.3
|
||||
ddp-common@1.4.0
|
||||
ddp-rate-limiter@1.0.7
|
||||
ddp-server@2.3.0
|
||||
deps@1.0.12
|
||||
diff-sequence@1.1.1
|
||||
dynamic-import@0.5.1
|
||||
easylogic:summernote@0.8.8
|
||||
ecmascript@0.13.2
|
||||
ecmascript-runtime@0.7.0
|
||||
ecmascript-runtime-client@0.9.0
|
||||
ecmascript-runtime-server@0.8.0
|
||||
ejson@1.1.1
|
||||
email@1.2.3
|
||||
es5-shim@4.8.0
|
||||
fastclick@1.0.13
|
||||
fetch@0.1.1
|
||||
fortawesome:fontawesome@4.7.0
|
||||
geojson-utils@1.0.10
|
||||
horka:swipebox@1.0.2
|
||||
hot-code-push@1.0.4
|
||||
html-tools@1.0.11
|
||||
htmljs@1.0.11
|
||||
http@1.4.2
|
||||
id-map@1.1.0
|
||||
idmontie:migrations@1.0.3
|
||||
inter-process-messaging@0.1.0
|
||||
jquery@1.11.11
|
||||
kadira:blaze-layout@2.3.0
|
||||
kadira:dochead@1.5.0
|
||||
kadira:flow-router@2.12.1
|
||||
kenton:accounts-sandstorm@0.7.0
|
||||
konecty:mongo-counter@0.0.5_3
|
||||
lamhieu:meteorx@2.1.1
|
||||
lamhieu:unblock@1.0.0
|
||||
launch-screen@1.1.1
|
||||
livedata@1.0.18
|
||||
localstorage@1.2.0
|
||||
logging@1.1.20
|
||||
lucasantoniassi:accounts-lockout@1.0.0
|
||||
matb33:collection-hooks@0.9.1
|
||||
matteodem:easy-search@1.6.4
|
||||
mdg:meteor-apm-agent@3.2.5
|
||||
mdg:validation-error@0.5.1
|
||||
meteor@1.9.3
|
||||
meteor-base@1.4.0
|
||||
meteor-platform@1.2.6
|
||||
meteorhacks:aggregate@1.3.0
|
||||
meteorhacks:collection-utils@1.2.0
|
||||
meteorhacks:picker@1.0.3
|
||||
meteorhacks:subs-manager@1.6.4
|
||||
meteorspark:util@0.2.0
|
||||
minifier-css@1.4.3
|
||||
minifier-js@2.5.1
|
||||
minifiers@1.1.8-faster-rebuild.0
|
||||
minimongo@1.4.5
|
||||
mobile-status-bar@1.0.14
|
||||
modern-browsers@0.1.4
|
||||
modules@0.14.0
|
||||
modules-runtime@0.11.0
|
||||
momentjs:moment@2.24.0
|
||||
mongo@1.7.0
|
||||
mongo-decimal@0.1.1
|
||||
mongo-dev-server@1.1.0
|
||||
mongo-id@1.0.7
|
||||
mongo-livedata@1.0.12
|
||||
mousetrap:mousetrap@1.4.6_1
|
||||
mquandalle:autofocus@1.0.0
|
||||
mquandalle:collection-mutations@0.1.0
|
||||
mquandalle:jade@0.4.9
|
||||
mquandalle:jade-compiler@0.4.5
|
||||
mquandalle:jquery-textcomplete@0.8.0_1
|
||||
mquandalle:jquery-ui-drag-drop-sort@0.2.0
|
||||
mquandalle:moment@1.0.1
|
||||
mquandalle:mousetrap-bindglobal@0.0.1
|
||||
mquandalle:perfect-scrollbar@0.6.5_2
|
||||
msavin:usercache@1.8.0
|
||||
npm-bcrypt@0.9.3
|
||||
npm-mongo@3.2.0
|
||||
oauth@1.2.8
|
||||
oauth2@1.2.1
|
||||
observe-sequence@1.0.16
|
||||
ongoworks:speakingurl@1.1.0
|
||||
ordered-dict@1.1.0
|
||||
ostrio:cookies@2.5.0
|
||||
peerlibrary:assert@0.3.0
|
||||
peerlibrary:base-component@0.16.0
|
||||
peerlibrary:blaze-components@0.15.1
|
||||
peerlibrary:computed-field@0.10.0
|
||||
peerlibrary:reactive-field@0.6.0
|
||||
percolate:synced-cron@1.3.2
|
||||
promise@0.11.2
|
||||
raix:eventemitter@0.1.3
|
||||
raix:handlebar-helpers@0.2.5
|
||||
rajit:bootstrap3-datepicker@1.7.1_1
|
||||
random@1.1.0
|
||||
rate-limit@1.0.9
|
||||
reactive-dict@1.3.0
|
||||
reactive-var@1.0.11
|
||||
reload@1.3.0
|
||||
retry@1.1.0
|
||||
routepolicy@1.1.0
|
||||
rzymek:fullcalendar@3.8.0
|
||||
server-render@0.3.1
|
||||
service-configuration@1.0.11
|
||||
session@1.2.0
|
||||
sha@1.0.9
|
||||
shell-server@0.4.0
|
||||
simple:authenticate-user-by-token@1.0.1
|
||||
simple:json-routes@2.1.0
|
||||
simple:rest-accounts-password@1.1.2
|
||||
simple:rest-bearer-token-parser@1.0.1
|
||||
simple:rest-json-error-handler@1.0.1
|
||||
socket-stream-client@0.2.2
|
||||
softwarerero:accounts-t9n@1.3.11
|
||||
spacebars@1.0.15
|
||||
spacebars-compiler@1.1.3
|
||||
srp@1.0.12
|
||||
standard-minifier-css@1.5.4
|
||||
standard-minifier-js@2.5.2
|
||||
staringatlights:fast-render@3.2.0
|
||||
staringatlights:inject-data@2.3.0
|
||||
tap:i18n@1.8.2
|
||||
templates:tabs@2.3.0
|
||||
templating@1.3.2
|
||||
templating-compiler@1.3.3
|
||||
templating-runtime@1.3.2
|
||||
templating-tools@1.1.2
|
||||
tracker@1.2.0
|
||||
twbs:bootstrap@3.3.6
|
||||
ui@1.0.13
|
||||
underscore@1.0.10
|
||||
url@1.2.0
|
||||
useraccounts:core@1.14.2
|
||||
useraccounts:flow-routing@1.14.2
|
||||
useraccounts:unstyled@1.14.2
|
||||
verron:autosize@3.0.8
|
||||
webapp@1.7.5
|
||||
webapp-hashing@1.0.9
|
||||
wekan-accounts-cas@0.1.0
|
||||
wekan-accounts-oidc@1.0.10
|
||||
wekan-ldap@0.0.2
|
||||
wekan-markdown@1.0.7
|
||||
wekan-oidc@1.0.12
|
||||
wekan-scrollbar@3.1.3
|
||||
yasaricli:slugify@0.0.7
|
||||
zimme:active-route@2.3.2
|
|
@ -1,914 +0,0 @@
|
|||
(function () {
|
||||
|
||||
/* Imports */
|
||||
var Meteor = Package.meteor.Meteor;
|
||||
var global = Package.meteor.global;
|
||||
var meteorEnv = Package.meteor.meteorEnv;
|
||||
var FS = Package['cfs:base-package'].FS;
|
||||
var check = Package.check.check;
|
||||
var Match = Package.check.Match;
|
||||
var EJSON = Package.ejson.EJSON;
|
||||
var HTTP = Package['cfs:http-methods'].HTTP;
|
||||
|
||||
/* Package-scope variables */
|
||||
var rootUrlPathPrefix, baseUrl, getHeaders, getHeadersByCollection, _existingMountPoints, mountUrls;
|
||||
|
||||
(function(){
|
||||
|
||||
///////////////////////////////////////////////////////////////////////
|
||||
// //
|
||||
// packages/cfs_access-point/packages/cfs_access-point.js //
|
||||
// //
|
||||
///////////////////////////////////////////////////////////////////////
|
||||
//
|
||||
(function () {
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// //
|
||||
// packages/cfs:access-point/access-point-common.js //
|
||||
// //
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
//
|
||||
rootUrlPathPrefix = __meteor_runtime_config__.ROOT_URL_PATH_PREFIX || ""; // 1
|
||||
// Adjust the rootUrlPathPrefix if necessary // 2
|
||||
if (rootUrlPathPrefix.length > 0) { // 3
|
||||
if (rootUrlPathPrefix.slice(0, 1) !== '/') { // 4
|
||||
rootUrlPathPrefix = '/' + rootUrlPathPrefix; // 5
|
||||
} // 6
|
||||
if (rootUrlPathPrefix.slice(-1) === '/') { // 7
|
||||
rootUrlPathPrefix = rootUrlPathPrefix.slice(0, -1); // 8
|
||||
} // 9
|
||||
} // 10
|
||||
// 11
|
||||
// prepend ROOT_URL when isCordova // 12
|
||||
if (Meteor.isCordova) { // 13
|
||||
rootUrlPathPrefix = Meteor.absoluteUrl(rootUrlPathPrefix.replace(/^\/+/, '')).replace(/\/+$/, ''); // 14
|
||||
} // 15
|
||||
// 16
|
||||
baseUrl = '/cfs'; // 17
|
||||
FS.HTTP = FS.HTTP || {}; // 18
|
||||
// 19
|
||||
// Note the upload URL so that client uploader packages know what it is // 20
|
||||
FS.HTTP.uploadUrl = rootUrlPathPrefix + baseUrl + '/files'; // 21
|
||||
// 22
|
||||
/** // 23
|
||||
* @method FS.HTTP.setBaseUrl // 24
|
||||
* @public // 25
|
||||
* @param {String} newBaseUrl - Change the base URL for the HTTP GET and DELETE endpoints. // 26
|
||||
* @returns {undefined} // 27
|
||||
*/ // 28
|
||||
FS.HTTP.setBaseUrl = function setBaseUrl(newBaseUrl) { // 29
|
||||
// 30
|
||||
// Adjust the baseUrl if necessary // 31
|
||||
if (newBaseUrl.slice(0, 1) !== '/') { // 32
|
||||
newBaseUrl = '/' + newBaseUrl; // 33
|
||||
} // 34
|
||||
if (newBaseUrl.slice(-1) === '/') { // 35
|
||||
newBaseUrl = newBaseUrl.slice(0, -1); // 36
|
||||
} // 37
|
||||
// 38
|
||||
// Update the base URL // 39
|
||||
baseUrl = newBaseUrl; // 40
|
||||
// 41
|
||||
// Change the upload URL so that client uploader packages know what it is // 42
|
||||
FS.HTTP.uploadUrl = rootUrlPathPrefix + baseUrl + '/files'; // 43
|
||||
// 44
|
||||
// Remount URLs with the new baseUrl, unmounting the old, on the server only. // 45
|
||||
// If existingMountPoints is empty, then we haven't run the server startup // 46
|
||||
// code yet, so this new URL will be used at that point for the initial mount. // 47
|
||||
if (Meteor.isServer && !FS.Utility.isEmpty(_existingMountPoints)) { // 48
|
||||
mountUrls(); // 49
|
||||
} // 50
|
||||
}; // 51
|
||||
// 52
|
||||
/* // 53
|
||||
* FS.File extensions // 54
|
||||
*/ // 55
|
||||
// 56
|
||||
/** // 57
|
||||
* @method FS.File.prototype.url Construct the file url // 58
|
||||
* @public // 59
|
||||
* @param {Object} [options] // 60
|
||||
* @param {String} [options.store] Name of the store to get from. If not defined, the first store defined in `options.stores` for the collection on the client is used.
|
||||
* @param {Boolean} [options.auth=null] Add authentication token to the URL query string? By default, a token for the current logged in user is added on the client. Set this to `false` to omit the token. Set this to a string to provide your own token. Set this to a number to specify an expiration time for the token in seconds.
|
||||
* @param {Boolean} [options.download=false] Should headers be set to force a download? Typically this means that clicking the link with this URL will download the file to the user's Downloads folder instead of displaying the file in the browser.
|
||||
* @param {Boolean} [options.brokenIsFine=false] Return the URL even if we know it's currently a broken link because the file hasn't been saved in the requested store yet.
|
||||
* @param {Boolean} [options.metadata=false] Return the URL for the file metadata access point rather than the file itself.
|
||||
* @param {String} [options.uploading=null] A URL to return while the file is being uploaded. // 66
|
||||
* @param {String} [options.storing=null] A URL to return while the file is being stored. // 67
|
||||
* @param {String} [options.filename=null] Override the filename that should appear at the end of the URL. By default it is the name of the file in the requested store.
|
||||
* // 69
|
||||
* Returns the HTTP URL for getting the file or its metadata. // 70
|
||||
*/ // 71
|
||||
FS.File.prototype.url = function(options) { // 72
|
||||
var self = this; // 73
|
||||
options = options || {}; // 74
|
||||
options = FS.Utility.extend({ // 75
|
||||
store: null, // 76
|
||||
auth: null, // 77
|
||||
download: false, // 78
|
||||
metadata: false, // 79
|
||||
brokenIsFine: false, // 80
|
||||
uploading: null, // return this URL while uploading // 81
|
||||
storing: null, // return this URL while storing // 82
|
||||
filename: null // override the filename that is shown to the user // 83
|
||||
}, options.hash || options); // check for "hash" prop if called as helper // 84
|
||||
// 85
|
||||
// Primarily useful for displaying a temporary image while uploading an image // 86
|
||||
if (options.uploading && !self.isUploaded()) { // 87
|
||||
return options.uploading; // 88
|
||||
} // 89
|
||||
// 90
|
||||
if (self.isMounted()) { // 91
|
||||
// See if we've stored in the requested store yet // 92
|
||||
var storeName = options.store || self.collection.primaryStore.name; // 93
|
||||
if (!self.hasStored(storeName)) { // 94
|
||||
if (options.storing) { // 95
|
||||
return options.storing; // 96
|
||||
} else if (!options.brokenIsFine) { // 97
|
||||
// We want to return null if we know the URL will be a broken // 98
|
||||
// link because then we can avoid rendering broken links, broken // 99
|
||||
// images, etc. // 100
|
||||
return null; // 101
|
||||
} // 102
|
||||
} // 103
|
||||
// 104
|
||||
// Add filename to end of URL if we can determine one // 105
|
||||
var filename = options.filename || self.name({store: storeName}); // 106
|
||||
if (typeof filename === "string" && filename.length) { // 107
|
||||
filename = '/' + filename; // 108
|
||||
} else { // 109
|
||||
filename = ''; // 110
|
||||
} // 111
|
||||
// 112
|
||||
// TODO: Could we somehow figure out if the collection requires login? // 113
|
||||
var authToken = ''; // 114
|
||||
if (Meteor.isClient && typeof Accounts !== "undefined" && typeof Accounts._storedLoginToken === "function") { // 115
|
||||
if (options.auth !== false) { // 116
|
||||
// Add reactive deps on the user // 117
|
||||
Meteor.userId(); // 118
|
||||
// 119
|
||||
var authObject = { // 120
|
||||
authToken: Accounts._storedLoginToken() || '' // 121
|
||||
}; // 122
|
||||
// 123
|
||||
// If it's a number, we use that as the expiration time (in seconds) // 124
|
||||
if (options.auth === +options.auth) { // 125
|
||||
authObject.expiration = FS.HTTP.now() + options.auth * 1000; // 126
|
||||
} // 127
|
||||
// 128
|
||||
// Set the authToken // 129
|
||||
var authString = JSON.stringify(authObject); // 130
|
||||
authToken = FS.Utility.btoa(authString); // 131
|
||||
} // 132
|
||||
} else if (typeof options.auth === "string") { // 133
|
||||
// If the user supplies auth token the user will be responsible for // 134
|
||||
// updating // 135
|
||||
authToken = options.auth; // 136
|
||||
} // 137
|
||||
// 138
|
||||
// Construct query string // 139
|
||||
var params = {}; // 140
|
||||
if (authToken !== '') { // 141
|
||||
params.token = authToken; // 142
|
||||
} // 143
|
||||
if (options.download) { // 144
|
||||
params.download = true; // 145
|
||||
} // 146
|
||||
if (options.store) { // 147
|
||||
// We use options.store here instead of storeName because we want to omit the queryString // 148
|
||||
// whenever possible, allowing users to have "clean" URLs if they want. The server will // 149
|
||||
// assume the first store defined on the server, which means that we are assuming that // 150
|
||||
// the first on the client is also the first on the server. If that's not the case, the // 151
|
||||
// store option should be supplied. // 152
|
||||
params.store = options.store; // 153
|
||||
} // 154
|
||||
var queryString = FS.Utility.encodeParams(params); // 155
|
||||
if (queryString.length) { // 156
|
||||
queryString = '?' + queryString; // 157
|
||||
} // 158
|
||||
// 159
|
||||
// Determine which URL to use // 160
|
||||
var area; // 161
|
||||
if (options.metadata) { // 162
|
||||
area = '/record'; // 163
|
||||
} else { // 164
|
||||
area = '/files'; // 165
|
||||
} // 166
|
||||
// 167
|
||||
// Construct and return the http method url // 168
|
||||
return rootUrlPathPrefix + baseUrl + area + '/' + self.collection.name + '/' + self._id + filename + queryString; // 169
|
||||
} // 170
|
||||
// 171
|
||||
}; // 172
|
||||
// 173
|
||||
// 174
|
||||
// 175
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
}).call(this);
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
(function () {
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// //
|
||||
// packages/cfs:access-point/access-point-handlers.js //
|
||||
// //
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
//
|
||||
getHeaders = []; // 1
|
||||
getHeadersByCollection = {}; // 2
|
||||
// 3
|
||||
FS.HTTP.Handlers = {}; // 4
|
||||
// 5
|
||||
/** // 6
|
||||
* @method FS.HTTP.Handlers.Del // 7
|
||||
* @public // 8
|
||||
* @returns {any} response // 9
|
||||
* // 10
|
||||
* HTTP DEL request handler // 11
|
||||
*/ // 12
|
||||
FS.HTTP.Handlers.Del = function httpDelHandler(ref) { // 13
|
||||
var self = this; // 14
|
||||
var opts = FS.Utility.extend({}, self.query || {}, self.params || {}); // 15
|
||||
// 16
|
||||
// If DELETE request, validate with 'remove' allow/deny, delete the file, and return // 17
|
||||
FS.Utility.validateAction(ref.collection.files._validators['remove'], ref.file, self.userId); // 18
|
||||
// 19
|
||||
/* // 20
|
||||
* From the DELETE spec: // 21
|
||||
* A successful response SHOULD be 200 (OK) if the response includes an // 22
|
||||
* entity describing the status, 202 (Accepted) if the action has not // 23
|
||||
* yet been enacted, or 204 (No Content) if the action has been enacted // 24
|
||||
* but the response does not include an entity. // 25
|
||||
*/ // 26
|
||||
self.setStatusCode(200); // 27
|
||||
// 28
|
||||
return { // 29
|
||||
deleted: !!ref.file.remove() // 30
|
||||
}; // 31
|
||||
}; // 32
|
||||
// 33
|
||||
/** // 34
|
||||
* @method FS.HTTP.Handlers.GetList // 35
|
||||
* @public // 36
|
||||
* @returns {Object} response // 37
|
||||
* // 38
|
||||
* HTTP GET file list request handler // 39
|
||||
*/ // 40
|
||||
FS.HTTP.Handlers.GetList = function httpGetListHandler() { // 41
|
||||
// Not Yet Implemented // 42
|
||||
// Need to check publications and return file list based on // 43
|
||||
// what user is allowed to see // 44
|
||||
}; // 45
|
||||
// 46
|
||||
/* // 47
|
||||
requestRange will parse the range set in request header - if not possible it // 48
|
||||
will throw fitting errors and autofill range for both partial and full ranges // 49
|
||||
// 50
|
||||
throws error or returns the object: // 51
|
||||
{ // 52
|
||||
start // 53
|
||||
end // 54
|
||||
length // 55
|
||||
unit // 56
|
||||
partial // 57
|
||||
} // 58
|
||||
*/ // 59
|
||||
var requestRange = function(req, fileSize) { // 60
|
||||
if (req) { // 61
|
||||
if (req.headers) { // 62
|
||||
var rangeString = req.headers.range; // 63
|
||||
// 64
|
||||
// Make sure range is a string // 65
|
||||
if (rangeString === ''+rangeString) { // 66
|
||||
// 67
|
||||
// range will be in the format "bytes=0-32767" // 68
|
||||
var parts = rangeString.split('='); // 69
|
||||
var unit = parts[0]; // 70
|
||||
// 71
|
||||
// Make sure parts consists of two strings and range is of type "byte" // 72
|
||||
if (parts.length == 2 && unit == 'bytes') { // 73
|
||||
// Parse the range // 74
|
||||
var range = parts[1].split('-'); // 75
|
||||
var start = Number(range[0]); // 76
|
||||
var end = Number(range[1]); // 77
|
||||
// 78
|
||||
// Fix invalid ranges? // 79
|
||||
if (range[0] != start) start = 0; // 80
|
||||
if (range[1] != end || !end) end = fileSize - 1; // 81
|
||||
// 82
|
||||
// Make sure range consists of a start and end point of numbers and start is less than end // 83
|
||||
if (start < end) { // 84
|
||||
// 85
|
||||
var partSize = 0 - start + end + 1; // 86
|
||||
// 87
|
||||
// Return the parsed range // 88
|
||||
return { // 89
|
||||
start: start, // 90
|
||||
end: end, // 91
|
||||
length: partSize, // 92
|
||||
size: fileSize, // 93
|
||||
unit: unit, // 94
|
||||
partial: (partSize < fileSize) // 95
|
||||
}; // 96
|
||||
// 97
|
||||
} else { // 98
|
||||
throw new Meteor.Error(416, "Requested Range Not Satisfiable"); // 99
|
||||
} // 100
|
||||
// 101
|
||||
} else { // 102
|
||||
// The first part should be bytes // 103
|
||||
throw new Meteor.Error(416, "Requested Range Unit Not Satisfiable"); // 104
|
||||
} // 105
|
||||
// 106
|
||||
} else { // 107
|
||||
// No range found // 108
|
||||
} // 109
|
||||
// 110
|
||||
} else { // 111
|
||||
// throw new Error('No request headers set for _parseRange function'); // 112
|
||||
} // 113
|
||||
} else { // 114
|
||||
throw new Error('No request object passed to _parseRange function'); // 115
|
||||
} // 116
|
||||
// 117
|
||||
return { // 118
|
||||
start: 0, // 119
|
||||
end: fileSize - 1, // 120
|
||||
length: fileSize, // 121
|
||||
size: fileSize, // 122
|
||||
unit: 'bytes', // 123
|
||||
partial: false // 124
|
||||
}; // 125
|
||||
}; // 126
|
||||
// 127
|
||||
/** // 128
|
||||
* @method FS.HTTP.Handlers.Get // 129
|
||||
* @public // 130
|
||||
* @returns {any} response // 131
|
||||
* // 132
|
||||
* HTTP GET request handler // 133
|
||||
*/ // 134
|
||||
FS.HTTP.Handlers.Get = function httpGetHandler(ref) { // 135
|
||||
var self = this; // 136
|
||||
// Once we have the file, we can test allow/deny validators // 137
|
||||
// XXX: pass on the "share" query eg. ?share=342hkjh23ggj for shared url access? // 138
|
||||
FS.Utility.validateAction(ref.collection._validators['download'], ref.file, self.userId /*, self.query.shareId*/); // 139
|
||||
// 140
|
||||
var storeName = ref.storeName; // 141
|
||||
// 142
|
||||
// If no storeName was specified, use the first defined storeName // 143
|
||||
if (typeof storeName !== "string") { // 144
|
||||
// No store handed, we default to primary store // 145
|
||||
storeName = ref.collection.primaryStore.name; // 146
|
||||
} // 147
|
||||
// 148
|
||||
// Get the storage reference // 149
|
||||
var storage = ref.collection.storesLookup[storeName]; // 150
|
||||
// 151
|
||||
if (!storage) { // 152
|
||||
throw new Meteor.Error(404, "Not Found", 'There is no store "' + storeName + '"'); // 153
|
||||
} // 154
|
||||
// 155
|
||||
// Get the file // 156
|
||||
var copyInfo = ref.file.copies[storeName]; // 157
|
||||
// 158
|
||||
if (!copyInfo) { // 159
|
||||
throw new Meteor.Error(404, "Not Found", 'This file was not stored in the ' + storeName + ' store'); // 160
|
||||
} // 161
|
||||
// 162
|
||||
// Set the content type for file // 163
|
||||
if (typeof copyInfo.type === "string") { // 164
|
||||
self.setContentType(copyInfo.type); // 165
|
||||
} else { // 166
|
||||
self.setContentType('application/octet-stream'); // 167
|
||||
} // 168
|
||||
// 169
|
||||
// Add 'Content-Disposition' header if requested a download/attachment URL // 170
|
||||
if (typeof ref.download !== "undefined") { // 171
|
||||
var filename = ref.filename || copyInfo.name; // 172
|
||||
self.addHeader('Content-Disposition', 'attachment; filename="' + filename + '"'); // 173
|
||||
} else { // 174
|
||||
self.addHeader('Content-Disposition', 'inline'); // 175
|
||||
} // 176
|
||||
// 177
|
||||
// Get the contents range from request // 178
|
||||
var range = requestRange(self.request, copyInfo.size); // 179
|
||||
// 180
|
||||
// Some browsers cope better if the content-range header is // 181
|
||||
// still included even for the full file being returned. // 182
|
||||
self.addHeader('Content-Range', range.unit + ' ' + range.start + '-' + range.end + '/' + range.size); // 183
|
||||
// 184
|
||||
// If a chunk/range was requested instead of the whole file, serve that' // 185
|
||||
if (range.partial) { // 186
|
||||
self.setStatusCode(206, 'Partial Content'); // 187
|
||||
} else { // 188
|
||||
self.setStatusCode(200, 'OK'); // 189
|
||||
} // 190
|
||||
// 191
|
||||
// Add any other global custom headers and collection-specific custom headers // 192
|
||||
FS.Utility.each(getHeaders.concat(getHeadersByCollection[ref.collection.name] || []), function(header) { // 193
|
||||
self.addHeader(header[0], header[1]); // 194
|
||||
}); // 195
|
||||
// 196
|
||||
// Inform clients about length (or chunk length in case of ranges) // 197
|
||||
self.addHeader('Content-Length', range.length); // 198
|
||||
// 199
|
||||
// Last modified header (updatedAt from file info) // 200
|
||||
self.addHeader('Last-Modified', copyInfo.updatedAt.toUTCString()); // 201
|
||||
// 202
|
||||
// Inform clients that we accept ranges for resumable chunked downloads // 203
|
||||
self.addHeader('Accept-Ranges', range.unit); // 204
|
||||
// 205
|
||||
if (FS.debug) console.log('Read file "' + (ref.filename || copyInfo.name) + '" ' + range.unit + ' ' + range.start + '-' + range.end + '/' + range.size);
|
||||
// 207
|
||||
var readStream = storage.adapter.createReadStream(ref.file, {start: range.start, end: range.end}); // 208
|
||||
// 209
|
||||
readStream.on('error', function(err) { // 210
|
||||
// Send proper error message on get error // 211
|
||||
if (err.message && err.statusCode) { // 212
|
||||
self.Error(new Meteor.Error(err.statusCode, err.message)); // 213
|
||||
} else { // 214
|
||||
self.Error(new Meteor.Error(503, 'Service unavailable')); // 215
|
||||
} // 216
|
||||
}); // 217
|
||||
// 218
|
||||
readStream.pipe(self.createWriteStream()); // 219
|
||||
}; // 220
|
||||
|
||||
const originalHandler = FS.HTTP.Handlers.Get;
|
||||
FS.HTTP.Handlers.Get = function (ref) {
|
||||
//console.log(ref.filename);
|
||||
try {
|
||||
var userAgent = (this.requestHeaders['user-agent']||'').toLowerCase();
|
||||
|
||||
if(userAgent.indexOf('msie') >= 0 || userAgent.indexOf('trident') >= 0 || userAgent.indexOf('chrome') >= 0) {
|
||||
ref.filename = encodeURIComponent(ref.filename);
|
||||
} else if(userAgent.indexOf('firefox') >= 0) {
|
||||
ref.filename = new Buffer(ref.filename).toString('binary');
|
||||
} else {
|
||||
/* safari*/
|
||||
ref.filename = new Buffer(ref.filename).toString('binary');
|
||||
}
|
||||
} catch (ex){
|
||||
ref.filename = 'tempfix';
|
||||
}
|
||||
return originalHandler.call(this, ref);
|
||||
};
|
||||
// 221
|
||||
/** // 222
|
||||
* @method FS.HTTP.Handlers.PutInsert // 223
|
||||
* @public // 224
|
||||
* @returns {Object} response object with _id property // 225
|
||||
* // 226
|
||||
* HTTP PUT file insert request handler // 227
|
||||
*/ // 228
|
||||
FS.HTTP.Handlers.PutInsert = function httpPutInsertHandler(ref) { // 229
|
||||
var self = this; // 230
|
||||
var opts = FS.Utility.extend({}, self.query || {}, self.params || {}); // 231
|
||||
// 232
|
||||
FS.debug && console.log("HTTP PUT (insert) handler"); // 233
|
||||
// 234
|
||||
// Create the nice FS.File // 235
|
||||
var fileObj = new FS.File(); // 236
|
||||
// 237
|
||||
// Set its name // 238
|
||||
fileObj.name(opts.filename || null); // 239
|
||||
// 240
|
||||
// Attach the readstream as the file's data // 241
|
||||
fileObj.attachData(self.createReadStream(), {type: self.requestHeaders['content-type'] || 'application/octet-stream'});
|
||||
// 243
|
||||
// Validate with insert allow/deny // 244
|
||||
FS.Utility.validateAction(ref.collection.files._validators['insert'], fileObj, self.userId); // 245
|
||||
// 246
|
||||
// Insert file into collection, triggering readStream storage // 247
|
||||
ref.collection.insert(fileObj); // 248
|
||||
// 249
|
||||
// Send response // 250
|
||||
self.setStatusCode(200); // 251
|
||||
// 252
|
||||
// Return the new file id // 253
|
||||
return {_id: fileObj._id}; // 254
|
||||
}; // 255
|
||||
// 256
|
||||
/** // 257
|
||||
* @method FS.HTTP.Handlers.PutUpdate // 258
|
||||
* @public // 259
|
||||
* @returns {Object} response object with _id and chunk properties // 260
|
||||
* // 261
|
||||
* HTTP PUT file update chunk request handler // 262
|
||||
*/ // 263
|
||||
FS.HTTP.Handlers.PutUpdate = function httpPutUpdateHandler(ref) { // 264
|
||||
var self = this; // 265
|
||||
var opts = FS.Utility.extend({}, self.query || {}, self.params || {}); // 266
|
||||
// 267
|
||||
var chunk = parseInt(opts.chunk, 10); // 268
|
||||
if (isNaN(chunk)) chunk = 0; // 269
|
||||
// 270
|
||||
FS.debug && console.log("HTTP PUT (update) handler received chunk: ", chunk); // 271
|
||||
// 272
|
||||
// Validate with insert allow/deny; also mounts and retrieves the file // 273
|
||||
FS.Utility.validateAction(ref.collection.files._validators['insert'], ref.file, self.userId); // 274
|
||||
// 275
|
||||
self.createReadStream().pipe( FS.TempStore.createWriteStream(ref.file, chunk) ); // 276
|
||||
// 277
|
||||
// Send response // 278
|
||||
self.setStatusCode(200); // 279
|
||||
// 280
|
||||
return { _id: ref.file._id, chunk: chunk }; // 281
|
||||
}; // 282
|
||||
// 283
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
}).call(this);
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
(function () {
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// //
|
||||
// packages/cfs:access-point/access-point-server.js //
|
||||
// //
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
//
|
||||
var path = Npm.require("path"); // 1
|
||||
// 2
|
||||
HTTP.publishFormats({ // 3
|
||||
fileRecordFormat: function (input) { // 4
|
||||
// Set the method scope content type to json // 5
|
||||
this.setContentType('application/json'); // 6
|
||||
if (FS.Utility.isArray(input)) { // 7
|
||||
return EJSON.stringify(FS.Utility.map(input, function (obj) { // 8
|
||||
return FS.Utility.cloneFileRecord(obj); // 9
|
||||
})); // 10
|
||||
} else { // 11
|
||||
return EJSON.stringify(FS.Utility.cloneFileRecord(input)); // 12
|
||||
} // 13
|
||||
} // 14
|
||||
}); // 15
|
||||
// 16
|
||||
/** // 17
|
||||
* @method FS.HTTP.setHeadersForGet // 18
|
||||
* @public // 19
|
||||
* @param {Array} headers - List of headers, where each is a two-item array in which item 1 is the header name and item 2 is the header value.
|
||||
* @param {Array|String} [collections] - Which collections the headers should be added for. Omit this argument to add the header for all collections.
|
||||
* @returns {undefined} // 22
|
||||
*/ // 23
|
||||
FS.HTTP.setHeadersForGet = function setHeadersForGet(headers, collections) { // 24
|
||||
if (typeof collections === "string") { // 25
|
||||
collections = [collections]; // 26
|
||||
} // 27
|
||||
if (collections) { // 28
|
||||
FS.Utility.each(collections, function(collectionName) { // 29
|
||||
getHeadersByCollection[collectionName] = headers || []; // 30
|
||||
}); // 31
|
||||
} else { // 32
|
||||
getHeaders = headers || []; // 33
|
||||
} // 34
|
||||
}; // 35
|
||||
// 36
|
||||
/** // 37
|
||||
* @method FS.HTTP.publish // 38
|
||||
* @public // 39
|
||||
* @param {FS.Collection} collection // 40
|
||||
* @param {Function} func - Publish function that returns a cursor. // 41
|
||||
* @returns {undefined} // 42
|
||||
* // 43
|
||||
* Publishes all documents returned by the cursor at a GET URL // 44
|
||||
* with the format baseUrl/record/collectionName. The publish // 45
|
||||
* function `this` is similar to normal `Meteor.publish`. // 46
|
||||
*/ // 47
|
||||
FS.HTTP.publish = function fsHttpPublish(collection, func) { // 48
|
||||
var name = baseUrl + '/record/' + collection.name; // 49
|
||||
// Mount collection listing URL using http-publish package // 50
|
||||
HTTP.publish({ // 51
|
||||
name: name, // 52
|
||||
defaultFormat: 'fileRecordFormat', // 53
|
||||
collection: collection, // 54
|
||||
collectionGet: true, // 55
|
||||
collectionPost: false, // 56
|
||||
documentGet: true, // 57
|
||||
documentPut: false, // 58
|
||||
documentDelete: false // 59
|
||||
}, func); // 60
|
||||
// 61
|
||||
FS.debug && console.log("Registered HTTP method GET URLs:\n\n" + name + '\n' + name + '/:id\n'); // 62
|
||||
}; // 63
|
||||
// 64
|
||||
/** // 65
|
||||
* @method FS.HTTP.unpublish // 66
|
||||
* @public // 67
|
||||
* @param {FS.Collection} collection // 68
|
||||
* @returns {undefined} // 69
|
||||
* // 70
|
||||
* Unpublishes a restpoint created by a call to `FS.HTTP.publish` // 71
|
||||
*/ // 72
|
||||
FS.HTTP.unpublish = function fsHttpUnpublish(collection) { // 73
|
||||
// Mount collection listing URL using http-publish package // 74
|
||||
HTTP.unpublish(baseUrl + '/record/' + collection.name); // 75
|
||||
}; // 76
|
||||
// 77
|
||||
_existingMountPoints = {}; // 78
|
||||
// 79
|
||||
/** // 80
|
||||
* @method defaultSelectorFunction // 81
|
||||
* @private // 82
|
||||
* @returns { collection, file } // 83
|
||||
* // 84
|
||||
* This is the default selector function // 85
|
||||
*/ // 86
|
||||
var defaultSelectorFunction = function() { // 87
|
||||
var self = this; // 88
|
||||
// Selector function // 89
|
||||
// // 90
|
||||
// This function will have to return the collection and the // 91
|
||||
// file. If file not found undefined is returned - if null is returned the // 92
|
||||
// search was not possible // 93
|
||||
var opts = FS.Utility.extend({}, self.query || {}, self.params || {}); // 94
|
||||
// 95
|
||||
// Get the collection name from the url // 96
|
||||
var collectionName = opts.collectionName; // 97
|
||||
// 98
|
||||
// Get the id from the url // 99
|
||||
var id = opts.id; // 100
|
||||
// 101
|
||||
// Get the collection // 102
|
||||
var collection = FS._collections[collectionName]; // 103
|
||||
// 104
|
||||
// Get the file if possible else return null // 105
|
||||
var file = (id && collection)? collection.findOne({ _id: id }): null; // 106
|
||||
// 107
|
||||
// Return the collection and the file // 108
|
||||
return { // 109
|
||||
collection: collection, // 110
|
||||
file: file, // 111
|
||||
storeName: opts.store, // 112
|
||||
download: opts.download, // 113
|
||||
filename: opts.filename // 114
|
||||
}; // 115
|
||||
}; // 116
|
||||
// 117
|
||||
/* // 118
|
||||
* @method FS.HTTP.mount // 119
|
||||
* @public // 120
|
||||
* @param {array of string} mountPoints mount points to map rest functinality on // 121
|
||||
* @param {function} selector_f [selector] function returns `{ collection, file }` for mount points to work with // 122
|
||||
* // 123
|
||||
*/ // 124
|
||||
FS.HTTP.mount = function(mountPoints, selector_f) { // 125
|
||||
// We take mount points as an array and we get a selector function // 126
|
||||
var selectorFunction = selector_f || defaultSelectorFunction; // 127
|
||||
// 128
|
||||
var accessPoint = { // 129
|
||||
'stream': true, // 130
|
||||
'auth': expirationAuth, // 131
|
||||
'post': function(data) { // 132
|
||||
// Use the selector for finding the collection and file reference // 133
|
||||
var ref = selectorFunction.call(this); // 134
|
||||
// 135
|
||||
// We dont support post - this would be normal insert eg. of filerecord? // 136
|
||||
throw new Meteor.Error(501, "Not implemented", "Post is not supported"); // 137
|
||||
}, // 138
|
||||
'put': function(data) { // 139
|
||||
// Use the selector for finding the collection and file reference // 140
|
||||
var ref = selectorFunction.call(this); // 141
|
||||
// 142
|
||||
// Make sure we have a collection reference // 143
|
||||
if (!ref.collection) // 144
|
||||
throw new Meteor.Error(404, "Not Found", "No collection found"); // 145
|
||||
// 146
|
||||
// Make sure we have a file reference // 147
|
||||
if (ref.file === null) { // 148
|
||||
// No id supplied so we will create a new FS.File instance and // 149
|
||||
// insert the supplied data. // 150
|
||||
return FS.HTTP.Handlers.PutInsert.apply(this, [ref]); // 151
|
||||
} else { // 152
|
||||
if (ref.file) { // 153
|
||||
return FS.HTTP.Handlers.PutUpdate.apply(this, [ref]); // 154
|
||||
} else { // 155
|
||||
throw new Meteor.Error(404, "Not Found", 'No file found'); // 156
|
||||
} // 157
|
||||
} // 158
|
||||
}, // 159
|
||||
'get': function(data) { // 160
|
||||
// Use the selector for finding the collection and file reference // 161
|
||||
var ref = selectorFunction.call(this); // 162
|
||||
// 163
|
||||
// Make sure we have a collection reference // 164
|
||||
if (!ref.collection) // 165
|
||||
throw new Meteor.Error(404, "Not Found", "No collection found"); // 166
|
||||
// 167
|
||||
// Make sure we have a file reference // 168
|
||||
if (ref.file === null) { // 169
|
||||
// No id supplied so we will return the published list of files ala // 170
|
||||
// http.publish in json format // 171
|
||||
return FS.HTTP.Handlers.GetList.apply(this, [ref]); // 172
|
||||
} else { // 173
|
||||
if (ref.file) { // 174
|
||||
return FS.HTTP.Handlers.Get.apply(this, [ref]); // 175
|
||||
} else { // 176
|
||||
throw new Meteor.Error(404, "Not Found", 'No file found'); // 177
|
||||
} // 178
|
||||
} // 179
|
||||
}, // 180
|
||||
'delete': function(data) { // 181
|
||||
// Use the selector for finding the collection and file reference // 182
|
||||
var ref = selectorFunction.call(this); // 183
|
||||
// 184
|
||||
// Make sure we have a collection reference // 185
|
||||
if (!ref.collection) // 186
|
||||
throw new Meteor.Error(404, "Not Found", "No collection found"); // 187
|
||||
// 188
|
||||
// Make sure we have a file reference // 189
|
||||
if (ref.file) { // 190
|
||||
return FS.HTTP.Handlers.Del.apply(this, [ref]); // 191
|
||||
} else { // 192
|
||||
throw new Meteor.Error(404, "Not Found", 'No file found'); // 193
|
||||
} // 194
|
||||
} // 195
|
||||
}; // 196
|
||||
// 197
|
||||
var accessPoints = {}; // 198
|
||||
// 199
|
||||
// Add debug message // 200
|
||||
FS.debug && console.log('Registered HTTP method URLs:'); // 201
|
||||
// 202
|
||||
FS.Utility.each(mountPoints, function(mountPoint) { // 203
|
||||
// Couple mountpoint and accesspoint // 204
|
||||
accessPoints[mountPoint] = accessPoint; // 205
|
||||
// Remember our mountpoints // 206
|
||||
_existingMountPoints[mountPoint] = mountPoint; // 207
|
||||
// Add debug message // 208
|
||||
FS.debug && console.log(mountPoint); // 209
|
||||
}); // 210
|
||||
// 211
|
||||
// XXX: HTTP:methods should unmount existing mounts in case of overwriting? // 212
|
||||
HTTP.methods(accessPoints); // 213
|
||||
// 214
|
||||
}; // 215
|
||||
// 216
|
||||
/** // 217
|
||||
* @method FS.HTTP.unmount // 218
|
||||
* @public // 219
|
||||
* @param {string | array of string} [mountPoints] Optional, if not specified all mountpoints are unmounted // 220
|
||||
* // 221
|
||||
*/ // 222
|
||||
FS.HTTP.unmount = function(mountPoints) { // 223
|
||||
// The mountPoints is optional, can be string or array if undefined then // 224
|
||||
// _existingMountPoints will be used // 225
|
||||
var unmountList; // 226
|
||||
// Container for the mount points to unmount // 227
|
||||
var unmountPoints = {}; // 228
|
||||
// 229
|
||||
if (typeof mountPoints === 'undefined') { // 230
|
||||
// Use existing mount points - unmount all // 231
|
||||
unmountList = _existingMountPoints; // 232
|
||||
} else if (mountPoints === ''+mountPoints) { // 233
|
||||
// Got a string // 234
|
||||
unmountList = [mountPoints]; // 235
|
||||
} else if (mountPoints.length) { // 236
|
||||
// Got an array // 237
|
||||
unmountList = mountPoints; // 238
|
||||
} // 239
|
||||
// 240
|
||||
// If we have a list to unmount // 241
|
||||
if (unmountList) { // 242
|
||||
// Iterate over each item // 243
|
||||
FS.Utility.each(unmountList, function(mountPoint) { // 244
|
||||
// Check _existingMountPoints to make sure the mount point exists in our // 245
|
||||
// context / was created by the FS.HTTP.mount // 246
|
||||
if (_existingMountPoints[mountPoint]) { // 247
|
||||
// Mark as unmount // 248
|
||||
unmountPoints[mountPoint] = false; // 249
|
||||
// Release // 250
|
||||
delete _existingMountPoints[mountPoint]; // 251
|
||||
} // 252
|
||||
}); // 253
|
||||
FS.debug && console.log('FS.HTTP.unmount:'); // 254
|
||||
FS.debug && console.log(unmountPoints); // 255
|
||||
// Complete unmount // 256
|
||||
HTTP.methods(unmountPoints); // 257
|
||||
} // 258
|
||||
}; // 259
|
||||
// 260
|
||||
// ### FS.Collection maps on HTTP pr. default on the following restpoints: // 261
|
||||
// * // 262
|
||||
// baseUrl + '/files/:collectionName/:id/:filename', // 263
|
||||
// baseUrl + '/files/:collectionName/:id', // 264
|
||||
// baseUrl + '/files/:collectionName' // 265
|
||||
// // 266
|
||||
// Change/ replace the existing mount point by: // 267
|
||||
// ```js // 268
|
||||
// // unmount all existing // 269
|
||||
// FS.HTTP.unmount(); // 270
|
||||
// // Create new mount point // 271
|
||||
// FS.HTTP.mount([ // 272
|
||||
// '/cfs/files/:collectionName/:id/:filename', // 273
|
||||
// '/cfs/files/:collectionName/:id', // 274
|
||||
// '/cfs/files/:collectionName' // 275
|
||||
// ]); // 276
|
||||
// ``` // 277
|
||||
// // 278
|
||||
mountUrls = function mountUrls() { // 279
|
||||
// We unmount first in case we are calling this a second time // 280
|
||||
FS.HTTP.unmount(); // 281
|
||||
// 282
|
||||
FS.HTTP.mount([ // 283
|
||||
baseUrl + '/files/:collectionName/:id/:filename', // 284
|
||||
baseUrl + '/files/:collectionName/:id', // 285
|
||||
baseUrl + '/files/:collectionName' // 286
|
||||
]); // 287
|
||||
}; // 288
|
||||
// 289
|
||||
// Returns the userId from URL token // 290
|
||||
var expirationAuth = function expirationAuth() { // 291
|
||||
var self = this; // 292
|
||||
// 293
|
||||
// Read the token from '/hello?token=base64' // 294
|
||||
var encodedToken = self.query.token; // 295
|
||||
// 296
|
||||
FS.debug && console.log("token: "+encodedToken); // 297
|
||||
// 298
|
||||
if (!encodedToken || !Meteor.users) return false; // 299
|
||||
// 300
|
||||
// Check the userToken before adding it to the db query // 301
|
||||
// Set the this.userId // 302
|
||||
var tokenString = FS.Utility.atob(encodedToken); // 303
|
||||
// 304
|
||||
var tokenObject; // 305
|
||||
try { // 306
|
||||
tokenObject = JSON.parse(tokenString); // 307
|
||||
} catch(err) { // 308
|
||||
throw new Meteor.Error(400, 'Bad Request'); // 309
|
||||
} // 310
|
||||
// 311
|
||||
// XXX: Do some check here of the object // 312
|
||||
var userToken = tokenObject.authToken; // 313
|
||||
if (userToken !== ''+userToken) { // 314
|
||||
throw new Meteor.Error(400, 'Bad Request'); // 315
|
||||
} // 316
|
||||
// 317
|
||||
// If we have an expiration token we should check that it's still valid // 318
|
||||
if (tokenObject.expiration != null) { // 319
|
||||
// check if its too old // 320
|
||||
var now = Date.now(); // 321
|
||||
if (tokenObject.expiration < now) { // 322
|
||||
FS.debug && console.log('Expired token: ' + tokenObject.expiration + ' is less than ' + now); // 323
|
||||
throw new Meteor.Error(500, 'Expired token'); // 324
|
||||
} // 325
|
||||
} // 326
|
||||
// 327
|
||||
// We are not on a secure line - so we have to look up the user... // 328
|
||||
var user = Meteor.users.findOne({ // 329
|
||||
$or: [ // 330
|
||||
{'services.resume.loginTokens.hashedToken': Accounts._hashLoginToken(userToken)}, // 331
|
||||
{'services.resume.loginTokens.token': userToken} // 332
|
||||
] // 333
|
||||
}); // 334
|
||||
// 335
|
||||
// Set the userId in the scope // 336
|
||||
return user && user._id; // 337
|
||||
}; // 338
|
||||
// 339
|
||||
HTTP.methods( // 340
|
||||
{'/cfs/servertime': { // 341
|
||||
get: function(data) { // 342
|
||||
return Date.now().toString(); // 343
|
||||
} // 344
|
||||
} // 345
|
||||
}); // 346
|
||||
// 347
|
||||
// Unify client / server api // 348
|
||||
FS.HTTP.now = function() { // 349
|
||||
return Date.now(); // 350
|
||||
}; // 351
|
||||
// 352
|
||||
// Start up the basic mount points // 353
|
||||
Meteor.startup(function () { // 354
|
||||
mountUrls(); // 355
|
||||
}); // 356
|
||||
// 357
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
}).call(this);
|
||||
|
||||
///////////////////////////////////////////////////////////////////////
|
||||
|
||||
}).call(this);
|
||||
|
||||
|
||||
/* Exports */
|
||||
if (typeof Package === 'undefined') Package = {};
|
||||
Package['cfs:access-point'] = {};
|
||||
|
||||
})();
|
|
@ -1,238 +0,0 @@
|
|||
/* global JsonRoutes */
|
||||
if (Meteor.isServer) {
|
||||
// todo XXX once we have a real API in place, move that route there
|
||||
// todo XXX also share the route definition between the client and the server
|
||||
// so that we could use something like
|
||||
// `ApiRoutes.path('boards/export', boardId)``
|
||||
// on the client instead of copy/pasting the route path manually between the
|
||||
// client and the server.
|
||||
/**
|
||||
* @operation export
|
||||
* @tag Boards
|
||||
*
|
||||
* @summary This route is used to export the board.
|
||||
*
|
||||
* @description If user is already logged-in, pass loginToken as param
|
||||
* "authToken": '/api/boards/:boardId/export?authToken=:token'
|
||||
*
|
||||
* See https://blog.kayla.com.au/server-side-route-authentication-in-meteor/
|
||||
* for detailed explanations
|
||||
*
|
||||
* @param {string} boardId the ID of the board we are exporting
|
||||
* @param {string} authToken the loginToken
|
||||
*/
|
||||
JsonRoutes.add('get', '/api/boards/:boardId/export', function(req, res) {
|
||||
const boardId = req.params.boardId;
|
||||
let user = null;
|
||||
|
||||
const loginToken = req.query.authToken;
|
||||
if (loginToken) {
|
||||
const hashToken = Accounts._hashLoginToken(loginToken);
|
||||
user = Meteor.users.findOne({
|
||||
'services.resume.loginTokens.hashedToken': hashToken,
|
||||
});
|
||||
} else if (!Meteor.settings.public.sandstorm) {
|
||||
Authentication.checkUserId(req.userId);
|
||||
user = Users.findOne({ _id: req.userId, isAdmin: true });
|
||||
}
|
||||
|
||||
const exporter = new Exporter(boardId);
|
||||
if (exporter.canExport(user)) {
|
||||
JsonRoutes.sendResult(res, {
|
||||
code: 200,
|
||||
data: exporter.build(),
|
||||
});
|
||||
} else {
|
||||
// we could send an explicit error message, but on the other hand the only
|
||||
// way to get there is by hacking the UI so let's keep it raw.
|
||||
JsonRoutes.sendResult(res, 403);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// exporter maybe is broken since Gridfs introduced, add fs and path
|
||||
|
||||
export class Exporter {
|
||||
constructor(boardId) {
|
||||
this._boardId = boardId;
|
||||
}
|
||||
|
||||
build() {
|
||||
const fs = Npm.require('fs');
|
||||
const os = Npm.require('os');
|
||||
const path = Npm.require('path');
|
||||
|
||||
const byBoard = { boardId: this._boardId };
|
||||
const byBoardNoLinked = {
|
||||
boardId: this._boardId,
|
||||
linkedId: { $in: ['', null] },
|
||||
};
|
||||
// we do not want to retrieve boardId in related elements
|
||||
const noBoardId = {
|
||||
fields: {
|
||||
boardId: 0,
|
||||
},
|
||||
};
|
||||
const result = {
|
||||
_format: 'wekan-board-1.0.0',
|
||||
};
|
||||
_.extend(
|
||||
result,
|
||||
Boards.findOne(this._boardId, {
|
||||
fields: {
|
||||
stars: 0,
|
||||
},
|
||||
}),
|
||||
);
|
||||
result.lists = Lists.find(byBoard, noBoardId).fetch();
|
||||
result.cards = Cards.find(byBoardNoLinked, noBoardId).fetch();
|
||||
result.swimlanes = Swimlanes.find(byBoard, noBoardId).fetch();
|
||||
result.customFields = CustomFields.find(
|
||||
{ boardIds: { $in: [this.boardId] } },
|
||||
{ fields: { boardId: 0 } },
|
||||
).fetch();
|
||||
result.comments = CardComments.find(byBoard, noBoardId).fetch();
|
||||
result.activities = Activities.find(byBoard, noBoardId).fetch();
|
||||
result.rules = Rules.find(byBoard, noBoardId).fetch();
|
||||
result.checklists = [];
|
||||
result.checklistItems = [];
|
||||
result.subtaskItems = [];
|
||||
result.triggers = [];
|
||||
result.actions = [];
|
||||
result.cards.forEach(card => {
|
||||
result.checklists.push(
|
||||
...Checklists.find({
|
||||
cardId: card._id,
|
||||
}).fetch(),
|
||||
);
|
||||
result.checklistItems.push(
|
||||
...ChecklistItems.find({
|
||||
cardId: card._id,
|
||||
}).fetch(),
|
||||
);
|
||||
result.subtaskItems.push(
|
||||
...Cards.find({
|
||||
parentId: card._id,
|
||||
}).fetch(),
|
||||
);
|
||||
});
|
||||
result.rules.forEach(rule => {
|
||||
result.triggers.push(
|
||||
...Triggers.find(
|
||||
{
|
||||
_id: rule.triggerId,
|
||||
},
|
||||
noBoardId,
|
||||
).fetch(),
|
||||
);
|
||||
result.actions.push(
|
||||
...Actions.find(
|
||||
{
|
||||
_id: rule.actionId,
|
||||
},
|
||||
noBoardId,
|
||||
).fetch(),
|
||||
);
|
||||
});
|
||||
|
||||
// [Old] for attachments we only export IDs and absolute url to original doc
|
||||
// [New] Encode attachment to base64
|
||||
const getBase64Data = function(doc, callback) {
|
||||
let buffer = new Buffer(0);
|
||||
// callback has the form function (err, res) {}
|
||||
const tmpFile = path.join(
|
||||
os.tmpdir(),
|
||||
`tmpexport${process.pid}${Math.random()}`,
|
||||
);
|
||||
const tmpWriteable = fs.createWriteStream(tmpFile);
|
||||
const readStream = doc.createReadStream();
|
||||
readStream.on('data', function(chunk) {
|
||||
buffer = Buffer.concat([buffer, chunk]);
|
||||
});
|
||||
readStream.on('error', function(err) {
|
||||
callback(err, null);
|
||||
});
|
||||
readStream.on('end', function() {
|
||||
// done
|
||||
fs.unlink(tmpFile, () => {
|
||||
//ignored
|
||||
});
|
||||
callback(null, buffer.toString('base64'));
|
||||
});
|
||||
readStream.pipe(tmpWriteable);
|
||||
};
|
||||
const getBase64DataSync = Meteor.wrapAsync(getBase64Data);
|
||||
result.attachments = Attachments.find(byBoard)
|
||||
.fetch()
|
||||
.map(attachment => {
|
||||
return {
|
||||
_id: attachment._id,
|
||||
cardId: attachment.cardId,
|
||||
// url: FlowRouter.url(attachment.url()),
|
||||
file: getBase64DataSync(attachment),
|
||||
name: attachment.original.name,
|
||||
type: attachment.original.type,
|
||||
};
|
||||
});
|
||||
|
||||
// we also have to export some user data - as the other elements only
|
||||
// include id but we have to be careful:
|
||||
// 1- only exports users that are linked somehow to that board
|
||||
// 2- do not export any sensitive information
|
||||
const users = {};
|
||||
result.members.forEach(member => {
|
||||
users[member.userId] = true;
|
||||
});
|
||||
result.lists.forEach(list => {
|
||||
users[list.userId] = true;
|
||||
});
|
||||
result.cards.forEach(card => {
|
||||
users[card.userId] = true;
|
||||
if (card.members) {
|
||||
card.members.forEach(memberId => {
|
||||
users[memberId] = true;
|
||||
});
|
||||
}
|
||||
});
|
||||
result.comments.forEach(comment => {
|
||||
users[comment.userId] = true;
|
||||
});
|
||||
result.activities.forEach(activity => {
|
||||
users[activity.userId] = true;
|
||||
});
|
||||
result.checklists.forEach(checklist => {
|
||||
users[checklist.userId] = true;
|
||||
});
|
||||
const byUserIds = {
|
||||
_id: {
|
||||
$in: Object.getOwnPropertyNames(users),
|
||||
},
|
||||
};
|
||||
// we use whitelist to be sure we do not expose inadvertently
|
||||
// some secret fields that gets added to User later.
|
||||
const userFields = {
|
||||
fields: {
|
||||
_id: 1,
|
||||
username: 1,
|
||||
'profile.fullname': 1,
|
||||
'profile.initials': 1,
|
||||
'profile.avatarUrl': 1,
|
||||
},
|
||||
};
|
||||
result.users = Users.find(byUserIds, userFields)
|
||||
.fetch()
|
||||
.map(user => {
|
||||
// user avatar is stored as a relative url, we export absolute
|
||||
if ((user.profile || {}).avatarUrl) {
|
||||
user.profile.avatarUrl = FlowRouter.url(user.profile.avatarUrl);
|
||||
}
|
||||
return user;
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
canExport(user) {
|
||||
const board = Boards.findOne(this._boardId);
|
||||
return board && board.isVisibleBy(user);
|
||||
}
|
||||
}
|
|
@ -1,640 +0,0 @@
|
|||
import ldapjs from 'ldapjs';
|
||||
import util from 'util';
|
||||
import Bunyan from 'bunyan';
|
||||
import { log_debug, log_info, log_warn, log_error } from './logger';
|
||||
|
||||
export default class LDAP {
|
||||
constructor() {
|
||||
this.ldapjs = ldapjs;
|
||||
|
||||
this.connected = false;
|
||||
|
||||
this.options = {
|
||||
host: this.constructor.settings_get('LDAP_HOST'),
|
||||
port: this.constructor.settings_get('LDAP_PORT'),
|
||||
Reconnect: this.constructor.settings_get('LDAP_RECONNECT'),
|
||||
timeout: this.constructor.settings_get('LDAP_TIMEOUT'),
|
||||
connect_timeout: this.constructor.settings_get('LDAP_CONNECT_TIMEOUT'),
|
||||
idle_timeout: this.constructor.settings_get('LDAP_IDLE_TIMEOUT'),
|
||||
encryption: this.constructor.settings_get('LDAP_ENCRYPTION'),
|
||||
ca_cert: this.constructor.settings_get('LDAP_CA_CERT'),
|
||||
reject_unauthorized:
|
||||
this.constructor.settings_get('LDAP_REJECT_UNAUTHORIZED') || false,
|
||||
Authentication: this.constructor.settings_get('LDAP_AUTHENTIFICATION'),
|
||||
Authentication_UserDN: this.constructor.settings_get(
|
||||
'LDAP_AUTHENTIFICATION_USERDN',
|
||||
),
|
||||
Authentication_Password: this.constructor.settings_get(
|
||||
'LDAP_AUTHENTIFICATION_PASSWORD',
|
||||
),
|
||||
Authentication_Fallback: this.constructor.settings_get(
|
||||
'LDAP_LOGIN_FALLBACK',
|
||||
),
|
||||
BaseDN: this.constructor.settings_get('LDAP_BASEDN'),
|
||||
Internal_Log_Level: this.constructor.settings_get('INTERNAL_LOG_LEVEL'),
|
||||
User_Authentication: this.constructor.settings_get(
|
||||
'LDAP_USER_AUTHENTICATION',
|
||||
),
|
||||
User_Authentication_Field: this.constructor.settings_get(
|
||||
'LDAP_USER_AUTHENTICATION_FIELD',
|
||||
),
|
||||
User_Attributes: this.constructor.settings_get('LDAP_USER_ATTRIBUTES'),
|
||||
User_Search_Filter: this.constructor.settings_get(
|
||||
'LDAP_USER_SEARCH_FILTER',
|
||||
),
|
||||
User_Search_Scope: this.constructor.settings_get(
|
||||
'LDAP_USER_SEARCH_SCOPE',
|
||||
),
|
||||
User_Search_Field: this.constructor.settings_get(
|
||||
'LDAP_USER_SEARCH_FIELD',
|
||||
),
|
||||
Search_Page_Size: this.constructor.settings_get('LDAP_SEARCH_PAGE_SIZE'),
|
||||
Search_Size_Limit: this.constructor.settings_get(
|
||||
'LDAP_SEARCH_SIZE_LIMIT',
|
||||
),
|
||||
group_filter_enabled: this.constructor.settings_get(
|
||||
'LDAP_GROUP_FILTER_ENABLE',
|
||||
),
|
||||
group_filter_object_class: this.constructor.settings_get(
|
||||
'LDAP_GROUP_FILTER_OBJECTCLASS',
|
||||
),
|
||||
group_filter_group_id_attribute: this.constructor.settings_get(
|
||||
'LDAP_GROUP_FILTER_GROUP_ID_ATTRIBUTE',
|
||||
),
|
||||
group_filter_group_member_attribute: this.constructor.settings_get(
|
||||
'LDAP_GROUP_FILTER_GROUP_MEMBER_ATTRIBUTE',
|
||||
),
|
||||
group_filter_group_member_format: this.constructor.settings_get(
|
||||
'LDAP_GROUP_FILTER_GROUP_MEMBER_FORMAT',
|
||||
),
|
||||
group_filter_group_name: this.constructor.settings_get(
|
||||
'LDAP_GROUP_FILTER_GROUP_NAME',
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
static settings_get(name, ...args) {
|
||||
let value = process.env[name];
|
||||
if (value !== undefined) {
|
||||
if (value === 'true' || value === 'false') {
|
||||
value = JSON.parse(value);
|
||||
} else if (value !== '' && !isNaN(value)) {
|
||||
value = Number(value);
|
||||
}
|
||||
return value;
|
||||
} else {
|
||||
log_warn(`Lookup for unset variable: ${name}`);
|
||||
}
|
||||
}
|
||||
|
||||
connectSync(...args) {
|
||||
if (!this._connectSync) {
|
||||
this._connectSync = Meteor.wrapAsync(this.connectAsync, this);
|
||||
}
|
||||
return this._connectSync(...args);
|
||||
}
|
||||
|
||||
searchAllSync(...args) {
|
||||
if (!this._searchAllSync) {
|
||||
this._searchAllSync = Meteor.wrapAsync(this.searchAllAsync, this);
|
||||
}
|
||||
return this._searchAllSync(...args);
|
||||
}
|
||||
|
||||
connectAsync(callback) {
|
||||
log_info('Init setup');
|
||||
|
||||
let replied = false;
|
||||
|
||||
const connectionOptions = {
|
||||
url: `${this.options.host}:${this.options.port}`,
|
||||
timeout: this.options.timeout,
|
||||
connectTimeout: this.options.connect_timeout,
|
||||
idleTimeout: this.options.idle_timeout,
|
||||
reconnect: this.options.Reconnect,
|
||||
};
|
||||
|
||||
if (this.options.Internal_Log_Level !== 'disabled') {
|
||||
connectionOptions.log = new Bunyan({
|
||||
name: 'ldapjs',
|
||||
component: 'client',
|
||||
stream: process.stderr,
|
||||
level: this.options.Internal_Log_Level,
|
||||
});
|
||||
}
|
||||
|
||||
const tlsOptions = {
|
||||
rejectUnauthorized: this.options.reject_unauthorized,
|
||||
};
|
||||
|
||||
if (this.options.ca_cert && this.options.ca_cert !== '') {
|
||||
// Split CA cert into array of strings
|
||||
const chainLines = this.constructor
|
||||
.settings_get('LDAP_CA_CERT')
|
||||
.split('\n');
|
||||
let cert = [];
|
||||
const ca = [];
|
||||
chainLines.forEach(line => {
|
||||
cert.push(line);
|
||||
if (line.match(/-END CERTIFICATE-/)) {
|
||||
ca.push(cert.join('\n'));
|
||||
cert = [];
|
||||
}
|
||||
});
|
||||
tlsOptions.ca = ca;
|
||||
}
|
||||
|
||||
if (this.options.encryption === 'ssl') {
|
||||
connectionOptions.url = `ldaps://${connectionOptions.url}`;
|
||||
connectionOptions.tlsOptions = tlsOptions;
|
||||
} else {
|
||||
connectionOptions.url = `ldap://${connectionOptions.url}`;
|
||||
}
|
||||
|
||||
log_info('Connecting', connectionOptions.url);
|
||||
log_debug(`connectionOptions${util.inspect(connectionOptions)}`);
|
||||
|
||||
this.client = ldapjs.createClient(connectionOptions);
|
||||
|
||||
this.bindSync = Meteor.wrapAsync(this.client.bind, this.client);
|
||||
|
||||
this.client.on('error', error => {
|
||||
log_error('connection', error);
|
||||
if (replied === false) {
|
||||
replied = true;
|
||||
callback(error, null);
|
||||
}
|
||||
});
|
||||
|
||||
this.client.on('idle', () => {
|
||||
log_info('Idle');
|
||||
this.disconnect();
|
||||
});
|
||||
|
||||
this.client.on('close', () => {
|
||||
log_info('Closed');
|
||||
});
|
||||
|
||||
if (this.options.encryption === 'tls') {
|
||||
// Set host parameter for tls.connect which is used by ldapjs starttls. This shouldn't be needed in newer nodejs versions (e.g v5.6.0).
|
||||
// https://github.com/RocketChat/Rocket.Chat/issues/2035
|
||||
// https://github.com/mcavage/node-ldapjs/issues/349
|
||||
tlsOptions.host = this.options.host;
|
||||
|
||||
log_info('Starting TLS');
|
||||
log_debug('tlsOptions', tlsOptions);
|
||||
|
||||
this.client.starttls(tlsOptions, null, (error, response) => {
|
||||
if (error) {
|
||||
log_error('TLS connection', error);
|
||||
if (replied === false) {
|
||||
replied = true;
|
||||
callback(error, null);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
log_info('TLS connected');
|
||||
this.connected = true;
|
||||
if (replied === false) {
|
||||
replied = true;
|
||||
callback(null, response);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
this.client.on('connect', response => {
|
||||
log_info('LDAP connected');
|
||||
this.connected = true;
|
||||
if (replied === false) {
|
||||
replied = true;
|
||||
callback(null, response);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
setTimeout(() => {
|
||||
if (replied === false) {
|
||||
log_error('connection time out', connectionOptions.connectTimeout);
|
||||
replied = true;
|
||||
callback(new Error('Timeout'));
|
||||
}
|
||||
}, connectionOptions.connectTimeout);
|
||||
}
|
||||
|
||||
getUserFilter(username) {
|
||||
const filter = [];
|
||||
|
||||
if (this.options.User_Search_Filter !== '') {
|
||||
if (this.options.User_Search_Filter[0] === '(') {
|
||||
filter.push(`${this.options.User_Search_Filter}`);
|
||||
} else {
|
||||
filter.push(`(${this.options.User_Search_Filter})`);
|
||||
}
|
||||
}
|
||||
|
||||
const usernameFilter = this.options.User_Search_Field.split(',').map(
|
||||
item => `(${item}=${username})`,
|
||||
);
|
||||
|
||||
if (usernameFilter.length === 0) {
|
||||
log_error('LDAP_LDAP_User_Search_Field not defined');
|
||||
} else if (usernameFilter.length === 1) {
|
||||
filter.push(`${usernameFilter[0]}`);
|
||||
} else {
|
||||
filter.push(`(|${usernameFilter.join('')})`);
|
||||
}
|
||||
|
||||
return `(&${filter.join('')})`;
|
||||
}
|
||||
|
||||
bindUserIfNecessary(username, password) {
|
||||
if (this.domainBinded === true) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.options.User_Authentication) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.options.BaseDN) throw new Error('BaseDN is not provided');
|
||||
|
||||
const userDn = `${this.options.User_Authentication_Field}=${username},${this.options.BaseDN}`;
|
||||
|
||||
this.bindSync(userDn, password);
|
||||
this.domainBinded = true;
|
||||
}
|
||||
|
||||
bindIfNecessary() {
|
||||
if (this.domainBinded === true) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.options.Authentication !== true) {
|
||||
return;
|
||||
}
|
||||
|
||||
log_info('Binding UserDN', this.options.Authentication_UserDN);
|
||||
|
||||
this.bindSync(
|
||||
this.options.Authentication_UserDN,
|
||||
this.options.Authentication_Password,
|
||||
);
|
||||
this.domainBinded = true;
|
||||
}
|
||||
|
||||
searchUsersSync(username, page) {
|
||||
this.bindIfNecessary();
|
||||
const searchOptions = {
|
||||
filter: this.getUserFilter(username),
|
||||
scope: this.options.User_Search_Scope || 'sub',
|
||||
sizeLimit: this.options.Search_Size_Limit,
|
||||
};
|
||||
|
||||
if (!!this.options.User_Attributes)
|
||||
searchOptions.attributes = this.options.User_Attributes.split(',');
|
||||
|
||||
if (this.options.Search_Page_Size > 0) {
|
||||
searchOptions.paged = {
|
||||
pageSize: this.options.Search_Page_Size,
|
||||
pagePause: !!page,
|
||||
};
|
||||
}
|
||||
|
||||
log_info('Searching user', username);
|
||||
log_debug('searchOptions', searchOptions);
|
||||
log_debug('BaseDN', this.options.BaseDN);
|
||||
|
||||
if (page) {
|
||||
return this.searchAllPaged(this.options.BaseDN, searchOptions, page);
|
||||
}
|
||||
|
||||
return this.searchAllSync(this.options.BaseDN, searchOptions);
|
||||
}
|
||||
|
||||
getUserByIdSync(id, attribute) {
|
||||
this.bindIfNecessary();
|
||||
|
||||
const Unique_Identifier_Field = this.constructor
|
||||
.settings_get('LDAP_UNIQUE_IDENTIFIER_FIELD')
|
||||
.split(',');
|
||||
|
||||
let filter;
|
||||
|
||||
if (attribute) {
|
||||
filter = new this.ldapjs.filters.EqualityFilter({
|
||||
attribute,
|
||||
value: new Buffer(id, 'hex'),
|
||||
});
|
||||
} else {
|
||||
const filters = [];
|
||||
Unique_Identifier_Field.forEach(item => {
|
||||
filters.push(
|
||||
new this.ldapjs.filters.EqualityFilter({
|
||||
attribute: item,
|
||||
value: new Buffer(id, 'hex'),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
filter = new this.ldapjs.filters.OrFilter({ filters });
|
||||
}
|
||||
|
||||
const searchOptions = {
|
||||
filter,
|
||||
scope: 'sub',
|
||||
};
|
||||
|
||||
log_info('Searching by id', id);
|
||||
log_debug('search filter', searchOptions.filter.toString());
|
||||
log_debug('BaseDN', this.options.BaseDN);
|
||||
|
||||
const result = this.searchAllSync(this.options.BaseDN, searchOptions);
|
||||
|
||||
if (!Array.isArray(result) || result.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (result.length > 1) {
|
||||
log_error('Search by id', id, 'returned', result.length, 'records');
|
||||
}
|
||||
|
||||
return result[0];
|
||||
}
|
||||
|
||||
getUserByUsernameSync(username) {
|
||||
this.bindIfNecessary();
|
||||
|
||||
const searchOptions = {
|
||||
filter: this.getUserFilter(username),
|
||||
scope: this.options.User_Search_Scope || 'sub',
|
||||
};
|
||||
|
||||
log_info('Searching user', username);
|
||||
log_debug('searchOptions', searchOptions);
|
||||
log_debug('BaseDN', this.options.BaseDN);
|
||||
|
||||
const result = this.searchAllSync(this.options.BaseDN, searchOptions);
|
||||
|
||||
if (!Array.isArray(result) || result.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (result.length > 1) {
|
||||
log_error(
|
||||
'Search by username',
|
||||
username,
|
||||
'returned',
|
||||
result.length,
|
||||
'records',
|
||||
);
|
||||
}
|
||||
|
||||
return result[0];
|
||||
}
|
||||
|
||||
getUserGroups(username, ldapUser) {
|
||||
if (!this.options.group_filter_enabled) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const filter = ['(&'];
|
||||
|
||||
if (this.options.group_filter_object_class !== '') {
|
||||
filter.push(`(objectclass=${this.options.group_filter_object_class})`);
|
||||
}
|
||||
|
||||
if (this.options.group_filter_group_member_attribute !== '') {
|
||||
const format_value =
|
||||
ldapUser[this.options.group_filter_group_member_format];
|
||||
if (format_value) {
|
||||
filter.push(
|
||||
`(${this.options.group_filter_group_member_attribute}=${format_value})`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
filter.push(')');
|
||||
|
||||
const searchOptions = {
|
||||
filter: filter.join('').replace(/#{username}/g, username),
|
||||
scope: 'sub',
|
||||
};
|
||||
|
||||
log_debug('Group list filter LDAP:', searchOptions.filter);
|
||||
|
||||
const result = this.searchAllSync(this.options.BaseDN, searchOptions);
|
||||
|
||||
if (!Array.isArray(result) || result.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const grp_identifier = this.options.group_filter_group_id_attribute || 'cn';
|
||||
const groups = [];
|
||||
result.map(item => {
|
||||
groups.push(item[grp_identifier]);
|
||||
});
|
||||
log_debug(`Groups: ${groups.join(', ')}`);
|
||||
return groups;
|
||||
}
|
||||
|
||||
isUserInGroup(username, ldapUser) {
|
||||
if (!this.options.group_filter_enabled) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const grps = this.getUserGroups(username, ldapUser);
|
||||
|
||||
const filter = ['(&'];
|
||||
|
||||
if (this.options.group_filter_object_class !== '') {
|
||||
filter.push(`(objectclass=${this.options.group_filter_object_class})`);
|
||||
}
|
||||
|
||||
if (this.options.group_filter_group_member_attribute !== '') {
|
||||
const format_value =
|
||||
ldapUser[this.options.group_filter_group_member_format];
|
||||
if (format_value) {
|
||||
filter.push(
|
||||
`(${this.options.group_filter_group_member_attribute}=${format_value})`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (this.options.group_filter_group_id_attribute !== '') {
|
||||
filter.push(
|
||||
`(${this.options.group_filter_group_id_attribute}=${this.options.group_filter_group_name})`,
|
||||
);
|
||||
}
|
||||
filter.push(')');
|
||||
|
||||
const searchOptions = {
|
||||
filter: filter.join('').replace(/#{username}/g, username),
|
||||
scope: 'sub',
|
||||
};
|
||||
|
||||
log_debug('Group filter LDAP:', searchOptions.filter);
|
||||
|
||||
const result = this.searchAllSync(this.options.BaseDN, searchOptions);
|
||||
|
||||
if (!Array.isArray(result) || result.length === 0) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
extractLdapEntryData(entry) {
|
||||
const values = {
|
||||
_raw: entry.raw,
|
||||
};
|
||||
|
||||
Object.keys(values._raw).forEach(key => {
|
||||
const value = values._raw[key];
|
||||
|
||||
if (!['thumbnailPhoto', 'jpegPhoto'].includes(key)) {
|
||||
if (value instanceof Buffer) {
|
||||
values[key] = value.toString();
|
||||
} else {
|
||||
values[key] = value;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return values;
|
||||
}
|
||||
|
||||
searchAllPaged(BaseDN, options, page) {
|
||||
this.bindIfNecessary();
|
||||
|
||||
const processPage = ({ entries, title, end, next }) => {
|
||||
log_info(title);
|
||||
// Force LDAP idle to wait the record processing
|
||||
this.client._updateIdle(true);
|
||||
page(null, entries, {
|
||||
end,
|
||||
next: () => {
|
||||
// Reset idle timer
|
||||
this.client._updateIdle();
|
||||
next && next();
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
this.client.search(BaseDN, options, (error, res) => {
|
||||
if (error) {
|
||||
log_error(error);
|
||||
page(error);
|
||||
return;
|
||||
}
|
||||
|
||||
res.on('error', error => {
|
||||
log_error(error);
|
||||
page(error);
|
||||
return;
|
||||
});
|
||||
|
||||
let entries = [];
|
||||
|
||||
const internalPageSize =
|
||||
options.paged && options.paged.pageSize > 0
|
||||
? options.paged.pageSize * 2
|
||||
: 500;
|
||||
|
||||
res.on('searchEntry', entry => {
|
||||
entries.push(this.extractLdapEntryData(entry));
|
||||
|
||||
if (entries.length >= internalPageSize) {
|
||||
processPage({
|
||||
entries,
|
||||
title: 'Internal Page',
|
||||
end: false,
|
||||
});
|
||||
entries = [];
|
||||
}
|
||||
});
|
||||
|
||||
res.on('page', (result, next) => {
|
||||
if (!next) {
|
||||
this.client._updateIdle(true);
|
||||
processPage({
|
||||
entries,
|
||||
title: 'Final Page',
|
||||
end: true,
|
||||
});
|
||||
} else if (entries.length) {
|
||||
log_info('Page');
|
||||
processPage({
|
||||
entries,
|
||||
title: 'Page',
|
||||
end: false,
|
||||
next,
|
||||
});
|
||||
entries = [];
|
||||
}
|
||||
});
|
||||
|
||||
res.on('end', () => {
|
||||
if (entries.length) {
|
||||
processPage({
|
||||
entries,
|
||||
title: 'Final Page',
|
||||
end: true,
|
||||
});
|
||||
entries = [];
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
searchAllAsync(BaseDN, options, callback) {
|
||||
this.bindIfNecessary();
|
||||
|
||||
this.client.search(BaseDN, options, (error, res) => {
|
||||
if (error) {
|
||||
log_error(error);
|
||||
callback(error);
|
||||
return;
|
||||
}
|
||||
|
||||
res.on('error', error => {
|
||||
log_error(error);
|
||||
callback(error);
|
||||
return;
|
||||
});
|
||||
|
||||
const entries = [];
|
||||
|
||||
res.on('searchEntry', entry => {
|
||||
entries.push(this.extractLdapEntryData(entry));
|
||||
});
|
||||
|
||||
res.on('end', () => {
|
||||
log_info('Search result count', entries.length);
|
||||
callback(null, entries);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
authSync(dn, password) {
|
||||
log_info('Authenticating', dn);
|
||||
|
||||
try {
|
||||
if (password === '') {
|
||||
throw new Error('Password is not provided');
|
||||
}
|
||||
this.bindSync(dn, password);
|
||||
log_info('Authenticated', dn);
|
||||
return true;
|
||||
} catch (error) {
|
||||
log_info('Not authenticated', dn);
|
||||
log_debug('error', error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
disconnect() {
|
||||
this.connected = false;
|
||||
this.domainBinded = false;
|
||||
log_info('Disconecting');
|
||||
this.client.unbind();
|
||||
}
|
||||
}
|
|
@ -1,163 +0,0 @@
|
|||
Oidc = {};
|
||||
|
||||
OAuth.registerService('oidc', 2, null, function(query) {
|
||||
var debug = process.env.DEBUG || false;
|
||||
var token = getToken(query);
|
||||
if (debug) console.log('XXX: register token:', token);
|
||||
|
||||
var accessToken = token.access_token || token.id_token;
|
||||
var expiresAt = +new Date() + 1000 * parseInt(token.expires_in, 10);
|
||||
|
||||
var userinfo = getUserInfo(accessToken);
|
||||
if (debug) console.log('XXX: userinfo:', userinfo);
|
||||
|
||||
var serviceData = {};
|
||||
serviceData.id = userinfo[process.env.OAUTH2_ID_MAP]; // || userinfo["id"];
|
||||
serviceData.username = userinfo[process.env.OAUTH2_USERNAME_MAP]; // || userinfo["uid"];
|
||||
serviceData.fullname = userinfo[process.env.OAUTH2_FULLNAME_MAP]; // || userinfo["displayName"];
|
||||
serviceData.accessToken = accessToken;
|
||||
serviceData.expiresAt = expiresAt;
|
||||
serviceData.email = userinfo[process.env.OAUTH2_EMAIL_MAP]; // || userinfo["email"];
|
||||
|
||||
if (accessToken) {
|
||||
var tokenContent = getTokenContent(accessToken);
|
||||
var fields = _.pick(
|
||||
tokenContent,
|
||||
getConfiguration().idTokenWhitelistFields,
|
||||
);
|
||||
_.extend(serviceData, fields);
|
||||
}
|
||||
|
||||
if (token.refresh_token) serviceData.refreshToken = token.refresh_token;
|
||||
if (debug) console.log('XXX: serviceData:', serviceData);
|
||||
|
||||
var profile = {};
|
||||
profile.name = userinfo[process.env.OAUTH2_FULLNAME_MAP]; // || userinfo["displayName"];
|
||||
profile.email = userinfo[process.env.OAUTH2_EMAIL_MAP]; // || userinfo["email"];
|
||||
if (debug) console.log('XXX: profile:', profile);
|
||||
|
||||
return {
|
||||
serviceData: serviceData,
|
||||
options: { profile: profile },
|
||||
};
|
||||
});
|
||||
|
||||
var userAgent = 'Meteor';
|
||||
if (Meteor.release) {
|
||||
userAgent += '/' + Meteor.release;
|
||||
}
|
||||
|
||||
var getToken = function(query) {
|
||||
var debug = process.env.DEBUG || false;
|
||||
var config = getConfiguration();
|
||||
if (config.tokenEndpoint.includes('https://')) {
|
||||
var serverTokenEndpoint = config.tokenEndpoint;
|
||||
} else {
|
||||
var serverTokenEndpoint = config.serverUrl + config.tokenEndpoint;
|
||||
}
|
||||
var requestPermissions = config.requestPermissions;
|
||||
var response;
|
||||
|
||||
try {
|
||||
response = HTTP.post(serverTokenEndpoint, {
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'User-Agent': userAgent,
|
||||
},
|
||||
params: {
|
||||
code: query.code,
|
||||
client_id: config.clientId,
|
||||
client_secret: OAuth.openSecret(config.secret),
|
||||
redirect_uri: OAuth._redirectUri('oidc', config),
|
||||
grant_type: 'authorization_code',
|
||||
scope: requestPermissions,
|
||||
state: query.state,
|
||||
},
|
||||
});
|
||||
} catch (err) {
|
||||
throw _.extend(
|
||||
new Error(
|
||||
'Failed to get token from OIDC ' +
|
||||
serverTokenEndpoint +
|
||||
': ' +
|
||||
err.message,
|
||||
),
|
||||
{ response: err.response },
|
||||
);
|
||||
}
|
||||
if (response.data.error) {
|
||||
// if the http response was a json object with an error attribute
|
||||
throw new Error(
|
||||
'Failed to complete handshake with OIDC ' +
|
||||
serverTokenEndpoint +
|
||||
': ' +
|
||||
response.data.error,
|
||||
);
|
||||
} else {
|
||||
if (debug) console.log('XXX: getToken response: ', response.data);
|
||||
return response.data;
|
||||
}
|
||||
};
|
||||
|
||||
var getUserInfo = function(accessToken) {
|
||||
var debug = process.env.DEBUG || false;
|
||||
var config = getConfiguration();
|
||||
// Some userinfo endpoints use a different base URL than the authorization or token endpoints.
|
||||
// This logic allows the end user to override the setting by providing the full URL to userinfo in their config.
|
||||
if (config.userinfoEndpoint.includes('https://')) {
|
||||
var serverUserinfoEndpoint = config.userinfoEndpoint;
|
||||
} else {
|
||||
var serverUserinfoEndpoint = config.serverUrl + config.userinfoEndpoint;
|
||||
}
|
||||
var response;
|
||||
try {
|
||||
response = HTTP.get(serverUserinfoEndpoint, {
|
||||
headers: {
|
||||
'User-Agent': userAgent,
|
||||
Authorization: 'Bearer ' + accessToken,
|
||||
},
|
||||
});
|
||||
} catch (err) {
|
||||
throw _.extend(
|
||||
new Error(
|
||||
'Failed to fetch userinfo from OIDC ' +
|
||||
serverUserinfoEndpoint +
|
||||
': ' +
|
||||
err.message,
|
||||
),
|
||||
{ response: err.response },
|
||||
);
|
||||
}
|
||||
if (debug) console.log('XXX: getUserInfo response: ', response.data);
|
||||
return response.data;
|
||||
};
|
||||
|
||||
var getConfiguration = function() {
|
||||
var config = ServiceConfiguration.configurations.findOne({ service: 'oidc' });
|
||||
if (!config) {
|
||||
throw new ServiceConfiguration.ConfigError('Service oidc not configured.');
|
||||
}
|
||||
return config;
|
||||
};
|
||||
|
||||
var getTokenContent = function(token) {
|
||||
var content = null;
|
||||
if (token) {
|
||||
try {
|
||||
var parts = token.split('.');
|
||||
var header = JSON.parse(new Buffer(parts[0], 'base64').toString());
|
||||
content = JSON.parse(new Buffer(parts[1], 'base64').toString());
|
||||
var signature = new Buffer(parts[2], 'base64');
|
||||
var signed = parts[0] + '.' + parts[1];
|
||||
} catch (err) {
|
||||
this.content = {
|
||||
exp: 0,
|
||||
};
|
||||
}
|
||||
}
|
||||
return content;
|
||||
};
|
||||
|
||||
Oidc.retrieveCredential = function(credentialToken, credentialSecret) {
|
||||
return OAuth.retrieveCredential(credentialToken, credentialSecret);
|
||||
};
|
4361
.sandstorm-meteor-1.8/package-lock.json
generated
4361
.sandstorm-meteor-1.8/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -1,73 +0,0 @@
|
|||
{
|
||||
"name": "wekan",
|
||||
"version": "v3.95.0",
|
||||
"description": "Open-Source kanban",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"lint": "eslint --cache --ext .js --ignore-path .eslintignore .",
|
||||
"lint:eslint:fix": "eslint --ext .js --ignore-path .eslintignore --fix .",
|
||||
"lint:staged": "lint-staged",
|
||||
"prettify": "prettier --write '**/*.js' '**/*.jsx'",
|
||||
"test": "npm run lint"
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.js": [
|
||||
"meteor npm run prettify",
|
||||
"meteor npm run lint:eslint:fix",
|
||||
"git add --force"
|
||||
],
|
||||
"*.jsx": [
|
||||
"meteor npm run prettify",
|
||||
"meteor npm run lint:eslint:fix",
|
||||
"git add --force"
|
||||
],
|
||||
"*.json": [
|
||||
"prettier --write",
|
||||
"git add --force"
|
||||
]
|
||||
},
|
||||
"pre-commit": "lint:staged",
|
||||
"eslintConfig": {
|
||||
"extends": "@meteorjs/eslint-config-meteor"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/wekan/wekan.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/wekan/wekan/issues"
|
||||
},
|
||||
"homepage": "https://wekan.github.io",
|
||||
"devDependencies": {
|
||||
"eslint": "^6.8.0",
|
||||
"eslint-config-meteor": "^0.1.1",
|
||||
"eslint-config-prettier": "^6.10.0",
|
||||
"eslint-import-resolver-meteor": "^0.4.0",
|
||||
"eslint-plugin-import": "^2.20.1",
|
||||
"eslint-plugin-meteor": "^6.0.0",
|
||||
"eslint-plugin-prettier": "^3.1.2",
|
||||
"lint-staged": "^10.0.8",
|
||||
"pre-commit": "^1.2.2",
|
||||
"prettier": "^1.19.1",
|
||||
"prettier-eslint": "^9.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.8.7",
|
||||
"ajv": "^6.12.0",
|
||||
"babel-runtime": "^6.26.0",
|
||||
"bcrypt": "^4.0.1",
|
||||
"bson": "^4.0.3",
|
||||
"bunyan": "^1.8.12",
|
||||
"es6-promise": "^4.2.8",
|
||||
"gridfs-stream": "^1.1.1",
|
||||
"ldapjs": "^1.0.2",
|
||||
"meteor-node-stubs": "^1.0.0",
|
||||
"mongodb": "^3.5.5",
|
||||
"os": "^0.1.1",
|
||||
"page": "^1.11.5",
|
||||
"qs": "^6.9.1",
|
||||
"source-map-support": "^0.5.16",
|
||||
"xss": "^1.0.6"
|
||||
}
|
||||
}
|
|
@ -1,244 +0,0 @@
|
|||
name: wekan
|
||||
version: 0
|
||||
version-script: git describe --tags | cut -c 2-
|
||||
summary: The open-source kanban
|
||||
description: |
|
||||
Wekan is an open-source and collaborative kanban board application.
|
||||
|
||||
Whether you’re maintaining a personal todo list, planning your holidays with some friends, or working in a team on your next revolutionary idea, Kanban boards are an unbeatable tool to keep your things organized. They give you a visual overview of the current state of your project, and make you productive by allowing you to focus on the few items that matter the most.
|
||||
Depending on target environment, some configuration settings might need to be adjusted.
|
||||
For full list of configuration options call:
|
||||
$ wekan.help
|
||||
|
||||
confinement: strict
|
||||
grade: stable
|
||||
|
||||
architectures:
|
||||
- amd64
|
||||
|
||||
plugs:
|
||||
mongodb-plug:
|
||||
interface: content
|
||||
target: $SNAP_DATA/shared
|
||||
|
||||
hooks:
|
||||
configure:
|
||||
plugs:
|
||||
- network
|
||||
- network-bind
|
||||
|
||||
slots:
|
||||
mongodb-slot:
|
||||
interface: content
|
||||
write:
|
||||
- $SNAP_DATA/share
|
||||
|
||||
apps:
|
||||
wekan:
|
||||
command: wekan-control
|
||||
daemon: simple
|
||||
plugs: [network, network-bind]
|
||||
|
||||
mongodb:
|
||||
command: mongodb-control
|
||||
daemon: simple
|
||||
plugs: [network, network-bind]
|
||||
|
||||
caddy:
|
||||
command: caddy-control
|
||||
daemon: simple
|
||||
plugs: [network, network-bind]
|
||||
|
||||
help:
|
||||
command: wekan-help
|
||||
|
||||
database-backup:
|
||||
command: mongodb-backup
|
||||
plugs: [network, network-bind]
|
||||
|
||||
database-list-backups:
|
||||
command: ls -al $SNAP_COMMON/db-backups/
|
||||
|
||||
database-restore:
|
||||
command: mongodb-restore
|
||||
plugs: [network, network-bind]
|
||||
|
||||
parts:
|
||||
mongodb:
|
||||
source: https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-ubuntu1604-3.2.22.tgz
|
||||
plugin: dump
|
||||
stage-packages: [libssl1.0.0]
|
||||
filesets:
|
||||
mongo:
|
||||
- usr
|
||||
- bin
|
||||
- lib
|
||||
stage:
|
||||
- $mongo
|
||||
prime:
|
||||
- $mongo
|
||||
|
||||
wekan:
|
||||
source: .
|
||||
plugin: nodejs
|
||||
node-engine: 8.17.0
|
||||
node-packages:
|
||||
- node-gyp
|
||||
- node-pre-gyp
|
||||
- fibers@2.0.0
|
||||
build-packages:
|
||||
- ca-certificates
|
||||
- apt-utils
|
||||
- python
|
||||
# - python3
|
||||
- g++
|
||||
- capnproto
|
||||
- curl
|
||||
- execstack
|
||||
- nodejs
|
||||
- npm
|
||||
stage-packages:
|
||||
- libfontconfig1
|
||||
override-build: |
|
||||
echo "Cleaning environment first"
|
||||
rm -rf ~/.meteor ~/.npm /usr/local/lib/node_modules
|
||||
# Create the OpenAPI specification
|
||||
rm -rf .build
|
||||
#mkdir -p .build/python
|
||||
#cd .build/python
|
||||
#git clone --depth 1 -b master https://github.com/Kronuz/esprima-python
|
||||
#cd esprima-python
|
||||
#python3 setup.py install
|
||||
#cd ../../..
|
||||
#mkdir -p ./public/api
|
||||
#python3 ./openapi/generate_openapi.py --release $(git describe --tags --abbrev=0) > ./public/api/wekan.yml
|
||||
# we temporary need api2html and mkdirp
|
||||
#npm install -g api2html@0.3.0
|
||||
#npm install -g mkdirp
|
||||
#api2html -c ./public/logo-header.png -o ./public/api/wekan.html ./public/api/wekan.yml
|
||||
#npm uninstall -g mkdirp
|
||||
#npm uninstall -g api2html
|
||||
# Node Fibers 100% CPU usage issue:
|
||||
# https://github.com/wekan/wekan-mongodb/issues/2#issuecomment-381453161
|
||||
# https://github.com/meteor/meteor/issues/9796#issuecomment-381676326
|
||||
# https://github.com/sandstorm-io/sandstorm/blob/0f1fec013fe7208ed0fd97eb88b31b77e3c61f42/shell/server/00-startup.js#L99-L129
|
||||
# Also see beginning of wekan/server/authentication.js
|
||||
# import Fiber from "fibers";
|
||||
# Fiber.poolSize = 1e9;
|
||||
# OLD: Download node version 8.12.0 prerelease build => Official node 8.12.0 has been released
|
||||
# Description at https://releases.wekan.team/node.txt
|
||||
##echo "375bd8db50b9c692c0bbba6e96d4114cd29bee3770f901c1ff2249d1038f1348 node" >> node-SHASUMS256.txt.asc
|
||||
##curl https://releases.wekan.team/node -o node
|
||||
# Verify Fibers patched node authenticity
|
||||
##echo "Fibers 100% CPU issue patched node authenticity:"
|
||||
##grep node node-SHASUMS256.txt.asc | shasum -a 256 -c -
|
||||
##rm -f node-SHASUMS256.txt.asc
|
||||
##chmod +x node
|
||||
##mv node `which node`
|
||||
# DOES NOT WORK: paxctl fix.
|
||||
# Removed from build-packages: - paxctl
|
||||
#echo "Applying paxctl fix for alpine linux: https://github.com/wekan/wekan/issues/1303"
|
||||
#paxctl -mC `which node`
|
||||
#echo "Installing npm"
|
||||
#curl -L https://www.npmjs.com/install.sh | sh
|
||||
echo "Installing meteor"
|
||||
curl https://install.meteor.com/ -o install_meteor.sh
|
||||
#sed -i "s|RELEASE=.*|RELEASE=\"1.8.1-beta.0\"|g" install_meteor.sh
|
||||
chmod +x install_meteor.sh
|
||||
sh install_meteor.sh
|
||||
rm install_meteor.sh
|
||||
# REPOS BELOW ARE INCLUDED TO WEKAN REPO
|
||||
#if [ ! -d "packages" ]; then
|
||||
# mkdir packages
|
||||
#fi
|
||||
#if [ ! -d "packages/kadira-flow-router" ]; then
|
||||
# cd packages
|
||||
# git clone --depth 1 -b master https://github.com/wekan/flow-router.git kadira-flow-router
|
||||
# cd ..
|
||||
#fi
|
||||
#if [ ! -d "packages/meteor-useraccounts-core" ]; then
|
||||
# cd packages
|
||||
# git clone --depth 1 -b master https://github.com/meteor-useraccounts/core.git meteor-useraccounts-core
|
||||
# sed -i 's/api\.versionsFrom/\/\/api.versionsFrom/' meteor-useraccounts-core/package.js
|
||||
# cd ..
|
||||
#fi
|
||||
#if [ ! -d "packages/meteor-accounts-cas" ]; then
|
||||
# cd packages
|
||||
# git clone --depth 1 -b master https://github.com/wekan/meteor-accounts-cas.git meteor-accounts-cas
|
||||
# cd ..
|
||||
#fi
|
||||
#if [ ! -d "packages/wekan-ldap" ]; then
|
||||
# cd packages
|
||||
# git clone --depth 1 -b master https://github.com/wekan/wekan-ldap.git
|
||||
# cd ..
|
||||
#fi
|
||||
#if [ ! -d "packages/wekan-scrollbar" ]; then
|
||||
# cd packages
|
||||
# git clone --depth 1 -b master https://github.com/wekan/wekan-scrollbar.git
|
||||
# cd ..
|
||||
#fi
|
||||
#if [ ! -d "packages/wekan_accounts-oidc" ]; then
|
||||
# cd packages
|
||||
# git clone --depth 1 -b master https://github.com/wekan/meteor-accounts-oidc.git
|
||||
# mv meteor-accounts-oidc/packages/switch_accounts-oidc wekan-accounts-oidc
|
||||
# mv meteor-accounts-oidc/packages/switch_oidc wekan-oidc
|
||||
# rm -rf meteor-accounts-oidc
|
||||
# cd ..
|
||||
#fi
|
||||
#if [ ! -d "packages/markdown" ]; then
|
||||
# cd packages
|
||||
# git clone --depth 1 -b master --recurse-submodules https://github.com/wekan/markdown.git
|
||||
# cd ..
|
||||
#fi
|
||||
rm -rf .build
|
||||
meteor add standard-minifier-js --allow-superuser
|
||||
meteor npm install --allow-superuser
|
||||
meteor npm install --allow-superuser --save babel-runtime
|
||||
meteor build .build --directory --allow-superuser
|
||||
cp -f fix-download-unicode/cfs_access-point.txt .build/bundle/programs/server/packages/cfs_access-point.js
|
||||
#Removed binary version of bcrypt because of security vulnerability that is not fixed yet.
|
||||
#https://github.com/wekan/wekan/commit/4b2010213907c61b0e0482ab55abb06f6a668eac
|
||||
#https://github.com/wekan/wekan/commit/7eeabf14be3c63fae2226e561ef8a0c1390c8d3c
|
||||
#cd .build/bundle/programs/server/npm/node_modules/meteor/npm-bcrypt
|
||||
#rm -rf node_modules/bcrypt
|
||||
#meteor npm install --save bcrypt
|
||||
# Change from npm-bcrypt directory back to .build/bundle/programs/server directory.
|
||||
#cd ../../../../
|
||||
# Change to directory .build/bundle/programs/server
|
||||
cd .build/bundle/programs/server
|
||||
npm install
|
||||
npm install --allow-superuser --save babel-runtime
|
||||
#meteor npm install --save bcrypt
|
||||
# Change back to Wekan source directory
|
||||
cd ../../../..
|
||||
cp -r .build/bundle/* $SNAPCRAFT_PART_INSTALL/
|
||||
cp .build/bundle/.node_version.txt $SNAPCRAFT_PART_INSTALL/
|
||||
rm -f $SNAPCRAFT_PART_INSTALL/lib/node_modules/wekan
|
||||
rm -f $SNAPCRAFT_PART_INSTALL/programs/server/npm/node_modules/meteor/rajit_bootstrap3-datepicker/lib/bootstrap-datepicker/node_modules/phantomjs-prebuilt/lib/phantom/bin/phantomjs
|
||||
rm -f $SNAPCRAFT_PART_INSTALL/programs/server/npm/node_modules/tar/lib/.mkdir.js.swp
|
||||
rm -f $SNAPCRAFT_PART_INSTALL/lib/node_modules/node-pre-gyp/node_modules/tar/lib/.mkdir.js.swp
|
||||
rm -f $SNAPCRAFT_PART_INSTALL/lib/node_modules/node-gyp/node_modules/tar/lib/.mkdir.js.swp
|
||||
# Meteor 1.8.x additional .swp remove
|
||||
rm -f $SNAPCRAFT_PART_INSTALL/programs/server/node_modules/node-pre-gyp/node_modules/tar/lib/.mkdir.js.swp
|
||||
|
||||
organize:
|
||||
README: README.wekan
|
||||
prime:
|
||||
- -lib/node_modules/node-pre-gyp/node_modules/tar/lib/.unpack.js.swp
|
||||
|
||||
helpers:
|
||||
source: snap-src
|
||||
plugin: dump
|
||||
|
||||
caddy:
|
||||
plugin: dump
|
||||
source: https://caddyserver.com/download/linux/amd64?license=personal&telemetry=off
|
||||
source-type: tar
|
||||
organize:
|
||||
caddy: bin/caddy
|
||||
CHANGES.txt: CADDY_CHANGES.txt
|
||||
EULA.txt: CADDY_EULA.txt
|
||||
LICENSES.txt: CADDY_LICENSES.txt
|
||||
README.txt: CADDY_README.txt
|
||||
stage:
|
||||
- -init
|
|
@ -1,853 +0,0 @@
|
|||
const DateString = Match.Where(function(dateAsString) {
|
||||
check(dateAsString, String);
|
||||
return moment(dateAsString, moment.ISO_8601).isValid();
|
||||
});
|
||||
|
||||
export class WekanCreator {
|
||||
constructor(data) {
|
||||
// we log current date, to use the same timestamp for all our actions.
|
||||
// this helps to retrieve all elements performed by the same import.
|
||||
this._nowDate = new Date();
|
||||
// The object creation dates, indexed by Wekan id
|
||||
// (so we only parse actions once!)
|
||||
this.createdAt = {
|
||||
board: null,
|
||||
cards: {},
|
||||
lists: {},
|
||||
swimlanes: {},
|
||||
};
|
||||
// The object creator Wekan Id, indexed by the object Wekan id
|
||||
// (so we only parse actions once!)
|
||||
this.createdBy = {
|
||||
cards: {}, // only cards have a field for that
|
||||
};
|
||||
|
||||
// Map of labels Wekan ID => Wekan ID
|
||||
this.labels = {};
|
||||
// Map of swimlanes Wekan ID => Wekan ID
|
||||
this.swimlanes = {};
|
||||
// Map of lists Wekan ID => Wekan ID
|
||||
this.lists = {};
|
||||
// Map of cards Wekan ID => Wekan ID
|
||||
this.cards = {};
|
||||
// Map of comments Wekan ID => Wekan ID
|
||||
this.commentIds = {};
|
||||
// Map of attachments Wekan ID => Wekan ID
|
||||
this.attachmentIds = {};
|
||||
// Map of checklists Wekan ID => Wekan ID
|
||||
this.checklists = {};
|
||||
// Map of checklistItems Wekan ID => Wekan ID
|
||||
this.checklistItems = {};
|
||||
// The comments, indexed by Wekan card id (to map when importing cards)
|
||||
this.comments = {};
|
||||
// Map of rules Wekan ID => Wekan ID
|
||||
this.rules = {};
|
||||
// the members, indexed by Wekan member id => Wekan user ID
|
||||
this.members = data.membersMapping ? data.membersMapping : {};
|
||||
// Map of triggers Wekan ID => Wekan ID
|
||||
this.triggers = {};
|
||||
// Map of actions Wekan ID => Wekan ID
|
||||
this.actions = {};
|
||||
|
||||
// maps a wekanCardId to an array of wekanAttachments
|
||||
this.attachments = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* If dateString is provided,
|
||||
* return the Date it represents.
|
||||
* If not, will return the date when it was first called.
|
||||
* This is useful for us, as we want all import operations to
|
||||
* have the exact same date for easier later retrieval.
|
||||
*
|
||||
* @param {String} dateString a properly formatted Date
|
||||
*/
|
||||
_now(dateString) {
|
||||
if (dateString) {
|
||||
return new Date(dateString);
|
||||
}
|
||||
if (!this._nowDate) {
|
||||
this._nowDate = new Date();
|
||||
}
|
||||
return this._nowDate;
|
||||
}
|
||||
|
||||
/**
|
||||
* if wekanUserId is provided and we have a mapping,
|
||||
* return it.
|
||||
* Otherwise return current logged user.
|
||||
* @param wekanUserId
|
||||
* @private
|
||||
*/
|
||||
_user(wekanUserId) {
|
||||
if (wekanUserId && this.members[wekanUserId]) {
|
||||
return this.members[wekanUserId];
|
||||
}
|
||||
return Meteor.userId();
|
||||
}
|
||||
|
||||
checkActivities(wekanActivities) {
|
||||
check(wekanActivities, [
|
||||
Match.ObjectIncluding({
|
||||
activityType: String,
|
||||
createdAt: DateString,
|
||||
}),
|
||||
]);
|
||||
// XXX we could perform more thorough checks based on action type
|
||||
}
|
||||
|
||||
checkBoard(wekanBoard) {
|
||||
check(
|
||||
wekanBoard,
|
||||
Match.ObjectIncluding({
|
||||
archived: Boolean,
|
||||
title: String,
|
||||
// XXX refine control by validating 'color' against a list of
|
||||
// allowed values (is it worth the maintenance?)
|
||||
color: String,
|
||||
permission: Match.Where(value => {
|
||||
return ['private', 'public'].indexOf(value) >= 0;
|
||||
}),
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
checkCards(wekanCards) {
|
||||
check(wekanCards, [
|
||||
Match.ObjectIncluding({
|
||||
archived: Boolean,
|
||||
dateLastActivity: DateString,
|
||||
labelIds: [String],
|
||||
title: String,
|
||||
sort: Number,
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
||||
checkLabels(wekanLabels) {
|
||||
check(wekanLabels, [
|
||||
Match.ObjectIncluding({
|
||||
// XXX refine control by validating 'color' against a list of allowed
|
||||
// values (is it worth the maintenance?)
|
||||
color: String,
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
||||
checkLists(wekanLists) {
|
||||
check(wekanLists, [
|
||||
Match.ObjectIncluding({
|
||||
archived: Boolean,
|
||||
title: String,
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
||||
checkSwimlanes(wekanSwimlanes) {
|
||||
check(wekanSwimlanes, [
|
||||
Match.ObjectIncluding({
|
||||
archived: Boolean,
|
||||
title: String,
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
||||
checkChecklists(wekanChecklists) {
|
||||
check(wekanChecklists, [
|
||||
Match.ObjectIncluding({
|
||||
cardId: String,
|
||||
title: String,
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
||||
checkChecklistItems(wekanChecklistItems) {
|
||||
check(wekanChecklistItems, [
|
||||
Match.ObjectIncluding({
|
||||
cardId: String,
|
||||
title: String,
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
||||
checkRules(wekanRules) {
|
||||
check(wekanRules, [
|
||||
Match.ObjectIncluding({
|
||||
triggerId: String,
|
||||
actionId: String,
|
||||
title: String,
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
||||
checkTriggers(wekanTriggers) {
|
||||
// XXX More check based on trigger type
|
||||
check(wekanTriggers, [
|
||||
Match.ObjectIncluding({
|
||||
activityType: String,
|
||||
desc: String,
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
||||
getMembersToMap(data) {
|
||||
// we will work on the list itself (an ordered array of objects) when a
|
||||
// mapping is done, we add a 'wekan' field to the object representing the
|
||||
// imported member
|
||||
const membersToMap = data.members;
|
||||
const users = data.users;
|
||||
// auto-map based on username
|
||||
membersToMap.forEach(importedMember => {
|
||||
importedMember.id = importedMember.userId;
|
||||
delete importedMember.userId;
|
||||
const user = users.filter(user => {
|
||||
return user._id === importedMember.id;
|
||||
})[0];
|
||||
if (user.profile && user.profile.fullname) {
|
||||
importedMember.fullName = user.profile.fullname;
|
||||
}
|
||||
importedMember.username = user.username;
|
||||
const wekanUser = Users.findOne({ username: importedMember.username });
|
||||
if (wekanUser) {
|
||||
importedMember.wekanId = wekanUser._id;
|
||||
}
|
||||
});
|
||||
return membersToMap;
|
||||
}
|
||||
|
||||
checkActions(wekanActions) {
|
||||
// XXX More check based on action type
|
||||
check(wekanActions, [
|
||||
Match.ObjectIncluding({
|
||||
actionType: String,
|
||||
desc: String,
|
||||
}),
|
||||
]);
|
||||
}
|
||||
|
||||
// You must call parseActions before calling this one.
|
||||
createBoardAndLabels(boardToImport) {
|
||||
const boardToCreate = {
|
||||
archived: boardToImport.archived,
|
||||
color: boardToImport.color,
|
||||
// very old boards won't have a creation activity so no creation date
|
||||
createdAt: this._now(boardToImport.createdAt),
|
||||
labels: [],
|
||||
members: [
|
||||
{
|
||||
userId: Meteor.userId(),
|
||||
wekanId: Meteor.userId(),
|
||||
isActive: true,
|
||||
isAdmin: true,
|
||||
isNoComments: false,
|
||||
isCommentOnly: false,
|
||||
swimlaneId: false,
|
||||
},
|
||||
],
|
||||
// Standalone Export has modifiedAt missing, adding modifiedAt to fix it
|
||||
modifiedAt: this._now(boardToImport.modifiedAt),
|
||||
permission: boardToImport.permission,
|
||||
slug: getSlug(boardToImport.title) || 'board',
|
||||
stars: 0,
|
||||
title: boardToImport.title,
|
||||
};
|
||||
// now add other members
|
||||
if (boardToImport.members) {
|
||||
boardToImport.members.forEach(wekanMember => {
|
||||
// do we already have it in our list?
|
||||
if (
|
||||
!boardToCreate.members.some(
|
||||
member => member.wekanId === wekanMember.wekanId,
|
||||
)
|
||||
)
|
||||
boardToCreate.members.push({
|
||||
...wekanMember,
|
||||
userId: wekanMember.wekanId,
|
||||
});
|
||||
});
|
||||
}
|
||||
boardToImport.labels.forEach(label => {
|
||||
const labelToCreate = {
|
||||
_id: Random.id(6),
|
||||
color: label.color,
|
||||
name: label.name,
|
||||
};
|
||||
// We need to remember them by Wekan ID, as this is the only ref we have
|
||||
// when importing cards.
|
||||
this.labels[label._id] = labelToCreate._id;
|
||||
boardToCreate.labels.push(labelToCreate);
|
||||
});
|
||||
const boardId = Boards.direct.insert(boardToCreate);
|
||||
Boards.direct.update(boardId, {
|
||||
$set: {
|
||||
modifiedAt: this._now(),
|
||||
},
|
||||
});
|
||||
// log activity
|
||||
Activities.direct.insert({
|
||||
activityType: 'importBoard',
|
||||
boardId,
|
||||
createdAt: this._now(),
|
||||
source: {
|
||||
id: boardToImport.id,
|
||||
system: 'Wekan',
|
||||
},
|
||||
// We attribute the import to current user,
|
||||
// not the author from the original object.
|
||||
userId: this._user(),
|
||||
});
|
||||
return boardId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the Wekan cards corresponding to the supplied Wekan cards,
|
||||
* as well as all linked data: activities, comments, and attachments
|
||||
* @param wekanCards
|
||||
* @param boardId
|
||||
* @returns {Array}
|
||||
*/
|
||||
createCards(wekanCards, boardId) {
|
||||
const result = [];
|
||||
wekanCards.forEach(card => {
|
||||
const cardToCreate = {
|
||||
archived: card.archived,
|
||||
boardId,
|
||||
// very old boards won't have a creation activity so no creation date
|
||||
createdAt: this._now(this.createdAt.cards[card._id]),
|
||||
dateLastActivity: this._now(),
|
||||
description: card.description,
|
||||
listId: this.lists[card.listId],
|
||||
swimlaneId: this.swimlanes[card.swimlaneId],
|
||||
sort: card.sort,
|
||||
title: card.title,
|
||||
// we attribute the card to its creator if available
|
||||
userId: this._user(this.createdBy.cards[card._id]),
|
||||
isOvertime: card.isOvertime || false,
|
||||
startAt: card.startAt ? this._now(card.startAt) : null,
|
||||
dueAt: card.dueAt ? this._now(card.dueAt) : null,
|
||||
spentTime: card.spentTime || null,
|
||||
};
|
||||
// add labels
|
||||
if (card.labelIds) {
|
||||
cardToCreate.labelIds = card.labelIds.map(wekanId => {
|
||||
return this.labels[wekanId];
|
||||
});
|
||||
}
|
||||
// add members {
|
||||
if (card.members) {
|
||||
const wekanMembers = [];
|
||||
// we can't just map, as some members may not have been mapped
|
||||
card.members.forEach(sourceMemberId => {
|
||||
if (this.members[sourceMemberId]) {
|
||||
const wekanId = this.members[sourceMemberId];
|
||||
// we may map multiple Wekan members to the same wekan user
|
||||
// in which case we risk adding the same user multiple times
|
||||
if (!wekanMembers.find(wId => wId === wekanId)) {
|
||||
wekanMembers.push(wekanId);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
});
|
||||
if (wekanMembers.length > 0) {
|
||||
cardToCreate.members = wekanMembers;
|
||||
}
|
||||
}
|
||||
// set color
|
||||
if (card.color) {
|
||||
cardToCreate.color = card.color;
|
||||
}
|
||||
// insert card
|
||||
const cardId = Cards.direct.insert(cardToCreate);
|
||||
// keep track of Wekan id => Wekan id
|
||||
this.cards[card._id] = cardId;
|
||||
// // log activity
|
||||
// Activities.direct.insert({
|
||||
// activityType: 'importCard',
|
||||
// boardId,
|
||||
// cardId,
|
||||
// createdAt: this._now(),
|
||||
// listId: cardToCreate.listId,
|
||||
// source: {
|
||||
// id: card._id,
|
||||
// system: 'Wekan',
|
||||
// },
|
||||
// // we attribute the import to current user,
|
||||
// // not the author of the original card
|
||||
// userId: this._user(),
|
||||
// });
|
||||
// add comments
|
||||
const comments = this.comments[card._id];
|
||||
if (comments) {
|
||||
comments.forEach(comment => {
|
||||
const commentToCreate = {
|
||||
boardId,
|
||||
cardId,
|
||||
createdAt: this._now(comment.createdAt),
|
||||
text: comment.text,
|
||||
// we attribute the comment to the original author, default to current user
|
||||
userId: this._user(comment.userId),
|
||||
};
|
||||
// dateLastActivity will be set from activity insert, no need to
|
||||
// update it ourselves
|
||||
const commentId = CardComments.direct.insert(commentToCreate);
|
||||
this.commentIds[comment._id] = commentId;
|
||||
// Activities.direct.insert({
|
||||
// activityType: 'addComment',
|
||||
// boardId: commentToCreate.boardId,
|
||||
// cardId: commentToCreate.cardId,
|
||||
// commentId,
|
||||
// createdAt: this._now(commentToCreate.createdAt),
|
||||
// // we attribute the addComment (not the import)
|
||||
// // to the original author - it is needed by some UI elements.
|
||||
// userId: commentToCreate.userId,
|
||||
// });
|
||||
});
|
||||
}
|
||||
const attachments = this.attachments[card._id];
|
||||
const wekanCoverId = card.coverId;
|
||||
if (attachments) {
|
||||
attachments.forEach(att => {
|
||||
const file = new FS.File();
|
||||
// Simulating file.attachData on the client generates multiple errors
|
||||
// - HEAD returns null, which causes exception down the line
|
||||
// - the template then tries to display the url to the attachment which causes other errors
|
||||
// so we make it server only, and let UI catch up once it is done, forget about latency comp.
|
||||
const self = this;
|
||||
if (Meteor.isServer) {
|
||||
if (att.url) {
|
||||
file.attachData(att.url, function(error) {
|
||||
file.boardId = boardId;
|
||||
file.cardId = cardId;
|
||||
file.userId = self._user(att.userId);
|
||||
// The field source will only be used to prevent adding
|
||||
// attachments' related activities automatically
|
||||
file.source = 'import';
|
||||
if (error) {
|
||||
throw error;
|
||||
} else {
|
||||
const wekanAtt = Attachments.insert(file, () => {
|
||||
// we do nothing
|
||||
});
|
||||
self.attachmentIds[att._id] = wekanAtt._id;
|
||||
//
|
||||
if (wekanCoverId === att._id) {
|
||||
Cards.direct.update(cardId, {
|
||||
$set: {
|
||||
coverId: wekanAtt._id,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
} else if (att.file) {
|
||||
file.attachData(
|
||||
new Buffer(att.file, 'base64'),
|
||||
{
|
||||
type: att.type,
|
||||
},
|
||||
error => {
|
||||
file.name(att.name);
|
||||
file.boardId = boardId;
|
||||
file.cardId = cardId;
|
||||
file.userId = self._user(att.userId);
|
||||
// The field source will only be used to prevent adding
|
||||
// attachments' related activities automatically
|
||||
file.source = 'import';
|
||||
if (error) {
|
||||
throw error;
|
||||
} else {
|
||||
const wekanAtt = Attachments.insert(file, () => {
|
||||
// we do nothing
|
||||
});
|
||||
this.attachmentIds[att._id] = wekanAtt._id;
|
||||
//
|
||||
if (wekanCoverId === att._id) {
|
||||
Cards.direct.update(cardId, {
|
||||
$set: {
|
||||
coverId: wekanAtt._id,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
// todo XXX set cover - if need be
|
||||
});
|
||||
}
|
||||
result.push(cardId);
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
// Create labels if they do not exist and load this.labels.
|
||||
createLabels(wekanLabels, board) {
|
||||
wekanLabels.forEach(label => {
|
||||
const color = label.color;
|
||||
const name = label.name;
|
||||
const existingLabel = board.getLabel(name, color);
|
||||
if (existingLabel) {
|
||||
this.labels[label.id] = existingLabel._id;
|
||||
} else {
|
||||
const idLabelCreated = board.pushLabel(name, color);
|
||||
this.labels[label.id] = idLabelCreated;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
createLists(wekanLists, boardId) {
|
||||
wekanLists.forEach((list, listIndex) => {
|
||||
const listToCreate = {
|
||||
archived: list.archived,
|
||||
boardId,
|
||||
// We are being defensing here by providing a default date (now) if the
|
||||
// creation date wasn't found on the action log. This happen on old
|
||||
// Wekan boards (eg from 2013) that didn't log the 'createList' action
|
||||
// we require.
|
||||
createdAt: this._now(this.createdAt.lists[list.id]),
|
||||
title: list.title,
|
||||
sort: list.sort ? list.sort : listIndex,
|
||||
};
|
||||
const listId = Lists.direct.insert(listToCreate);
|
||||
Lists.direct.update(listId, {
|
||||
$set: {
|
||||
updatedAt: this._now(),
|
||||
},
|
||||
});
|
||||
this.lists[list._id] = listId;
|
||||
// // log activity
|
||||
// Activities.direct.insert({
|
||||
// activityType: 'importList',
|
||||
// boardId,
|
||||
// createdAt: this._now(),
|
||||
// listId,
|
||||
// source: {
|
||||
// id: list._id,
|
||||
// system: 'Wekan',
|
||||
// },
|
||||
// // We attribute the import to current user,
|
||||
// // not the creator of the original object
|
||||
// userId: this._user(),
|
||||
// });
|
||||
});
|
||||
}
|
||||
|
||||
createSwimlanes(wekanSwimlanes, boardId) {
|
||||
wekanSwimlanes.forEach((swimlane, swimlaneIndex) => {
|
||||
const swimlaneToCreate = {
|
||||
archived: swimlane.archived,
|
||||
boardId,
|
||||
// We are being defensing here by providing a default date (now) if the
|
||||
// creation date wasn't found on the action log. This happen on old
|
||||
// Wekan boards (eg from 2013) that didn't log the 'createList' action
|
||||
// we require.
|
||||
createdAt: this._now(this.createdAt.swimlanes[swimlane._id]),
|
||||
title: swimlane.title,
|
||||
sort: swimlane.sort ? swimlane.sort : swimlaneIndex,
|
||||
};
|
||||
// set color
|
||||
if (swimlane.color) {
|
||||
swimlaneToCreate.color = swimlane.color;
|
||||
}
|
||||
const swimlaneId = Swimlanes.direct.insert(swimlaneToCreate);
|
||||
Swimlanes.direct.update(swimlaneId, {
|
||||
$set: {
|
||||
updatedAt: this._now(),
|
||||
},
|
||||
});
|
||||
this.swimlanes[swimlane._id] = swimlaneId;
|
||||
});
|
||||
}
|
||||
|
||||
createChecklists(wekanChecklists) {
|
||||
const result = [];
|
||||
wekanChecklists.forEach((checklist, checklistIndex) => {
|
||||
// Create the checklist
|
||||
const checklistToCreate = {
|
||||
cardId: this.cards[checklist.cardId],
|
||||
title: checklist.title,
|
||||
createdAt: checklist.createdAt,
|
||||
sort: checklist.sort ? checklist.sort : checklistIndex,
|
||||
};
|
||||
const checklistId = Checklists.direct.insert(checklistToCreate);
|
||||
this.checklists[checklist._id] = checklistId;
|
||||
result.push(checklistId);
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
createTriggers(wekanTriggers, boardId) {
|
||||
wekanTriggers.forEach(trigger => {
|
||||
if (trigger.hasOwnProperty('labelId')) {
|
||||
trigger.labelId = this.labels[trigger.labelId];
|
||||
}
|
||||
if (trigger.hasOwnProperty('memberId')) {
|
||||
trigger.memberId = this.members[trigger.memberId];
|
||||
}
|
||||
trigger.boardId = boardId;
|
||||
const oldId = trigger._id;
|
||||
delete trigger._id;
|
||||
this.triggers[oldId] = Triggers.direct.insert(trigger);
|
||||
});
|
||||
}
|
||||
|
||||
createActions(wekanActions, boardId) {
|
||||
wekanActions.forEach(action => {
|
||||
if (action.hasOwnProperty('labelId')) {
|
||||
action.labelId = this.labels[action.labelId];
|
||||
}
|
||||
if (action.hasOwnProperty('memberId')) {
|
||||
action.memberId = this.members[action.memberId];
|
||||
}
|
||||
action.boardId = boardId;
|
||||
const oldId = action._id;
|
||||
delete action._id;
|
||||
this.actions[oldId] = Actions.direct.insert(action);
|
||||
});
|
||||
}
|
||||
|
||||
createRules(wekanRules, boardId) {
|
||||
wekanRules.forEach(rule => {
|
||||
// Create the rule
|
||||
rule.boardId = boardId;
|
||||
rule.triggerId = this.triggers[rule.triggerId];
|
||||
rule.actionId = this.actions[rule.actionId];
|
||||
delete rule._id;
|
||||
Rules.direct.insert(rule);
|
||||
});
|
||||
}
|
||||
|
||||
createChecklistItems(wekanChecklistItems) {
|
||||
wekanChecklistItems.forEach((checklistitem, checklistitemIndex) => {
|
||||
// Create the checklistItem
|
||||
const checklistItemTocreate = {
|
||||
title: checklistitem.title,
|
||||
checklistId: this.checklists[checklistitem.checklistId],
|
||||
cardId: this.cards[checklistitem.cardId],
|
||||
sort: checklistitem.sort ? checklistitem.sort : checklistitemIndex,
|
||||
isFinished: checklistitem.isFinished,
|
||||
};
|
||||
const checklistItemId = ChecklistItems.direct.insert(
|
||||
checklistItemTocreate,
|
||||
);
|
||||
this.checklistItems[checklistitem._id] = checklistItemId;
|
||||
});
|
||||
}
|
||||
|
||||
parseActivities(wekanBoard) {
|
||||
wekanBoard.activities.forEach(activity => {
|
||||
switch (activity.activityType) {
|
||||
case 'addAttachment': {
|
||||
// We have to be cautious, because the attachment could have been removed later.
|
||||
// In that case Wekan still reports its addition, but removes its 'url' field.
|
||||
// So we test for that
|
||||
const wekanAttachment = wekanBoard.attachments.filter(attachment => {
|
||||
return attachment._id === activity.attachmentId;
|
||||
})[0];
|
||||
|
||||
if (typeof wekanAttachment !== 'undefined' && wekanAttachment) {
|
||||
if (wekanAttachment.url || wekanAttachment.file) {
|
||||
// we cannot actually create the Wekan attachment, because we don't yet
|
||||
// have the cards to attach it to, so we store it in the instance variable.
|
||||
const wekanCardId = activity.cardId;
|
||||
if (!this.attachments[wekanCardId]) {
|
||||
this.attachments[wekanCardId] = [];
|
||||
}
|
||||
this.attachments[wekanCardId].push(wekanAttachment);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'addComment': {
|
||||
const wekanComment = wekanBoard.comments.filter(comment => {
|
||||
return comment._id === activity.commentId;
|
||||
})[0];
|
||||
const id = activity.cardId;
|
||||
if (!this.comments[id]) {
|
||||
this.comments[id] = [];
|
||||
}
|
||||
this.comments[id].push(wekanComment);
|
||||
break;
|
||||
}
|
||||
case 'createBoard': {
|
||||
this.createdAt.board = activity.createdAt;
|
||||
break;
|
||||
}
|
||||
case 'createCard': {
|
||||
const cardId = activity.cardId;
|
||||
this.createdAt.cards[cardId] = activity.createdAt;
|
||||
this.createdBy.cards[cardId] = activity.userId;
|
||||
break;
|
||||
}
|
||||
case 'createList': {
|
||||
const listId = activity.listId;
|
||||
this.createdAt.lists[listId] = activity.createdAt;
|
||||
break;
|
||||
}
|
||||
case 'createSwimlane': {
|
||||
const swimlaneId = activity.swimlaneId;
|
||||
this.createdAt.swimlanes[swimlaneId] = activity.createdAt;
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
importActivities(activities, boardId) {
|
||||
activities.forEach(activity => {
|
||||
switch (activity.activityType) {
|
||||
// Board related activities
|
||||
// TODO: addBoardMember, removeBoardMember
|
||||
case 'createBoard': {
|
||||
Activities.direct.insert({
|
||||
userId: this._user(activity.userId),
|
||||
type: 'board',
|
||||
activityTypeId: boardId,
|
||||
activityType: activity.activityType,
|
||||
boardId,
|
||||
createdAt: this._now(activity.createdAt),
|
||||
});
|
||||
break;
|
||||
}
|
||||
// List related activities
|
||||
// TODO: removeList, archivedList
|
||||
case 'createList': {
|
||||
Activities.direct.insert({
|
||||
userId: this._user(activity.userId),
|
||||
type: 'list',
|
||||
activityType: activity.activityType,
|
||||
listId: this.lists[activity.listId],
|
||||
boardId,
|
||||
createdAt: this._now(activity.createdAt),
|
||||
});
|
||||
break;
|
||||
}
|
||||
// Card related activities
|
||||
// TODO: archivedCard, restoredCard, joinMember, unjoinMember
|
||||
case 'createCard': {
|
||||
Activities.direct.insert({
|
||||
userId: this._user(activity.userId),
|
||||
activityType: activity.activityType,
|
||||
listId: this.lists[activity.listId],
|
||||
cardId: this.cards[activity.cardId],
|
||||
boardId,
|
||||
createdAt: this._now(activity.createdAt),
|
||||
});
|
||||
break;
|
||||
}
|
||||
case 'moveCard': {
|
||||
Activities.direct.insert({
|
||||
userId: this._user(activity.userId),
|
||||
oldListId: this.lists[activity.oldListId],
|
||||
activityType: activity.activityType,
|
||||
listId: this.lists[activity.listId],
|
||||
cardId: this.cards[activity.cardId],
|
||||
boardId,
|
||||
createdAt: this._now(activity.createdAt),
|
||||
});
|
||||
break;
|
||||
}
|
||||
// Comment related activities
|
||||
case 'addComment': {
|
||||
Activities.direct.insert({
|
||||
userId: this._user(activity.userId),
|
||||
activityType: activity.activityType,
|
||||
cardId: this.cards[activity.cardId],
|
||||
commentId: this.commentIds[activity.commentId],
|
||||
boardId,
|
||||
createdAt: this._now(activity.createdAt),
|
||||
});
|
||||
break;
|
||||
}
|
||||
// Attachment related activities
|
||||
case 'addAttachment': {
|
||||
Activities.direct.insert({
|
||||
userId: this._user(activity.userId),
|
||||
type: 'card',
|
||||
activityType: activity.activityType,
|
||||
attachmentId: this.attachmentIds[activity.attachmentId],
|
||||
cardId: this.cards[activity.cardId],
|
||||
boardId,
|
||||
createdAt: this._now(activity.createdAt),
|
||||
});
|
||||
break;
|
||||
}
|
||||
// Checklist related activities
|
||||
case 'addChecklist': {
|
||||
Activities.direct.insert({
|
||||
userId: this._user(activity.userId),
|
||||
activityType: activity.activityType,
|
||||
cardId: this.cards[activity.cardId],
|
||||
checklistId: this.checklists[activity.checklistId],
|
||||
boardId,
|
||||
createdAt: this._now(activity.createdAt),
|
||||
});
|
||||
break;
|
||||
}
|
||||
case 'addChecklistItem': {
|
||||
Activities.direct.insert({
|
||||
userId: this._user(activity.userId),
|
||||
activityType: activity.activityType,
|
||||
cardId: this.cards[activity.cardId],
|
||||
checklistId: this.checklists[activity.checklistId],
|
||||
checklistItemId: activity.checklistItemId.replace(
|
||||
activity.checklistId,
|
||||
this.checklists[activity.checklistId],
|
||||
),
|
||||
boardId,
|
||||
createdAt: this._now(activity.createdAt),
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
//check(board) {
|
||||
check() {
|
||||
//try {
|
||||
// check(data, {
|
||||
// membersMapping: Match.Optional(Object),
|
||||
// });
|
||||
// this.checkActivities(board.activities);
|
||||
// this.checkBoard(board);
|
||||
// this.checkLabels(board.labels);
|
||||
// this.checkLists(board.lists);
|
||||
// this.checkSwimlanes(board.swimlanes);
|
||||
// this.checkCards(board.cards);
|
||||
//this.checkChecklists(board.checklists);
|
||||
// this.checkRules(board.rules);
|
||||
// this.checkActions(board.actions);
|
||||
//this.checkTriggers(board.triggers);
|
||||
//this.checkChecklistItems(board.checklistItems);
|
||||
//} catch (e) {
|
||||
// throw new Meteor.Error('error-json-schema');
|
||||
// }
|
||||
}
|
||||
|
||||
create(board, currentBoardId) {
|
||||
// TODO : Make isSandstorm variable global
|
||||
const isSandstorm =
|
||||
Meteor.settings &&
|
||||
Meteor.settings.public &&
|
||||
Meteor.settings.public.sandstorm;
|
||||
if (isSandstorm && currentBoardId) {
|
||||
const currentBoard = Boards.findOne(currentBoardId);
|
||||
currentBoard.archive();
|
||||
}
|
||||
this.parseActivities(board);
|
||||
const boardId = this.createBoardAndLabels(board);
|
||||
this.createLists(board.lists, boardId);
|
||||
this.createSwimlanes(board.swimlanes, boardId);
|
||||
this.createCards(board.cards, boardId);
|
||||
this.createChecklists(board.checklists);
|
||||
this.createChecklistItems(board.checklistItems);
|
||||
this.importActivities(board.activities, boardId);
|
||||
this.createTriggers(board.triggers, boardId);
|
||||
this.createActions(board.actions, boardId);
|
||||
this.createRules(board.rules, boardId);
|
||||
// XXX add members
|
||||
return boardId;
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue