diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..4c49bd7 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +.env diff --git a/README.md b/README.md index c01320c..9f8e597 100755 --- a/README.md +++ b/README.md @@ -21,32 +21,49 @@ Configuration for the application is at `config/default.js` and `config/production.js`. The following parameters can be set in config files or in env variables: -- LOG_LEVEL: the log level -- PORT: the server port -- AUTH_SECRET: TC Authentication secret -- VALID_ISSUERS: valid issuers for TC authentication -- PAGE_SIZE: the default pagination limit -- MAX_PAGE_SIZE: the maximum pagination size -- API_VERSION: the API version -- DB_NAME: the database name -- DB_USERNAME: the database username -- DB_PASSWORD: the database password -- DB_HOST: the database host -- DB_PORT: the database port -- ES_HOST: Elasticsearch host -- ES_REFRESH: Should elastic search refresh. Default is 'true'. Values can be 'true', 'wait_for', 'false' -- ELASTICCLOUD_ID: The elastic cloud id, if your elasticsearch instance is hosted on elastic cloud. DO NOT provide a value for ES_HOST if you are using this -- ELASTICCLOUD_USERNAME: The elastic cloud username for basic authentication. Provide this only if your elasticsearch instance is hosted on elastic cloud -- ELASTICCLOUD_PASSWORD: The elastic cloud password for basic authentication. Provide this only if your elasticsearch instance is hosted on elastic cloud -- ES.DOCUMENTS: Elasticsearch index, type and id mapping for resources. -- SKILL_INDEX: The Elastic search index for skill. Default is `skill` -- SKILL_ENRICH_POLICYNAME: The enrich policy for skill. Default is `skill-policy` -- TAXONOMY_INDEX: The Elastic search index for taxonomy. Default is `taxonomy` -- TAXONOMY_PIPELINE_ID: The pipeline id for enrichment with taxonomy. Default is `taxonomy-pipeline` -- TAXONOMY_ENRICH_POLICYNAME: The enrich policy for taxonomy. Default is `taxonomy-policy` -- MAX_BATCH_SIZE: Restrict number of records in memory during bulk insert (Used by the db to es migration script) -- MAX_BULK_SIZE: The Bulk Indexing Maximum Limits. Default is `100` (Used by the db to es migration script) +- `LOG_LEVEL`: the log level +- `PORT`: the server port +- `AUTH_SECRET`: TC Authentication secret +- `VALID_ISSUERS`: valid issuers for TC authentication +- `PAGE_SIZE`: the default pagination limit +- `MAX_PAGE_SIZE`: the maximum pagination size +- `API_VERSION`: the API version +- `DB_NAME`: the database name +- `DB_USERNAME`: the database username +- `DB_PASSWORD`: the database password +- `DB_HOST`: the database host +- `DB_PORT`: the database port +- `ES_HOST`: Elasticsearch host +- `ES_REFRESH`: Should elastic search refresh. Default is 'true'. Values can be 'true', 'wait_for', 'false' +- `ELASTICCLOUD_ID`: The elastic cloud id, if your elasticsearch instance is hosted on elastic cloud. DO NOT provide a value for ES_HOST if you are using this +- `ELASTICCLOUD_USERNAME`: The elastic cloud username for basic authentication. Provide this only if your elasticsearch instance is hosted on elastic cloud +- `ELASTICCLOUD_PASSWORD`: The elastic cloud password for basic authentication. Provide this only if your elasticsearch instance is hosted on elastic cloud +- `ES`.DOCUMENTS: Elasticsearch index, type and id mapping for resources. +- `SKILL_INDEX`: The Elastic search index for skill. Default is `skill` +- `TAXONOMY_INDEX`: The Elastic search index for taxonomy. Default is `taxonomy` +- `MAX_BATCH_SIZE`: Restrict number of records in memory during bulk insert (Used by the db to es migration script) +- `MAX_BULK_SIZE`: The Bulk Indexing Maximum Limits. Default is `100` (Used by the db to es migration script) + +- `AUTH0_URL`: Auth0 URL, used to get TC M2M token +- `AUTH0_AUDIENCE`: Auth0 audience, used to get TC M2M token +- `TOKEN_CACHE_TIME`: Auth0 token cache time, used to get TC M2M token +- `AUTH0_CLIENT_ID`: Auth0 client id, used to get TC M2M token +- `AUTH0_CLIENT_SECRET`: Auth0 client secret, used to get TC M2M token +- `AUTH0_PROXY_SERVER_URL`: Proxy Auth0 URL, used to get TC M2M token + +- `BUSAPI_URL`: Topcoder Bus API URL +- `KAFKA_ERROR_TOPIC`: The error topic at which bus api will publish any errors +- `KAFKA_MESSAGE_ORIGINATOR`: The originator value for the kafka messages +- `SKILLS_ERROR_TOPIC`: Kafka topic for report operation error + +**NOTE** AUTH0 related configuration normally is shared on challenge forum. + +## DB and Elasticsearch In Docker +- Navigate to the directory `docker-pgsql-es` folder. Rename `sample.env` to `.env` and change any values if required. +- Run `docker-compose up -d` to have docker instances of pgsql and elasticsearch to use with the api +**NOTE** To completely restart the services, run `docker-compose down --volumes` and then `docker-compose up`. +Notice the `--volumes` argument is passed to the `docker-compose down` command to remove the volume that stores DB data. Without the `--volumes` argument the DB data will be persistent after the services are put down. ## Local deployment @@ -58,17 +75,16 @@ Setup your Postgresql DB and Elasticsearch instance and ensure that they are up - Run the migrations - `npm run migrations up`. This will create the tables. - Then run `npm run insert-data` and insert mock data into the database. - Run `npm run migrate-db-to-es` to sync data with ES. -- Startup server `npm run start` +- Startup server `npm run start:dev` ## Migrations Migrations are located under the `./scripts/db/` folder. Run `npm run migrations up` and `npm run migrations down` to execute the migrations or remove the earlier ones ## Local Deployment with Docker +Setup your Postgresql DB and Elasticsearch instance and ensure that they are up and running. -- Navigate to the directory `docker-pgsql-es` folder. Rename `sample.env` to `.env` and change any values if required. -- Run `docker-compose up -d` to have docker instances of pgsql and elasticsearch to use with the api - +- Configure AUTH0 related parameters via ENV variables. Note that normally you don't need to change other configuration. - Create database using `npm run create-db`. - Run the migrations - `npm run migrations up`. This will create the tables. - Then run `npm run insert-data` and insert mock data into the database. @@ -153,5 +169,5 @@ These tokens have been signed with the secret `CLIENT_SECRET`. This secret shoul ## Documentation -- [permissions.html](docs/permissions.html) - the list of all permissions in Skills API. +- [permissions.html](https://htmlpreview.github.io/?https://github.com/topcoder-platform/skills-api/blob/develop/docs/permissions.html) - the list of all permissions in Skills API. - [swagger.yaml](docs/swagger.yaml) - the Swagger API Definition. diff --git a/config/default.js b/config/default.js index 1554127..42c4df4 100755 --- a/config/default.js +++ b/config/default.js @@ -20,6 +20,20 @@ module.exports = { DB_HOST: process.env.DB_HOST || 'localhost', DB_PORT: process.env.DB_PORT || 5432, + AUTH0_URL: process.env.AUTH0_URL, + AUTH0_AUDIENCE: process.env.AUTH0_AUDIENCE, + TOKEN_CACHE_TIME: process.env.TOKEN_CACHE_TIME, + AUTH0_CLIENT_ID: process.env.AUTH0_CLIENT_ID, + AUTH0_CLIENT_SECRET: process.env.AUTH0_CLIENT_SECRET, + AUTH0_PROXY_SERVER_URL: process.env.AUTH0_PROXY_SERVER_URL, + + BUSAPI_URL: process.env.BUSAPI_URL || 'https://api.topcoder-dev.com/v5', + + KAFKA_ERROR_TOPIC: process.env.KAFKA_ERROR_TOPIC || 'common.error.reporting', + KAFKA_MESSAGE_ORIGINATOR: process.env.KAFKA_MESSAGE_ORIGINATOR || 'skills-api', + + SKILLS_ERROR_TOPIC: process.env.SKILLS_ERROR_TOPIC || 'skills.action.error', + // ElasticSearch ES: { HOST: process.env.ES_HOST || 'http://localhost:9200', @@ -36,14 +50,11 @@ module.exports = { DOCUMENTS: { skill: { index: process.env.SKILL_INDEX || 'skill', - type: '_doc', - enrichPolicyName: process.env.SKILL_ENRICH_POLICYNAME || 'skill-policy' + type: '_doc' }, taxonomy: { index: process.env.TAXONOMY_INDEX || 'taxonomy', - type: '_doc', - pipelineId: process.env.TAXONOMY_PIPELINE_ID || 'taxonomy-pipeline', - enrichPolicyName: process.env.TAXONOMY_ENRICH_POLICYNAME || 'taxonomy-policy' + type: '_doc' } }, MAX_BATCH_SIZE: parseInt(process.env.MAX_BATCH_SIZE, 10) || 10000, diff --git a/docker/sample.env b/docker/sample.env index 6cb7442..37522b8 100644 --- a/docker/sample.env +++ b/docker/sample.env @@ -6,3 +6,8 @@ DB_PORT=5432 ES_HOST=http://host.docker.internal:9200 PORT=3001 + +AUTH0_CLIENT_ID= +AUTH0_CLIENT_SECRET= +AUTH0_URL= +AUTH0_AUDIENCE= diff --git a/docs/permissions.html b/docs/permissions.html index eaf4289..c9658e2 100644 --- a/docs/permissions.html +++ b/docs/permissions.html @@ -263,7 +263,7 @@

- Taxonomy Metadata + Taxonomy

@@ -360,7 +360,7 @@

- Taxonomy + Taxonomy Metadata

diff --git a/package-lock.json b/package-lock.json index d5ef323..e67d604 100644 --- a/package-lock.json +++ b/package-lock.json @@ -111,6 +111,16 @@ "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==" }, + "@topcoder-platform/topcoder-bus-api-wrapper": { + "version": "github:topcoder-platform/tc-bus-api-wrapper#f8cbd335a0e0b4d6edd7cae859473593271fd97f", + "from": "github:topcoder-platform/tc-bus-api-wrapper", + "requires": { + "joi": "^13.4.0", + "lodash": "^4.17.15", + "superagent": "^3.8.3", + "tc-core-library-js": "github:appirio-tech/tc-core-library-js#v2.6.4" + } + }, "@types/body-parser": { "version": "1.19.1", "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.1.tgz", @@ -394,6 +404,14 @@ "resolved": "https://registry.npmjs.org/async/-/async-3.2.0.tgz", "integrity": "sha512-TR2mEZFVOj2pLStYxLht7TyfuRzaydfpxr3k9RpHIzMgw7A64dzsdqCxH1WJyQdoe8T10nDXd9wnEigmiuHIZw==" }, + "async-hook-jl": { + "version": "1.7.6", + "resolved": "https://registry.npmjs.org/async-hook-jl/-/async-hook-jl-1.7.6.tgz", + "integrity": "sha512-gFaHkFfSxTjvoxDMYqDuGHlcRyUuamF8s+ZTtJdDzqjws4mCt7v0vuV79/E2Wr2/riMQgtG4/yUtXWs1gZ7JMg==", + "requires": { + "stack-chain": "^1.3.7" + } + }, "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -813,6 +831,16 @@ "mimic-response": "^1.0.0" } }, + "cls-hooked": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/cls-hooked/-/cls-hooked-4.2.2.tgz", + "integrity": "sha512-J4Xj5f5wq/4jAvcdgoGsL3G103BtWpZrMo8NEinRltN+xpTZdI+M38pyQqhuFU/P792xkMFvnKSf+Lm81U1bxw==", + "requires": { + "async-hook-jl": "^1.7.6", + "emitter-listener": "^1.0.1", + "semver": "^5.4.1" + } + }, "code-point-at": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", @@ -886,6 +914,11 @@ "delayed-stream": "~1.0.0" } }, + "component-emitter": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", + "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==" + }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -942,6 +975,11 @@ "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" }, + "cookiejar": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.2.tgz", + "integrity": "sha512-Mw+adcfzPxcPeI+0WlvRrr/3lGVO0bD75SxX6811cxSh1Wbxx7xZBGK1eVtDf6si8rg2lhnUjsVLMFMfbRIuwA==" + }, "core-js": { "version": "2.6.12", "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.12.tgz", @@ -1161,38 +1199,12 @@ "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" }, - "elasticsearch": { - "version": "16.7.2", - "resolved": "https://registry.npmjs.org/elasticsearch/-/elasticsearch-16.7.2.tgz", - "integrity": "sha512-1ZLKZlG2ABfYVBX2d7/JgxOsKJrM5Yu62GvshWu7ZSvhxPomCN4Gas90DS51yYI56JolY0XGhyiRlUhLhIL05Q==", + "emitter-listener": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/emitter-listener/-/emitter-listener-1.1.2.tgz", + "integrity": "sha512-Bt1sBAGFHY9DKY+4/2cV6izcKJUf5T7/gkdmkxzX/qv9CcGH8xSwVRW5mtX03SWJtRTWSOpzCuWN9rBFYZepZQ==", "requires": { - "agentkeepalive": "^3.4.1", - "chalk": "^1.0.0", - "lodash": "^4.17.10" - }, - "dependencies": { - "ansi-styles": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", - "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=" - }, - "chalk": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", - "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", - "requires": { - "ansi-styles": "^2.2.1", - "escape-string-regexp": "^1.0.2", - "has-ansi": "^2.0.0", - "strip-ansi": "^3.0.0", - "supports-color": "^2.0.0" - } - }, - "supports-color": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", - "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=" - } + "shimmer": "^1.2.0" } }, "emoji-regex": { @@ -1948,6 +1960,11 @@ "mime-types": "^2.1.12" } }, + "formidable": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-1.2.2.tgz", + "integrity": "sha512-V8gLm+41I/8kguQ4/o1D3RIHRmhYFG4pnNyonvua+40rqcEmT4+V71yaZ3B457xbbgCsCfjSPi65u/W6vK1U5Q==" + }, "forwarded": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", @@ -2156,6 +2173,11 @@ "integrity": "sha512-UqBRqi4ju7T+TqGNdqAO0PaSVGsDGJUBQvk9eUWNGRY1CFGDzYhLWoM7JQEemnlvVcv/YEmc2wNW8BC24EnUsw==", "dev": true }, + "hoek": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/hoek/-/hoek-5.0.4.tgz", + "integrity": "sha512-Alr4ZQgoMlnere5FZJsIyfIjORBqZll5POhDsF4q64dPuJR6rNxXdDxtHSQq8OXRurhmx+PWYEE8bXRROY8h0w==" + }, "hosted-git-info": { "version": "2.8.9", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", @@ -2590,6 +2612,14 @@ "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" }, + "isemail": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/isemail/-/isemail-3.2.0.tgz", + "integrity": "sha512-zKqkK+O+dGqevc93KNsbZ/TqTUFd46MwWjYOoMrjIMZ51eU7DtQG3Wmd9SQQT7i7RVnuTPEiYEWHU3MSbxC1Tg==", + "requires": { + "punycode": "2.x.x" + } + }, "isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", @@ -2601,10 +2631,15 @@ "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" }, - "jmespath": { - "version": "0.15.0", - "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz", - "integrity": "sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc=" + "joi": { + "version": "13.7.0", + "resolved": "https://registry.npmjs.org/joi/-/joi-13.7.0.tgz", + "integrity": "sha512-xuY5VkHfeOYK3Hdi91ulocfuFopwgbSORmIwzcwHKESQhC7w1kD5jaVSPnqDxS2I8t3RZ9omCKAxNwXN5zG1/Q==", + "requires": { + "hoek": "5.x.x", + "isemail": "3.x.x", + "topo": "3.x.x" + } }, "js-tokens": { "version": "4.0.0", @@ -4191,6 +4226,11 @@ "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", "dev": true }, + "shimmer": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/shimmer/-/shimmer-1.2.1.tgz", + "integrity": "sha512-sQTKC1Re/rM6XyFM6fIAGHRPVGvyXfgzIDvzoq608vM+jeyVD0Tu1E6Np0Kc2zAIFWIj963V2800iF/9LPieQw==" + }, "signal-exit": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", @@ -4293,6 +4333,11 @@ "tweetnacl": "~0.14.0" } }, + "stack-chain": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/stack-chain/-/stack-chain-1.3.7.tgz", + "integrity": "sha1-0ZLJ/06moiyUxN1FkXHj8AzqEoU=" + }, "stack-trace": { "version": "0.0.10", "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", @@ -4399,6 +4444,55 @@ "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", "dev": true }, + "superagent": { + "version": "3.8.3", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-3.8.3.tgz", + "integrity": "sha512-GLQtLMCoEIK4eDv6OGtkOoSMt3D+oq0y3dsxMuYuDvaNUvuT8eFBuLmfR0iYYzHC1e8hpzC6ZsxbuP6DIalMFA==", + "requires": { + "component-emitter": "^1.2.0", + "cookiejar": "^2.1.0", + "debug": "^3.1.0", + "extend": "^3.0.0", + "form-data": "^2.3.1", + "formidable": "^1.2.0", + "methods": "^1.1.1", + "mime": "^1.4.1", + "qs": "^6.5.1", + "readable-stream": "^2.3.5" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "requires": { + "ms": "^2.1.1" + } + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + } + } + }, "supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -4539,6 +4633,21 @@ "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==" }, + "topo": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/topo/-/topo-3.0.3.tgz", + "integrity": "sha512-IgpPtvD4kjrJ7CRA3ov2FhWQADwv+Tdqbsf1ZnPUSAtCJ9e1Z44MmoSGDXGk4IppoZA7jd/QRkNddlLJWlUZsQ==", + "requires": { + "hoek": "6.x.x" + }, + "dependencies": { + "hoek": { + "version": "6.1.3", + "resolved": "https://registry.npmjs.org/hoek/-/hoek-6.1.3.tgz", + "integrity": "sha512-YXXAAhmF9zpQbC7LEcREFtXfGq5K1fmd+4PHkBq8NUqmzW3G+Dq10bI/i0KucLRwss3YYFQ0fSfoxBZYiGUqtQ==" + } + } + }, "toposort-class": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/toposort-class/-/toposort-class-1.0.1.tgz", diff --git a/package.json b/package.json index 9f15270..55d8a4d 100644 --- a/package.json +++ b/package.json @@ -23,7 +23,9 @@ "dependencies": { "@elastic/elasticsearch": "^7.9.1", "@hapi/joi": "^16.1.8", + "@topcoder-platform/topcoder-bus-api-wrapper": "github:topcoder-platform/tc-bus-api-wrapper", "body-parser": "^1.19.0", + "cls-hooked": "^4.2.2", "config": "^3.2.4", "cors": "^2.8.5", "express": "^4.17.1", diff --git a/scripts/constants.js b/scripts/constants.js index fa5fb4d..65d2d90 100644 --- a/scripts/constants.js +++ b/scripts/constants.js @@ -8,35 +8,13 @@ const config = require('config') const topResources = { taxonomy: { index: config.get('ES.DOCUMENTS.taxonomy.index'), - type: config.get('ES.DOCUMENTS.taxonomy.type'), - enrich: { - policyName: config.get('ES.DOCUMENTS.taxonomy.enrichPolicyName'), - matchField: 'id', - enrichFields: ['id', 'name', 'created', 'updated', 'createdBy', 'updatedBy'] - }, - pipeline: { - id: config.get('ES.DOCUMENTS.taxonomy.pipelineId'), - field: 'taxonomyId', - targetField: 'taxonomy', - maxMatches: '1' - } + type: config.get('ES.DOCUMENTS.taxonomy.type') }, skill: { index: config.get('ES.DOCUMENTS.skill.index'), - type: config.get('ES.DOCUMENTS.skill.type'), - enrich: { - policyName: config.get('ES.DOCUMENTS.skill.enrichPolicyName'), - matchField: 'id', - enrichFields: ['id', 'taxonomyId', 'name', 'externalId', 'uri', 'created', 'updated', 'createdBy', 'updatedBy', 'taxonomyName'] - }, - ingest: { - pipeline: { - id: config.get('ES.DOCUMENTS.taxonomy.pipelineId') - } - } + type: config.get('ES.DOCUMENTS.skill.type') } - } const modelToESIndexMapping = { diff --git a/scripts/db/dropAll.js b/scripts/db/dropAll.js index be168ac..23725e2 100644 --- a/scripts/db/dropAll.js +++ b/scripts/db/dropAll.js @@ -13,18 +13,6 @@ const { getESClient } = require('../../src/common/es-client') async function main () { const client = getESClient() - // delete es pipelines - try { - logger.info('Deleting all pipelines...') - await client.ingest.deletePipeline({ - id: topResources.taxonomy.pipeline.id - }) - logger.info('Successfully deleted') - } catch (e) { - console.error(e) - logger.warn('Delete all ingest pipelines failed') - } - // delete data in es const keys = Object.keys(sequelize.models) for (let i = 0; i < keys.length; i++) { @@ -32,18 +20,11 @@ async function main () { const esResourceName = modelToESIndexMapping[key] try { if (_.includes(_.keys(topResources), esResourceName)) { - if (topResources[esResourceName].enrich) { - logger.info(`Deleting enrich policy for ${esResourceName}`) - await client.enrich.deletePolicy({ - name: topResources[esResourceName].enrich.policyName - }) - logger.info(`Successfully deleted enrich policy for ${esResourceName}`) - } logger.info(`Deleting index for ${esResourceName}`) await client.indices.delete({ index: topResources[esResourceName].index }) - logger.info(`Successfully deleted enrich policy for ${esResourceName}`) + logger.info(`Successfully deleted index for ${esResourceName}`) } } catch (e) { console.error(e) @@ -54,6 +35,10 @@ async function main () { // delete tables try { await sequelize.drop() + // the dropped tables cannot be re-created via command `npm run migrations up` + // without dropping the SequelizeMeta table first + // so here we drop the SequelizeMeta table too. + await sequelize.query('DROP TABLE IF EXISTS "SequelizeMeta";') } catch (e) { console.error(e) logger.warn('deleting tables failed') diff --git a/scripts/db/dumpDbToEs.js b/scripts/db/dumpDbToEs.js index 648a6aa..9c1bf9d 100644 --- a/scripts/db/dumpDbToEs.js +++ b/scripts/db/dumpDbToEs.js @@ -12,15 +12,8 @@ const { const models = sequelize.models -// Declares the ordering of the resource data insertion, to ensure that enrichment happens correctly -const RESOURCES_IN_ORDER = [ - 'taxonomy', - 'skill' -] - const client = getESClient() -const RESOURCE_NOT_FOUND = 'resource_not_found_exception' const INDEX_NOT_FOUND = 'index_not_found_exception' /** @@ -29,17 +22,6 @@ const INDEX_NOT_FOUND = 'index_not_found_exception' */ async function cleanupES (keys) { const client = getESClient() - try { - await client.ingest.deletePipeline({ - id: topResources.taxonomy.pipeline.id - }) - } catch (e) { - if (e.meta && e.meta.body.error.type === RESOURCE_NOT_FOUND) { - // Ignore - } else { - throw e - } - } try { for (let i = 0; i < keys.length; i++) { @@ -47,20 +29,6 @@ async function cleanupES (keys) { if (models[key].tableName) { const esResourceName = modelToESIndexMapping[key] if (_.includes(_.keys(topResources), esResourceName)) { - if (topResources[esResourceName].enrich) { - try { - await client.enrich.deletePolicy({ - name: topResources[esResourceName].enrich.policyName - }) - } catch (e) { - if (e.meta && e.meta.body.error.type === RESOURCE_NOT_FOUND) { - // Ignore - } else { - throw e - } - } - } - try { await client.indices.delete({ index: topResources[esResourceName].index @@ -98,7 +66,6 @@ async function insertBulkIntoES (esResourceName, dataset) { index: resourceConfig.index, type: resourceConfig.type, body, - pipeline: resourceConfig.ingest ? resourceConfig.ingest.pipeline.id : undefined, refresh: 'wait_for' }) } catch (e) { @@ -108,99 +75,12 @@ async function insertBulkIntoES (esResourceName, dataset) { } } -/** - * Creates and executes the enrich policy for the provided model - * @param {String} modelName The model name - */ -async function createAndExecuteEnrichPolicy (modelName) { - const esResourceName = modelToESIndexMapping[modelName] - - if (_.includes(_.keys(topResources), esResourceName) && topResources[esResourceName].enrich) { - await client.enrich.putPolicy({ - name: topResources[esResourceName].enrich.policyName, - body: { - match: { - indices: topResources[esResourceName].index, - match_field: topResources[esResourceName].enrich.matchField, - enrich_fields: topResources[esResourceName].enrich.enrichFields - } - } - }) - await client.enrich.executePolicy({ name: topResources[esResourceName].enrich.policyName }) - } -} - -/** - * Creates the ingest pipeline using the enrich policy - * @param {String} modelName The model name - */ -async function createEnrichProcessor (modelName) { - const esResourceName = modelToESIndexMapping[modelName] - - if (_.includes(_.keys(topResources), esResourceName) && topResources[esResourceName].pipeline) { - if (topResources[esResourceName].pipeline.processors) { - const processors = [] - - for (let i = 0; i < topResources[esResourceName].pipeline.processors.length; i++) { - const ep = topResources[esResourceName].pipeline.processors[i] - processors.push({ - foreach: { - field: ep.referenceField, - ignore_missing: true, - processor: { - enrich: { - policy_name: ep.enrichPolicyName, - ignore_missing: true, - field: ep.field, - target_field: ep.targetField, - max_matches: ep.maxMatches - } - } - } - }) - } - - await client.ingest.putPipeline({ - id: topResources[esResourceName].pipeline.id, - body: { - processors - } - }) - } else { - await client.ingest.putPipeline({ - id: topResources[esResourceName].pipeline.id, - body: { - processors: [{ - enrich: { - policy_name: topResources[esResourceName].enrich.policyName, - field: topResources[esResourceName].pipeline.field, - target_field: topResources[esResourceName].pipeline.targetField, - max_matches: topResources[esResourceName].pipeline.maxMatches - } - }] - } - }) - } - } -} - /** * import test data * @return {Promise} */ async function main () { - let keys = Object.keys(models) - - // Sort the models in the order of insertion (for correct enrichment) - const temp = Array(keys.length).fill(null) - keys.forEach(k => { - if (sequelize.models[k].name) { - const esResourceName = modelToESIndexMapping[k] - const index = RESOURCES_IN_ORDER.indexOf(esResourceName) - temp[index] = k - } - }) - keys = _.compact(temp) + const keys = Object.keys(models) await cleanupES(keys) @@ -241,21 +121,6 @@ async function main () { logger.warn('import data for ' + key + ' failed') continue } - try { - await createAndExecuteEnrichPolicy(key) - logger.info('create and execute enrich policy for ' + key + ' done') - } catch (e) { - logger.error(JSON.stringify(_.get(e, 'meta.body', ''), null, 4)) - logger.warn('create and execute enrich policy for ' + key + ' failed') - } - - try { - await createEnrichProcessor(key) - logger.info('create enrich processor (pipeline) for ' + key + ' done') - } catch (e) { - logger.error(JSON.stringify(_.get(e, 'meta.body', ''), null, 4)) - logger.warn('create enrich processor (pipeline) for ' + key + ' failed') - } } logger.info('all done') process.exit(0) diff --git a/src/bootstrap.js b/src/bootstrap.js index 087ba72..5eca77b 100755 --- a/src/bootstrap.js +++ b/src/bootstrap.js @@ -9,6 +9,7 @@ const logger = require('./common/logger') const joi = require('@hapi/joi') joi.id = () => joi.number().integer().min(1) +joi.page = () => joi.number().integer().min(1) joi.pageSize = () => joi.number().integer().min(1).max(config.get('MAX_PAGE_SIZE')) joi.prominence = (name) => joi.string().custom((value, helper) => { // check if value is in the range [0, 1] diff --git a/src/common/errors.js b/src/common/errors.js index 5a3a868..0684425 100644 --- a/src/common/errors.js +++ b/src/common/errors.js @@ -24,6 +24,5 @@ module.exports = { newAuthError: msg => new AppError(401, msg || 'Auth failed.'), newPermissionError: msg => new AppError(403, msg || 'The entity does not exist.'), newConflictError: msg => new AppError(409, msg || 'The entity does not exist.'), - deleteConflictError: msg => new AppError(400, msg || 'Please delete child records first'), - elasticSearchEnrichError: msg => new AppError(500, msg || 'Elasticsearch enrich failed') + deleteConflictError: msg => new AppError(400, msg || 'Please delete child records first') } diff --git a/src/common/es-helper.js b/src/common/es-helper.js index c250010..cfb58bf 100644 --- a/src/common/es-helper.js +++ b/src/common/es-helper.js @@ -303,7 +303,7 @@ async function searchElasticSearch (resource, ...args) { const preResFilters = parseResourceFilter(resource, params, false) const preResFilterResults = [] // resolve pre resource filters - if (!params.enrich && preResFilters.length > 0) { + if (preResFilters.length > 0) { for (const filter of preResFilters) { const resolved = await resolveResFilter(filter, resource) preResFilterResults.push(resolved) @@ -333,7 +333,7 @@ async function searchElasticSearch (resource, ...args) { } // set pre res filter results - if (!params.enrich && preResFilterResults.length > 0) { + if (preResFilterResults.length > 0) { for (const filter of preResFilterResults) { const matchField = `${filter.queryField}` setFilterValueToEsQuery(esQuery, matchField, filter.value, filter.queryField) @@ -342,7 +342,7 @@ async function searchElasticSearch (resource, ...args) { const ownResFilters = parseResourceFilter(resource, params, true) // set it's own res filter to the main query - if (!params.enrich && ownResFilters.length > 0) { + if (ownResFilters.length > 0) { setResourceFilterToEsQuery(ownResFilters, esQuery) } diff --git a/src/common/helper.js b/src/common/helper.js index c197ee9..0d5cb97 100644 --- a/src/common/helper.js +++ b/src/common/helper.js @@ -1,6 +1,11 @@ const querystring = require('querystring') const _ = require('lodash') const { getControllerMethods } = require('./controller-helper') +const logger = require('./logger') +const config = require('config') +const busApi = require('@topcoder-platform/topcoder-bus-api-wrapper') +const busApiClient = busApi(_.pick(config, ['AUTH0_URL', 'AUTH0_AUDIENCE', 'TOKEN_CACHE_TIME', 'AUTH0_CLIENT_ID', + 'AUTH0_CLIENT_SECRET', 'BUSAPI_URL', 'KAFKA_ERROR_TOPIC', 'AUTH0_PROXY_SERVER_URL'])) /** * get auth user handle or id @@ -90,9 +95,29 @@ function omitAuditFields (entity) { } } +/** + * Send error event to Kafka + * @params {String} topic the topic name + * @params {Object} payload the payload + * @params {String} action for which operation error occurred + */ +async function publishError (topic, payload, action) { + _.set(payload, 'apiAction', action) + const message = { + topic, + originator: config.KAFKA_MESSAGE_ORIGINATOR, + timestamp: new Date().toISOString(), + 'mime-type': 'application/json', + payload + } + logger.debug(`Publish error to Kafka topic ${topic}, ${JSON.stringify(message, null, 2)}`) + await busApiClient.postEvent(message) +} + module.exports = { getAuthUser, injectSearchMeta, getControllerMethods, - omitAuditFields + omitAuditFields, + publishError } diff --git a/src/common/service-helper.js b/src/common/service-helper.js index 0869d8a..69e7aec 100644 --- a/src/common/service-helper.js +++ b/src/common/service-helper.js @@ -19,6 +19,7 @@ async function createRecordInEs (resource, entity) { await esHelper.insertIntoES(resource, entity) } catch (err) { logger.logFullError(err) + throw err } } @@ -32,6 +33,7 @@ async function patchRecordInEs (resource, entity) { await esHelper.updateESRecord(resource, entity) } catch (err) { logger.logFullError(err) + throw err } } @@ -46,6 +48,7 @@ async function deleteRecordFromEs (id, params, resource) { await esHelper.deleteESRecord(resource, id) } catch (err) { logger.logFullError(err) + throw err } } @@ -61,9 +64,9 @@ async function getRecordInEs (resource, id, params) { const result = await esHelper.getFromElasticSearch(resource, id, params) return result } catch (err) { - // return error if enrich fails or permission fails + // return error if permission fails if (err.status && err.status === 403) { - throw errors.elasticSearchEnrichError(err.message) + throw errors.ForbiddenError(err.message) } logger.logFullError(err) } diff --git a/src/constants.js b/src/constants.js index c347926..12929e0 100644 --- a/src/constants.js +++ b/src/constants.js @@ -43,7 +43,26 @@ const M2M_SCOPES = { } } +const SequelizeCLSNamespace = 'skills-api' + +const API_ACTION = { + SkillCreate: 'skill.create', + SkillUpdate: 'skill.update', + SkillDelete: 'skill.delete', + SkillPutMetadata: 'skill.putMetadata', + SkillPatchMetadata: 'skill.patchMetadata', + SkillDeleteMetadata: 'skill.deleteMetadata', + TaxonomyCreate: 'taxonomy.create', + TaxonomyUpdate: 'taxonomy.update', + TaxonomyDelete: 'taxonomy.delete', + TaxonomyPutMetadata: 'taxonomy.putMetadata', + TaxonomyPatchMetadata: 'taxonomy.patchMetadata', + TaxonomyDeleteMetadata: 'taxonomy.deleteMetadata' +} + module.exports = { MANAGER_ROLES, - M2M_SCOPES + M2M_SCOPES, + SequelizeCLSNamespace, + API_ACTION } diff --git a/src/models/index.js b/src/models/index.js index 4bf6b21..a053cd5 100755 --- a/src/models/index.js +++ b/src/models/index.js @@ -2,11 +2,21 @@ * the model index */ const { Sequelize } = require('sequelize') +const cls = require('cls-hooked') const config = require('config') +const constants = require('../constants') const fs = require('fs') const path = require('path') const logger = require('../common/logger') +// Enable CLS so that when using a managed transaction the transaction will be +// automatically passed to all queries within a callback chain. +// No longer need to pass the transaction manually. +// +// See https://sequelize.org/master/manual/transactions.html for more info +const namespace = cls.createNamespace(constants.SequelizeCLSNamespace) +Sequelize.useCLS(namespace) + /** * the database instance */ diff --git a/src/modules/SkillMetadata/service.js b/src/modules/SkillMetadata/service.js index 5c9e6a9..474d40f 100644 --- a/src/modules/SkillMetadata/service.js +++ b/src/modules/SkillMetadata/service.js @@ -4,11 +4,13 @@ const joi = require('@hapi/joi') const _ = require('lodash') +const config = require('config') const errors = require('../../common/errors') const helper = require('../../common/helper') const dbHelper = require('../../common/db-helper') const serviceHelper = require('../../common/service-helper') +const constants = require('../../constants') const { PERMISSION } = require('../../permissions/constants') const sequelize = require('../../models/index') @@ -21,17 +23,31 @@ const resource = serviceHelper.getResource('Skill') * @param instance the skill instance * @param metadata the new metadata * @param auth the auth object + * @param action for which operation performed * @return the updated skill */ -async function updateMetaData (instance, metadata, auth) { - const newEntity = await instance.update({ - ...instance.dataValues, - updatedBy: helper.getAuthUser(auth), - metadata - }) - const taxonomy = await dbHelper.get(Taxonomy, newEntity.taxonomyId) - await serviceHelper.patchRecordInEs(resource, { ...newEntity.dataValues, taxonomyName: taxonomy.name }) - return helper.omitAuditFields(newEntity.dataValues) +async function updateMetaData (instance, metadata, auth, action) { + let payload + try { + return await sequelize.transaction(async () => { + const newEntity = await instance.update({ + ...instance.dataValues, + updatedBy: helper.getAuthUser(auth), + metadata + }) + + payload = newEntity.dataValues + + const taxonomy = await dbHelper.get(Taxonomy, newEntity.taxonomyId) + await serviceHelper.patchRecordInEs(resource, newEntity.dataValues) + return helper.omitAuditFields({ ...newEntity.dataValues, taxonomyName: taxonomy.name }) + }) + } catch (e) { + if (payload) { + helper.publishError(config.SKILLS_ERROR_TOPIC, payload, action) + } + throw e + } } /** @@ -53,7 +69,7 @@ async function fullyUpdate (id, entity, auth) { serviceHelper.hasPermission(PERMISSION.DELETE_SKILL_METADATA, auth) } - return updateMetaData(instance, entity, auth) + return updateMetaData(instance, entity, auth, constants.API_ACTION.SkillPutMetadata) } fullyUpdate.schema = { @@ -88,7 +104,7 @@ async function particallyUpdate (id, entity, auth) { serviceHelper.hasPermission(PERMISSION.UPDATE_SKILL_METADATA, auth) } - return updateMetaData(instance, { ...instance.dataValues.metadata, ...entity }, auth) + return updateMetaData(instance, { ...instance.dataValues.metadata, ...entity }, auth, constants.API_ACTION.SkillPatchMetadata) } particallyUpdate.schema = { @@ -116,7 +132,7 @@ async function remove (id, fields, auth) { throw errors.NotFoundError(`Metadata fields: ${nonExistingFields} do not exist`) } - return updateMetaData(instance, _.omit(instance.dataValues.metadata, fields), auth) + return updateMetaData(instance, _.omit(instance.dataValues.metadata, fields), auth, constants.API_ACTION.SkillDeleteMetadata) } remove.schema = { diff --git a/src/modules/health/controller.js b/src/modules/health/controller.js index f4641df..98e2bb7 100644 --- a/src/modules/health/controller.js +++ b/src/modules/health/controller.js @@ -2,7 +2,6 @@ * Controller for health check endpoint */ const models = require('../../models') -const config = require('config') const logger = require('../../common/logger') // the topcoder-healthcheck-dropin library returns checksRun count, @@ -30,4 +29,4 @@ async function checkHealth (req, res) { module.exports = { checkHealth -} \ No newline at end of file +} diff --git a/src/modules/skill/service.js b/src/modules/skill/service.js index aa024d7..5b5b541 100644 --- a/src/modules/skill/service.js +++ b/src/modules/skill/service.js @@ -4,11 +4,13 @@ const joi = require('@hapi/joi') const _ = require('lodash') +const config = require('config') const errors = require('../../common/errors') const helper = require('../../common/helper') const dbHelper = require('../../common/db-helper') const serviceHelper = require('../../common/service-helper') +const constants = require('../../constants') const { PERMISSION } = require('../../permissions/constants') const sequelize = require('../../models/index') @@ -32,12 +34,24 @@ async function create (entity, auth) { const taxonomy = await dbHelper.get(Taxonomy, entity.taxonomyId) await dbHelper.makeSureUnique(Skill, entity, uniqueFields) - const result = await dbHelper.create(Skill, entity, auth) - const created = result.dataValues - created.taxonomyName = taxonomy.name - await serviceHelper.createRecordInEs(resource, created) + let payload + try { + return await sequelize.transaction(async () => { + const result = await dbHelper.create(Skill, entity, auth) - return helper.omitAuditFields(created) + payload = result.dataValues + + const created = { ...result.dataValues, taxonomyName: taxonomy.name } + await serviceHelper.createRecordInEs(resource, created) + + return helper.omitAuditFields(created) + }) + } catch (e) { + if (payload) { + helper.publishError(config.SKILLS_ERROR_TOPIC, payload, constants.API_ACTION.SkillCreate) + } + throw e + } } create.schema = { @@ -82,19 +96,31 @@ async function patch (id, entity, auth) { } } - const newEntity = await instance.update({ - ...entity, - updatedBy: helper.getAuthUser(auth) - }) + let payload + try { + return await sequelize.transaction(async () => { + const newEntity = await instance.update({ + ...entity, + updatedBy: helper.getAuthUser(auth) + }) - if (!taxonomy) { - taxonomy = await dbHelper.get(Taxonomy, newEntity.taxonomyId) - } - const updated = newEntity.dataValues - updated.taxonomyName = taxonomy.name - await serviceHelper.patchRecordInEs(resource, updated) + payload = newEntity.dataValues - return helper.omitAuditFields(updated) + if (!taxonomy) { + taxonomy = await dbHelper.get(Taxonomy, newEntity.taxonomyId) + } + const updated = { ...newEntity.dataValues, taxonomyName: taxonomy.name } + + await serviceHelper.patchRecordInEs(resource, updated) + + return helper.omitAuditFields(updated) + }) + } catch (e) { + if (payload) { + helper.publishError(config.SKILLS_ERROR_TOPIC, payload, constants.API_ACTION.SkillUpdate) + } + throw e + } } patch.schema = { @@ -198,7 +224,7 @@ async function search (query) { search.schema = { query: { - page: joi.string().uuid(), + page: joi.page(), perPage: joi.pageSize(), taxonomyId: joi.string().uuid(), name: joi.string(), @@ -215,8 +241,16 @@ search.schema = { * @return no data returned */ async function remove (id, auth, params) { - await dbHelper.remove(Skill, id) - await serviceHelper.deleteRecordFromEs(id, params, resource) + const payload = { id } + try { + return await sequelize.transaction(async () => { + await dbHelper.remove(Skill, id) + await serviceHelper.deleteRecordFromEs(id, params, resource) + }) + } catch (e) { + helper.publishError(config.SKILLS_ERROR_TOPIC, payload, constants.API_ACTION.SkillDelete) + throw e + } } remove.schema = { diff --git a/src/modules/taxonomy/service.js b/src/modules/taxonomy/service.js index f4f06f7..1bea23c 100644 --- a/src/modules/taxonomy/service.js +++ b/src/modules/taxonomy/service.js @@ -4,11 +4,13 @@ const joi = require('@hapi/joi') const _ = require('lodash') +const config = require('config') const errors = require('../../common/errors') const helper = require('../../common/helper') const dbHelper = require('../../common/db-helper') const serviceHelper = require('../../common/service-helper') +const constants = require('../../constants') const { PERMISSION } = require('../../permissions/constants') const sequelize = require('../../models/index') @@ -28,10 +30,22 @@ async function create (entity, auth) { serviceHelper.hasPermission(PERMISSION.ADD_TAXONOMY_METADATA, auth) } - const result = await dbHelper.create(Taxonomy, entity, auth) + let payload + try { + return await sequelize.transaction(async () => { + const result = await dbHelper.create(Taxonomy, entity, auth) - await serviceHelper.createRecordInEs(resource, result.dataValues) - return helper.omitAuditFields(result.dataValues) + payload = result.dataValues + + await serviceHelper.createRecordInEs(resource, result.dataValues) + return helper.omitAuditFields(result.dataValues) + }) + } catch (e) { + if (payload) { + helper.publishError(config.SKILLS_ERROR_TOPIC, payload, constants.API_ACTION.TaxonomyCreate) + } + throw e + } } create.schema = { @@ -64,13 +78,24 @@ async function patch (id, entity, auth) { } } - const newEntity = await instance.update({ - ...entity, - updatedBy: helper.getAuthUser(auth) - }) - - await serviceHelper.patchRecordInEs(resource, newEntity.dataValues) - return helper.omitAuditFields(newEntity.dataValues) + let payload + try { + return await sequelize.transaction(async () => { + const newEntity = await instance.update({ + ...entity, + updatedBy: helper.getAuthUser(auth) + }) + payload = newEntity.dataValues + + await serviceHelper.patchRecordInEs(resource, newEntity.dataValues) + return helper.omitAuditFields(newEntity.dataValues) + }) + } catch (e) { + if (payload) { + helper.publishError(config.SKILLS_ERROR_TOPIC, payload, constants.API_ACTION.TaxonomyUpdate) + } + throw e + } } patch.schema = { @@ -135,7 +160,7 @@ async function search (query) { search.schema = { query: { - page: joi.string().uuid(), + page: joi.page(), perPage: joi.pageSize(), name: joi.string() } @@ -154,8 +179,16 @@ async function remove (id, auth, params) { throw errors.deleteConflictError(`Please delete ${Skill.name} with ids ${existing.map(o => o.id)}`) } - await dbHelper.remove(Taxonomy, id) - await serviceHelper.deleteRecordFromEs(id, params, resource) + const payload = { id } + try { + return await sequelize.transaction(async () => { + await dbHelper.remove(Taxonomy, id) + await serviceHelper.deleteRecordFromEs(id, params, resource) + }) + } catch (e) { + helper.publishError(config.SKILLS_ERROR_TOPIC, payload, constants.API_ACTION.TaxonomyDelete) + throw e + } } remove.schema = { diff --git a/src/modules/taxonomyMetadata/service.js b/src/modules/taxonomyMetadata/service.js index 2395d8d..63eb5c5 100644 --- a/src/modules/taxonomyMetadata/service.js +++ b/src/modules/taxonomyMetadata/service.js @@ -4,11 +4,13 @@ const joi = require('@hapi/joi') const _ = require('lodash') +const config = require('config') const errors = require('../../common/errors') const helper = require('../../common/helper') const dbHelper = require('../../common/db-helper') const serviceHelper = require('../../common/service-helper') +const constants = require('../../constants') const { PERMISSION } = require('../../permissions/constants') const sequelize = require('../../models/index') @@ -20,16 +22,30 @@ const resource = serviceHelper.getResource('Taxonomy') * @param instance the taxonomy instance * @param metadata the new metadata * @param auth the auth object + * @param action for which operation performed * @return the updated taxonomy */ -async function updateMetaData (instance, metadata, auth) { - const newEntity = await instance.update({ - ...instance.dataValues, - updatedBy: helper.getAuthUser(auth), - metadata - }) - await serviceHelper.patchRecordInEs(resource, newEntity.dataValues) - return helper.omitAuditFields(newEntity.dataValues) +async function updateMetaData (instance, metadata, auth, action) { + let payload + try { + return await sequelize.transaction(async () => { + const newEntity = await instance.update({ + ...instance.dataValues, + updatedBy: helper.getAuthUser(auth), + metadata + }) + + payload = newEntity.dataValues + + await serviceHelper.patchRecordInEs(resource, newEntity.dataValues) + return helper.omitAuditFields(newEntity.dataValues) + }) + } catch (e) { + if (payload) { + helper.publishError(config.SKILLS_ERROR_TOPIC, payload, action) + } + throw e + } } /** @@ -51,7 +67,7 @@ async function fullyUpdate (id, entity, auth) { serviceHelper.hasPermission(PERMISSION.DELETE_TAXONOMY_METADATA, auth) } - return updateMetaData(instance, entity, auth) + return updateMetaData(instance, entity, auth, constants.API_ACTION.TaxonomyPutMetadata) } fullyUpdate.schema = { @@ -83,7 +99,7 @@ async function particallyUpdate (id, entity, auth) { serviceHelper.hasPermission(PERMISSION.UPDATE_TAXONOMY_METADATA, auth) } - return updateMetaData(instance, { ...instance.dataValues.metadata, ...entity }, auth) + return updateMetaData(instance, { ...instance.dataValues.metadata, ...entity }, auth, constants.API_ACTION.TaxonomyPatchMetadata) } particallyUpdate.schema = { @@ -108,7 +124,7 @@ async function remove (id, fields, auth) { throw errors.NotFoundError(`Metadata fields: ${nonExistingFields} do not exist`) } - return updateMetaData(instance, _.omit(instance.dataValues.metadata, fields), auth) + return updateMetaData(instance, _.omit(instance.dataValues.metadata, fields), auth, constants.API_ACTION.TaxonomyDeleteMetadata) } remove.schema = { diff --git a/src/permissions/constants.js b/src/permissions/constants.js index 48a3fd9..f355fa0 100644 --- a/src/permissions/constants.js +++ b/src/permissions/constants.js @@ -132,7 +132,7 @@ const PERMISSION = { CREATE_TAXONOMY: { meta: { title: 'Create Taxonomy', - group: 'Taxonomy Metadata' + group: 'Taxonomy' }, topcoderRoles: TOPCODER_ROLES_MANAGERS_AND_ADMINS, scopes: SCOPES_PROJECTS_WRITE @@ -141,7 +141,7 @@ const PERMISSION = { UPDATE_TAXONOMY: { meta: { title: 'Update Taxonomy', - group: 'Taxonomy Metadata' + group: 'Taxonomy' }, topcoderRoles: TOPCODER_ROLES_MANAGERS_AND_ADMINS, scopes: SCOPES_PROJECTS_WRITE @@ -150,7 +150,7 @@ const PERMISSION = { DELETE_TAXONOMY: { meta: { title: 'Delete Taxonomy', - group: 'Taxonomy Metadata' + group: 'Taxonomy' }, topcoderRoles: TOPCODER_ROLES_MANAGERS_AND_ADMINS, scopes: SCOPES_PROJECTS_WRITE @@ -162,7 +162,7 @@ const PERMISSION = { ADD_TAXONOMY_METADATA: { meta: { title: 'Add Taxonomy Metadata', - group: 'Taxonomy', + group: 'Taxonomy Metadata', description: 'Add metadata fields in a taxonomy' }, topcoderRoles: TOPCODER_ROLES_MANAGERS_AND_ADMINS, @@ -172,7 +172,7 @@ const PERMISSION = { UPDATE_TAXONOMY_METADATA: { meta: { title: 'Update Taxonomy Metadata', - group: 'Taxonomy', + group: 'Taxonomy Metadata', description: 'Update Metadata fields from a taxonomy' }, topcoderRoles: TOPCODER_ROLES_MANAGERS_AND_ADMINS, @@ -182,7 +182,7 @@ const PERMISSION = { DELETE_TAXONOMY_METADATA: { meta: { title: 'Delete Taxonomy Metadata', - group: 'Taxonomy', + group: 'Taxonomy Metadata', description: 'Delete Metadata fields from a taxonomy' }, topcoderRoles: TOPCODER_ROLES_MANAGERS_AND_ADMINS,