diff --git a/.circleci/config.yml b/.circleci/config.yml index 8bc4ada..ae80cb8 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,11 +1,13 @@ version: 2 defaults: &defaults docker: - - image: circleci/python:2.7-stretch-browsers + - image: cimg/python:3.11.0-browsers install_dependency: &install_dependency name: Installation of build and deployment dependencies. command: | + sudo apt update sudo apt install jq + sudo apt install python3-pip sudo pip install awscli --upgrade sudo pip install docker-compose install_deploysuite: &install_deploysuite @@ -70,7 +72,7 @@ workflows: branches: only: - develop - - feature/negative_length_error_fix + - PLAT-2032 # Production builds are exectuted only on tagged commits to the # master branch. diff --git a/ReadMe.md b/ReadMe.md index 98cfdfe..86c0264 100644 --- a/ReadMe.md +++ b/ReadMe.md @@ -207,6 +207,7 @@ npm run e2e - TBD + ## Verification Refer to the verification document `Verification.md` diff --git a/compose-dev.yaml b/compose-dev.yaml new file mode 100644 index 0000000..de3d08b --- /dev/null +++ b/compose-dev.yaml @@ -0,0 +1,13 @@ +services: + app: + entrypoint: + - sleep + - infinity + image: node:10.15.1 + platform: linux/amd64 + init: true + volumes: + - type: bind + source: /var/run/docker.sock + target: /var/run/docker.sock + diff --git a/config/default.js b/config/default.js index ddca4ad..59ab6f7 100644 --- a/config/default.js +++ b/config/default.js @@ -51,6 +51,7 @@ module.exports = { V5_RESOURCES_API_URL: process.env.V5_RESOURCES_API_URL || 'http://localhost:4000/v5/resources', V5_TERMS_API_URL: process.env.V5_TERMS_API_URL || 'http://localhost:4000/v5/terms', V5_RESOURCE_ROLES_API_URL: process.env.V5_RESOURCE_ROLES_API_URL || 'http://localhost:4000/v5/resource-roles', + MEMBER_API_URL: process.env.MEMBER_API_URL || 'https://api.topcoder-dev.com/v5/members', V5_CHALLENGE_TYPE_API_URL: process.env.V5_CHALLENGE_TYPE_API_URL || 'http://localhost:4000/v5/challenge-types', V4_CHALLENGE_TYPE_API_URL: process.env.V4_CHALLENGE_TYPE_API_URL || 'http://localhost:4000/v4/challenge-types', @@ -84,5 +85,9 @@ module.exports = { SYNC_V5_TERM_UUID: process.env.SYNC_V5_TERM_UUID || '317cd8f9-d66c-4f2a-8774-63c612d99cd4', SYNC_V5_WRITE_ENABLED: process.env.SYNC_V5_WRITE_ENABLED === 'true' || false, - TIMEZONE: process.env.TIMEZONE || 'America/New_York' + DEV_TRACK_ID: '9b6fc876-f4d9-4ccb-9dfd-419247628825', + + TIMEZONE: process.env.TIMEZONE || 'America/New_York', + + IGNORED_ORIGINATORS: process.env.IGNORED_ORIGINATORS ? process.env.IGNORED_ORIGINATORS.split(',') : ['legacy-migration-script'] } diff --git a/package-lock.json b/package-lock.json index 0f4ba98..1953e3c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -7836,4 +7836,4 @@ } } } -} +} \ No newline at end of file diff --git a/package.json b/package.json index d36619d..bdfd14b 100644 --- a/package.json +++ b/package.json @@ -58,5 +58,8 @@ "test/unit/test.js", "test/e2e/test.js" ] + }, + "volta": { + "node": "12.22.12" } } diff --git a/src/app.js b/src/app.js index 78be71e..80b2a3f 100644 --- a/src/app.js +++ b/src/app.js @@ -24,8 +24,8 @@ const consumer = new Kafka.GroupConsumer(helper.getKafkaOptions()) */ const dataHandler = (messageSet, topic, partition) => Promise.each(messageSet, async (m) => { const message = m.message.value.toString('utf8') - logger.info(`Handle Kafka event message; Topic: ${topic}; Partition: ${partition}; Offset: ${ - m.offset}; Message: ${message}.`) + // logger.info(`Handle Kafka event message; Topic: ${topic}; Partition: ${partition}; Offset: ${ + // m.offset}; Message: ${message}.`) let messageJSON try { messageJSON = JSON.parse(message) @@ -39,7 +39,15 @@ const dataHandler = (messageSet, topic, partition) => Promise.each(messageSet, a } if (messageJSON.topic !== topic) { - logger.error(`The message topic ${messageJSON.topic} doesn't match the Kafka topic ${topic}.`) + logger.error(`The message topic ${messageJSON.topic} doesn't match the Kafka topic ${topic}. Message: ${JSON.stringify(messageJSON)}`) + + // commit the message and ignore it + await consumer.commitOffset({ topic, partition, offset: m.offset }) + return + } + + if (_.includes(config.IGNORED_ORIGINATORS, messageJSON.originator)) { + logger.error(`The message originator is in the ignored list. Originator: ${messageJSON.originator}`) // commit the message and ignore it await consumer.commitOffset({ topic, partition, offset: m.offset }) @@ -64,13 +72,14 @@ const dataHandler = (messageSet, topic, partition) => Promise.each(messageSet, a try { if (topic === config.CREATE_CHALLENGE_TOPIC) { - await ProcessorService.processCreate(messageJSON) + await ProcessorService.processMessage(messageJSON) } else { - await ProcessorService.processUpdate(messageJSON) + await ProcessorService.processMessage(messageJSON) } - logger.debug('Successfully processed message') + // logger.debug('Successfully processed message') } catch (err) { + logger.error(`Error processing message ${JSON.stringify(messageJSON)}`) logger.logFullError(err) } finally { // Commit offset regardless of error diff --git a/src/common/helper.js b/src/common/helper.js index 3b8d1f5..ec10824 100644 --- a/src/common/helper.js +++ b/src/common/helper.js @@ -4,6 +4,7 @@ const _ = require('lodash') const config = require('config') +const momentTZ = require('moment-timezone') const ifxnjs = require('ifxnjs') const request = require('superagent') const m2mAuth = require('tc-core-library-js').auth.m2m @@ -162,6 +163,47 @@ async function forceV4ESFeeder (legacyId) { await request.put(`${config.V4_ES_FEEDER_API_URL}`).send(body).set({ Authorization: `Bearer ${token}` }) } +/** + * Get the member ID by handle + * @param {String} handle the handle + */ +async function getMemberIdByHandle (handle) { + const m2mToken = await getM2MToken() + let memberId + try { + const res = await getRequest(`${config.MEMBER_API_URL}/${handle}`, m2mToken) + if (_.get(res, 'body.userId')) { + memberId = res.body.userId + } + // handle return from v3 API, handle and memberHandle are the same under case-insensitive condition + handle = _.get(res, 'body.handle') + } catch (error) { + // re-throw all error except 404 Not-Founded, BadRequestError should be thrown if 404 occurs + if (error.status !== 404) { + throw error + } + } + + if (_.isUndefined(memberId)) { + throw new Error(`User with handle: ${handle} doesn't exist`) + } + + return memberId +} + +/** + * Formats a date into a format supported by ifx + * @param {String} dateStr the date in string format + */ +function formatDate (dateStr) { + if (!dateStr) { + return null + } + const date = momentTZ.tz(dateStr, config.TIMEZONE).format('YYYY-MM-DD HH:mm:ss') + logger.info(`Formatting date ${dateStr} New Date ${date}`) + return date +} + module.exports = { getInformixConnection, getKafkaOptions, @@ -171,5 +213,7 @@ module.exports = { putRequest, postRequest, postBusEvent, - forceV4ESFeeder + forceV4ESFeeder, + getMemberIdByHandle, + formatDate } diff --git a/src/constants.js b/src/constants.js index 7fe0b58..497bf35 100644 --- a/src/constants.js +++ b/src/constants.js @@ -1,6 +1,7 @@ /** * constants */ +const metadataExtractor = require('./utils/metadataExtractor') const prizeSetTypes = { ChallengePrizes: 'placement', @@ -32,6 +33,7 @@ const createChallengeStatusesMap = { const challengeStatuses = { New: 'New', Draft: 'Draft', + Approved: 'Approved', Canceled: 'Canceled', Active: 'Active', Completed: 'Completed', @@ -42,7 +44,25 @@ const challengeStatuses = { CancelledWinnerUnresponsive: 'Cancelled - Winner Unresponsive', CancelledClientRequest: 'Cancelled - Client Request', CancelledRequirementsInfeasible: 'Cancelled - Requirements Infeasible', - CancelledZeroRegistrations: 'Cancelled - Zero Registrations' + CancelledZeroRegistrations: 'Cancelled - Zero Registrations', + CancelledPaymentFailed: 'Cancelled - Payment Failed' +} + +const scorecardQuestionMapping = { + 30002212: [ + { + questionId: 30007531, + description: 'Does the submission sufficiently satisfy the requirements as described in the provided specification?' + }, + { + questionId: 30007533, + description: 'How would you rate the work ethic of this submitter?' + }, + { + questionId: 30007532, + description: 'How would you rate the quality of this submitters work?' + } + ] } const PhaseStatusTypes = { @@ -57,12 +77,91 @@ const prizeTypesIds = { } const supportedMetadata = { - allowStockArt: 52, - drPoints: 30, - submissionViewable: 53, - submissionLimit: 51, - codeRepo: 85, - environment: 84 + 32: { + method: metadataExtractor.extractBillingProject, + defaultValue: null, + description: 'Billing Project' + }, + 31: { + method: metadataExtractor.extractAdminFee, + defaultValue: 0, + description: 'Admin Fee' + }, + 30: { + method: metadataExtractor.extractDrPoints, + defaultValue: 0, + description: 'DR points' + }, + 57: { + method: metadataExtractor.extractMarkup, + defaultValue: 0, + description: 'Markup' + }, + 35: { + method: metadataExtractor.extractSpecReviewCost, + defaultValue: null, + description: 'Spec review cost' + }, + 41: { + method: metadataExtractor.extractApprovalRequired, + defaultValue: true, + description: 'Approval Required' + }, + 44: { + method: metadataExtractor.extractPostMortemRequired, + defaultValue: true, + description: 'Post-Mortem Required' + }, + 48: { + method: metadataExtractor.extractTrackLateDeliverablesRequired, + defaultValue: true, + description: 'Track Late Deliverables' + }, + 51: { + method: metadataExtractor.extractSubmissionLimit, + defaultValue: null, + description: 'Maximum submissions' + }, + 52: { + method: metadataExtractor.extractAllowStockArtRequired, + defaultValue: false, + description: 'Allow Stock Art' + }, + 53: { + method: metadataExtractor.extractSubmissionViewable, + defaultValue: false, + description: 'Viewable Submissions Flag' + }, + 59: { + method: metadataExtractor.extractReviewFeedback, + defaultValue: false, + description: 'Review Feedback Flag' + }, + 84: { + method: metadataExtractor.extractEnvironment, + defaultValue: null, + description: 'Environment' + }, + 85: { + method: metadataExtractor.extractCodeRepo, + defaultValue: null, + description: 'Code repo' + }, + 88: { + method: metadataExtractor.extractEstimateEffortHours, + defaultValue: 0, + description: 'Effort Hours Estimate' + }, + 89: { + method: metadataExtractor.extractEstimateEffortOffshore, + defaultValue: 0, + description: 'Estimate Effort Days offshore' + }, + 90: { + method: metadataExtractor.extractEstimateEffortOnsite, + defaultValue: 0, + description: 'Estimate Effort Days Onsite' + } } module.exports = { @@ -73,5 +172,6 @@ module.exports = { challengeStatuses, PhaseStatusTypes, prizeTypesIds, - supportedMetadata + supportedMetadata, + scorecardQuestionMapping } diff --git a/src/services/ProcessorService.js b/src/services/ProcessorService.js index 72d2330..992135b 100644 --- a/src/services/ProcessorService.js +++ b/src/services/ProcessorService.js @@ -16,38 +16,228 @@ const copilotPaymentService = require('./copilotPaymentService') const timelineService = require('./timelineService') const metadataService = require('./metadataService') const paymentService = require('./paymentService') +const { createOrSetNumberOfReviewers } = require('./selfServiceReviewerService') +const { disableTimelineNotifications } = require('./selfServiceNotificationService') +const legacyChallengeService = require('./legacyChallengeService') +const legacyChallengeReviewService = require('./legacyChallengeReviewService') +const phaseCriteriaService = require('./phaseCriteriaService'); /** - * Sync the information from the v5 phases into legacy + * Drop and recreate phases in ifx * @param {Number} legacyId the legacy challenge ID - * @param {Array} v4Phases the v4 phases * @param {Array} v5Phases the v5 phases + * @param {String} createdBy the createdBy */ -async function syncChallengePhases (legacyId, v5Phases) { +async function recreatePhases (legacyId, v5Phases, createdBy) { + logger.info('recreatePhases :: start') const phaseTypes = await timelineService.getPhaseTypes() const phasesFromIFx = await timelineService.getChallengePhases(legacyId) + logger.debug('Creating phases that exist on v5 and not on legacy...') + for (const phase of v5Phases) { + const phaseLegacyId = _.get(_.find(phaseTypes, pt => pt.name === phase.name), 'phase_type_id') + const existingLegacyPhase = _.find(phasesFromIFx, p => p.phase_type_id === phaseLegacyId) + logger.debug(`Phase ${phase.name} has legacy phase type id ${phaseLegacyId} - Existing Phase ${JSON.stringify(existingLegacyPhase)}`) + if (!existingLegacyPhase && phaseLegacyId) { + const statusTypeId = phase.isOpen + ? constants.PhaseStatusTypes.Open + : (new Date().getTime() <= new Date(phase.scheduledEndDate).getTime() ? constants.PhaseStatusTypes.Scheduled : constants.PhaseStatusTypes.Closed) + logger.debug(`Will create phase ${phase.name}/${phaseLegacyId} with duration ${phase.duration} seconds`) + await timelineService.createPhase( + legacyId, + phaseLegacyId, + statusTypeId, + phase.scheduledStartDate, + phase.actualStartDate, + phase.scheduledEndDate, + phase.actualEndDate, + phase.duration * 1000, + createdBy + ) + //Handle checkpoint phases + //Magic numbers: 15=checkpoint submission, 16=checkpoint screen, 17=checkpoint review, 1=registration + //For dependencyStart: 1=start, 0=end + if(phaseLegacyId==17){ + logger.info(`Creating phase dependencies for checkpoint phases`) + + const registrationPhaseId = await timelineService.getProjectPhaseId(legacyId, 1) + const checkpointSubmissionPhaseId = await timelineService.getProjectPhaseId(legacyId, 15) + const checkpointScreeningPhaseId = await timelineService.getProjectPhaseId(legacyId, 16) + const checkpointReviewPhaseId = await timelineService.getProjectPhaseId(legacyId, 17) + + await timelineService.insertPhaseDependency(registrationPhaseId, checkpointSubmissionPhaseId, 1, createdBy) + await timelineService.insertPhaseDependency(checkpointSubmissionPhaseId, checkpointScreeningPhaseId, 0, createdBy) + await timelineService.insertPhaseDependency(checkpointScreeningPhaseId, checkpointReviewPhaseId, 0, createdBy) + + logger.info(`Creating default scorecard records for checkpoint phases`) + //30001364 is the default checkpoint screening scorecard for studio (https://software.topcoder-dev.com/review/actions/ViewScorecard?scid=30001364) + await timelineService.insertScorecardId(checkpointScreeningPhaseId, 30001364, createdBy) + + //30001364 is the default checkpoint review scorecard for studio (https://software.topcoder-dev.com/review/actions/ViewScorecard?scid=30001004) + await timelineService.insertScorecardId(checkpointReviewPhaseId, 30001004, createdBy) + + + } + } else if (!phaseLegacyId) { + logger.warn(`Could not create phase ${phase.name} on legacy!`) + } + } + logger.debug('Deleting phases that exist on legacy and not on v5...') for (const phase of phasesFromIFx) { const phaseName = _.get(_.find(phaseTypes, pt => pt.phase_type_id === phase.phase_type_id), 'name') const v5Equivalent = _.find(v5Phases, p => p.name === phaseName) - if (v5Equivalent) { - // Compare duration and status - if (v5Equivalent.duration * 1000 !== phase.duration || - (v5Equivalent.isOpen && _.toInteger(phase.phase_status_id) === constants.PhaseStatusTypes.Closed) || - (!v5Equivalent.isOpen && _.toInteger(phase.phase_status_id) === constants.PhaseStatusTypes.Open)) { - const newStatus = v5Equivalent.isOpen - ? constants.PhaseStatusTypes.Open - : (new Date().getTime() <= new Date(v5Equivalent.scheduledEndDate).getTime() ? constants.PhaseStatusTypes.Scheduled : constants.PhaseStatusTypes.Closed) - // update phase - logger.debug(`Will update phase ${phase.project_phase_id}/${v5Equivalent.name} to duration ${v5Equivalent.duration * 1000} milli`) - await timelineService.updatePhase( - phase.project_phase_id, - legacyId, - v5Equivalent.scheduledStartDate, - v5Equivalent.scheduledEndDate, - v5Equivalent.duration * 1000, - newStatus) + if (!v5Equivalent) { + logger.debug(`Will delete phase ${phaseName}`) + await timelineService.dropPhase(legacyId, phase.project_phase_id) + } + } + logger.info('recreatePhases :: end') +} + +async function addPhaseConstraints(legacyId, v5Phases, createdBy) { + logger.info(`addPhaseConstraints :: start: ${legacyId}, ${JSON.stringify(v5Phases)}`) + const allPhaseCriteria = await phaseCriteriaService.getPhaseCriteria(); + logger.info(`addPhaseConstraints :: allPhaseCriteria: ${JSON.stringify(allPhaseCriteria)}`) + + const phaseTypes = await timelineService.getPhaseTypes() + logger.info(`addPhaseConstraints :: phaseTypes: ${JSON.stringify(phaseTypes)}`) + + const phasesFromIFx = await timelineService.getChallengePhases(legacyId) + + for (const phase of v5Phases) { + logger.info(`addPhaseConstraints :: phase: ${legacyId} -> ${JSON.stringify(phase)}`) + if (phase.constraints == null || phase.constraints.length === 0) continue; + + const phaseLegacyId = _.get(_.find(phaseTypes, pt => pt.name === phase.name), 'phase_type_id') + const existingLegacyPhase = _.find(phasesFromIFx, p => p.phase_type_id === phaseLegacyId) + + const projectPhaseId = _.get(existingLegacyPhase, 'project_phase_id') + if (!projectPhaseId) { + logger.warn(`Could not find phase ${phase.name} on legacy!`) + continue + } + + let constraintName = null; + let constraintValue = null + + if (phase.name === 'Submission') { + const numSubmissionsConstraint = phase.constraints.find(c => c.name === 'Number of Submissions') + if (numSubmissionsConstraint) { + constraintName = 'Submission Number' + constraintValue = numSubmissionsConstraint.value + } + } + + if (phase.name === 'Registration') { + const numRegistrantsConstraint = phase.constraints.find(c => c.name === 'Number of Registrants') + if (numRegistrantsConstraint) { + constraintName = 'Registration Number' + constraintValue = numRegistrantsConstraint.value + } + } + + if (phase.name === 'Review') { + const numReviewersConstraint = phase.constraints.find(c => c.name === 'Number of Reviewers') + if (numReviewersConstraint) { + constraintName = 'Reviewer Number' + constraintValue = numReviewersConstraint.value } } + + // We have an interesting situation if a submission phase constraint was added but + // no registgration phase constraint was added. This ends up opening Post-Mortem + // phase if registration closes with 0 submissions. + // For now I'll leave it as is and handle this better in the new Autopilot implementation + // A quick solution would have been adding a registration constraint with value 1 if none is provided when there is a submission phase constraint + + if (constraintName && constraintValue) { + const phaseCriteriaTypeId = _.get(_.find(allPhaseCriteria, pc => pc.name === constraintName), 'phase_criteria_type_id') + if (phaseCriteriaTypeId) { + logger.debug(`Will create phase constraint ${constraintName} with value ${constraintValue}`) + // Ideally we should update the existing phase criteria, but this processor will go away in weeks + // and it's a backend processor, so we can just drop and recreate without slowing down anything + await phaseCriteriaService.dropPhaseCriteria(projectPhaseId, phaseCriteriaTypeId) + await phaseCriteriaService.createPhaseCriteria(projectPhaseId, phaseCriteriaTypeId, constraintValue, createdBy) + } else { + logger.warn(`Could not find phase criteria type for ${constraintName}`) + } + } + + } + logger.info('addPhaseConstraints :: end') +} + +/** + * Sync the information from the v5 phases into legacy + * @param {Number} legacyId the legacy challenge ID + * @param {Array} v5Phases the v5 phases + * @param {Boolean} isSelfService is the challenge self-service + * @param {String} createdBy the created by + */ +async function syncChallengePhases (legacyId, v5Phases, createdBy, isSelfService, numOfReviewers, isBeingActivated) { + const phaseTypes = await timelineService.getPhaseTypes() + const phasesFromIFx = await timelineService.getChallengePhases(legacyId) + logger.debug(`Phases from v5: ${JSON.stringify(v5Phases)}`) + logger.debug(`Phases from IFX: ${JSON.stringify(phasesFromIFx)}`) + let phaseGroups = {} + _.forEach(phasesFromIFx, p => { + if (!phaseGroups[p.phase_type_id]) { + phaseGroups[p.phase_type_id] = [] + } + phaseGroups[p.phase_type_id].push(p) + }) + _.forEach(_.cloneDeep(phaseGroups), (pg, pt) => { + phaseGroups[pt] = _.sortBy(pg, 'scheduled_start_time') + }) + + for (const key of _.keys(phaseGroups)) { + let phaseOrder = 0 + let v5Equivalents = undefined + for (const phase of phaseGroups[key]) { + const phaseName = _.get(_.find(phaseTypes, pt => pt.phase_type_id === phase.phase_type_id), 'name') + if (_.isUndefined(v5Equivalents)) { + v5Equivalents = _.sortBy(_.filter(v5Phases, p => p.name === phaseName), 'scheduledStartDate') + } + if (v5Equivalents.length > 0) { + if (v5Equivalents.length === phaseGroups[key].length) { + const v5Equivalent = v5Equivalents[phaseOrder] + logger.debug(`Will update phase ${phaseName}/${v5Equivalent.name} from ${phase.duration} to duration ${v5Equivalent.duration * 1000} milli`) + let newStatus = _.toInteger(phase.phase_status_id) + if (v5Equivalent.isOpen && _.toInteger(phase.phase_status_id) === constants.PhaseStatusTypes.Closed) { + newStatus = constants.PhaseStatusTypes.Scheduled + } + + if (isBeingActivated && ['Registration', 'Submission'].indexOf(v5Equivalent.name) != -1) { + const scheduledStartDate = v5Equivalent.scheduledStartDate; + const now = new Date().getTime(); + if (scheduledStartDate != null && new Date(scheduledStartDate).getTime() < now) { + newStatus = constants.PhaseStatusTypes.Open; + } + + logger.debug(`Challenge phase ${v5Equivalent.name} status is being set to: ${newStatus} on challenge activation.`) + } + + await timelineService.updatePhase( + phase.project_phase_id, + legacyId, + phase.fixed_start_time ? v5Equivalent.scheduledStartDate : null, + v5Equivalent.scheduledStartDate, + v5Equivalent.scheduledEndDate, + v5Equivalent.duration * 1000, + newStatus, + isBeingActivated && newStatus == constants.PhaseStatusTypes.Open ? new Date() : null + ) + } else { + logger.info(`number of ${phaseName} does not match`) + } + } else { + logger.info(`No v5 Equivalent Found for ${phaseName}`) + } + if (isSelfService && phaseName === 'Review') { + // make sure to set the required reviewers to 2 + await createOrSetNumberOfReviewers(_.toString(phase.project_phase_id), _.toString(numOfReviewers), _.toString(createdBy)) + } + phaseOrder = phaseOrder + 1 + } } // TODO: What about iterative reviews? There can be many for the same challenge. // TODO: handle timeline template updates @@ -133,12 +323,15 @@ async function getV5Terms (v5TermsId, m2mToken) { * @param {Array} toBeDeleted the array of groups to be deleted * @param {String|Number} legacyChallengeId the legacy challenge ID */ -async function associateChallengeGroups (toBeAdded = [], toBeDeleted = [], legacyChallengeId) { - for (const group of toBeAdded) { - await groupService.addGroupToChallenge(legacyChallengeId, group) +async function associateChallengeGroups (v5groups, legacyId, m2mToken) { + const { groupsToBeAdded, groupsToBeDeleted } = await getGroups(v5groups, legacyId, m2mToken) + logger.info(`Groups to add to challenge: ${legacyId}: ${JSON.stringify(groupsToBeAdded)}`) + for (const group of groupsToBeAdded) { + await groupService.addGroupToChallenge(legacyId, group) } - for (const group of toBeDeleted) { - await groupService.removeGroupFromChallenge(legacyChallengeId, group) + logger.info(`Groups to remove from challenge: ${legacyId}: ${JSON.stringify(groupsToBeDeleted)}`) + for (const group of groupsToBeDeleted) { + await groupService.removeGroupFromChallenge(legacyId, group) } } @@ -196,21 +389,17 @@ async function associateChallengeTerms (v5Terms, legacyChallengeId, createdBy, u */ async function setCopilotPayment (challengeId, legacyChallengeId, prizeSets = [], createdBy, updatedBy, m2mToken) { try { - const copilotPayment = _.get(_.find(prizeSets, p => p.type === config.COPILOT_PAYMENT_TYPE), 'prizes[0].value', null) - if (copilotPayment) { - logger.debug('Fetching challenge copilot...') - const res = await helper.getRequest(`${config.V5_RESOURCES_API_URL}?challengeId=${challengeId}&roleId=${config.COPILOT_ROLE_ID}`, m2mToken) - const [copilotResource] = res.body - if (!copilotResource) { - logger.warn(`Copilot does not exist for challenge ${challengeId} (legacy: ${legacyChallengeId})`) - return - } - logger.debug(`Setting Copilot Payment: ${copilotPayment} for legacyId ${legacyChallengeId} for copilot ${copilotResource.memberId}`) - if (copilotPayment !== null && copilotPayment >= 0) { - await copilotPaymentService.setManualCopilotPayment(legacyChallengeId, createdBy, updatedBy) - } - await copilotPaymentService.setCopilotPayment(legacyChallengeId, copilotPayment, createdBy, updatedBy) + const copilotPayment = _.get(_.find(prizeSets, p => p.type === config.COPILOT_PAYMENT_TYPE), 'prizes[0].value', 0) + logger.debug('Fetching challenge copilot...') + const res = await helper.getRequest(`${config.V5_RESOURCES_API_URL}?challengeId=${challengeId}&roleId=${config.COPILOT_ROLE_ID}`, m2mToken) + const [copilotResource] = res.body + if (!copilotResource) { + logger.warn(`Copilot does not exist for challenge ${challengeId} (legacy: ${legacyChallengeId})`) + return } + logger.debug(`Setting Copilot Payment: ${copilotPayment} for legacyId ${legacyChallengeId} for copilot ${copilotResource.memberId}`) + await copilotPaymentService.setManualCopilotPayment(legacyChallengeId, createdBy, updatedBy) + await copilotPaymentService.setCopilotPayment(legacyChallengeId, copilotPayment, createdBy, updatedBy) } catch (e) { logger.error('Failed to set the copilot payment!') logger.debug(e) @@ -278,12 +467,12 @@ async function getLegacyTrackInformation (trackId, typeId, tags, m2mToken) { `typeId=${typeId}` ] _.each((tags || []), (tag) => { - query.push(`tags[]=${tag}`) + query.push(`tags[]=${encodeURIComponent(tag)}`) }) try { const res = await helper.getRequest(`${config.V5_CHALLENGE_MIGRATION_API_URL}/convert-to-v4?${query.join('&')}`, m2mToken) return { - track: res.body.track, + // track: res.body.track, subTrack: res.body.subTrack, ...(res.body.isTask ? { task: true } : {}) } @@ -301,7 +490,7 @@ async function getLegacyTrackInformation (trackId, typeId, tags, m2mToken) { * @param {Array} informixTermsIds IDs from Informix [{termsId, roleId}] * @returns the DTO for saving a draft contest.(refer SaveDraftContestDTO in ap-challenge-microservice) */ -async function parsePayload (payload, m2mToken, isCreated = true, informixGroupIds) { +async function parsePayload (payload, m2mToken) { try { let projectId if (_.get(payload, 'legacy.directProjectId')) { @@ -318,7 +507,7 @@ async function parsePayload (payload, m2mToken, isCreated = true, informixGroupI name: payload.name, reviewType: _.get(payload, 'legacy.reviewType', 'INTERNAL'), projectId, - status: payload.status + status: payload.status === constants.challengeStatuses.CancelledPaymentFailed ? constants.challengeStatuses.CancelledFailedScreening : payload.status } if (payload.billingAccountId) { data.billingAccountId = payload.billingAccountId @@ -329,13 +518,13 @@ async function parsePayload (payload, m2mToken, isCreated = true, informixGroupI if (payload.copilotId) { data.copilotId = payload.copilotId } - if (isCreated) { - // hard code some required properties for v4 api - data.confidentialityType = _.get(payload, 'legacy.confidentialityType', 'public') - data.submissionGuidelines = 'Please read above' - data.submissionVisibility = true - data.milestoneId = 1 - } + // if (isCreated) { + // hard code some required properties for v4 api + data.confidentialityType = _.get(payload, 'legacy.confidentialityType', 'public') + data.submissionGuidelines = 'Please read above' + data.submissionVisibility = true + data.milestoneId = 1 + // } data.detailedRequirements = payload.description if (payload.privateDescription) { @@ -390,33 +579,19 @@ async function parsePayload (payload, m2mToken, isCreated = true, informixGroupI const techResult = await getTechnologies(m2mToken) data.technologies = _.filter(techResult.result.content, e => payload.tags.includes(e.name)) + if (data.technologies.length < 1) { + data.technologies = _.filter(techResult.result.content, e => e.name === 'Other') + } + const platResult = await getPlatforms(m2mToken) data.platforms = _.filter(platResult.result.content, e => payload.tags.includes(e.name)) - } - if (payload.groups && _.get(payload, 'groups.length', 0) > 0) { - const oldGroups = _.map(informixGroupIds, g => _.toString(g)) - const newGroups = [] - for (const group of payload.groups) { - try { - const groupInfo = await getGroup(group, m2mToken) - if (!_.isEmpty(_.get(groupInfo, 'oldId'))) { - newGroups.push(_.toString(_.get(groupInfo, 'oldId'))) - } - } catch (e) { - logger.warn(`Failed to load details for group ${group}`) - } + if (data.platforms.length < 1) { + data.platforms = _.filter(platResult.result.content, e => e.name === 'Other') } - data.groupsToBeAdded = _.difference(newGroups, oldGroups) - data.groupsToBeDeleted = _.difference(oldGroups, newGroups) - if (data.groupsToBeAdded.length > 0) { - logger.debug(`parsePayload :: Adding Groups ${JSON.stringify(data.groupsToBeAdded)}`) - } - if (data.groupsToBeDeleted.length > 0) { - logger.debug(`parsePayload :: Deleting Groups ${JSON.stringify(data.groupsToBeAdded)}`) - } - } else if (informixGroupIds && informixGroupIds.length > 0) { - data.groupsToBeDeleted = _.map(informixGroupIds, g => _.toString(g)) + + logger.debug(`Technologies: ${JSON.stringify(data.technologies)}`) + logger.debug(`Platforms: ${JSON.stringify(data.platforms)}`) } if (payload.metadata && payload.metadata.length > 0) { @@ -448,6 +623,41 @@ async function parsePayload (payload, m2mToken, isCreated = true, informixGroupI } } +async function getGroups (v5Groups, legacyId, m2mToken) { + const v4GroupIds = await groupService.getGroupsForChallenge(legacyId) + let groupsToBeAdded = [] + let groupsToBeDeleted = [] + if (v5Groups && v5Groups.length > 0) { + const oldGroups = _.map(v4GroupIds, g => _.toString(g)) + const newGroups = [] + + for (const group of v5Groups) { + try { + const groupInfo = await getGroup(group, m2mToken) + if (!_.isEmpty(_.get(groupInfo, 'oldId'))) { + newGroups.push(_.toString(_.get(groupInfo, 'oldId'))) + } + } catch (e) { + logger.warn(`Failed to load details for group ${group}`) + } + } + groupsToBeAdded = _.difference(newGroups, oldGroups) + groupsToBeDeleted = _.difference(oldGroups, newGroups) + if (groupsToBeAdded.length > 0) { + logger.debug(`parsePayload :: Adding Groups ${JSON.stringify(groupsToBeAdded)}`) + } + if (groupsToBeDeleted.length > 0) { + logger.debug(`parsePayload :: Deleting Groups ${JSON.stringify(groupsToBeDeleted)}`) + } + } else if (v4GroupIds && v4GroupIds.length > 0) { + groupsToBeDeleted = _.map(v4GroupIds, g => _.toString(g)) + } + return { + groupsToBeAdded, + groupsToBeDeleted + } +} + /** * Activate challenge * @param {Number} challengeId the challenge ID @@ -484,247 +694,224 @@ async function rePostResourcesOnKafka (challengeUuid, m2mToken) { * @param {Object} message the kafka message * @returns {Number} the created legacy id */ -async function processCreate (message) { - if (message.payload.status === constants.challengeStatuses.New) { - logger.debug(`Will skip creating on legacy as status is ${constants.challengeStatuses.New}`) - return - } - - if (_.get(message, 'payload.legacy.pureV5Task')) { - logger.debug('Challenge is a pure v5 task. Will skip...') - return +async function createChallenge (saveDraftContestDTO, challengeUuid, createdByUserId, v5legacyPayload, m2mToken) { + // logger.debug('processCreate :: beforeTry') + // try { + logger.info(`processCreate :: ${config.V4_CHALLENGE_API_URL}?filter=skipForum=true body: ${JSON.stringify({ param: saveDraftContestDTO })}`) + let newChallenge + try { + newChallenge = await helper.postRequest(`${config.V4_CHALLENGE_API_URL}?filter=skipForum=true`, { param: saveDraftContestDTO }, m2mToken) + } catch (e) { + throw new Error(`createChallenge - Calling POST v4 API Failed. + Request URL: ${`${config.V4_CHALLENGE_API_URL}?filter=skipForum=true`} + Params: ${JSON.stringify({ param: saveDraftContestDTO })} + Error: ${JSON.stringify(e)} + Token: ${JSON.stringify(m2mToken)}`) } - const m2mToken = await helper.getM2MToken() - - const saveDraftContestDTO = await parsePayload(message.payload, m2mToken) - logger.debug('Parsed Payload', saveDraftContestDTO) - const challengeUuid = message.payload.id - - logger.debug('processCreate :: beforeTry') + const legacyId = newChallenge.body.result.content.id + let forumId = 0 + forumId = _.get(newChallenge, 'body.result.content.forumId', forumId) try { - logger.info(`processCreate :: Skip Forums - ${config.V4_CHALLENGE_API_URL}?filter=skipForum=true body: ${JSON.stringify({ param: _.omit(saveDraftContestDTO, ['groupsToBeAdded', 'groupsToBeDeleted']) })}`) - const newChallenge = await helper.postRequest(`${config.V4_CHALLENGE_API_URL}?filter=skipForum=true`, { param: _.omit(saveDraftContestDTO, ['groupsToBeAdded', 'groupsToBeDeleted']) }, m2mToken) - - let forumId = 0 - if (message.payload.legacy && message.payload.legacy.forumId) { - forumId = message.payload.legacy.forumId - } - forumId = _.get(newChallenge, 'body.result.content.forumId', forumId) - await helper.forceV4ESFeeder(newChallenge.body.result.content.id) - await associateChallengeGroups(saveDraftContestDTO.groupsToBeAdded, saveDraftContestDTO.groupsToBeDeleted, newChallenge.body.result.content.id) - // await associateChallengeTerms(saveDraftContestDTO.termsToBeAdded, saveDraftContestDTO.termsToBeRemoved, newChallenge.body.result.content.id) - await setCopilotPayment(challengeUuid, newChallenge.body.result.content.id, _.get(message, 'payload.prizeSets'), _.get(message, 'payload.createdBy'), _.get(message, 'payload.updatedBy'), m2mToken) - await helper.patchRequest(`${config.V5_CHALLENGE_API_URL}/${challengeUuid}`, { - legacy: { - ...message.payload.legacy, - track: saveDraftContestDTO.track, - subTrack: saveDraftContestDTO.subTrack, - isTask: saveDraftContestDTO.task || false, - directProjectId: newChallenge.body.result.content.projectId, - forumId - }, - legacyId: newChallenge.body.result.content.id - }, m2mToken) - // Repost all challenge resource on Kafka so they will get created on legacy by the legacy-challenge-resource-processor - await rePostResourcesOnKafka(challengeUuid, m2mToken) - await timelineService.enableTimelineNotifications(newChallenge.body.result.content.id, _.get(message, 'payload.createdBy')) - logger.debug('End of processCreate') - return newChallenge.body.result.content.id + await helper.forceV4ESFeeder(legacyId) } catch (e) { - logger.error('processCreate Catch', e) - throw e + logger.error(`createChallenge - Error calling forceV4ESFeeder ${e}`) } -} -processCreate.schema = { - message: Joi.object().keys({ - topic: Joi.string().required(), - originator: Joi.string().required(), - timestamp: Joi.date().required(), - 'mime-type': Joi.string().required(), - payload: Joi.object().keys({ - id: Joi.string().required(), - typeId: Joi.string().required(), - trackId: Joi.string().required(), - legacy: Joi.object().keys({ - track: Joi.string().required(), - reviewType: Joi.string().required(), - confidentialityType: Joi.string(), - directProjectId: Joi.number(), - forumId: Joi.number().integer().positive() - }).unknown(true), - task: Joi.object().keys({ - isTask: Joi.boolean().default(false), - isAssigned: Joi.boolean().default(false), - memberId: Joi.string().allow(null) - }), - billingAccountId: Joi.number(), - name: Joi.string().required(), - description: Joi.string(), - privateDescription: Joi.string(), - phases: Joi.array().items(Joi.object().keys({ - id: Joi.string().required(), - duration: Joi.number().positive().required() - }).unknown(true)), - prizeSets: Joi.array().items(Joi.object().keys({ - type: Joi.string().valid(_.values(constants.prizeSetTypes)).required(), - prizes: Joi.array().items(Joi.object().keys({ - value: Joi.number().positive().required() - }).unknown(true)).min(1).required() - }).unknown(true)), - tags: Joi.array().items(Joi.string().required()), // tag names - projectId: Joi.number().integer().positive().required(), - copilotId: Joi.number().integer().positive().optional(), - status: Joi.string().valid(_.values(Object.keys(constants.createChallengeStatusesMap))).required(), - groups: Joi.array().items(Joi.string()), - startDate: Joi.date() - }).unknown(true).required() - }).required() + await helper.patchRequest(`${config.V5_CHALLENGE_API_URL}/${challengeUuid}`, { + legacy: { + ...v5legacyPayload, + track: saveDraftContestDTO.track, + subTrack: saveDraftContestDTO.subTrack, + isTask: saveDraftContestDTO.task || false, + directProjectId: newChallenge.body.result.content.projectId, + forumId + }, + legacyId + }, m2mToken) + // Repost all challenge resource on Kafka so they will get created on legacy by the legacy-challenge-resource-processor + await rePostResourcesOnKafka(challengeUuid, m2mToken) + await timelineService.enableTimelineNotifications(legacyId, createdByUserId) + await metadataService.createOrUpdateMetadata(legacyId, 9, 'On', createdByUserId) // autopilot + return legacyId } /** * Process update challenge message * @param {Object} message the kafka message */ -async function processUpdate (message) { - if (_.get(message, 'payload.legacy.pureV5Task')) { - logger.debug('Challenge is a pure v5 task. Will skip...') +async function processMessage (message) { + if (_.get(message, 'payload.legacy.pureV5Task') || _.get(message, 'payload.legacy.pureV5')) { + logger.debug(`Challenge ${message.payload.id} is a pure v5 task or challenge. Will skip...`) return } - let legacyId = message.payload.legacyId if (message.payload.status === constants.challengeStatuses.New) { logger.debug(`Will skip creating on legacy as status is ${constants.challengeStatuses.New}`) return - } else if (!legacyId) { - logger.debug('Legacy ID does not exist. Will create...') - legacyId = await processCreate(message) } + + if (message.payload.status === constants.challengeStatuses.Approved) { + logger.debug(`Will skip updating on legacy as status is ${constants.challengeStatuses.Approved}`) + return + } + + logger.info(`Processing Kafka Message: ${JSON.stringify(message)}`) + + const createdByUserHandle = _.get(message, 'payload.createdBy') + const updatedByUserHandle = _.get(message, 'payload.updatedBy') + const updatedAt = _.get(message, 'payload.updated', new Date().toISOString()) + + const createdByUserId = await helper.getMemberIdByHandle(createdByUserHandle) + let updatedByUserId = createdByUserId + if (updatedByUserHandle !== createdByUserHandle) { + updatedByUserId = await helper.getMemberIdByHandle(updatedByUserHandle) + } + + let legacyId = message.payload.legacyId + const challengeUuid = message.payload.id const m2mToken = await helper.getM2MToken() - let challenge - try { - // ensure challenge existed - challenge = await getChallengeById(m2mToken, legacyId) - if (!challenge) { - throw new Error(`Could not find challenge ${legacyId}`) + const saveDraftContestDTO = await parsePayload(message.payload, m2mToken) + + if (!legacyId) { + logger.debug('Legacy ID does not exist. Will create...') + legacyId = await createChallenge(saveDraftContestDTO, challengeUuid, createdByUserId, message.payload.legacy, m2mToken) + + await recreatePhases(legacyId, message.payload.phases, updatedByUserId) + + if (_.get(message, 'payload.legacy.selfService')) { + await disableTimelineNotifications(legacyId, createdByUserId) // disable } - } catch (e) { - // postponne kafka event - logger.warn(`Error getting challenge by id, RETRY TURNED OFF ${JSON.stringify(e)}`) - // logger.info('Challenge does not exist yet. Will post the same message back to the bus API') - // logger.error(`Error: ${JSON.stringify(e)}`) - - // const retryCountIdentifier = `${config.KAFKA_GROUP_ID.split(' ').join('_')}_retry_count` - // let currentRetryCount = parseInt(_.get(message.payload, retryCountIdentifier, 1), 10) - // if (currentRetryCount <= config.MAX_RETRIES) { - // await new Promise((resolve) => { - // setTimeout(async () => { - // currentRetryCount += 1 - // await helper.postBusEvent(config.UPDATE_CHALLENGE_TOPIC, { ...message.payload, [retryCountIdentifier]: currentRetryCount }) - // resolve() - // }, config.RETRY_TIMEOUT * currentRetryCount) - // }) - // } else { - // logger.error(`Failed to process message after ${config.MAX_RETRIES} retries. Aborting...`) - // } - // return } - const v4GroupIds = await groupService.getGroupsForChallenge(legacyId) - logger.info(`GroupIDs Found in Informix: ${JSON.stringify(v4GroupIds)}`) - - const saveDraftContestDTO = await parsePayload(message.payload, m2mToken, false, v4GroupIds) logger.debug('Result from parsePayload:') - logger.debug(JSON.stringify(saveDraftContestDTO, null, 2)) - // logger.debug('Parsed Payload', saveDraftContestDTO) - try { + logger.debug(JSON.stringify(saveDraftContestDTO)) + + let metaValue + for (const metadataKey of _.keys(constants.supportedMetadata)) { try { - if (challenge) { - await helper.putRequest(`${config.V4_CHALLENGE_API_URL}/${legacyId}`, { param: _.omit(saveDraftContestDTO, ['groupsToBeAdded', 'groupsToBeDeleted']) }, m2mToken) + metaValue = constants.supportedMetadata[metadataKey].method(message.payload, constants.supportedMetadata[metadataKey].defaultValue) + if (metaValue !== null && metaValue !== '') { + logger.info(`Setting ${constants.supportedMetadata[metadataKey].description} to ${metaValue}`) + await metadataService.createOrUpdateMetadata(legacyId, metadataKey, metaValue, updatedByUserId) } } catch (e) { - logger.warn('Failed to update the challenge via the V4 API') - logger.error(e) - } - - // Update metadata in IFX - if (message.payload.metadata && message.payload.metadata.length > 0) { - for (const metadataKey of _.keys(constants.supportedMetadata)) { - const entry = _.find(message.payload.metadata, meta => meta.name === metadataKey) - if (entry) { - if (metadataKey === 'submissionLimit') { - // data here is JSON stringified - try { - const parsedEntryValue = JSON.parse(entry.value) - if (parsedEntryValue.limit) { - entry.value = parsedEntryValue.count - } else { - entry.value = null - } - } catch (e) { - entry.value = null - } - } - try { - await metadataService.createOrUpdateMetadata(legacyId, constants.supportedMetadata[metadataKey], entry.value, _.get(message, 'payload.updatedBy') || _.get(message, 'payload.createdBy')) - } catch (e) { - logger.warn(`Failed to set ${metadataKey} (${constants.supportedMetadata[metadataKey]})`) - } - } - } + logger.warn(`Failed to set ${constants.supportedMetadata[metadataKey].description} to ${metaValue}`) } - if (message.payload.status && challenge) { - // logger.info(`The status has changed from ${challenge.currentStatus} to ${message.payload.status}`) - if (message.payload.status === constants.challengeStatuses.Active && challenge.currentStatus !== constants.challengeStatuses.Active) { - logger.info('Activating challenge...') - await activateChallenge(legacyId) - logger.info('Activated!') - // Repost all challenge resource on Kafka so they will get created on legacy by the legacy-challenge-resource-processor - await rePostResourcesOnKafka(message.payload.id, m2mToken) + } + + logger.info(`Set Associations for challenge ${legacyId}`) + await updateMemberPayments(legacyId, message.payload.prizeSets, updatedByUserId) + logger.info(`Associate groups for challenge ${legacyId}`) + await associateChallengeGroups(message.payload.groups, legacyId, m2mToken) + logger.info(`Associate challenge terms for challenge ${legacyId}`) + await associateChallengeTerms(message.payload.terms, legacyId, createdByUserId, updatedByUserId) + logger.info(`set copilot for challenge ${legacyId}`) + await setCopilotPayment(challengeUuid, legacyId, _.get(message, 'payload.prizeSets'), createdByUserId, updatedByUserId, m2mToken) + + try { + logger.info(`force V4 ES Feeder for the legacy challenge ${legacyId}`) + await helper.forceV4ESFeeder(legacyId) + } catch (e) { + logger.warn(`Failed to call V4 ES Feeder ${JSON.stringify(e)}`) + } + + let challenge + try { + challenge = await getChallengeById(m2mToken, legacyId) + } catch (e) { + throw new Error(`Error getting challenge by id - Error: ${JSON.stringify(e)}`) + } + + // If iterative review is open + if (_.find(_.get(message.payload, 'phases'), p => p.isOpen && p.name === 'Iterative Review')) { + // Try to read reviews and insert them into informix DB + if (message.payload.metadata && message.payload.legacy.reviewScorecardId) { + let orReviewFeedback = _.find(message.payload.metadata, meta => meta.name === 'or_review_feedback') + let orReviewScore = _.find(message.payload.metadata, meta => meta.name === 'or_review_score') + if (!_.isUndefined(orReviewFeedback) && !_.isUndefined(orReviewScore)) { + orReviewFeedback = JSON.parse(orReviewFeedback) + const reviewResponses = [] + _.each(orReviewFeedback, (value, key) => { + const questionId = _.get(_.find(constants.scorecardQuestionMapping[message.payload.legacy.reviewScorecardId], item => _.toString(item.questionId) === _.toString(key) || _.toLower(item.description) === _.toLower(key)), 'questionId') + reviewResponses.push({ + questionId, + answer: value + }) + }) + orReviewScore = _.toNumber(orReviewFeedback) + await legacyChallengeReviewService.insertReview(legacyId, message.payload.legacy.reviewScorecardId, orReviewScore, reviewResponses, createdByUserId) } - if (message.payload.status === constants.challengeStatuses.Completed && challenge.currentStatus !== constants.challengeStatuses.Completed) { - if (message.payload.task.isTask) { - logger.info('Challenge is a TASK') - if (!message.payload.winners || message.payload.winners.length === 0) { - throw new Error('Cannot close challenge without winners') - } - const winnerId = _.find(message.payload.winners, winner => winner.placement === 1).userId - logger.info(`Will close the challenge with ID ${legacyId}. Winner ${winnerId}!`) - await closeChallenge(legacyId, winnerId) - } else { - logger.info('Challenge type is not a task.. Skip closing challenge...') + } + } + + let isBeingActivated = false; + + if (message.payload.status && challenge) { + // Whether we need to sync v4 ES again + let needSyncV4ES = false + // logger.info(`The status has changed from ${challenge.currentStatus} to ${message.payload.status}`) + if (message.payload.status === constants.challengeStatuses.Active && challenge.currentStatus !== constants.challengeStatuses.Active) { + isBeingActivated = true; + logger.info('Activating challenge...') + const activated = await activateChallenge(legacyId) + logger.info(`Activated! ${JSON.stringify(activated)}`) + // make sure autopilot is on + await metadataService.createOrUpdateMetadata(legacyId, 9, 'On', createdByUserId) // autopilot + // Repost all challenge resource on Kafka so they will get created on legacy by the legacy-challenge-resource-processor + await rePostResourcesOnKafka(challengeUuid, m2mToken) + needSyncV4ES = true + } + if (message.payload.status === constants.challengeStatuses.Completed && challenge.currentStatus !== constants.challengeStatuses.Completed) { + if (message.payload.task.isTask) { + logger.info('Challenge is a TASK') + if (!message.payload.winners || message.payload.winners.length === 0) { + throw new Error('Cannot close challenge without winners') } + const winnerId = _.find(message.payload.winners, winner => winner.placement === 1).userId + logger.info(`Will close the challenge with ID ${legacyId}. Winner ${winnerId}!`) + await closeChallenge(legacyId, winnerId) + needSyncV4ES = true + } else { + logger.info('Challenge type is not a task.. Skip closing challenge...') } } + if (!_.get(message.payload, 'task.isTask')) { - await syncChallengePhases(message.payload.legacyId, message.payload.phases) + const numOfReviewers = 2 + await syncChallengePhases(legacyId, message.payload.phases, createdByUserId, _.get(message, 'payload.legacy.selfService'), numOfReviewers, isBeingActivated) + await addPhaseConstraints(legacyId, message.payload.phases, createdByUserId); + needSyncV4ES = true } else { logger.info('Will skip syncing phases as the challenge is a task...') } - await updateMemberPayments(message.payload.legacyId, message.payload.prizeSets, _.get(message, 'payload.updatedBy') || _.get(message, 'payload.createdBy')) - await associateChallengeGroups(saveDraftContestDTO.groupsToBeAdded, saveDraftContestDTO.groupsToBeDeleted, legacyId) - await associateChallengeTerms(message.payload.terms, legacyId, _.get(message, 'payload.createdBy'), _.get(message, 'payload.updatedBy') || _.get(message, 'payload.createdBy')) - await setCopilotPayment(message.payload.id, legacyId, _.get(message, 'payload.prizeSets'), _.get(message, 'payload.createdBy'), _.get(message, 'payload.updatedBy') || _.get(message, 'payload.createdBy'), m2mToken) + if (message.payload.status === constants.challengeStatuses.CancelledClientRequest && challenge.currentStatus !== constants.challengeStatuses.CancelledClientRequest) { + logger.info('Cancelling challenge...') + await legacyChallengeService.cancelChallenge(legacyId, updatedByUserId, updatedAt) + needSyncV4ES = true + } else { + await legacyChallengeService.updateChallengeAudit(legacyId, updatedByUserId, updatedAt) + } - try { - await helper.forceV4ESFeeder(legacyId) - } catch (e) { - logger.warn('Failed to call V4 ES Feeder') + if (needSyncV4ES) { + try { + logger.info(`Resync V4 ES for the legacy challenge ${legacyId}`) + await helper.forceV4ESFeeder(legacyId) + } catch (e) { + logger.warn(`Resync V4 - Failed to call V4 ES Feeder ${JSON.stringify(e)}`) + } } - } catch (e) { - logger.error('processUpdate Catch', e) - throw e } } -processUpdate.schema = { +processMessage.schema = { message: Joi.object().keys({ topic: Joi.string().required(), originator: Joi.string().required(), timestamp: Joi.date().required(), 'mime-type': Joi.string().required(), + key: Joi.string().allow(null), payload: Joi.object().keys({ legacyId: Joi.number().integer().positive(), legacy: Joi.object().keys({ @@ -732,7 +919,8 @@ processUpdate.schema = { reviewType: Joi.string().required(), confidentialityType: Joi.string(), directProjectId: Joi.number(), - forumId: Joi.number().integer().positive() + forumId: Joi.number().integer().positive(), + selfService: Joi.boolean() }).unknown(true), task: Joi.object().keys({ isTask: Joi.boolean().default(false), @@ -752,7 +940,7 @@ processUpdate.schema = { prizeSets: Joi.array().items(Joi.object().keys({ type: Joi.string().valid(_.values(constants.prizeSetTypes)).required(), prizes: Joi.array().items(Joi.object().keys({ - value: Joi.number().positive().required() + value: Joi.number().min(0).required() }).unknown(true)) }).unknown(true)).min(1), tags: Joi.array().items(Joi.string().required()).min(1), // tag names @@ -764,8 +952,8 @@ processUpdate.schema = { } module.exports = { - processCreate, - processUpdate + // processCreate, + processMessage } // logger.buildService(module.exports) diff --git a/src/services/legacyChallengeReviewService.js b/src/services/legacyChallengeReviewService.js new file mode 100644 index 0000000..f003f0a --- /dev/null +++ b/src/services/legacyChallengeReviewService.js @@ -0,0 +1,124 @@ +/** + * Legacy Challenge Service + * Interacts with InformixDB + * Note: this is built to work for topgear challenges and iterative review phases + */ +const _ = require('lodash') +const logger = require('../common/logger') +const util = require('util') +const helper = require('../common/helper') +const IDGenerator = require('../common/idGenerator') +const reviewIdGen = new IDGenerator('review_id_seq') +const reviewItemIdGen = new IDGenerator('review_item_id_seq') + +const ITERATIVE_REVIEWER_RESOURCE_ROLE_ID = 21 +const QUERY_GET_ITERATIVE_REVIEW_RESOURCE_FOR_CHALLENGE = `SELECT limit 1 resource_id as resourceid FROM resource WHERE project_id = %d AND resource_role_id = ${ITERATIVE_REVIEWER_RESOURCE_ROLE_ID}` + +const QUERY_CREATE_REVIEW = 'INSERT INTO review (review_id, resource_id, submission_id, project_phase_id, scorecard_id, committed, score, initial_score, create_user, create_date, modify_user, modify_date) values (?,?,?,?,?,?,?,?,?,CURRENT,?,CURRENT)' + +const QUERY_CREATE_REVIEW_ITEM = 'INSERT INTO review_item (review_item_id, review_id, scorecard_question_id, upload_id, answer, sort, create_user, create_date, modify_user, modify_date) values (?,?,?,?,?,?,?,CURRENT,?,CURRENT)' + +const QUERY_GET_SUBMISSION = 'SELECT FIRST 1 * FROM submission s INNER JOIN upload u on s.upload_id = u.upload_id WHERE u.project_id = %d AND upload_status_id = 1 AND submission_status_id = 1 ORDER BY u.CREATE_DATE ASC' + +const QUERY_GET_PROJECT_PHASE = 'select pc.parameter scorecard_id, pp.project_phase_id project_phase_id from project_phase pp inner join phase_criteria pc on pc.project_phase_id = pp.project_phase_id where pp.project_id = %d and pp.phase_type_id = 18 and phase_criteria_type_id = 1' + +/** + * Prepare Informix statement + * @param {Object} connection the Informix connection + * @param {String} sql the sql + * @return {Object} Informix statement + */ +async function prepare (connection, sql) { + // logger.debug(`Preparing SQL ${sql}`) + const stmt = await connection.prepareAsync(sql) + return Promise.promisifyAll(stmt) +} + +/** + * Insert review in IFX + * @param {Number} challengeLegacyId the legacy challenge ID + * @param {Number} createdBy the scorecard ID + * @param {Number} score the review score + * @param {Array} responses the review responses + * @param {Number} createdBy the creator user ID + */ +async function insertReview (challengeLegacyId, scorecardId, score, responses, createdBy) { + const connection = await helper.getInformixConnection() + let result = null + let reviewId + try { + const resourceId = await getIterativeReviewerResourceId(connection, challengeLegacyId) + if (!resourceId) throw new Error('Cannot find Iterative Reviewer') + const submissionId = await getSubmissionId(connection, challengeLegacyId) + if (!submissionId) throw new Error('Cannot find Submission') + const projectPhaseId = await getProjectPhaseId(connection, challengeLegacyId) + if (!projectPhaseId) throw new Error('Cannot find Project Phase Id') + reviewId = await reviewIdGen.getNextId() + await connection.beginTransactionAsync() + const query = await prepare(connection, QUERY_CREATE_REVIEW) + result = await query.executeAsync([reviewId, resourceId, submissionId, projectPhaseId, scorecardId, 1, score, score, createdBy, createdBy]) + for (let i = 0; i < responses.length; i += 1) { + await insertReviewItem(connection, reviewId, responses[i], i, createdBy) + } + await connection.commitTransactionAsync() + } catch (e) { + logger.error(`Error in 'insertReview' ${e}, rolling back transaction`) + await connection.rollbackTransactionAsync() + throw e + } finally { + logger.info(`Review ${challengeLegacyId} has been created`) + await connection.closeAsync() + } + return result +} + +/** + * Insert review item in IFX + * @param {Object} connection + * @param {Number} reviewId the review ID + * @param {Object} response the response + * @param {Number} sort the sort + * @param {Number} createdBy the creator user ID + */ +async function insertReviewItem (connection, reviewId, response, sort, createdBy) { + let result = null + const reviewItemId = await reviewItemIdGen.getNextId() + await connection.beginTransactionAsync() + const query = await prepare(connection, QUERY_CREATE_REVIEW_ITEM) + result = await query.executeAsync([reviewItemId, reviewId, response.questionId, null, response.answer, sort, createdBy, createdBy]) + return result +} + +/** + * Gets the iterative reviewer resource id + * @param {Object} connection + * @param {Number} challengeLegacyId + */ +async function getIterativeReviewerResourceId (connection, challengeLegacyId) { + const result = await connection.queryAsync(util.format(QUERY_GET_ITERATIVE_REVIEW_RESOURCE_FOR_CHALLENGE, challengeLegacyId)) + return _.get(result, '[0].resourceid', null) +} + +/** + * Gets the submission id + * @param {Object} connection + * @param {Number} challengeLegacyId + */ +async function getSubmissionId (connection, challengeLegacyId) { + const result = await connection.queryAsync(util.format(QUERY_GET_SUBMISSION, challengeLegacyId)) + return _.get(result, '[0].submission_id', null) +} + +/** + * Gets the submission id + * @param {Object} connection + * @param {Number} challengeLegacyId + */ +async function getProjectPhaseId (connection, challengeLegacyId) { + const result = await connection.queryAsync(util.format(QUERY_GET_PROJECT_PHASE, challengeLegacyId)) + return _.get(result, '[0].project_phase_id', null) +} + +module.exports = { + insertReview +} diff --git a/src/services/legacyChallengeService.js b/src/services/legacyChallengeService.js new file mode 100644 index 0000000..e95b981 --- /dev/null +++ b/src/services/legacyChallengeService.js @@ -0,0 +1,75 @@ +/** + * Legacy Challenge Service + * Interacts with InformixDB + */ +const logger = require('../common/logger') +const util = require('util') +const helper = require('../common/helper') +const { createChallengeStatusesMap } = require('../constants') + +const QUERY_UPDATE_PROJECT = 'UPDATE project SET project_status_id = ?, modify_user = ?, modify_date = ? WHERE project_id = %d' +const QUERY_UPDATE_PROJECT_AUDIT = 'UPDATE project SET modify_user = ?, modify_date = ? WHERE project_id = %d' + +/** + * Prepare Informix statement + * @param {Object} connection the Informix connection + * @param {String} sql the sql + * @return {Object} Informix statement + */ +async function prepare (connection, sql) { + // logger.debug(`Preparing SQL ${sql}`) + const stmt = await connection.prepareAsync(sql) + return Promise.promisifyAll(stmt) +} + +/** + * Update a challenge in IFX + * @param {Number} challengeLegacyId the legacy challenge ID + * @param {Number} updatedBy the user ID + * @param {String} updatedAt the challenge modified time + */ +async function cancelChallenge (challengeLegacyId, updatedBy, updatedAt) { + const connection = await helper.getInformixConnection() + let result = null + try { + await connection.beginTransactionAsync() + const query = await prepare(connection, util.format(QUERY_UPDATE_PROJECT, challengeLegacyId)) + result = await query.executeAsync([createChallengeStatusesMap.CancelledClientRequest, updatedBy, helper.formatDate(updatedAt)]) + await connection.commitTransactionAsync() + } catch (e) { + logger.error(`Error in 'cancelChallenge' ${e}, rolling back transaction`) + await connection.rollbackTransactionAsync() + throw e + } finally { + logger.info(`Challenge ${challengeLegacyId} has been cancelled`) + await connection.closeAsync() + } + return result +} + +/** + * Update a challenge audit fields in IFX + * @param {Number} challengeLegacyId the legacy challenge ID + * @param {Number} updatedBy the user ID + * @param {String} updatedAt the challenge modified time + */ +async function updateChallengeAudit (challengeLegacyId, updatedBy, updatedAt) { + const connection = await helper.getInformixConnection() + let result = null + try { + const query = await prepare(connection, util.format(QUERY_UPDATE_PROJECT_AUDIT, challengeLegacyId)) + result = await query.executeAsync([updatedBy, helper.formatDate(updatedAt)]) + } catch (e) { + logger.error(`Error in 'updateChallengeAudit' ${e}`) + throw e + } finally { + logger.info(`Challenge audit for ${challengeLegacyId} has been updated`) + await connection.closeAsync() + } + return result +} + +module.exports = { + cancelChallenge, + updateChallengeAudit +} diff --git a/src/services/metadataService.js b/src/services/metadataService.js index 166962c..bce66cf 100644 --- a/src/services/metadataService.js +++ b/src/services/metadataService.js @@ -58,21 +58,21 @@ async function createOrUpdateMetadata (challengeLegacyId, typeId, value, created const [existing] = await getMetadataEntry(challengeLegacyId, typeId) if (existing) { if (value) { - logger.info(`Metadata ${typeId} exists. Will update`) + // logger.info(`Metadata ${typeId} exists. Will update`) const query = await prepare(connection, QUERY_UPDATE) result = await query.executeAsync([value, createdBy, typeId, challengeLegacyId]) } else { - logger.info(`Metadata ${typeId} exists. Will delete`) + // logger.info(`Metadata ${typeId} exists. Will delete`) const query = await prepare(connection, QUERY_DELETE) result = await query.executeAsync([challengeLegacyId, typeId]) } } else { - logger.info(`Metadata ${typeId} does not exist. Will create`) + // logger.info(`Metadata ${typeId} does not exist. Will create`) const query = await prepare(connection, QUERY_CREATE) result = await query.executeAsync([challengeLegacyId, typeId, value, createdBy, createdBy]) } // await connection.commitTransactionAsync() - logger.info(`Metadata with typeId ${typeId} has been enabled for challenge ${challengeLegacyId}`) + // logger.info(`Metadata with typeId ${typeId} has been enabled for challenge ${challengeLegacyId}`) } catch (e) { logger.error(`Error in 'createOrUpdateMetadata' ${e}, rolling back transaction`) await connection.rollbackTransactionAsync() diff --git a/src/services/phaseCriteriaService.js b/src/services/phaseCriteriaService.js new file mode 100644 index 0000000..dd00e48 --- /dev/null +++ b/src/services/phaseCriteriaService.js @@ -0,0 +1,81 @@ +/** + * Number of reviewers Service + * Interacts with InformixDB + */ +const util = require('util') +const logger = require('../common/logger') +const helper = require('../common/helper') + +const QUERY_GET_PHASE_CRITERIA = 'SELECT phase_criteria_type_id, name FROM phase_criteria_type_lu;' +const QUERY_CREATE = 'INSERT INTO phase_criteria (project_phase_id, phase_criteria_type_id, parameter, create_user, create_date, modify_user, modify_date) VALUES (?, ?, ?, ?, CURRENT, ?, CURRENT)' +const QUERY_DELETE = 'DELETE FROM phase_criteria WHERE project_phase_id = ? AND phase_criteria_type_id = ?' + +/** + * Prepare Informix statement + * @param {Object} connection the Informix connection + * @param {String} sql the sql + * @return {Object} Informix statement + */ +async function prepare (connection, sql) { + // logger.debug(`Preparing SQL ${sql}`) + const stmt = await connection.prepareAsync(sql) + return Promise.promisifyAll(stmt) +} + +async function getPhaseCriteria () { + const connection = await helper.getInformixConnection() + let result = null + try { + result = await connection.queryAsync(QUERY_GET_PHASE_CRITERIA) + } catch (e) { + logger.error(`Error in 'getPhaseCriteria' ${e}`) + throw e + } finally { + await connection.closeAsync() + } + return result +} + +async function dropPhaseCriteria(phaseId, phaseCriteriaTypeId) { + const connection = await helper.getInformixConnection() + let result = null + try { + await connection.beginTransactionAsync() + const query = await prepare(connection, QUERY_DELETE) + result = await query.executeAsync([phaseId, phaseCriteriaTypeId]) + await connection.commitTransactionAsync() + } catch (e) { + logger.error(`Error in 'dropPhaseCriteria' ${e}`) + await connection.rollbackTransactionAsync() + throw e + } finally { + await connection.closeAsync() + } + return result +} + +async function createPhaseCriteria(phaseId, phaseCriteriaTypeId, value, createdBy) { + const connection = await helper.getInformixConnection() + let result = null + try { + await connection.beginTransactionAsync() + const query = await prepare(connection, QUERY_CREATE) + result = await query.executeAsync([phaseId, phaseCriteriaTypeId, value, createdBy, createdBy]) + await connection.commitTransactionAsync() + } catch (e) { + logger.error(`Error in 'createPhaseCriteria' ${e}`) + await connection.rollbackTransactionAsync() + throw e + } finally { + await connection.closeAsync() + } + return result +} + + + +module.exports = { + getPhaseCriteria, + createPhaseCriteria, + dropPhaseCriteria +} diff --git a/src/services/selfServiceNotificationService.js b/src/services/selfServiceNotificationService.js new file mode 100644 index 0000000..cf89d77 --- /dev/null +++ b/src/services/selfServiceNotificationService.js @@ -0,0 +1,72 @@ +/** + * timeline notification Service + * Interacts with InformixDB + */ +const util = require('util') +const logger = require('../common/logger') +const helper = require('../common/helper') + +const QUERY_GET_ENTRY = 'SELECT notification_type_id FROM notification WHERE external_ref_id = %d AND project_id = %d' +const QUERY_DELETE = 'DELETE FROM notification WHERE external_ref_id = ? AND project_id = ?' + +/** + * Prepare Informix statement + * @param {Object} connection the Informix connection + * @param {String} sql the sql + * @return {Object} Informix statement + */ +async function prepare (connection, sql) { + // logger.debug(`Preparing SQL ${sql}`) + const stmt = await connection.prepareAsync(sql) + return Promise.promisifyAll(stmt) +} + +/** + * Get entry + * @param {Number} legacyId the legacy challenge ID + * @param {String} userId the userId + */ +async function getEntry (legacyId, userId) { + const connection = await helper.getInformixConnection() + let result = null + try { + result = await connection.queryAsync(util.format(QUERY_GET_ENTRY, userId, legacyId)) + } catch (e) { + logger.error(`Error in 'getEntry' ${e}`) + throw e + } finally { + await connection.closeAsync() + } + return result +} + +/** + * Disable timeline notifications + * @param {Number} legacyId the legacy challenge ID + * @param {String} userId the userId + */ +async function disableTimelineNotifications (legacyId, userId) { + const connection = await helper.getInformixConnection() + let result = null + try { + await connection.beginTransactionAsync() + const [existing] = await getEntry(legacyId, userId) + if (existing) { + const query = await prepare(connection, QUERY_DELETE) + result = await query.executeAsync([userId, legacyId]) + } + await connection.commitTransactionAsync() + } catch (e) { + logger.error(`Error in 'disableTimelineNotifications' ${e}, rolling back transaction`) + await connection.rollbackTransactionAsync() + throw e + } finally { + await connection.closeAsync() + } + return result +} + +module.exports = { + getEntry, + disableTimelineNotifications +} diff --git a/src/services/selfServiceReviewerService.js b/src/services/selfServiceReviewerService.js new file mode 100644 index 0000000..05367dd --- /dev/null +++ b/src/services/selfServiceReviewerService.js @@ -0,0 +1,86 @@ +/** + * Number of reviewers Service + * Interacts with InformixDB + */ +const util = require('util') +const logger = require('../common/logger') +const helper = require('../common/helper') + +const QUERY_GET_ENTRY = 'SELECT parameter FROM phase_criteria WHERE project_phase_id = %d AND phase_criteria_type_id = 6' +const QUERY_CREATE = 'INSERT INTO phase_criteria (project_phase_id, phase_criteria_type_id, parameter, create_user, create_date, modify_user, modify_date) VALUES (?, 6, ?, ?, CURRENT, ?, CURRENT)' +const QUERY_UPDATE = 'UPDATE phase_criteria SET parameter = ?, modify_user = ?, modify_date = CURRENT WHERE project_phase_id = ? AND phase_criteria_type_id = 6' +const QUERY_DELETE = 'DELETE FROM phase_criteria WHERE project_phase_id = ? AND phase_criteria_type_id = 6' + +/** + * Prepare Informix statement + * @param {Object} connection the Informix connection + * @param {String} sql the sql + * @return {Object} Informix statement + */ +async function prepare (connection, sql) { + // logger.debug(`Preparing SQL ${sql}`) + const stmt = await connection.prepareAsync(sql) + return Promise.promisifyAll(stmt) +} + +/** + * Get entry + * @param {Number} phaseId the phase ID + */ +async function getEntry (phaseId) { + // logger.debug(`Getting Groups for Challenge ${challengeLegacyId}`) + const connection = await helper.getInformixConnection() + let result = null + try { + result = await connection.queryAsync(util.format(QUERY_GET_ENTRY, phaseId)) + } catch (e) { + logger.error(`Error in 'getEntry' ${e}`) + throw e + } finally { + await connection.closeAsync() + } + return result +} + +/** + * Merge number of reviewers + * @param {Number} phaseId the legacy challenge ID + * @param {Number=} value the value + * @param {String} createdBy the created by + */ +async function createOrSetNumberOfReviewers (phaseId, value, createdBy) { + const connection = await helper.getInformixConnection() + let result = null + try { + await connection.beginTransactionAsync() + const [existing] = await getEntry(phaseId) + if (existing) { + if (value) { + const query = await prepare(connection, QUERY_UPDATE) + logger.info(`Will update with values: ${value}, ${createdBy}, ${phaseId}`) + result = await query.executeAsync([value, createdBy, phaseId]) + } else { + const query = await prepare(connection, QUERY_DELETE) + logger.info(`Will delete with values: ${phaseId}`) + result = await query.executeAsync([phaseId]) + } + } else { + const query = await prepare(connection, QUERY_CREATE) + logger.info(`Will create with values: ${phaseId}, ${value}, ${createdBy}, ${createdBy}`) + result = await query.executeAsync([phaseId, value, createdBy, createdBy]) + } + await connection.commitTransactionAsync() + } catch (e) { + logger.error(`Error in 'createOrSetNumberOfReviewers' ${e}, rolling back transaction`) + await connection.rollbackTransactionAsync() + throw e + } finally { + await connection.closeAsync() + } + return result +} + +module.exports = { + getEntry, + createOrSetNumberOfReviewers +} diff --git a/src/services/timelineService.js b/src/services/timelineService.js index 210c380..6502dc1 100644 --- a/src/services/timelineService.js +++ b/src/services/timelineService.js @@ -2,26 +2,35 @@ * Timeline Service * Interacts with InformixDB */ +const _ = require('lodash') + const logger = require('../common/logger') const util = require('util') const config = require('config') -const momentTZ = require('moment-timezone') +const IDGenerator = require('../common/idGenerator') const helper = require('../common/helper') +const phaseIdGen = new IDGenerator('project_phase_id_seq') + const QUERY_GET_PHASE_TYPES = 'SELECT phase_type_id, name FROM phase_type_lu' -const QUERY_GET_CHALLENGE_PHASES = 'SELECT project_phase_id, scheduled_start_time, scheduled_end_time, duration, phase_status_id, phase_type_id FROM project_phase WHERE project_id = %d' -const QUERY_UPDATE_CHALLENGE_PHASE = 'UPDATE project_phase SET scheduled_start_time = ?, scheduled_end_time = ?, duration = ?, phase_status_id = ? WHERE project_phase_id = %d and project_id = %d' + +const QUERY_GET_CHALLENGE_PHASES = 'SELECT project_phase_id, fixed_start_time, scheduled_start_time, scheduled_end_time, duration, phase_status_id, phase_type_id FROM project_phase WHERE project_id = %d' +const QUERY_DROP_CHALLENGE_PHASE = 'DELETE FROM project_phase WHERE project_id = ? AND project_phase_id = ?' +const QUERY_INSERT_CHALLENGE_PHASE = 'INSERT INTO project_phase (project_phase_id, project_id, phase_type_id, phase_status_id, scheduled_start_time, scheduled_end_time, duration, create_user, create_date, modify_user, modify_date) VALUES (?, ?, ?, ?, ?, ?, ?, ?, CURRENT, ?, CURRENT)' +const QUERY_UPDATE_CHALLENGE_PHASE = 'UPDATE project_phase SET fixed_start_time = ?, scheduled_start_time = ?, scheduled_end_time = ?, duration = ?, phase_status_id = ? WHERE project_phase_id = %d and project_id = %d' +const QUERY_UPDATE_CHALLENGE_PHASE_WITH_START_TIME = 'UPDATE project_phase SET fixed_start_time = ?, scheduled_start_time = ?, scheduled_end_time = ?, duration = ?, phase_status_id = ?, actual_start_time = ? WHERE project_phase_id = %d and project_id = %d' + +const QUERY_DROP_CHALLENGE_PHASE_CRITERIA = 'DELETE FROM phase_criteria WHERE project_phase_id = ?' + const QUERY_GET_TIMELINE_NOTIFICATION_SETTINGS = 'SELECT value FROM project_info WHERE project_id = %d and project_info_type_id = %d' + const QUERY_CREATE_TIMELINE_NOTIFICATIONS = 'INSERT INTO project_info (project_id, project_info_type_id, value, create_user, create_date, modify_user, modify_date) VALUES (?, "11", "On", ?, CURRENT, ?, CURRENT)' const QUERY_UPDATE_TIMELINE_NOTIFICATIONS = 'UPDATE project_info SET value = "On", modify_user = ?, modify_date = CURRENT WHERE project_info_type_id = "11" AND project_id = ?' -/** - * Formats a date into a format supported by ifx - * @param {String} dateStr the date in string format - */ -function formatDate (dateStr) { - return momentTZ.tz(dateStr, config.TIMEZONE).format('YYYY-MM-DD HH:mm:ss') -} +const QUERY_INSERT_CHALLENGE_PHASE_DEPENDENCY = 'INSERT INTO phase_dependency (dependency_phase_id, dependent_phase_id, dependency_start, dependent_start, lag_time, create_user, create_date, modify_user, modify_date) VALUES (?, ?, ?, 1, 0, ?, CURRENT, ?, CURRENT)' +const QUERY_GET_PROJECT_PHASE_ID = 'SELECT project_phase_id as project_phase_id FROM project_phase WHERE project_id = %d AND phase_type_id = %d' + +const QUERY_INSERT_CHALLENGE_PHASE_SCORECARD_ID = 'INSERT INTO phase_criteria (project_phase_id, phase_criteria_type_id, parameter, create_user, create_date, modify_user, modify_date) VALUES (?, 1, ?, ?, CURRENT, ?, CURRENT)' /** * Prepare Informix statement @@ -52,6 +61,87 @@ async function getPhaseTypes () { return result } +async function insertPhaseDependency(dependencyPhaseId, dependentPhaseId, dependencyStart, createdBy){ + + logger.info(`Creating phase dependency ${dependencyPhaseId} to ${dependentPhaseId} at ${dependencyStart}`) + const connection = await helper.getInformixConnection() + let result = null + try { + let query = await prepare(connection, QUERY_INSERT_CHALLENGE_PHASE_DEPENDENCY) + result = await query.executeAsync([dependencyPhaseId, dependentPhaseId, dependencyStart, createdBy, createdBy]) + } catch (e) { + logger.error(`Error in 'insertPhaseDependency' ${e}`) + throw e + } finally { + await connection.closeAsync() + } + return result +} + + +async function insertScorecardId(projectPhaseId, scorecardId, createdBy){ + + logger.info(`Creating scorecard ID ${projectPhaseId} use scorecard ${scorecardId}`) + const connection = await helper.getInformixConnection() + let result = null + try { + let query = await prepare(connection, QUERY_INSERT_CHALLENGE_PHASE_SCORECARD_ID) + result = await query.executeAsync([projectPhaseId, scorecardId, createdBy, createdBy]) + } catch (e) { + logger.error(`Error in 'insertScorecardId' ${e}`) + throw e + } finally { + await connection.closeAsync() + } + return result +} + +/** + * Gets phase for the given phase type for the given challenge ID + */ +async function getProjectPhaseId(challengeLegacyId, phaseTypeId) { + logger.info(`Getting project phase ID type ${phaseTypeId} for challenge ${challengeLegacyId}`) + const connection = await helper.getInformixConnection() + let result = null + try { + await connection.beginTransactionAsync() + result = await connection.queryAsync(util.format(QUERY_GET_PROJECT_PHASE_ID, challengeLegacyId, phaseTypeId)) + } catch (e) { + logger.error(`Error in 'getProjectPhaseId' ${e}`) + throw e + } finally { + await connection.closeAsync() + } + const project_phase_id = _.get(result, '[0].project_phase_id', null) + logger.info(`Project phase ID: ${project_phase_id}`) + return project_phase_id +} +/** + * Drop challenge phase + * @param {Number} challengeLegacyId the legacy challenge ID + * @param {Number} projectPhaseId the phase ID + */ +async function dropPhase (challengeLegacyId, projectPhaseId) { + const connection = await helper.getInformixConnection() + let result = null + try { + await connection.beginTransactionAsync() + let query = await prepare(connection, QUERY_DROP_CHALLENGE_PHASE_CRITERIA) + result = await query.executeAsync([projectPhaseId]) + query = await prepare(connection, QUERY_DROP_CHALLENGE_PHASE) + result = await query.executeAsync([challengeLegacyId, projectPhaseId]) + await connection.commitTransactionAsync() + } catch (e) { + logger.error(`Error in 'dropPhase' ${e}, rolling back transaction`) + await connection.rollbackTransactionAsync() + throw e + } finally { + logger.info('Phases have been deleted') + await connection.closeAsync() + } + return result +} + /** * Gets the challenge phases from ifx * @param {Number} challengeLegacyId the legacy challenge ID @@ -70,22 +160,83 @@ async function getChallengePhases (challengeLegacyId) { return result } +/** + * Create a phase in IFX + * @param {Number} challengeLegacyId the legacy challenge ID + * @param {Number} phaseTypeId the legacy phase type ID + * @param {Number} statusTypeId the status type ID + * @param {Date} scheduledStartDate the scheduled start date + * @param {Date} actualStartDate the actual start date + * @param {Date} scheduledEndDate the scheduled end date + * @param {Date} actualEndDate the actual end date + * @param {Date} duration the duration + * @param {String} createdBy the createdBy + */ +async function createPhase (challengeLegacyId, phaseTypeId, statusTypeId, scheduledStartDate, actualStartDate, scheduledEndDate, actualEndDate, duration, createdBy) { + const nextId = await phaseIdGen.getNextId() + const connection = await helper.getInformixConnection() + let result = null + try { + await connection.beginTransactionAsync() + const query = await prepare(connection, QUERY_INSERT_CHALLENGE_PHASE) + logger.debug(`Query data: ${JSON.stringify([ + nextId, + challengeLegacyId, + phaseTypeId, + statusTypeId, + helper.formatDate(scheduledStartDate), + helper.formatDate(scheduledEndDate), + duration, + createdBy, + createdBy + ])}`) + result = await query.executeAsync([ + nextId, + challengeLegacyId, + phaseTypeId, + statusTypeId, + helper.formatDate(scheduledStartDate), + helper.formatDate(scheduledEndDate), + duration, + createdBy, + createdBy + ]) + await connection.commitTransactionAsync() + + } catch (e) { + logger.error(`Error in 'createPhase' ${e}, rolling back transaction`) + await connection.rollbackTransactionAsync() + throw e + } finally { + logger.info(`Phase ${phaseTypeId} has been created`) + await connection.closeAsync() + } + return result +} + /** * Update a phase in IFX * @param {Number} phaseId the phase ID * @param {Number} challengeLegacyId the legacy challenge ID + * @param {Date} fixedStartTime the fixed start date * @param {Date} startTime the scheduled start date * @param {Date} endTime the scheduled end date * @param {Date} duration the duration * @param {Number} statusTypeId the status type ID */ -async function updatePhase (phaseId, challengeLegacyId, startTime, endTime, duration, statusTypeId) { +async function updatePhase (phaseId, challengeLegacyId, fixedStartTime, startTime, endTime, duration, statusTypeId, actualStartTime) { const connection = await helper.getInformixConnection() let result = null try { // await connection.beginTransactionAsync() - const query = await prepare(connection, util.format(QUERY_UPDATE_CHALLENGE_PHASE, phaseId, challengeLegacyId)) - result = await query.executeAsync([formatDate(startTime), formatDate(endTime), duration, statusTypeId]) + const query = actualStartTime == null ? + await prepare(connection, util.format(QUERY_UPDATE_CHALLENGE_PHASE, phaseId, challengeLegacyId)) : + await prepare(connection, util.format(QUERY_UPDATE_CHALLENGE_PHASE_WITH_START_TIME, phaseId, challengeLegacyId)) + + result = actualStartTime == null ? + await query.executeAsync([helper.formatDate(fixedStartTime), helper.formatDate(startTime), helper.formatDate(endTime), duration, statusTypeId]) : + await query.executeAsync([helper.formatDate(fixedStartTime), helper.formatDate(startTime), helper.formatDate(endTime), duration, statusTypeId, helper.formatDate(actualStartTime)]) + // await connection.commitTransactionAsync() } catch (e) { logger.error(`Error in 'updatePhase' ${e}, rolling back transaction`) @@ -151,5 +302,10 @@ module.exports = { getChallengePhases, getPhaseTypes, updatePhase, - enableTimelineNotifications + enableTimelineNotifications, + createPhase, + dropPhase, + insertPhaseDependency, + getProjectPhaseId, + insertScorecardId } diff --git a/src/utils/metadataExtractor.js b/src/utils/metadataExtractor.js new file mode 100644 index 0000000..9810af1 --- /dev/null +++ b/src/utils/metadataExtractor.js @@ -0,0 +1,221 @@ +/** + * Metadata extractor + */ +const _ = require('lodash') + +/** + * Get metadata entry by key + * @param {Array} metadata the metadata array + * @param {String} key the metadata key + */ +const getMeta = (metadata = [], key) => _.find(metadata, meta => meta.name === key) + +/** + * Extract billing project + * @param {Object} challenge the challenge object + * @param {Any} defaultValue the default value + */ +function extractBillingProject (challenge, defaultValue) { + return _.get(challenge, 'billingAccountId', _.get(challenge, 'billing.billingAccountId', _.toString(defaultValue))) +} + +/** + * Extract markup + * @param {Object} challenge the challenge object + * @param {Any} defaultValue the default value + */ +function extractMarkup (challenge, defaultValue) { + return _.toString(_.get(challenge, 'billing.markup', defaultValue)) +} + +/** + * Extract Admin Fee + * @param {Object} challenge the challenge object + * @param {Any} defaultValue the default value + */ +function extractAdminFee (challenge, defaultValue) { + // TODO for now just return 0 + return _.toString(_.get(challenge, 0, defaultValue)) +} + +/** + * Extract submission limit + * @param {Object} challenge the challenge object + * @param {Any} defaultValue the default value + */ +function extractSubmissionLimit (challenge, defaultValue) { + const entry = getMeta(challenge.metadata, 'submissionLimit') + if (!entry) return _.toString(defaultValue) + try { + const parsedEntryValue = JSON.parse(entry.value) + if (parsedEntryValue.limit) { + entry.value = parsedEntryValue.count + } else { + entry.value = null + } + } catch (e) { + entry.value = null + } + return _.toString(entry.value || defaultValue) +} + +/** + * Extract spec review cost + * @param {Object} challenge the challenge object + * @param {Any} defaultValue the default value + */ +function extractSpecReviewCost (challenge, defaultValue) { + return _.get(_.find(_.get(challenge, 'prizeSets', []), p => p.type === 'specReviewer') || {}, 'prizes[0].value', _.toString(defaultValue)) +} + +/** + * Extract DR points + * @param {Object} challenge the challenge object + * @param {Any} defaultValue the default value + */ +function extractDrPoints (challenge, defaultValue) { + const entry = getMeta(challenge.metadata, 'drPoints') + if (!entry) return _.toString(defaultValue) + return _.toString(entry.value || defaultValue) +} + +/** + * Extract Approval required + * @param {Object} challenge the challenge object + * @param {Any} defaultValue the default value + */ +function extractApprovalRequired (challenge, defaultValue) { + const entry = getMeta(challenge.metadata, 'approvalRequired') + if (!entry) return _.toString(defaultValue) + return _.toString(entry.value) +} + +/** + * Extract Post-mortem required + * @param {Object} challenge the challenge object + * @param {Any} defaultValue the default value + */ +function extractPostMortemRequired (challenge, defaultValue) { + const entry = getMeta(challenge.metadata, 'postMortemRequired') + if (!entry) return _.toString(defaultValue) + return _.toString(entry.value) +} + +/** + * Extract track late deliverables required + * @param {Object} challenge the challenge object + * @param {Any} defaultValue the default value + */ +function extractTrackLateDeliverablesRequired (challenge, defaultValue) { + const entry = getMeta(challenge.metadata, 'trackLateDeliverables') + if (!entry) return _.toString(defaultValue) + return _.toString(entry.value) +} + +/** + * Extract allow stock art required + * @param {Object} challenge the challenge object + * @param {Any} defaultValue the default value + */ +function extractAllowStockArtRequired (challenge, defaultValue) { + const entry = getMeta(challenge.metadata, 'allowStockArt') + if (!entry) return _.toString(defaultValue) + return _.toString(entry.value) +} + +/** + * Extract submission viewable + * @param {Object} challenge the challenge object + * @param {Any} defaultValue the default value + */ +function extractSubmissionViewable (challenge, defaultValue) { + const entry = getMeta(challenge.metadata, 'submissionViewable') + if (!entry) return _.toString(defaultValue) + return _.toString(entry.value) +} + +/** + * Extract review feedback + * @param {Object} challenge the challenge object + * @param {Any} defaultValue the default value + */ +function extractReviewFeedback (challenge, defaultValue) { + const entry = getMeta(challenge.metadata, 'reviewFeedback') + if (!entry) return _.toString(defaultValue) + return _.toString(entry.value) +} + +/** + * Extract environment + * @param {Object} challenge the challenge object + * @param {Any} defaultValue the default value + */ +function extractEnvironment (challenge, defaultValue) { + const entry = getMeta(challenge.metadata, 'environment') + if (!entry) return _.toString(defaultValue) + return _.toString(entry.value) +} + +/** + * Extract code repo + * @param {Object} challenge the challenge object + * @param {Any} defaultValue the default value + */ +function extractCodeRepo (challenge, defaultValue) { + const entry = getMeta(challenge.metadata, 'codeRepo') + if (!entry) return _.toString(defaultValue) + return _.toString(entry.value) +} + +/** + * Extract estimate effort hours + * @param {Object} challenge the challenge object + * @param {Any} defaultValue the default value + */ +function extractEstimateEffortHours (challenge, defaultValue) { + const entry = getMeta(challenge.metadata, 'effortHoursEstimate') + if (!entry) return _.toString(defaultValue) + return _.toNumber(entry.value) +} + +/** + * Extract estimate effort days offshore + * @param {Object} challenge the challenge object + * @param {Any} defaultValue the default value + */ +function extractEstimateEffortOffshore (challenge, defaultValue) { + const entry = getMeta(challenge.metadata, 'offshoreEfforts') + if (!entry) return _.toString(defaultValue) + return _.toNumber(entry.value) +} + +/** + * Extract estimate effort days Onsite + * @param {Object} challenge the challenge object + * @param {Any} defaultValue the default value + */ +function extractEstimateEffortOnsite (challenge, defaultValue) { + const entry = getMeta(challenge.metadata, 'onsiteEfforts') + if (!entry) return _.toString(defaultValue) + return _.toNumber(entry.value) +} + +module.exports = { + extractMarkup, + extractAdminFee, + extractBillingProject, + extractSubmissionLimit, + extractSpecReviewCost, + extractDrPoints, + extractApprovalRequired, + extractPostMortemRequired, + extractTrackLateDeliverablesRequired, + extractAllowStockArtRequired, + extractSubmissionViewable, + extractReviewFeedback, + extractEnvironment, + extractCodeRepo, + extractEstimateEffortHours, + extractEstimateEffortOffshore, + extractEstimateEffortOnsite +}