diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2c5b122a2..8c4b35ebb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -5,27 +5,32 @@ env: jobs: test: name: Test - env: - pg-version: "14" - postgis-version: "3.2" - redis-version: "6.2" runs-on: ubuntu-latest + + services: + redis: + image: redis:6.2-alpine + ports: + - 6379:6379 + postgres: + image: postgis/postgis:14-3.3-alpine + env: + POSTGRES_PASSWORD: temba + ports: + - 5432:5432 + options: --name textit-postgres-1 --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 + elastic: + image: elasticsearch:7.17.9 + ports: + - 9200:9200 + - 9300:9300 + env: + discovery.type: single-node + steps: - name: Checkout code uses: actions/checkout@v3 - - name: Install Redis - uses: zhulik/redis-action@v1.0.0 - with: - redis version: ${{ env.redis-version }} - - - name: Install PostgreSQL - uses: nyaruka/postgis-action@v2 - with: - postgresql version: ${{ env.pg-version }} - postgis version: ${{ env.postgis-version }} - postgresql password: temba - - name: Install Linux packages run: | sudo apt-get update @@ -51,7 +56,7 @@ jobs: - name: Upload coverage if: success() - uses: codecov/codecov-action@v2 + uses: codecov/codecov-action@v3 with: token: ${{ secrets.CODECOV_TOKEN }} fail_ci_if_error: true @@ -64,6 +69,8 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v3 + with: + fetch-depth: 0 - name: Fetch GoFlow docs # for backward compatibility, English docs are copied to root of docs directory @@ -78,19 +85,17 @@ jobs: go-version: ${{ env.go-version }} - name: Publish release - uses: goreleaser/goreleaser-action@v1 + uses: goreleaser/goreleaser-action@v4 if: ${{ !contains(github.ref, '-') }} with: - version: v0.147.2 - args: release --rm-dist + args: release --clean env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Publish non-master release - uses: goreleaser/goreleaser-action@v1 + uses: goreleaser/goreleaser-action@v4 if: contains(github.ref, '-') with: - version: v0.147.2 - args: release --rm-dist --skip-validate + args: release --clean --skip-validate env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/cla.yml b/.github/workflows/cla.yml deleted file mode 100644 index 190a3fa1a..000000000 --- a/.github/workflows/cla.yml +++ /dev/null @@ -1,25 +0,0 @@ -name: "CLA Assistant" -on: - issue_comment: - types: [created] - pull_request_target: - types: [opened,closed,synchronize] - -jobs: - CLAssistant: - runs-on: ubuntu-latest - steps: - - name: "CLA Assistant" - if: (github.event.comment.body == 'recheck' || github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA') || github.event_name == 'pull_request_target' - # Beta Release - uses: contributor-assistant/github-action@v2.2.0 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - PERSONAL_ACCESS_TOKEN : ${{ secrets.CLA_TOKEN }} - with: - path-to-signatures: 'signatures/version1/cla.json' - path-to-document: 'https://github.com/nyaruka/license/blob/main/TextIt_CLA.md' - branch: 'main' - allowlist: bot* - remote-organization-name: 'nyaruka' - remote-repository-name: 'legal' diff --git a/CHANGELOG.md b/CHANGELOG.md index 953cc2b9f..ef1d8b0c0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,306 @@ +v8.2.0 (2023-07-31) +------------------------- + * Add dockerfile for dev + +v8.1.66 (2023-07-20) +------------------------- + * Update deps including gocommon which changes requirement for storage paths to start with slash + +v8.1.65 (2023-07-18) +------------------------- + * Limit how old surveyor submissions can be + +v8.1.64 (2023-07-10) +------------------------- + * Update goflow + +v8.1.63 (2023-07-03) +------------------------- + * Support requesting recordings for Twilio with basic auth + +v8.1.62 (2023-06-29) +------------------------- + * Fix session storage path generation + +v8.1.61 (2023-06-28) +------------------------- + * Write channel logs with channels/ key prefex + +v8.1.60 (2023-06-28) +------------------------- + * Tweak channel log creation to prevent nil slices + +v8.1.59 (2023-06-28) +------------------------- + * Update README + +v8.1.58 (2023-06-28) +------------------------- + * Rename sessions bucket config setting for clarity and remove unused sessions prefix setting + * Write attached call logs only to S3 + +v8.1.57 (2023-06-20) +------------------------- + * Fix redaction of twiml IVR channel logs + +v8.1.56 (2023-06-08) +------------------------- + * Support importing of contacts with non-active statuses + * Use the user that created an import when applying its modifiers + +v8.1.55 (2023-06-05) +------------------------- + * Stop writing ChannelLog.call + * Stop returning sample contacts on preview endpoints which now only need to return total count + +v8.1.54 (2023-05-25) +------------------------- + * Add endpoint to generate broadcast preview + +v8.1.53 (2023-05-25) +------------------------- + * Rework firing campaign events so that skipping happens outside of runner + +v8.1.52 (2023-05-24) +------------------------- + * Update to latest goflow + +v8.1.51 (2023-05-24) +------------------------- + * Remove applying started-previously exclusion in runner now that it's applied at batch creation stage + * Refresh elastic indexes after changes in tests instead of waiting for a second + * Optimize case when recipients is only specific contacts and no exclusions + * Rework ResolveRecipients to use elastic + +v8.1.50 (2023-05-23) +------------------------- + * Remove support for passing URNs to flow/preview_start as that's not a thing we do + * Make the name of the ES index for contacts configurable + +v8.1.49 (2023-05-18) +------------------------- + * Remove support for ticket assignment with a note + * Add contact/bulk_create endpoint + +v8.1.48 (2023-05-15) +------------------------- + * Fix loading of scheduled triggers + * Update test database + +v8.1.47 (2023-05-11) +------------------------- + * Still queue a courier message even if fetching the flow fails + * Stop writing old FlowStart fields + +v8.1.46 (2023-05-10) +------------------------- + * Update to latest null library + * Read from new flow start fields + +v8.1.45 (2023-05-09) +------------------------- + * Always write new FlowStart fields + * Flow start batches should read from exclusions and remove legacy fields + +v8.1.44 (2023-05-08) +------------------------- + * Start writing exclusions blob on start batch tasks + +v8.1.43 (2023-05-08) +------------------------- + * Add contact locking to ticket/reopen endpoint + +v8.1.42 (2023-05-03) +------------------------- + * Update to latest goflow which fixes parsing locations with non-ASCII chars + +v8.1.41 (2023-05-01) +------------------------- + * Add contact locking to modify endpoint + +v8.1.40 (2023-05-01) +------------------------- + * Add context paramter to LockContacts so it can error if context is done + +v8.1.39 (2023-04-27) +------------------------- + * Refactor how we lock and unlock contacts + +v8.1.38 (2023-04-27) +------------------------- + * Handled incoming messages should be associated with any open ticket + * Only load the last opened open ticket for a contact + +v8.1.37 (2023-04-20) +------------------------- + * Add contact/inspect endpoint to return all URNs with channel if there is one + +v8.1.36 (2023-04-19) +------------------------- + * Fix not queuing chat messages as high priority and add contact_last_seen_on + * Use services for github actions + +v8.1.35 (2023-04-18) +------------------------- + * Fix goreleaser changelog generation and use latest action + +v8.1.34 (2023-04-17) +------------------------- + * Add ticket_id to msg and use to set origin on messages queued to courier + * Remove fields from courier payload that it doesn't use + +v8.1.33 (2023-04-13) +------------------------- + * Use envelope struct for marshalling courier messages and remove unused fields + +v8.1.32 (2023-04-03) +------------------------- + * Fix not logging bodies of incoming IVR requests + +v8.1.31 (2023-03-16) +------------------------- + * Remove no longer used exit type constants + * Remove support for broadcasts with an associated ticket + +v8.1.30 (2023-03-14) +------------------------- + * Bump courier http client timeout + * Use Org.config and Channel.config as JSONB columns + * Fix YYYY-MM-DD date formats + +v8.1.29 (2023-03-13) +------------------------- + * Don't set msg_type when handling messages as courier is already setting it + +v8.1.28 (2023-03-08) +------------------------- + * Remove msg_type values INBOX and FLOW + * Re-organize web endpoints so each endpoint is in its own file + +v8.1.27 (2023-03-06) +------------------------- + * Add Msg.created_by and populate for chat and broadcast messages + +v8.1.26 (2023-02-27) +------------------------- + * Update goflow + * Improve detection of repeated outgoing messages + +v8.1.25 (2023-02-22) +------------------------- + * Support Msg.status = I for outgoing messages that should be retried + +v8.1.24 (2023-02-22) +------------------------- + * Update to latest goflow + +v8.1.23 (2023-02-20) +------------------------- + * Use msg_type = T|V for outgoing messages + +v8.1.22 (2023-02-16) +------------------------- + * Use generics to remove repeated code in server endpoints + +v8.1.21 (2023-02-15) +------------------------- + * Cleanup server and http wrappers + +v8.1.20 (2023-02-15) +------------------------- + * Add endpoint to send a single message + * Cleanup broadcasts and starts + * Update test database + +v8.1.19 (2023-02-13) +------------------------- + * Stop writing Broadcast.send_all + +v8.1.18 (2023-02-13) +------------------------- + * Update to latest goflow + * Support contact query based broadcasts by consolidating broadcast and flow start task code + * Remove support for sending broadcasts to specific URNs + +v8.1.17 (2023-02-09) +------------------------- + * Update how we create messages from broadcasts and resolve translations + +v8.1.16 (2023-02-07) +------------------------- + * Update to latest goflow + +v8.1.15 (2023-02-07) +------------------------- + * Refactor so that web doesn't import testsuite + * Test queuing and popping of start flow tasks + * Convert FlowStart to basic struct for simpler marshalling etc + +v8.1.14 (2023-02-06) +------------------------- + * Simplify FlowStartBatch + +v8.1.13 (2023-02-06) +------------------------- + * Fix unmarshalling start tasks + +v8.1.12 (2023-02-06) +------------------------- + * Refactor tasks + +v8.1.11 (2023-02-02) +------------------------- + * Stop writing quick replies to metadata and fix not writing them to the db + +v8.1.10 (2023-02-02) +------------------------- + * Fix test + +v8.1.9 (2023-02-02) +------------------------- + * Update to latest goflow which updates ANTLR + * Ensure quick replies are included with retries and resends + +v8.1.8 (2023-02-02) +------------------------- + * Start writing Msg.quick_replies as well as writing them to Msg.metadata + +v8.1.7 (2023-02-01) +------------------------- + * Don't send machine_detection param to Nexmo if empty + +v8.1.6 (2023-02-01) +------------------------- + * Update to nyaruka/null v2 and validator v10 + +v8.1.5 (2023-01-31) +------------------------- + * Rework more task types to use tasks package + * Stop adding language and country to msg.metadata.templating + +v8.1.4 (2023-01-26) +------------------------- + * Start writing msgs_msg.locale + +v8.1.3 (2023-01-24) +------------------------- + * Update test database + * Stop writing msgs_broadcast.text + +v8.1.2 (2023-01-24) +------------------------- + * Stop reading from Broadcast.text + +v8.1.1 (2023-01-19) +------------------------- + * Write new translations JSONB column when saving child broadcasts + * Remove support for legacy expressions in broadcasts + +v8.1.0 (2023-01-18) +------------------------- + * Update to latest goflow which moves to flow spec version 13.2 + * Tweak fetching contacts eligible for a new campaign event + v8.0.0 (2023-01-09) ------------------------- * Update test database to latest schema diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 000000000..8b6b4efce --- /dev/null +++ b/Dockerfile @@ -0,0 +1,12 @@ +FROM golang:1.20 + +WORKDIR /usr/src/app + +# pre-copy/cache go.mod for pre-downloading dependencies and only redownloading them in subsequent builds if they change +COPY go.mod go.sum ./ +RUN go mod download && go mod verify + +COPY . . +RUN go build -v -o /usr/local/bin/app github.com/nyaruka/mailroom/cmd/mailroom + +CMD ["app"] \ No newline at end of file diff --git a/cmd/mailroom/main.go b/cmd/mailroom/main.go index 9216669e3..247523f25 100644 --- a/cmd/mailroom/main.go +++ b/cmd/mailroom/main.go @@ -34,7 +34,6 @@ import ( _ "github.com/nyaruka/mailroom/services/tickets/zendesk" _ "github.com/nyaruka/mailroom/web/contact" _ "github.com/nyaruka/mailroom/web/docs" - _ "github.com/nyaruka/mailroom/web/expression" _ "github.com/nyaruka/mailroom/web/flow" _ "github.com/nyaruka/mailroom/web/ivr" _ "github.com/nyaruka/mailroom/web/msg" diff --git a/core/goflow/engine_test.go b/core/goflow/engine_test.go index 18e64d105..36fa402b6 100644 --- a/core/goflow/engine_test.go +++ b/core/goflow/engine_test.go @@ -18,7 +18,7 @@ import ( ) func TestEngineWebhook(t *testing.T) { - _, rt, _, _ := testsuite.Get() + _, rt := testsuite.Runtime() svc, err := goflow.Engine(rt.Config).Services().Webhook(nil) assert.NoError(t, err) @@ -40,7 +40,7 @@ func TestEngineWebhook(t *testing.T) { } func TestSimulatorAirtime(t *testing.T) { - _, rt, _, _ := testsuite.Get() + _, rt := testsuite.Runtime() svc, err := goflow.Simulator(rt.Config).Services().Airtime(nil) assert.NoError(t, err) @@ -60,9 +60,9 @@ func TestSimulatorAirtime(t *testing.T) { } func TestSimulatorTicket(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() - ticketer, err := models.LookupTicketerByUUID(ctx, db, testdata.Mailgun.UUID) + ticketer, err := models.LookupTicketerByUUID(ctx, rt.DB, testdata.Mailgun.UUID) require.NoError(t, err) svc, err := goflow.Simulator(rt.Config).Services().Ticket(flows.NewTicketer(ticketer)) @@ -78,7 +78,7 @@ func TestSimulatorTicket(t *testing.T) { } func TestSimulatorWebhook(t *testing.T) { - _, rt, _, _ := testsuite.Get() + _, rt := testsuite.Runtime() svc, err := goflow.Simulator(rt.Config).Services().Webhook(nil) assert.NoError(t, err) diff --git a/core/goflow/flows_test.go b/core/goflow/flows_test.go index 22f3772fe..5598fff3d 100644 --- a/core/goflow/flows_test.go +++ b/core/goflow/flows_test.go @@ -16,11 +16,11 @@ import ( ) func TestSpecVersion(t *testing.T) { - assert.Equal(t, semver.MustParse("13.1.0"), goflow.SpecVersion()) + assert.Equal(t, semver.MustParse("13.2.0"), goflow.SpecVersion()) } func TestReadFlow(t *testing.T) { - _, rt, _, _ := testsuite.Get() + _, rt := testsuite.Runtime() // try to read empty definition flow, err := goflow.ReadFlow(rt.Config, []byte(`{}`)) @@ -53,10 +53,15 @@ func TestCloneDefinition(t *testing.T) { } func TestMigrateDefinition(t *testing.T) { - _, rt, _, _ := testsuite.Get() + _, rt := testsuite.Runtime() // 13.0 > 13.1 - migrated, err := goflow.MigrateDefinition(rt.Config, []byte(`{"uuid": "502c3ee4-3249-4dee-8e71-c62070667d52", "name": "New", "spec_version": "13.0.0", "type": "messaging", "language": "eng", "nodes": []}`), semver.MustParse("13.1.0")) + migrated, err := goflow.MigrateDefinition(rt.Config, []byte(`{"uuid": "502c3ee4-3249-4dee-8e71-c62070667d52", "name": "New", "spec_version": "13.0.0", "type": "messaging", "language": "base", "nodes": []}`), semver.MustParse("13.1.0")) assert.NoError(t, err) - test.AssertEqualJSON(t, []byte(`{"uuid": "502c3ee4-3249-4dee-8e71-c62070667d52", "name": "New", "spec_version": "13.1.0", "type": "messaging", "language": "eng", "nodes": []}`), migrated) + test.AssertEqualJSON(t, []byte(`{"uuid": "502c3ee4-3249-4dee-8e71-c62070667d52", "name": "New", "spec_version": "13.1.0", "type": "messaging", "language": "base", "nodes": []}`), migrated) + + // 13.1 > 13.2 + migrated, err = goflow.MigrateDefinition(rt.Config, migrated, semver.MustParse("13.2.0")) + assert.NoError(t, err) + test.AssertEqualJSON(t, []byte(`{"uuid": "502c3ee4-3249-4dee-8e71-c62070667d52", "name": "New", "spec_version": "13.2.0", "type": "messaging", "language": "und", "nodes": []}`), migrated) } diff --git a/core/goflow/modifiers_test.go b/core/goflow/modifiers_test.go index cb7589c99..1792988d6 100644 --- a/core/goflow/modifiers_test.go +++ b/core/goflow/modifiers_test.go @@ -13,7 +13,7 @@ import ( ) func TestReadModifiers(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() oa, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) assert.NoError(t, err) diff --git a/core/handlers/airtime_transferred_test.go b/core/handlers/airtime_transferred_test.go index cb9ed16f2..f020c6424 100644 --- a/core/handlers/airtime_transferred_test.go +++ b/core/handlers/airtime_transferred_test.go @@ -267,7 +267,7 @@ var transactionRejectedResponse = `{ }` func TestAirtimeTransferred(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) defer httpx.SetRequestor(httpx.DefaultRequestor) @@ -289,7 +289,7 @@ func TestAirtimeTransferred(t *testing.T) { }, })) - db.MustExec(`UPDATE orgs_org SET config = '{"dtone_key": "key123", "dtone_secret": "sesame"}'::jsonb WHERE id = $1`, testdata.Org1.ID) + rt.DB.MustExec(`UPDATE orgs_org SET config = '{"dtone_key": "key123", "dtone_secret": "sesame"}'::jsonb WHERE id = $1`, testdata.Org1.ID) tcs := []handlers.TestCase{ { diff --git a/core/handlers/base_test.go b/core/handlers/base_test.go index 307157399..db7ce2d5b 100644 --- a/core/handlers/base_test.go +++ b/core/handlers/base_test.go @@ -183,22 +183,24 @@ func RunTestCases(t *testing.T, ctx context.Context, rt *runtime.Runtime, tcs [] flow, err := oa.FlowByUUID(flowUUID) require.NoError(t, err) - options := runner.NewStartOptions() - options.CommitHook = func(ctx context.Context, tx *sqlx.Tx, rp *redis.Pool, oa *models.OrgAssets, session []*models.Session) error { - for _, s := range session { - msg := msgsByContactID[s.ContactID()] - if msg != nil { - s.SetIncomingMsg(models.MsgID(msg.ID()), "") + options := &runner.StartOptions{ + Interrupt: true, + TriggerBuilder: func(contact *flows.Contact) flows.Trigger { + msg := msgsByContactID[models.ContactID(contact.ID())] + if msg == nil { + return triggers.NewBuilder(oa.Env(), testFlow.Reference(false), contact).Manual().Build() } - } - return nil - } - options.TriggerBuilder = func(contact *flows.Contact) flows.Trigger { - msg := msgsByContactID[models.ContactID(contact.ID())] - if msg == nil { - return triggers.NewBuilder(oa.Env(), testFlow.Reference(false), contact).Manual().Build() - } - return triggers.NewBuilder(oa.Env(), testFlow.Reference(false), contact).Msg(msg).Build() + return triggers.NewBuilder(oa.Env(), testFlow.Reference(false), contact).Msg(msg).Build() + }, + CommitHook: func(ctx context.Context, tx *sqlx.Tx, rp *redis.Pool, oa *models.OrgAssets, session []*models.Session) error { + for _, s := range session { + msg := msgsByContactID[s.ContactID()] + if msg != nil { + s.SetIncomingMsg(models.MsgID(msg.ID()), "") + } + } + return nil + }, } for _, c := range []*testdata.Contact{testdata.Cathy, testdata.Bob, testdata.George, testdata.Alexandria} { diff --git a/core/handlers/broadcast_created_test.go b/core/handlers/broadcast_created_test.go index 120c16211..fb822f86a 100644 --- a/core/handlers/broadcast_created_test.go +++ b/core/handlers/broadcast_created_test.go @@ -18,7 +18,7 @@ import ( ) func TestBroadcastCreated(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) @@ -49,9 +49,9 @@ func TestBroadcastCreated(t *testing.T) { bcast := models.Broadcast{} err = json.Unmarshal(task.Task, &bcast) assert.NoError(t, err) - assert.Nil(t, bcast.ContactIDs()) - assert.Nil(t, bcast.GroupIDs()) - assert.Equal(t, 1, len(bcast.URNs())) + assert.Nil(t, bcast.ContactIDs) + assert.Nil(t, bcast.GroupIDs) + assert.Equal(t, 1, len(bcast.URNs)) return nil }, }, diff --git a/core/handlers/campaigns_test.go b/core/handlers/campaigns_test.go index d0372c5d5..c549db2b2 100644 --- a/core/handlers/campaigns_test.go +++ b/core/handlers/campaigns_test.go @@ -13,7 +13,7 @@ import ( ) func TestCampaigns(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) @@ -21,17 +21,17 @@ func TestCampaigns(t *testing.T) { joined := assets.NewFieldReference("joined", "Joined") // insert an event on our campaign that is based on created_on - testdata.InsertCampaignFlowEvent(db, testdata.RemindersCampaign, testdata.Favorites, testdata.CreatedOnField, 1000, "W") + testdata.InsertCampaignFlowEvent(rt, testdata.RemindersCampaign, testdata.Favorites, testdata.CreatedOnField, 1000, "W") // insert an event on our campaign that is based on last_seen_on - testdata.InsertCampaignFlowEvent(db, testdata.RemindersCampaign, testdata.Favorites, testdata.LastSeenOnField, 2, "D") + testdata.InsertCampaignFlowEvent(rt, testdata.RemindersCampaign, testdata.Favorites, testdata.LastSeenOnField, 2, "D") // init their values - db.MustExec( + rt.DB.MustExec( `update contacts_contact set fields = fields - '8c1c1256-78d6-4a5b-9f1c-1761d5728251' WHERE id = $1`, testdata.Cathy.ID) - db.MustExec( + rt.DB.MustExec( `update contacts_contact set fields = fields || '{"8c1c1256-78d6-4a5b-9f1c-1761d5728251": { "text": "2029-09-15T12:00:00+00:00", "datetime": "2029-09-15T12:00:00+00:00" }}'::jsonb WHERE id = $1`, testdata.Bob.ID) diff --git a/core/handlers/contact_field_changed_test.go b/core/handlers/contact_field_changed_test.go index 10c7537c2..174bc6ff2 100644 --- a/core/handlers/contact_field_changed_test.go +++ b/core/handlers/contact_field_changed_test.go @@ -12,7 +12,7 @@ import ( ) func TestContactFieldChanged(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) @@ -20,7 +20,7 @@ func TestContactFieldChanged(t *testing.T) { age := assets.NewFieldReference("age", "Age") // populate some field values on alexandria - db.MustExec(`UPDATE contacts_contact SET fields = '{"903f51da-2717-47c7-a0d3-f2f32877013d": {"text":"34"}, "3a5891e4-756e-4dc9-8e12-b7a766168824": {"text":"female"}}' WHERE id = $1`, testdata.Alexandria.ID) + rt.DB.MustExec(`UPDATE contacts_contact SET fields = '{"903f51da-2717-47c7-a0d3-f2f32877013d": {"text":"34"}, "3a5891e4-756e-4dc9-8e12-b7a766168824": {"text":"female"}}' WHERE id = $1`, testdata.Alexandria.ID) tcs := []handlers.TestCase{ { diff --git a/core/handlers/contact_groups_changed_test.go b/core/handlers/contact_groups_changed_test.go index 7adb4d7a6..1368c2edf 100644 --- a/core/handlers/contact_groups_changed_test.go +++ b/core/handlers/contact_groups_changed_test.go @@ -12,7 +12,7 @@ import ( ) func TestContactGroupsChanged(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) diff --git a/core/handlers/contact_language_changed_test.go b/core/handlers/contact_language_changed_test.go index c00353ca1..c24ef5b66 100644 --- a/core/handlers/contact_language_changed_test.go +++ b/core/handlers/contact_language_changed_test.go @@ -11,7 +11,7 @@ import ( ) func TestContactLanguageChanged(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) diff --git a/core/handlers/contact_name_changed_test.go b/core/handlers/contact_name_changed_test.go index bc616e0d9..40f46397f 100644 --- a/core/handlers/contact_name_changed_test.go +++ b/core/handlers/contact_name_changed_test.go @@ -11,7 +11,7 @@ import ( ) func TestContactNameChanged(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) diff --git a/core/handlers/contact_status_changed_test.go b/core/handlers/contact_status_changed_test.go index 05c7aad89..10714cab9 100644 --- a/core/handlers/contact_status_changed_test.go +++ b/core/handlers/contact_status_changed_test.go @@ -11,7 +11,7 @@ import ( ) func TestContactStatusChanged(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) diff --git a/core/handlers/contact_urns_changed_test.go b/core/handlers/contact_urns_changed_test.go index 3bd8bd021..928448104 100644 --- a/core/handlers/contact_urns_changed_test.go +++ b/core/handlers/contact_urns_changed_test.go @@ -12,12 +12,12 @@ import ( ) func TestContactURNsChanged(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) // add a URN to george that cathy will steal - testdata.InsertContactURN(db, testdata.Org1, testdata.George, urns.URN("tel:+12065551212"), 100) + testdata.InsertContactURN(rt, testdata.Org1, testdata.George, urns.URN("tel:+12065551212"), 100) tcs := []handlers.TestCase{ { diff --git a/core/handlers/flow_entered_test.go b/core/handlers/flow_entered_test.go index f30f2fc42..a795fb023 100644 --- a/core/handlers/flow_entered_test.go +++ b/core/handlers/flow_entered_test.go @@ -12,7 +12,7 @@ import ( ) func TestFlowEntered(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) diff --git a/core/handlers/input_labels_added_test.go b/core/handlers/input_labels_added_test.go index 0b91db956..dfd602ef9 100644 --- a/core/handlers/input_labels_added_test.go +++ b/core/handlers/input_labels_added_test.go @@ -14,15 +14,15 @@ import ( ) func TestInputLabelsAdded(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) reporting := assets.NewLabelReference(assets.LabelUUID("ebc4dedc-91c4-4ed4-9dd6-daa05ea82698"), "Reporting") testing := assets.NewLabelReference(assets.LabelUUID("a6338cdc-7938-4437-8b05-2d5d785e3a08"), "Testing") - msg1 := testdata.InsertIncomingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "start", models.MsgStatusHandled) - msg2 := testdata.InsertIncomingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Bob, "start", models.MsgStatusHandled) + msg1 := testdata.InsertIncomingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "start", models.MsgStatusHandled) + msg2 := testdata.InsertIncomingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Bob, "start", models.MsgStatusHandled) tcs := []handlers.TestCase{ { diff --git a/core/handlers/msg_created_test.go b/core/handlers/msg_created_test.go index c15216e6e..3e7503041 100644 --- a/core/handlers/msg_created_test.go +++ b/core/handlers/msg_created_test.go @@ -19,7 +19,7 @@ import ( ) func TestMsgCreated(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) @@ -27,16 +27,16 @@ func TestMsgCreated(t *testing.T) { defer func() { rt.Config.AttachmentDomain = "" }() // add a URN for cathy so we can test all urn sends - testdata.InsertContactURN(db, testdata.Org1, testdata.Cathy, urns.URN("tel:+12065551212"), 10) + testdata.InsertContactURN(rt, testdata.Org1, testdata.Cathy, urns.URN("tel:+12065551212"), 10) // delete all URNs for bob - db.MustExec(`DELETE FROM contacts_contacturn WHERE contact_id = $1`, testdata.Bob.ID) + rt.DB.MustExec(`DELETE FROM contacts_contacturn WHERE contact_id = $1`, testdata.Bob.ID) // change alexandrias URN to a twitter URN and set her language to eng so that a template gets used for her - db.MustExec(`UPDATE contacts_contacturn SET identity = 'twitter:12345', path='12345', scheme='twitter' WHERE contact_id = $1`, testdata.Alexandria.ID) - db.MustExec(`UPDATE contacts_contact SET language='eng' WHERE id = $1`, testdata.Alexandria.ID) + rt.DB.MustExec(`UPDATE contacts_contacturn SET identity = 'twitter:12345', path='12345', scheme='twitter' WHERE contact_id = $1`, testdata.Alexandria.ID) + rt.DB.MustExec(`UPDATE contacts_contact SET language='eng' WHERE id = $1`, testdata.Alexandria.ID) - msg1 := testdata.InsertIncomingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "start", models.MsgStatusHandled) + msg1 := testdata.InsertIncomingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "start", models.MsgStatusHandled) templateAction := actions.NewSendMsg(handlers.NewActionUUID(), "Template time", nil, nil, false) templateAction.Templating = &actions.Templating{ @@ -66,8 +66,8 @@ func TestMsgCreated(t *testing.T) { }, SQLAssertions: []handlers.SQLAssertion{ { - SQL: "SELECT COUNT(*) FROM msgs_msg WHERE text='Hello World' AND contact_id = $1 AND metadata = $2 AND high_priority = TRUE", - Args: []interface{}{testdata.Cathy.ID, `{"quick_replies":["yes","no"]}`}, + SQL: `SELECT COUNT(*) FROM msgs_msg WHERE text='Hello World' AND contact_id = $1 AND quick_replies[1] = 'yes' AND quick_replies[2] = 'no' AND high_priority = TRUE`, + Args: []interface{}{testdata.Cathy.ID}, Count: 2, }, { @@ -85,7 +85,7 @@ func TestMsgCreated(t *testing.T) { Args: []interface{}{ testdata.Alexandria.ID, `Hi Alexandia, are you still experiencing problems with tooth?`, - `{"templating":{"template":{"uuid":"9c22b594-fcab-4b29-9bcb-ce4404894a80","name":"revive_issue"},"language":"eng","country":"US","variables":["Alexandia","tooth"],"namespace":"2d40b45c_25cd_4965_9019_f05d0124c5fa"}}`, + `{"templating":{"template":{"uuid":"9c22b594-fcab-4b29-9bcb-ce4404894a80","name":"revive_issue"},"variables":["Alexandia","tooth"],"namespace":"2d40b45c_25cd_4965_9019_f05d0124c5fa"}}`, testdata.TwitterChannel.ID, }, Count: 1, @@ -96,7 +96,7 @@ func TestMsgCreated(t *testing.T) { handlers.RunTestCases(t, ctx, rt, tcs) - rc := rp.Get() + rc := rt.RP.Get() defer rc.Close() // Cathy should have 1 batch of queued messages at high priority @@ -111,20 +111,20 @@ func TestMsgCreated(t *testing.T) { } func TestNewURN(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) // switch our twitter channel to telegram telegramUUID := testdata.TwitterChannel.UUID telegramID := testdata.TwitterChannel.ID - db.MustExec( + rt.DB.MustExec( `UPDATE channels_channel SET channel_type = 'TG', name = 'Telegram', schemes = ARRAY['telegram'] WHERE uuid = $1`, telegramUUID, ) // give George a URN that Bob will steal - testdata.InsertContactURN(db, testdata.Org1, testdata.George, urns.URN("telegram:67890"), 1) + testdata.InsertContactURN(rt, testdata.Org1, testdata.George, urns.URN("telegram:67890"), 1) tcs := []handlers.TestCase{ { diff --git a/core/handlers/msg_received.go b/core/handlers/msg_received.go index 66081a3b8..62e10d1e8 100644 --- a/core/handlers/msg_received.go +++ b/core/handlers/msg_received.go @@ -30,7 +30,7 @@ func handleMsgReceived(ctx context.Context, rt *runtime.Runtime, tx *sqlx.Tx, oa "urn": event.Msg.URN(), }).Debug("msg received event") - msg := models.NewIncomingMsg(rt.Config, oa.OrgID(), nil, scene.ContactID(), &event.Msg, event.CreatedOn()) + msg := models.NewIncomingSurveyorMsg(rt.Config, oa.OrgID(), nil, scene.ContactID(), &event.Msg, event.CreatedOn()) // we'll commit this message with all the others scene.AppendToEventPreCommitHook(hooks.CommitMessagesHook, msg) diff --git a/core/handlers/msg_received_test.go b/core/handlers/msg_received_test.go index d687db45d..2611f77b3 100644 --- a/core/handlers/msg_received_test.go +++ b/core/handlers/msg_received_test.go @@ -15,7 +15,7 @@ import ( ) func TestMsgReceived(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) @@ -32,7 +32,7 @@ func TestMsgReceived(t *testing.T) { }, }, Msgs: handlers.ContactMsgMap{ - testdata.Cathy: testdata.InsertIncomingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "start", models.MsgStatusHandled), + testdata.Cathy: testdata.InsertIncomingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "start", models.MsgStatusHandled), }, SQLAssertions: []handlers.SQLAssertion{ { diff --git a/core/handlers/service_called_test.go b/core/handlers/service_called_test.go index 3c721168c..2ae368649 100644 --- a/core/handlers/service_called_test.go +++ b/core/handlers/service_called_test.go @@ -13,7 +13,7 @@ import ( ) func TestServiceCalled(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) defer httpx.SetRequestor(httpx.DefaultRequestor) diff --git a/core/handlers/session_triggered_test.go b/core/handlers/session_triggered_test.go index 58ff0dd01..1ca3abf9e 100644 --- a/core/handlers/session_triggered_test.go +++ b/core/handlers/session_triggered_test.go @@ -14,12 +14,11 @@ import ( "github.com/nyaruka/mailroom/runtime" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/stretchr/testify/assert" ) func TestSessionTriggered(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) @@ -80,11 +79,11 @@ func TestSessionTriggered(t *testing.T) { start := models.FlowStart{} err = json.Unmarshal(task.Task, &start) assert.NoError(t, err) - assert.True(t, start.CreateContact()) - assert.Equal(t, []models.ContactID{testdata.George.ID}, start.ContactIDs()) - assert.Equal(t, []models.GroupID{testdata.TestersGroup.ID}, start.GroupIDs()) - assert.Equal(t, simpleFlow.ID(), start.FlowID()) - assert.JSONEq(t, `{"parent_uuid":"39a9f95e-3641-4d19-95e0-ed866f27c829", "ancestors":1, "ancestors_since_input":1}`, string(start.SessionHistory())) + assert.True(t, start.CreateContact) + assert.Equal(t, []models.ContactID{testdata.George.ID}, start.ContactIDs) + assert.Equal(t, []models.GroupID{testdata.TestersGroup.ID}, start.GroupIDs) + assert.Equal(t, simpleFlow.ID(), start.FlowID) + assert.JSONEq(t, `{"parent_uuid":"39a9f95e-3641-4d19-95e0-ed866f27c829", "ancestors":1, "ancestors_since_input":1}`, string(start.SessionHistory)) return nil }, }, @@ -95,7 +94,7 @@ func TestSessionTriggered(t *testing.T) { } func TestQuerySessionTriggered(t *testing.T) { - ctx, rt, _, rp := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) @@ -122,7 +121,7 @@ func TestQuerySessionTriggered(t *testing.T) { }, Assertions: []handlers.Assertion{ func(t *testing.T, rt *runtime.Runtime) error { - rc := rp.Get() + rc := rt.RP.Get() defer rc.Close() task, err := queue.PopNextTask(rc, queue.BatchQueue) @@ -131,11 +130,11 @@ func TestQuerySessionTriggered(t *testing.T) { start := models.FlowStart{} err = json.Unmarshal(task.Task, &start) assert.NoError(t, err) - assert.Equal(t, start.CreateContact(), true) - assert.Equal(t, 0, len(start.ContactIDs())) - assert.Equal(t, 0, len(start.GroupIDs())) - assert.Equal(t, `name ~ "Cathy"`, start.Query()) - assert.Equal(t, start.FlowID(), favoriteFlow.ID()) + assert.Equal(t, start.CreateContact, true) + assert.Len(t, start.ContactIDs, 0) + assert.Len(t, start.GroupIDs, 0) + assert.Equal(t, `name ~ "Cathy"`, string(start.Query)) + assert.Equal(t, start.FlowID, favoriteFlow.ID()) return nil }, }, diff --git a/core/handlers/ticket_opened_test.go b/core/handlers/ticket_opened_test.go index 3e6406215..909cb6e48 100644 --- a/core/handlers/ticket_opened_test.go +++ b/core/handlers/ticket_opened_test.go @@ -4,23 +4,18 @@ import ( "testing" "github.com/nyaruka/gocommon/httpx" - "github.com/nyaruka/gocommon/uuids" "github.com/nyaruka/goflow/assets" "github.com/nyaruka/goflow/flows" "github.com/nyaruka/goflow/flows/actions" "github.com/nyaruka/mailroom/core/handlers" - "github.com/nyaruka/mailroom/core/models" - "github.com/nyaruka/mailroom/testsuite" - "github.com/nyaruka/mailroom/testsuite/testdata" - _ "github.com/nyaruka/mailroom/services/tickets/mailgun" _ "github.com/nyaruka/mailroom/services/tickets/zendesk" - - "github.com/stretchr/testify/require" + "github.com/nyaruka/mailroom/testsuite" + "github.com/nyaruka/mailroom/testsuite/testdata" ) func TestTicketOpened(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) defer httpx.SetRequestor(httpx.DefaultRequestor) @@ -44,13 +39,6 @@ func TestTicketOpened(t *testing.T) { }, })) - oa := testdata.Org1.Load(rt) - - // an existing ticket - cathyTicket := models.NewTicket(flows.TicketUUID(uuids.New()), testdata.Org1.ID, testdata.Admin.ID, models.NilFlowID, testdata.Cathy.ID, testdata.Mailgun.ID, "748363", testdata.DefaultTopic.ID, "Who?", models.NilUserID, nil) - err := models.InsertTickets(ctx, db, oa, []*models.Ticket{cathyTicket}) - require.NoError(t, err) - tcs := []handlers.TestCase{ { Actions: handlers.ContactActionMap{ @@ -79,7 +67,7 @@ func TestTicketOpened(t *testing.T) { { // cathy's old ticket will still be open and cathy's new ticket will have been created SQL: "select count(*) from tickets_ticket where contact_id = $1 AND status = 'O' AND ticketer_id = $2", Args: []interface{}{testdata.Cathy.ID, testdata.Mailgun.ID}, - Count: 2, + Count: 1, }, { // and there's an HTTP log for that SQL: "select count(*) from request_logs_httplog where ticketer_id = $1", diff --git a/core/handlers/webhook_called_test.go b/core/handlers/webhook_called_test.go index d520045ab..7d101e9af 100644 --- a/core/handlers/webhook_called_test.go +++ b/core/handlers/webhook_called_test.go @@ -26,7 +26,7 @@ import ( ) func TestWebhookCalled(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) defer httpx.SetRequestor(httpx.DefaultRequestor) @@ -44,13 +44,13 @@ func TestWebhookCalled(t *testing.T) { })) // add a few resthooks - db.MustExec(`INSERT INTO api_resthook(is_active, slug, org_id, created_on, modified_on, created_by_id, modified_by_id) VALUES(TRUE, 'foo', 1, NOW(), NOW(), 1, 1);`) - db.MustExec(`INSERT INTO api_resthook(is_active, slug, org_id, created_on, modified_on, created_by_id, modified_by_id) VALUES(TRUE, 'bar', 1, NOW(), NOW(), 1, 1);`) + rt.DB.MustExec(`INSERT INTO api_resthook(is_active, slug, org_id, created_on, modified_on, created_by_id, modified_by_id) VALUES(TRUE, 'foo', 1, NOW(), NOW(), 1, 1);`) + rt.DB.MustExec(`INSERT INTO api_resthook(is_active, slug, org_id, created_on, modified_on, created_by_id, modified_by_id) VALUES(TRUE, 'bar', 1, NOW(), NOW(), 1, 1);`) // and a few targets - db.MustExec(`INSERT INTO api_resthooksubscriber(is_active, created_on, modified_on, target_url, created_by_id, modified_by_id, resthook_id) VALUES(TRUE, NOW(), NOW(), 'http://rapidpro.io/', 1, 1, 1);`) - db.MustExec(`INSERT INTO api_resthooksubscriber(is_active, created_on, modified_on, target_url, created_by_id, modified_by_id, resthook_id) VALUES(TRUE, NOW(), NOW(), 'http://rapidpro.io/?unsub=1', 1, 1, 2);`) - db.MustExec(`INSERT INTO api_resthooksubscriber(is_active, created_on, modified_on, target_url, created_by_id, modified_by_id, resthook_id) VALUES(TRUE, NOW(), NOW(), 'http://rapidpro.io/?unsub=1', 1, 1, 1);`) + rt.DB.MustExec(`INSERT INTO api_resthooksubscriber(is_active, created_on, modified_on, target_url, created_by_id, modified_by_id, resthook_id) VALUES(TRUE, NOW(), NOW(), 'http://rapidpro.io/', 1, 1, 1);`) + rt.DB.MustExec(`INSERT INTO api_resthooksubscriber(is_active, created_on, modified_on, target_url, created_by_id, modified_by_id, resthook_id) VALUES(TRUE, NOW(), NOW(), 'http://rapidpro.io/?unsub=1', 1, 1, 2);`) + rt.DB.MustExec(`INSERT INTO api_resthooksubscriber(is_active, created_on, modified_on, target_url, created_by_id, modified_by_id, resthook_id) VALUES(TRUE, NOW(), NOW(), 'http://rapidpro.io/?unsub=1', 1, 1, 1);`) tcs := []handlers.TestCase{ { @@ -116,8 +116,9 @@ func (s *failingWebhookService) Call(request *http.Request) (*flows.WebhookCall, } func TestUnhealthyWebhookCalls(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetData | testsuite.ResetRedis) @@ -128,13 +129,13 @@ func TestUnhealthyWebhookCalls(t *testing.T) { flowDef, err := os.ReadFile("testdata/webhook_flow.json") require.NoError(t, err) - testdata.InsertFlow(db, testdata.Org1, flowDef) + testdata.InsertFlow(rt, testdata.Org1, flowDef) oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshFlows) require.NoError(t, err) env := envs.NewBuilder().Build() - _, cathy := testdata.Cathy.Load(db, oa) + _, cathy := testdata.Cathy.Load(rt, oa) // webhook service with a 2 second delay svc := &failingWebhookService{delay: 2 * time.Second} @@ -168,7 +169,7 @@ func TestUnhealthyWebhookCalls(t *testing.T) { total, _ = unhealthySeries.Total(rc, "1bff8fe4-0714-433e-96a3-437405bf21cf") assert.Equal(t, int64(9), total) - assertdb.Query(t, db, `SELECT count(*) FROM notifications_incident WHERE incident_type = 'webhooks:unhealthy'`).Returns(0) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM notifications_incident WHERE incident_type = 'webhooks:unhealthy'`).Returns(0) // however 1 more bad call means this node is considered unhealthy handlers.RunFlowAndApplyEvents(t, ctx, rt, env, eng, oa, flowRef, cathy) @@ -179,17 +180,17 @@ func TestUnhealthyWebhookCalls(t *testing.T) { assert.Equal(t, int64(10), total) // and now we have an incident - assertdb.Query(t, db, `SELECT count(*) FROM notifications_incident WHERE incident_type = 'webhooks:unhealthy'`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM notifications_incident WHERE incident_type = 'webhooks:unhealthy'`).Returns(1) var incidentID models.IncidentID - db.Get(&incidentID, `SELECT id FROM notifications_incident`) + rt.DB.Get(&incidentID, `SELECT id FROM notifications_incident`) // and a record of the nodes - assertredis.SMembers(t, rp, fmt.Sprintf("incident:%d:nodes", incidentID), []string{"1bff8fe4-0714-433e-96a3-437405bf21cf"}) + assertredis.SMembers(t, rt.RP, fmt.Sprintf("incident:%d:nodes", incidentID), []string{"1bff8fe4-0714-433e-96a3-437405bf21cf"}) // another bad call won't create another incident.. handlers.RunFlowAndApplyEvents(t, ctx, rt, env, eng, oa, flowRef, cathy) - assertdb.Query(t, db, `SELECT count(*) FROM notifications_incident WHERE incident_type = 'webhooks:unhealthy'`).Returns(1) - assertredis.SMembers(t, rp, fmt.Sprintf("incident:%d:nodes", incidentID), []string{"1bff8fe4-0714-433e-96a3-437405bf21cf"}) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM notifications_incident WHERE incident_type = 'webhooks:unhealthy'`).Returns(1) + assertredis.SMembers(t, rt.RP, fmt.Sprintf("incident:%d:nodes", incidentID), []string{"1bff8fe4-0714-433e-96a3-437405bf21cf"}) } diff --git a/core/hooks/commit_language_changes.go b/core/hooks/commit_language_changes.go index 23c8ab820..ef7712128 100644 --- a/core/hooks/commit_language_changes.go +++ b/core/hooks/commit_language_changes.go @@ -7,7 +7,7 @@ import ( "github.com/nyaruka/goflow/flows/events" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/runtime" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" ) // CommitLanguageChangesHook is our hook for language changes diff --git a/core/hooks/commit_name_changes.go b/core/hooks/commit_name_changes.go index dafc0cba9..539f0fc88 100644 --- a/core/hooks/commit_name_changes.go +++ b/core/hooks/commit_name_changes.go @@ -8,7 +8,7 @@ import ( "github.com/nyaruka/goflow/flows/events" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/runtime" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" ) // CommitNameChangesHook is our hook for name changes diff --git a/core/hooks/send_messages.go b/core/hooks/send_messages.go index bcdb19a9c..765db643d 100644 --- a/core/hooks/send_messages.go +++ b/core/hooks/send_messages.go @@ -3,11 +3,10 @@ package hooks import ( "context" + "github.com/jmoiron/sqlx" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/core/msgio" "github.com/nyaruka/mailroom/runtime" - - "github.com/jmoiron/sqlx" ) // SendMessagesHook is our hook for sending scene messages @@ -20,17 +19,15 @@ func (h *sendMessagesHook) Apply(ctx context.Context, rt *runtime.Runtime, tx *s msgs := make([]*models.Msg, 0, 1) // for each scene gather all our messages - for s, args := range scenes { + for _, args := range scenes { sceneMsgs := make([]*models.Msg, 0, 1) for _, m := range args { sceneMsgs = append(sceneMsgs, m.(*models.Msg)) } - // if our scene has a timeout, set it on our last message - if len(sceneMsgs) > 0 && s.Session().Timeout() != nil && s.Session().WaitStartedOn() != nil { - sceneMsgs[len(sceneMsgs)-1].SetTimeout(*s.Session().WaitStartedOn(), *s.Session().Timeout()) - } + // mark the last message in the sprint (used for setting timeouts) + sceneMsgs[len(sceneMsgs)-1].LastInSprint = true msgs = append(msgs, sceneMsgs...) } diff --git a/core/hooks/start_broadcasts.go b/core/hooks/start_broadcasts.go index ce01b004d..5f68ad28f 100644 --- a/core/hooks/start_broadcasts.go +++ b/core/hooks/start_broadcasts.go @@ -3,12 +3,13 @@ package hooks import ( "context" + "github.com/jmoiron/sqlx" "github.com/nyaruka/goflow/flows/events" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/core/queue" + "github.com/nyaruka/mailroom/core/tasks" + "github.com/nyaruka/mailroom/core/tasks/msgs" "github.com/nyaruka/mailroom/runtime" - - "github.com/jmoiron/sqlx" "github.com/pkg/errors" ) @@ -36,14 +37,14 @@ func (h *startBroadcastsHook) Apply(ctx context.Context, rt *runtime.Runtime, tx priority := queue.DefaultPriority // if we are starting groups, queue to our batch queue instead, but with high priority - if len(bcast.GroupIDs()) > 0 { + if len(bcast.GroupIDs) > 0 { taskQ = queue.BatchQueue priority = queue.HighPriority } - err = queue.AddTask(rc, taskQ, queue.SendBroadcast, int(oa.OrgID()), bcast, priority) + err = tasks.Queue(rc, taskQ, oa.OrgID(), &msgs.SendBroadcastTask{Broadcast: bcast}, priority) if err != nil { - return errors.Wrapf(err, "error queuing broadcast") + return errors.Wrapf(err, "error queuing broadcast task") } } } diff --git a/core/hooks/start_start.go b/core/hooks/start_start.go index e5ba6b5cd..64336e62c 100644 --- a/core/hooks/start_start.go +++ b/core/hooks/start_start.go @@ -3,11 +3,12 @@ package hooks import ( "context" + "github.com/jmoiron/sqlx" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/core/queue" + "github.com/nyaruka/mailroom/core/tasks" + "github.com/nyaruka/mailroom/core/tasks/starts" "github.com/nyaruka/mailroom/runtime" - - "github.com/jmoiron/sqlx" "github.com/pkg/errors" ) @@ -30,12 +31,12 @@ func (h *startStartHook) Apply(ctx context.Context, rt *runtime.Runtime, tx *sql priority := queue.DefaultPriority // if we are starting groups, queue to our batch queue instead, but with high priority - if len(start.GroupIDs()) > 0 || start.Query() != "" { + if len(start.GroupIDs) > 0 || start.Query != "" { taskQ = queue.BatchQueue priority = queue.HighPriority } - err := queue.AddTask(rc, taskQ, queue.StartFlow, int(oa.OrgID()), start, priority) + err := tasks.Queue(rc, taskQ, oa.OrgID(), &starts.StartFlowTask{FlowStart: start}, priority) if err != nil { return errors.Wrapf(err, "error queuing flow start") } diff --git a/core/ivr/ivr.go b/core/ivr/ivr.go index bdedfd1a1..dcc2205c1 100644 --- a/core/ivr/ivr.go +++ b/core/ivr/ivr.go @@ -2,6 +2,7 @@ package ivr import ( "context" + "encoding/json" "fmt" "net/http" "net/url" @@ -9,6 +10,8 @@ import ( "strconv" "time" + "github.com/gomodule/redigo/redis" + "github.com/jmoiron/sqlx" "github.com/nyaruka/gocommon/dates" "github.com/nyaruka/gocommon/httpx" "github.com/nyaruka/gocommon/urns" @@ -22,10 +25,7 @@ import ( "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/core/runner" "github.com/nyaruka/mailroom/runtime" - "github.com/nyaruka/null" - - "github.com/gomodule/redigo/redis" - "github.com/jmoiron/sqlx" + "github.com/nyaruka/null/v2" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) @@ -120,7 +120,6 @@ func HangupCall(ctx context.Context, rt *runtime.Runtime, call *models.Call) (*m } clog := models.NewChannelLog(models.ChannelLogTypeIVRHangup, channel, svc.RedactValues(channel)) - clog.SetCall(call) defer clog.End() // try to request our call hangup @@ -188,7 +187,7 @@ func RequestCall(ctx context.Context, rt *runtime.Runtime, oa *models.OrgAssets, // create our call object conn, err := models.InsertCall( - ctx, rt.DB, oa.OrgID(), channel.ID(), start.StartID(), contact.ID(), models.URNID(urnID), + ctx, rt.DB, oa.OrgID(), channel.ID(), start.StartID, contact.ID(), models.URNID(urnID), models.CallDirectionOut, models.CallStatusPending, "", ) if err != nil { @@ -199,7 +198,7 @@ func RequestCall(ctx context.Context, rt *runtime.Runtime, oa *models.OrgAssets, // log any error inserting our channel log, but continue if clog != nil { - if err := models.InsertChannelLogs(ctx, rt.DB, []*models.ChannelLog{clog}); err != nil { + if err := models.InsertChannelLogs(ctx, rt, []*models.ChannelLog{clog}); err != nil { logrus.WithError(err).Error("error inserting channel log") } } @@ -253,7 +252,6 @@ func RequestStartForCall(ctx context.Context, rt *runtime.Runtime, channel *mode } clog := models.NewChannelLog(models.ChannelLogTypeIVRStart, channel, svc.RedactValues(channel)) - clog.SetCall(call) defer clog.End() // try to request our call start @@ -308,7 +306,7 @@ func StartIVRFlow( if err != nil { return errors.Wrapf(err, "unable to load start: %d", startID) } - flow, err := oa.FlowByID(start.FlowID()) + flow, err := oa.FlowByID(start.FlowID) if err != nil { return errors.Wrapf(err, "unable to load flow: %d", startID) } @@ -335,16 +333,16 @@ func StartIVRFlow( } var params *types.XObject - if len(start.Extra()) > 0 { - params, err = types.ReadXObject(start.Extra()) + if !start.Params.IsNull() { + params, err = types.ReadXObject(start.Params) if err != nil { - return errors.Wrap(err, "unable to read JSON from flow start extra") + return errors.Wrap(err, "unable to read JSON from flow start params") } } var history *flows.SessionHistory - if len(start.SessionHistory()) > 0 { - history, err = models.ReadSessionHistory(start.SessionHistory()) + if !start.SessionHistory.IsNull() { + history, err = models.ReadSessionHistory(start.SessionHistory) if err != nil { return errors.Wrap(err, "unable to read JSON from flow start history") } @@ -354,9 +352,9 @@ func StartIVRFlow( flowRef := assets.NewFlowReference(flow.UUID(), flow.Name()) var trigger flows.Trigger - if len(start.ParentSummary()) > 0 { + if !start.ParentSummary.IsNull() { trigger = triggers.NewBuilder(oa.Env(), flowRef, contact). - FlowAction(history, start.ParentSummary()). + FlowAction(history, json.RawMessage(start.ParentSummary)). WithCall(channel.ChannelReference(), urn). Build() } else { @@ -614,9 +612,9 @@ func HandleIVRStatus(ctx context.Context, rt *runtime.Runtime, oa *models.OrgAss return errors.Wrapf(err, "unable to load start: %d", call.StartID()) } - flow, err := oa.FlowByID(start.FlowID()) + flow, err := oa.FlowByID(start.FlowID) if err != nil { - return errors.Wrapf(err, "unable to load flow: %d", start.FlowID()) + return errors.Wrapf(err, "unable to load flow: %d", start.FlowID) } call.MarkErrored(ctx, rt.DB, dates.Now(), flow.IVRRetryWait(), errorReason) diff --git a/core/models/airtime.go b/core/models/airtime.go index c74fbc064..37a697b17 100644 --- a/core/models/airtime.go +++ b/core/models/airtime.go @@ -6,12 +6,12 @@ import ( "time" "github.com/nyaruka/gocommon/urns" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" "github.com/shopspring/decimal" ) // AirtimeTransferID is the type for airtime transfer IDs -type AirtimeTransferID null.Int +type AirtimeTransferID int // NilAirtimeTransferID is the nil value for airtime transfer IDs var NilAirtimeTransferID = AirtimeTransferID(0) @@ -88,22 +88,7 @@ func InsertAirtimeTransfers(ctx context.Context, db Queryer, transfers []*Airtim return BulkQuery(ctx, "inserted airtime transfers", db, sqlInsertAirtimeTransfers, ts) } -// MarshalJSON marshals into JSON. 0 values will become null -func (i AirtimeTransferID) MarshalJSON() ([]byte, error) { - return null.Int(i).MarshalJSON() -} - -// UnmarshalJSON unmarshals from JSON. null values become 0 -func (i *AirtimeTransferID) UnmarshalJSON(b []byte) error { - return null.UnmarshalInt(b, (*null.Int)(i)) -} - -// Value returns the db value, null is returned for 0 -func (i AirtimeTransferID) Value() (driver.Value, error) { - return null.Int(i).Value() -} - -// Scan scans from the db value. null values become 0 -func (i *AirtimeTransferID) Scan(value interface{}) error { - return null.ScanInt(value, (*null.Int)(i)) -} +func (i *AirtimeTransferID) Scan(value any) error { return null.ScanInt(value, i) } +func (i AirtimeTransferID) Value() (driver.Value, error) { return null.IntValue(i) } +func (i *AirtimeTransferID) UnmarshalJSON(b []byte) error { return null.UnmarshalInt(b, i) } +func (i AirtimeTransferID) MarshalJSON() ([]byte, error) { return null.MarshalInt(i) } diff --git a/core/models/airtime_test.go b/core/models/airtime_test.go index aaaeea458..89b3a6782 100644 --- a/core/models/airtime_test.go +++ b/core/models/airtime_test.go @@ -15,9 +15,9 @@ import ( ) func TestAirtimeTransfers(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() - defer db.MustExec(`DELETE FROM airtime_airtimetransfer`) + defer rt.DB.MustExec(`DELETE FROM airtime_airtimetransfer`) // insert a transfer transfer := models.NewAirtimeTransfer( @@ -31,10 +31,10 @@ func TestAirtimeTransfers(t *testing.T) { decimal.RequireFromString(`1000`), time.Now(), ) - err := models.InsertAirtimeTransfers(ctx, db, []*models.AirtimeTransfer{transfer}) + err := models.InsertAirtimeTransfers(ctx, rt.DB, []*models.AirtimeTransfer{transfer}) assert.Nil(t, err) - assertdb.Query(t, db, `SELECT org_id, status from airtime_airtimetransfer`).Columns(map[string]interface{}{"org_id": int64(1), "status": "S"}) + assertdb.Query(t, rt.DB, `SELECT org_id, status from airtime_airtimetransfer`).Columns(map[string]interface{}{"org_id": int64(1), "status": "S"}) // insert a failed transfer with nil sender, empty currency transfer = models.NewAirtimeTransfer( @@ -48,8 +48,8 @@ func TestAirtimeTransfers(t *testing.T) { decimal.Zero, time.Now(), ) - err = models.InsertAirtimeTransfers(ctx, db, []*models.AirtimeTransfer{transfer}) + err = models.InsertAirtimeTransfers(ctx, rt.DB, []*models.AirtimeTransfer{transfer}) assert.Nil(t, err) - assertdb.Query(t, db, `SELECT count(*) from airtime_airtimetransfer WHERE org_id = $1 AND status = $2`, testdata.Org1.ID, models.AirtimeTransferStatusFailed).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) from airtime_airtimetransfer WHERE org_id = $1 AND status = $2`, testdata.Org1.ID, models.AirtimeTransferStatusFailed).Returns(1) } diff --git a/core/models/assets_test.go b/core/models/assets_test.go index 6d6d95d3e..1ec76acb8 100644 --- a/core/models/assets_test.go +++ b/core/models/assets_test.go @@ -15,7 +15,7 @@ import ( ) func TestAssets(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer models.FlushCache() @@ -60,7 +60,7 @@ func TestAssets(t *testing.T) { } func TestCloneForSimulation(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(0) diff --git a/core/models/broadcasts.go b/core/models/broadcasts.go new file mode 100644 index 000000000..ac833c926 --- /dev/null +++ b/core/models/broadcasts.go @@ -0,0 +1,260 @@ +package models + +import ( + "context" + "time" + + "github.com/nyaruka/gocommon/urns" + "github.com/nyaruka/goflow/envs" + "github.com/nyaruka/goflow/excellent" + "github.com/nyaruka/goflow/excellent/types" + "github.com/nyaruka/goflow/flows" + "github.com/nyaruka/goflow/flows/events" + "github.com/nyaruka/mailroom/runtime" + "github.com/nyaruka/null/v2" + "github.com/pkg/errors" +) + +// BroadcastID is our internal type for broadcast ids, which can be null/0 +type BroadcastID int + +// NilBroadcastID is our constant for a nil broadcast id +const NilBroadcastID = BroadcastID(0) + +// TemplateState represents what state are templates are in, either already evaluated or unevaluated +type TemplateState string + +const ( + TemplateStateEvaluated = TemplateState("evaluated") + TemplateStateUnevaluated = TemplateState("unevaluated") +) + +// Broadcast represents a broadcast that needs to be sent +type Broadcast struct { + ID BroadcastID `json:"broadcast_id,omitempty" db:"id"` + OrgID OrgID `json:"org_id" db:"org_id"` + Translations flows.BroadcastTranslations `json:"translations" db:"translations"` + TemplateState TemplateState `json:"template_state"` + BaseLanguage envs.Language `json:"base_language" db:"base_language"` + URNs []urns.URN `json:"urns,omitempty"` + ContactIDs []ContactID `json:"contact_ids,omitempty"` + GroupIDs []GroupID `json:"group_ids,omitempty"` + Query null.String `json:"query,omitempty" db:"query"` + CreatedByID UserID `json:"created_by_id,omitempty" db:"created_by_id"` + ParentID BroadcastID `json:"parent_id,omitempty" db:"parent_id"` +} + +// NewBroadcast creates a new broadcast with the passed in parameters +func NewBroadcast(orgID OrgID, translations flows.BroadcastTranslations, + state TemplateState, baseLanguage envs.Language, urns []urns.URN, contactIDs []ContactID, groupIDs []GroupID, query string, createdByID UserID) *Broadcast { + + return &Broadcast{ + OrgID: orgID, + Translations: translations, + TemplateState: state, + BaseLanguage: baseLanguage, + URNs: urns, + ContactIDs: contactIDs, + GroupIDs: groupIDs, + CreatedByID: createdByID, + } +} + +// NewBroadcastFromEvent creates a broadcast object from the passed in broadcast event +func NewBroadcastFromEvent(ctx context.Context, tx Queryer, oa *OrgAssets, event *events.BroadcastCreatedEvent) (*Broadcast, error) { + // resolve our contact references + contactIDs, err := GetContactIDsFromReferences(ctx, tx, oa.OrgID(), event.Contacts) + if err != nil { + return nil, errors.Wrapf(err, "error resolving contact references") + } + + // and our groups + groupIDs := make([]GroupID, 0, len(event.Groups)) + for i := range event.Groups { + group := oa.GroupByUUID(event.Groups[i].UUID) + if group != nil { + groupIDs = append(groupIDs, group.ID()) + } + } + + return NewBroadcast(oa.OrgID(), event.Translations, TemplateStateEvaluated, event.BaseLanguage, event.URNs, contactIDs, groupIDs, event.ContactQuery, NilUserID), nil +} + +func (b *Broadcast) CreateBatch(contactIDs []ContactID, isLast bool) *BroadcastBatch { + return &BroadcastBatch{ + BroadcastID: b.ID, + OrgID: b.OrgID, + BaseLanguage: b.BaseLanguage, + Translations: b.Translations, + TemplateState: b.TemplateState, + CreatedByID: b.CreatedByID, + ContactIDs: contactIDs, + IsLast: isLast, + } +} + +// MarkBroadcastSent marks the given broadcast as sent +func MarkBroadcastSent(ctx context.Context, db Queryer, id BroadcastID) error { + _, err := db.ExecContext(ctx, `UPDATE msgs_broadcast SET status = 'S', modified_on = now() WHERE id = $1`, id) + return errors.Wrapf(err, "error marking broadcast #%d as sent", id) +} + +// MarkBroadcastFailed marks the given broadcast as failed +func MarkBroadcastFailed(ctx context.Context, db Queryer, id BroadcastID) error { + _, err := db.ExecContext(ctx, `UPDATE msgs_broadcast SET status = 'S', modified_on = now() WHERE id = $1`, id) + return errors.Wrapf(err, "error marking broadcast #%d as failed", id) +} + +// InsertChildBroadcast clones the passed in broadcast as a parent, then inserts that broadcast into the DB +func InsertChildBroadcast(ctx context.Context, db Queryer, parent *Broadcast) (*Broadcast, error) { + child := NewBroadcast( + parent.OrgID, + parent.Translations, + parent.TemplateState, + parent.BaseLanguage, + parent.URNs, + parent.ContactIDs, + parent.GroupIDs, + string(parent.Query), + parent.CreatedByID, + ) + child.ParentID = parent.ID + + // insert our broadcast + err := BulkQuery(ctx, "inserting broadcast", db, sqlInsertBroadcast, []*Broadcast{child}) + if err != nil { + return nil, errors.Wrapf(err, "error inserting child broadcast for broadcast: %d", parent.ID) + } + + // build up all our contact associations + contacts := make([]*broadcastContact, 0, len(child.ContactIDs)) + for _, contactID := range child.ContactIDs { + contacts = append(contacts, &broadcastContact{BroadcastID: child.ID, ContactID: contactID}) + } + + // insert our contacts + err = BulkQuery(ctx, "inserting broadcast contacts", db, sqlInsertBroadcastContacts, contacts) + if err != nil { + return nil, errors.Wrapf(err, "error inserting contacts for broadcast") + } + + // build up all our group associations + groups := make([]*broadcastGroup, 0, len(child.GroupIDs)) + for _, groupID := range child.GroupIDs { + groups = append(groups, &broadcastGroup{BroadcastID: child.ID, GroupID: groupID}) + } + + // insert our groups + err = BulkQuery(ctx, "inserting broadcast groups", db, sqlInsertBroadcastGroups, groups) + if err != nil { + return nil, errors.Wrapf(err, "error inserting groups for broadcast") + } + + return child, nil +} + +type broadcastContact struct { + BroadcastID BroadcastID `db:"broadcast_id"` + ContactID ContactID `db:"contact_id"` +} + +type broadcastGroup struct { + BroadcastID BroadcastID `db:"broadcast_id"` + GroupID GroupID `db:"contactgroup_id"` +} + +const sqlInsertBroadcast = ` +INSERT INTO + msgs_broadcast( org_id, parent_id, created_on, modified_on, status, translations, base_language, query, is_active) + VALUES(:org_id, :parent_id, NOW() , NOW(), 'Q', :translations, :base_language, :query, TRUE) +RETURNING id` + +const sqlInsertBroadcastContacts = `INSERT INTO msgs_broadcast_contacts(broadcast_id, contact_id) VALUES(:broadcast_id, :contact_id)` +const sqlInsertBroadcastGroups = `INSERT INTO msgs_broadcast_groups(broadcast_id, contactgroup_id) VALUES(:broadcast_id, :contactgroup_id)` + +// BroadcastBatch represents a batch of contacts that need messages sent for +type BroadcastBatch struct { + BroadcastID BroadcastID `json:"broadcast_id,omitempty"` + OrgID OrgID `json:"org_id"` + Translations flows.BroadcastTranslations `json:"translations"` + BaseLanguage envs.Language `json:"base_language"` + TemplateState TemplateState `json:"template_state"` + ContactIDs []ContactID `json:"contact_ids,omitempty"` + CreatedByID UserID `json:"created_by_id"` + IsLast bool `json:"is_last"` +} + +func (b *BroadcastBatch) CreateMessages(ctx context.Context, rt *runtime.Runtime, oa *OrgAssets) ([]*Msg, error) { + // load all our contacts + contacts, err := LoadContacts(ctx, rt.DB, oa, b.ContactIDs) + if err != nil { + return nil, errors.Wrap(err, "error loading contacts for broadcast") + } + + // for each contact, build our message + msgs := make([]*Msg, 0, len(contacts)) + + // run through all our contacts to create our messages + for _, c := range contacts { + msg, err := b.createMessage(rt, oa, c) + if err != nil { + return nil, errors.Wrap(err, "error creating broadcast message") + } + if msg != nil { + msgs = append(msgs, msg) + } + } + + // insert them in a single request + err = InsertMessages(ctx, rt.DB, msgs) + if err != nil { + return nil, errors.Wrap(err, "error inserting broadcast messages") + } + + return msgs, nil +} + +// creates an outgoing message for the given contact - can return nil if resultant message has no content and thus is a noop +func (b *BroadcastBatch) createMessage(rt *runtime.Runtime, oa *OrgAssets, c *Contact) (*Msg, error) { + contact, err := c.FlowContact(oa) + if err != nil { + return nil, errors.Wrap(err, "error creating flow contact for broadcast message") + } + + trans, lang := b.Translations.ForContact(oa.Env(), contact, b.BaseLanguage) + if trans == nil { + // in theory shoud never happen because we shouldn't save a broadcast like this + return nil, errors.New("broadcast has no translation in base language") + } + + text := trans.Text + attachments := trans.Attachments + quickReplies := trans.QuickReplies + locale := envs.NewLocale(lang, envs.NilCountry) + + if b.TemplateState == TemplateStateUnevaluated { + // build up the minimum viable context for templates + templateCtx := types.NewXObject(map[string]types.XValue{ + "contact": flows.Context(oa.Env(), contact), + "fields": flows.Context(oa.Env(), contact.Fields()), + "globals": flows.Context(oa.Env(), oa.SessionAssets().Globals()), + "urns": flows.ContextFunc(oa.Env(), contact.URNs().MapContext), + }) + text, _ = excellent.EvaluateTemplate(oa.Env(), templateCtx, text, nil) + } + + // don't create a message if we have no content + if text == "" && len(attachments) == 0 && len(trans.QuickReplies) == 0 { + return nil, nil + } + + // create our outgoing message + out, ch := NewMsgOut(oa, contact, text, attachments, quickReplies, locale) + + msg, err := NewOutgoingBroadcastMsg(rt, oa.Org(), ch, contact, out, time.Now(), b) + if err != nil { + return nil, errors.Wrapf(err, "error creating outgoing message") + } + + return msg, nil +} diff --git a/core/models/broadcasts_test.go b/core/models/broadcasts_test.go new file mode 100644 index 000000000..5eea0533b --- /dev/null +++ b/core/models/broadcasts_test.go @@ -0,0 +1,200 @@ +package models_test + +import ( + "testing" + + "github.com/nyaruka/gocommon/dbutil/assertdb" + "github.com/nyaruka/gocommon/urns" + "github.com/nyaruka/goflow/envs" + "github.com/nyaruka/goflow/flows" + "github.com/nyaruka/goflow/utils" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/testsuite" + "github.com/nyaruka/mailroom/testsuite/testdata" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestNonPersistentBroadcasts(t *testing.T) { + ctx, rt := testsuite.Runtime() + + defer testsuite.Reset(testsuite.ResetData) + + translations := flows.BroadcastTranslations{"eng": {Text: "Hi there"}} + + // create a broadcast which doesn't actually exist in the DB + bcast := models.NewBroadcast( + testdata.Org1.ID, + translations, + models.TemplateStateUnevaluated, + envs.Language("eng"), + []urns.URN{"tel:+593979012345"}, + []models.ContactID{testdata.Alexandria.ID, testdata.Bob.ID, testdata.Cathy.ID}, + []models.GroupID{testdata.DoctorsGroup.ID}, + "", + models.NilUserID, + ) + + assert.Equal(t, models.NilBroadcastID, bcast.ID) + assert.Equal(t, testdata.Org1.ID, bcast.OrgID) + assert.Equal(t, envs.Language("eng"), bcast.BaseLanguage) + assert.Equal(t, translations, bcast.Translations) + assert.Equal(t, models.TemplateStateUnevaluated, bcast.TemplateState) + assert.Equal(t, []urns.URN{"tel:+593979012345"}, bcast.URNs) + assert.Equal(t, []models.ContactID{testdata.Alexandria.ID, testdata.Bob.ID, testdata.Cathy.ID}, bcast.ContactIDs) + assert.Equal(t, []models.GroupID{testdata.DoctorsGroup.ID}, bcast.GroupIDs) + + batch := bcast.CreateBatch([]models.ContactID{testdata.Alexandria.ID, testdata.Bob.ID}, false) + + assert.Equal(t, models.NilBroadcastID, batch.BroadcastID) + assert.Equal(t, testdata.Org1.ID, batch.OrgID) + assert.Equal(t, envs.Language("eng"), batch.BaseLanguage) + assert.Equal(t, translations, batch.Translations) + assert.Equal(t, models.TemplateStateUnevaluated, batch.TemplateState) + assert.Equal(t, []models.ContactID{testdata.Alexandria.ID, testdata.Bob.ID}, batch.ContactIDs) + + oa, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) + require.NoError(t, err) + + msgs, err := batch.CreateMessages(ctx, rt, oa) + require.NoError(t, err) + + assert.Equal(t, 2, len(msgs)) + + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE direction = 'O' AND broadcast_id IS NULL AND text = 'Hi there'`).Returns(2) +} + +func TestBroadcastTranslations(t *testing.T) { + _, rt := testsuite.Runtime() + + defer func() { + rt.DB.MustExec(`DELETE FROM msgs_broadcast_contacts`) + rt.DB.MustExec(`DELETE FROM msgs_broadcast`) + }() + + bcastID := testdata.InsertBroadcast(rt, testdata.Org1, `eng`, map[envs.Language]string{`eng`: "Hello", `spa`: "Hola"}, models.NilScheduleID, []*testdata.Contact{testdata.Cathy}, nil) + + type TestStruct struct { + Translations flows.BroadcastTranslations `json:"translations"` + } + + s := &TestStruct{} + err := rt.DB.Get(s, `SELECT translations FROM msgs_broadcast WHERE id = $1`, bcastID) + require.NoError(t, err) + + assert.Equal(t, flows.BroadcastTranslations{"eng": {Text: "Hello"}, "spa": {Text: "Hola"}}, s.Translations) + + s.Translations = flows.BroadcastTranslations{"fra": {Text: "Bonjour"}} + + rt.DB.MustExec(`UPDATE msgs_broadcast SET translations = $1 WHERE id = $2`, s.Translations, bcastID) + + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_broadcast WHERE translations -> 'fra' ->> 'text' = 'Bonjour'`, 1) +} + +func TestBroadcastBatchCreateMessage(t *testing.T) { + ctx, rt := testsuite.Runtime() + + defer func() { + rt.DB.MustExec(`UPDATE contacts_contact SET language = NULL WHERE id = $1`, testdata.Cathy.ID) + testsuite.Reset(testsuite.ResetData | testsuite.ResetRedis) + }() + + oa, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) + require.NoError(t, err) + + // we need a broadcast id to insert messages but the content here is ignored + bcastID := testdata.InsertBroadcast(rt, testdata.Org1, "eng", map[envs.Language]string{"eng": "Test"}, models.NilScheduleID, nil, nil) + + tcs := []struct { + contactLanguage envs.Language + translations flows.BroadcastTranslations + baseLanguage envs.Language + templateState models.TemplateState + expectedText string + expectedAttachments []utils.Attachment + expectedQuickReplies []string + expectedLocale envs.Locale + expectedError string + }{ + { + contactLanguage: envs.NilLanguage, + translations: flows.BroadcastTranslations{"eng": {Text: "Hi @Cathy"}}, + baseLanguage: "eng", + templateState: models.TemplateStateEvaluated, + expectedText: "Hi @Cathy", + expectedAttachments: []utils.Attachment{}, + expectedQuickReplies: nil, + expectedLocale: "eng", + }, + { + // contact language not set, uses base language + contactLanguage: envs.NilLanguage, + translations: flows.BroadcastTranslations{"eng": {Text: "Hello @contact.name"}, "spa": {Text: "Hola @contact.name"}}, + baseLanguage: "eng", + templateState: models.TemplateStateUnevaluated, + expectedText: "Hello Cathy", + expectedAttachments: []utils.Attachment{}, + expectedQuickReplies: nil, + expectedLocale: "eng", + }, + { + // contact language iggnored if it isn't a valid org language, even if translation exists + contactLanguage: envs.Language("spa"), + translations: flows.BroadcastTranslations{"eng": {Text: "Hello @contact.name"}, "spa": {Text: "Hola @contact.name"}}, + baseLanguage: "eng", + templateState: models.TemplateStateUnevaluated, + expectedText: "Hello Cathy", + expectedAttachments: []utils.Attachment{}, + expectedQuickReplies: nil, + expectedLocale: "eng", + }, + { + // contact language used + contactLanguage: envs.Language("fra"), + translations: flows.BroadcastTranslations{ + "eng": {Text: "Hello @contact.name", Attachments: []utils.Attachment{"audio/mp3:http://test.en.mp3"}, QuickReplies: []string{"yes", "no"}}, + "fra": {Text: "Bonjour @contact.name", Attachments: []utils.Attachment{"audio/mp3:http://test.fr.mp3"}, QuickReplies: []string{"oui", "no"}}, + }, + baseLanguage: "eng", + templateState: models.TemplateStateUnevaluated, + expectedText: "Bonjour Cathy", + expectedAttachments: []utils.Attachment{"audio/mp3:http://test.fr.mp3"}, + expectedQuickReplies: []string{"oui", "no"}, + expectedLocale: "fra", + }, + { + // broken broadcast with no translation in base language + contactLanguage: envs.NilLanguage, + translations: flows.BroadcastTranslations{"fra": {Text: "Bonjour @contact.name"}}, + baseLanguage: "eng", + templateState: models.TemplateStateUnevaluated, + expectedError: "error creating broadcast message: broadcast has no translation in base language", + }, + } + + for i, tc := range tcs { + batch := &models.BroadcastBatch{ + BroadcastID: bcastID, + OrgID: testdata.Org1.ID, + Translations: tc.translations, + BaseLanguage: tc.baseLanguage, + TemplateState: tc.templateState, + ContactIDs: []models.ContactID{testdata.Cathy.ID}, + } + + rt.DB.MustExec(`UPDATE contacts_contact SET language = $2 WHERE id = $1`, testdata.Cathy.ID, tc.contactLanguage) + + msgs, err := batch.CreateMessages(ctx, rt, oa) + if tc.expectedError != "" { + assert.EqualError(t, err, tc.expectedError, "error mismatch in test case %d", i) + } else { + assert.NoError(t, err, "unexpected error in test case %d", i) + if assert.Len(t, msgs, 1, "msg count mismatch in test case %d", i) { + assert.Equal(t, tc.expectedText, msgs[0].Text(), "msg text mismatch in test case %d", i) + assert.Equal(t, tc.expectedAttachments, msgs[0].Attachments(), "attachments mismatch in test case %d", i) + assert.Equal(t, tc.expectedQuickReplies, msgs[0].QuickReplies(), "quick replies mismatch in test case %d", i) + assert.Equal(t, tc.expectedLocale, msgs[0].Locale(), "msg locale mismatch in test case %d", i) + } + } + } +} diff --git a/core/models/calls.go b/core/models/calls.go index 8c73ec00d..0bc492a12 100644 --- a/core/models/calls.go +++ b/core/models/calls.go @@ -7,12 +7,12 @@ import ( "github.com/jmoiron/sqlx" "github.com/lib/pq" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" "github.com/pkg/errors" ) // CallID is the type for call IDs -type CallID null.Int +type CallID int // NilCallID is the nil value for call IDs const NilCallID = CallID(0) @@ -420,7 +420,12 @@ func BulkUpdateCallStatuses(ctx context.Context, db Queryer, callIDs []CallID, s func (c *Call) AttachLog(ctx context.Context, db Queryer, clog *ChannelLog) error { _, err := db.ExecContext(ctx, `UPDATE ivr_call SET log_uuids = array_append(log_uuids, $2) WHERE id = $1`, c.c.ID, clog.UUID()) - return errors.Wrap(err, "error attaching log to call") + if err != nil { + return errors.Wrap(err, "error attaching log to call") + } + + clog.attached = true + return nil } // ActiveCallCount returns the number of ongoing calls for the passed in channel @@ -433,22 +438,7 @@ func ActiveCallCount(ctx context.Context, db Queryer, id ChannelID) (int, error) return count, nil } -// MarshalJSON marshals into JSON. 0 values will become null -func (i CallID) MarshalJSON() ([]byte, error) { - return null.Int(i).MarshalJSON() -} - -// UnmarshalJSON unmarshals from JSON. null values become 0 -func (i *CallID) UnmarshalJSON(b []byte) error { - return null.UnmarshalInt(b, (*null.Int)(i)) -} - -// Value returns the db value, null is returned for 0 -func (i CallID) Value() (driver.Value, error) { - return null.Int(i).Value() -} - -// Scan scans from the db value. null values become 0 -func (i *CallID) Scan(value interface{}) error { - return null.ScanInt(value, (*null.Int)(i)) -} +func (i *CallID) Scan(value any) error { return null.ScanInt(value, i) } +func (i CallID) Value() (driver.Value, error) { return null.IntValue(i) } +func (i *CallID) UnmarshalJSON(b []byte) error { return null.UnmarshalInt(b, i) } +func (i CallID) MarshalJSON() ([]byte, error) { return null.MarshalInt(i) } diff --git a/core/models/calls_test.go b/core/models/calls_test.go index aa723c1e6..4561b6c4e 100644 --- a/core/models/calls_test.go +++ b/core/models/calls_test.go @@ -11,21 +11,21 @@ import ( ) func TestCalls(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() - defer db.MustExec(`DELETE FROM ivr_call`) + defer rt.DB.MustExec(`DELETE FROM ivr_call`) - conn, err := models.InsertCall(ctx, db, testdata.Org1.ID, testdata.TwilioChannel.ID, models.NilStartID, testdata.Cathy.ID, testdata.Cathy.URNID, models.CallDirectionOut, models.CallStatusPending, "") + conn, err := models.InsertCall(ctx, rt.DB, testdata.Org1.ID, testdata.TwilioChannel.ID, models.NilStartID, testdata.Cathy.ID, testdata.Cathy.URNID, models.CallDirectionOut, models.CallStatusPending, "") assert.NoError(t, err) assert.NotEqual(t, models.CallID(0), conn.ID()) - err = conn.UpdateExternalID(ctx, db, "test1") + err = conn.UpdateExternalID(ctx, rt.DB, "test1") assert.NoError(t, err) - assertdb.Query(t, db, `SELECT count(*) from ivr_call where external_id = 'test1' AND id = $1`, conn.ID()).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) from ivr_call where external_id = 'test1' AND id = $1`, conn.ID()).Returns(1) - conn2, err := models.GetCallByID(ctx, db, testdata.Org1.ID, conn.ID()) + conn2, err := models.GetCallByID(ctx, rt.DB, testdata.Org1.ID, conn.ID()) assert.NoError(t, err) assert.Equal(t, "test1", conn2.ExternalID()) } diff --git a/core/models/campaigns.go b/core/models/campaigns.go index 8ed0bb05b..fab0a35cc 100644 --- a/core/models/campaigns.go +++ b/core/models/campaigns.go @@ -13,7 +13,7 @@ import ( "github.com/nyaruka/goflow/assets" "github.com/nyaruka/goflow/flows" "github.com/nyaruka/mailroom/runtime" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) @@ -602,18 +602,16 @@ func ScheduleCampaignEvent(ctx context.Context, rt *runtime.Runtime, orgID OrgID tz := oa.Env().Timezone() for _, el := range eligible { - if el.RelToValue != nil { - start := *el.RelToValue + start := *el.RelToValue - // calculate next fire for this contact - scheduled, err := event.ScheduleForTime(tz, time.Now(), start) - if err != nil { - return errors.Wrapf(err, "error calculating offset for start: %s and event: %d", start, eventID) - } + // calculate next fire for this contact + scheduled, err := event.ScheduleForTime(tz, time.Now(), start) + if err != nil { + return errors.Wrapf(err, "error calculating offset for start: %s and event: %d", start, eventID) + } - if scheduled != nil { - fas = append(fas, &FireAdd{ContactID: el.ContactID, EventID: eventID, Scheduled: *scheduled}) - } + if scheduled != nil { + fas = append(fas, &FireAdd{ContactID: el.ContactID, EventID: eventID, Scheduled: *scheduled}) } } @@ -642,7 +640,7 @@ const sqlEligibleContactsForField = ` SELECT c.id AS contact_id, (c.fields->$2->>'datetime')::timestamptz AS rel_to_value FROM contacts_contact c INNER JOIN contacts_contactgroup_contacts gc ON gc.contact_id = c.id - WHERE gc.contactgroup_id = $1 AND c.is_active = TRUE AND ARRAY[$2]::text[] <@ (extract_jsonb_keys(c.fields)) IS NOT NULL` + WHERE gc.contactgroup_id = $1 AND c.is_active = TRUE AND (c.fields->$2->>'datetime')::timestamptz IS NOT NULL` func campaignEventEligibleContacts(ctx context.Context, db Queryer, groupID GroupID, field *Field) ([]*eligibleContact, error) { var query string diff --git a/core/models/campaigns_test.go b/core/models/campaigns_test.go index 7f35dccff..f4ee9b33a 100644 --- a/core/models/campaigns_test.go +++ b/core/models/campaigns_test.go @@ -82,35 +82,35 @@ func TestCampaignSchedule(t *testing.T) { } func TestAddEventFires(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() - defer db.MustExec(`DELETE FROM campaigns_eventfire`) + defer rt.DB.MustExec(`DELETE FROM campaigns_eventfire`) scheduled1 := time.Date(2020, 9, 8, 14, 38, 30, 123456789, time.UTC) - err := models.AddEventFires(ctx, db, []*models.FireAdd{ + err := models.AddEventFires(ctx, rt.DB, []*models.FireAdd{ {ContactID: testdata.Cathy.ID, EventID: testdata.RemindersEvent1.ID, Scheduled: scheduled1}, {ContactID: testdata.Bob.ID, EventID: testdata.RemindersEvent1.ID, Scheduled: scheduled1}, {ContactID: testdata.Bob.ID, EventID: testdata.RemindersEvent2.ID, Scheduled: scheduled1}, }) require.NoError(t, err) - assertdb.Query(t, db, `SELECT count(*) FROM campaigns_eventfire`).Returns(3) - assertdb.Query(t, db, `SELECT count(*) FROM campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Cathy.ID, testdata.RemindersEvent1.ID).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Bob.ID, testdata.RemindersEvent1.ID).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Bob.ID, testdata.RemindersEvent2.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM campaigns_eventfire`).Returns(3) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Cathy.ID, testdata.RemindersEvent1.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Bob.ID, testdata.RemindersEvent1.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Bob.ID, testdata.RemindersEvent2.ID).Returns(1) - db.MustExec(`UPDATE campaigns_eventfire SET fired = NOW() WHERE contact_id = $1`, testdata.Cathy.ID) + rt.DB.MustExec(`UPDATE campaigns_eventfire SET fired = NOW() WHERE contact_id = $1`, testdata.Cathy.ID) scheduled2 := time.Date(2020, 9, 8, 14, 38, 30, 123456789, time.UTC) - err = models.AddEventFires(ctx, db, []*models.FireAdd{ + err = models.AddEventFires(ctx, rt.DB, []*models.FireAdd{ {ContactID: testdata.Cathy.ID, EventID: testdata.RemindersEvent1.ID, Scheduled: scheduled2}, // fine because previous one now has non-null fired {ContactID: testdata.Bob.ID, EventID: testdata.RemindersEvent1.ID, Scheduled: scheduled2}, // won't be added due to conflict }) require.NoError(t, err) - assertdb.Query(t, db, `SELECT count(*) FROM campaigns_eventfire`).Returns(4) - assertdb.Query(t, db, `SELECT count(*) FROM campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Cathy.ID, testdata.RemindersEvent1.ID).Returns(2) - assertdb.Query(t, db, `SELECT count(*) FROM campaigns_eventfire WHERE contact_id = $1`, testdata.Bob.ID).Returns(2) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM campaigns_eventfire`).Returns(4) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Cathy.ID, testdata.RemindersEvent1.ID).Returns(2) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM campaigns_eventfire WHERE contact_id = $1`, testdata.Bob.ID).Returns(2) } diff --git a/core/models/channel_event.go b/core/models/channel_event.go index d483724ff..9a715db53 100644 --- a/core/models/channel_event.go +++ b/core/models/channel_event.go @@ -5,7 +5,7 @@ import ( "encoding/json" "time" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" ) type ChannelEventType string @@ -59,11 +59,12 @@ func (e *ChannelEvent) IsNewContact() bool { return e.e.NewContact } func (e *ChannelEvent) OccurredOn() time.Time { return e.e.OccurredOn } func (e *ChannelEvent) Extra() map[string]interface{} { - return e.e.Extra.Map() + return e.e.Extra } func (e *ChannelEvent) ExtraValue(key string) string { - return e.e.Extra.GetString(key, "") + v, _ := e.e.Extra[key].(string) + return v } // MarshalJSON is our custom marshaller so that our inner struct get output @@ -99,8 +100,10 @@ func NewChannelEvent(eventType ChannelEventType, orgID OrgID, channelID ChannelI e.URNID = urnID e.NewContact = isNewContact - if extra != nil { - e.Extra = null.NewMap(extra) + if extra == nil { + e.Extra = null.Map{} + } else { + e.Extra = null.Map(extra) } now := time.Now() diff --git a/core/models/channel_event_test.go b/core/models/channel_event_test.go index c7bccaf47..ccd1526c4 100644 --- a/core/models/channel_event_test.go +++ b/core/models/channel_event_test.go @@ -13,26 +13,26 @@ import ( ) func TestChannelEvents(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() - defer db.MustExec(`DELETE FROM channels_channelevent`) + defer rt.DB.MustExec(`DELETE FROM channels_channelevent`) start := time.Now() // no extra e := models.NewChannelEvent(models.MOMissEventType, testdata.Org1.ID, testdata.TwilioChannel.ID, testdata.Cathy.ID, testdata.Cathy.URNID, nil, false) - err := e.Insert(ctx, db) + err := e.Insert(ctx, rt.DB) assert.NoError(t, err) assert.NotZero(t, e.ID()) - assert.Equal(t, e.Extra(), map[string]interface{}{}) + assert.Equal(t, map[string]interface{}{}, e.Extra()) assert.True(t, e.OccurredOn().After(start)) // with extra e2 := models.NewChannelEvent(models.MOMissEventType, testdata.Org1.ID, testdata.TwilioChannel.ID, testdata.Cathy.ID, testdata.Cathy.URNID, map[string]interface{}{"referral_id": "foobar"}, false) - err = e2.Insert(ctx, db) + err = e2.Insert(ctx, rt.DB) assert.NoError(t, err) assert.NotZero(t, e2.ID()) - assert.Equal(t, e2.Extra(), map[string]interface{}{"referral_id": "foobar"}) + assert.Equal(t, map[string]interface{}{"referral_id": "foobar"}, e2.Extra()) asJSON, err := json.Marshal(e2) assert.NoError(t, err) diff --git a/core/models/channel_logs.go b/core/models/channel_logs.go index 0f73c8efa..bd98a671c 100644 --- a/core/models/channel_logs.go +++ b/core/models/channel_logs.go @@ -3,13 +3,18 @@ package models import ( "context" "encoding/json" + "fmt" + "path" "time" "github.com/nyaruka/gocommon/dates" "github.com/nyaruka/gocommon/httpx" "github.com/nyaruka/gocommon/jsonx" + "github.com/nyaruka/gocommon/storage" "github.com/nyaruka/gocommon/stringsx" "github.com/nyaruka/gocommon/uuids" + "github.com/nyaruka/goflow/assets" + "github.com/nyaruka/mailroom/runtime" "github.com/pkg/errors" ) @@ -43,7 +48,6 @@ type ChannelLog struct { uuid ChannelLogUUID type_ ChannelLogType channel *Channel - call *Call httpLogs []*httpx.Log errors []ChannelError createdOn time.Time @@ -51,6 +55,7 @@ type ChannelLog struct { recorder *httpx.Recorder redactor stringsx.Redactor + attached bool } // NewChannelLog creates a new channel log with the given type and channel @@ -68,6 +73,8 @@ func newChannelLog(t ChannelLogType, ch *Channel, r *httpx.Recorder, redactVals uuid: ChannelLogUUID(uuids.New()), type_: t, channel: ch, + httpLogs: []*httpx.Log{}, + errors: []ChannelError{}, createdOn: dates.Now(), recorder: r, @@ -77,10 +84,6 @@ func newChannelLog(t ChannelLogType, ch *Channel, r *httpx.Recorder, redactVals func (l *ChannelLog) UUID() ChannelLogUUID { return l.uuid } -func (l *ChannelLog) SetCall(c *Call) { - l.call = c -} - func (l *ChannelLog) HTTP(t *httpx.Trace) { l.httpLogs = append(l.httpLogs, l.traceToLog(t)) } @@ -102,16 +105,31 @@ func (l *ChannelLog) traceToLog(t *httpx.Trace) *httpx.Log { return httpx.NewLog(t, 2048, 50000, l.redactor) } +// if we have an error or a non 2XX/3XX http response then log is considered an error +func (l *ChannelLog) isError() bool { + if len(l.errors) > 0 { + return true + } + + for _, l := range l.httpLogs { + if l.StatusCode < 200 || l.StatusCode >= 400 { + return true + } + } + + return false +} + const sqlInsertChannelLog = ` -INSERT INTO channels_channellog( uuid, channel_id, call_id, log_type, http_logs, errors, is_error, elapsed_ms, created_on) - VALUES(:uuid, :channel_id, :call_id, :log_type, :http_logs, :errors, :is_error, :elapsed_ms, :created_on) +INSERT INTO channels_channellog( uuid, channel_id, log_type, http_logs, errors, is_error, elapsed_ms, created_on) + VALUES(:uuid, :channel_id, :log_type, :http_logs, :errors, :is_error, :elapsed_ms, :created_on) RETURNING id` +// channel log to be inserted into the database type dbChannelLog struct { ID ChannelLogID `db:"id"` UUID ChannelLogUUID `db:"uuid"` ChannelID ChannelID `db:"channel_id"` - CallID CallID `db:"call_id"` Type ChannelLogType `db:"log_type"` HTTPLogs json.RawMessage `db:"http_logs"` Errors json.RawMessage `db:"errors"` @@ -120,37 +138,73 @@ type dbChannelLog struct { CreatedOn time.Time `db:"created_on"` } +// channel log to be written to logs storage +type stChannelLog struct { + UUID ChannelLogUUID `json:"uuid"` + Type ChannelLogType `json:"type"` + HTTPLogs []*httpx.Log `json:"http_logs"` + Errors []ChannelError `json:"errors"` + ElapsedMS int `json:"elapsed_ms"` + CreatedOn time.Time `json:"created_on"` + ChannelUUID assets.ChannelUUID `json:"-"` +} + +func (l *stChannelLog) path() string { + return path.Join("channels", string(l.ChannelUUID), string(l.UUID[:4]), fmt.Sprintf("%s.json", l.UUID)) +} + // InsertChannelLogs writes the given channel logs to the db -func InsertChannelLogs(ctx context.Context, db Queryer, logs []*ChannelLog) error { - vs := make([]*dbChannelLog, len(logs)) - for i, l := range logs { - // if we have an error or a non 2XX/3XX http response then this log is marked as an error - isError := len(l.errors) > 0 - if !isError { - for _, l := range l.httpLogs { - if l.StatusCode < 200 || l.StatusCode >= 400 { - isError = true - break - } - } +func InsertChannelLogs(ctx context.Context, rt *runtime.Runtime, logs []*ChannelLog) error { + attached := make([]*stChannelLog, 0, len(logs)) + unattached := make([]*dbChannelLog, 0, len(logs)) + + for _, l := range logs { + if l.attached { + // if log is attached to a call or message, only write to storage + attached = append(attached, &stChannelLog{ + UUID: l.uuid, + Type: l.type_, + HTTPLogs: l.httpLogs, + Errors: l.errors, + ElapsedMS: int(l.elapsed / time.Millisecond), + CreatedOn: l.createdOn, + ChannelUUID: l.channel.UUID(), + }) + } else { + // otherwise write to database so it's retrievable + unattached = append(unattached, &dbChannelLog{ + UUID: ChannelLogUUID(uuids.New()), + ChannelID: l.channel.ID(), + Type: l.type_, + HTTPLogs: jsonx.MustMarshal(l.httpLogs), + Errors: jsonx.MustMarshal(l.errors), + IsError: l.isError(), + CreatedOn: l.createdOn, + ElapsedMS: int(l.elapsed / time.Millisecond), + }) } + } - v := &dbChannelLog{ - UUID: ChannelLogUUID(uuids.New()), - ChannelID: l.channel.ID(), - Type: l.type_, - HTTPLogs: jsonx.MustMarshal(l.httpLogs), - Errors: jsonx.MustMarshal(l.errors), - IsError: isError, - CreatedOn: time.Now(), - ElapsedMS: int(l.elapsed / time.Millisecond), + if len(attached) > 0 { + uploads := make([]*storage.Upload, len(attached)) + for i, l := range attached { + uploads[i] = &storage.Upload{ + Path: l.path(), + ContentType: "application/json", + Body: jsonx.MustMarshal(l), + } + } + if err := rt.LogStorage.BatchPut(ctx, uploads); err != nil { + return errors.Wrapf(err, "error writing attached channel logs to storage") } - if l.call != nil { - v.CallID = l.call.ID() + } + + if len(unattached) > 0 { + err := BulkQuery(ctx, "insert channel log", rt.DB, sqlInsertChannelLog, unattached) + if err != nil { + return errors.Wrapf(err, "error inserting unattached channel logs") } - vs[i] = v } - err := BulkQuery(ctx, "insert channel log", db, sqlInsertChannelLog, vs) - return errors.Wrapf(err, "error inserting channel logs") + return nil } diff --git a/core/models/channel_logs_test.go b/core/models/channel_logs_test.go index bf448e8f7..72bf3a3d6 100644 --- a/core/models/channel_logs_test.go +++ b/core/models/channel_logs_test.go @@ -10,14 +10,13 @@ import ( "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/stretchr/testify/require" ) func TestChannelLogsOutgoing(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() - defer db.MustExec(`DELETE FROM channels_channellog`) + defer rt.DB.MustExec(`DELETE FROM channels_channellog`) defer httpx.SetRequestor(httpx.DefaultRequestor) httpx.SetRequestor(httpx.NewMockRequestor(map[string][]*httpx.MockResponse{ @@ -49,11 +48,11 @@ func TestChannelLogsOutgoing(t *testing.T) { clog2.Error(errors.New("oops")) clog2.End() - err = models.InsertChannelLogs(ctx, db, []*models.ChannelLog{clog1, clog2}) + err = models.InsertChannelLogs(ctx, rt, []*models.ChannelLog{clog1, clog2}) require.NoError(t, err) - assertdb.Query(t, db, `SELECT count(*) FROM channels_channellog`).Returns(2) - assertdb.Query(t, db, `SELECT count(*) FROM channels_channellog WHERE log_type = 'ivr_start' AND http_logs -> 0 ->> 'url' = 'http://ivr.com/start' AND is_error = FALSE AND channel_id = $1`, channel.ID()).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM channels_channellog WHERE log_type = 'ivr_hangup' AND http_logs -> 0 ->> 'url' = 'http://ivr.com/hangup' AND is_error = TRUE AND channel_id = $1`, channel.ID()).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM channels_channellog WHERE http_logs::text LIKE '%sesame%'`).Returns(0) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM channels_channellog`).Returns(2) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM channels_channellog WHERE log_type = 'ivr_start' AND http_logs -> 0 ->> 'url' = 'http://ivr.com/start' AND is_error = FALSE AND channel_id = $1`, channel.ID()).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM channels_channellog WHERE log_type = 'ivr_hangup' AND http_logs -> 0 ->> 'url' = 'http://ivr.com/hangup' AND is_error = TRUE AND channel_id = $1`, channel.ID()).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM channels_channellog WHERE http_logs::text LIKE '%sesame%'`).Returns(0) } diff --git a/core/models/channels.go b/core/models/channels.go index 35b2c7774..ec2903260 100644 --- a/core/models/channels.go +++ b/core/models/channels.go @@ -11,13 +11,13 @@ import ( "github.com/nyaruka/gocommon/dbutil" "github.com/nyaruka/goflow/assets" "github.com/nyaruka/goflow/envs" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) // ChannelID is the type for channel IDs -type ChannelID null.Int +type ChannelID int // NilChannelID is the nil value for channel IDs const NilChannelID = ChannelID(0) @@ -43,6 +43,7 @@ type Channel struct { c struct { ID ChannelID `json:"id"` UUID assets.ChannelUUID `json:"uuid"` + OrgID OrgID `json:"org_id"` Parent *assets.ChannelReference `json:"parent"` Name string `json:"name"` Address string `json:"address"` @@ -61,6 +62,9 @@ type Channel struct { // ID returns the id of this channel func (c *Channel) ID() ChannelID { return c.c.ID } +// OrgID returns the org id of this channel +func (c *Channel) OrgID() OrgID { return c.c.OrgID } + // UUID returns the UUID of this channel func (c *Channel) UUID() assets.ChannelUUID { return c.c.UUID } @@ -150,10 +154,11 @@ const sqlSelectChannelsByID = ` SELECT ROW_TO_JSON(r) FROM (SELECT c.id as id, c.uuid as uuid, + c.org_id as org_id, c.name as name, c.channel_type as channel_type, COALESCE(c.tps, 10) as tps, - COALESCE(c.config, '{}')::json as config + c.config as config FROM channels_channel c WHERE @@ -190,6 +195,7 @@ const sqlSelectChannels = ` SELECT ROW_TO_JSON(r) FROM (SELECT c.id as id, c.uuid as uuid, + c.org_id as org_id, (SELECT ROW_TO_JSON(p) FROM (SELECT uuid, name FROM channels_channel cc where cc.id = c.parent_id) p) as parent, c.name as name, c.channel_type as channel_type, @@ -197,7 +203,7 @@ SELECT ROW_TO_JSON(r) FROM (SELECT c.country as country, c.address as address, c.schemes as schemes, - COALESCE(c.config, '{}')::json as config, + c.config as config, (SELECT ARRAY( SELECT CASE r WHEN 'R' THEN 'receive' @@ -206,11 +212,11 @@ SELECT ROW_TO_JSON(r) FROM (SELECT WHEN 'A' THEN 'answer' WHEN 'U' THEN 'ussd' END - FROM unnest(regexp_split_to_array(c.role,'')) as r) + FROM unnest(regexp_split_to_array(c.role,'')) AS r) ) as roles, - JSON_EXTRACT_PATH(c.config::json, 'matching_prefixes') as match_prefixes, - JSON_EXTRACT_PATH(c.config::json, 'allow_international') as allow_international, - JSON_EXTRACT_PATH(c.config::json, 'machine_detection') as machine_detection + jsonb_extract_path(c.config, 'matching_prefixes') AS match_prefixes, + jsonb_extract_path(c.config, 'allow_international') AS allow_international, + jsonb_extract_path(c.config, 'machine_detection') AS machine_detection FROM channels_channel c WHERE @@ -230,22 +236,7 @@ func OrgIDForChannelUUID(ctx context.Context, db Queryer, channelUUID assets.Cha return orgID, nil } -// MarshalJSON marshals into JSON. 0 values will become null -func (i ChannelID) MarshalJSON() ([]byte, error) { - return null.Int(i).MarshalJSON() -} - -// UnmarshalJSON unmarshals from JSON. null values become 0 -func (i *ChannelID) UnmarshalJSON(b []byte) error { - return null.UnmarshalInt(b, (*null.Int)(i)) -} - -// Value returns the db value, null is returned for 0 -func (i ChannelID) Value() (driver.Value, error) { - return null.Int(i).Value() -} - -// Scan scans from the db value. null values become 0 -func (i *ChannelID) Scan(value interface{}) error { - return null.ScanInt(value, (*null.Int)(i)) -} +func (i *ChannelID) Scan(value any) error { return null.ScanInt(value, i) } +func (i ChannelID) Value() (driver.Value, error) { return null.IntValue(i) } +func (i *ChannelID) UnmarshalJSON(b []byte) error { return null.UnmarshalInt(b, i) } +func (i ChannelID) MarshalJSON() ([]byte, error) { return null.MarshalInt(i) } diff --git a/core/models/channels_test.go b/core/models/channels_test.go index a48b12ded..6400330ea 100644 --- a/core/models/channels_test.go +++ b/core/models/channels_test.go @@ -7,21 +7,20 @@ import ( "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestChannels(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) // add some tel specific config to channel 2 - db.MustExec(`UPDATE channels_channel SET config = '{"matching_prefixes": ["250", "251"], "allow_international": true}' WHERE id = $1`, testdata.VonageChannel.ID) + rt.DB.MustExec(`UPDATE channels_channel SET config = '{"matching_prefixes": ["250", "251"], "allow_international": true}' WHERE id = $1`, testdata.VonageChannel.ID) // make twitter channel have a parent of twilio channel - db.MustExec(`UPDATE channels_channel SET parent_id = $1 WHERE id = $2`, testdata.TwilioChannel.ID, testdata.TwitterChannel.ID) + rt.DB.MustExec(`UPDATE channels_channel SET parent_id = $1 WHERE id = $2`, testdata.TwilioChannel.ID, testdata.TwitterChannel.ID) oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, 1, models.RefreshChannels) require.NoError(t, err) diff --git a/core/models/classifiers.go b/core/models/classifiers.go index ee2e8a785..58401b089 100644 --- a/core/models/classifiers.go +++ b/core/models/classifiers.go @@ -15,13 +15,13 @@ import ( "github.com/nyaruka/goflow/services/classification/wit" "github.com/nyaruka/mailroom/core/goflow" "github.com/nyaruka/mailroom/runtime" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) // ClassifierID is our type for classifier IDs -type ClassifierID null.Int +type ClassifierID int // NilClassifierID is nil value for classifier IDs const NilClassifierID = ClassifierID(0) @@ -187,22 +187,7 @@ ORDER BY ) r; ` -// MarshalJSON marshals into JSON. 0 values will become null -func (i ClassifierID) MarshalJSON() ([]byte, error) { - return null.Int(i).MarshalJSON() -} - -// UnmarshalJSON unmarshals from JSON. null values become 0 -func (i *ClassifierID) UnmarshalJSON(b []byte) error { - return null.UnmarshalInt(b, (*null.Int)(i)) -} - -// Value returns the db value, null is returned for 0 -func (i ClassifierID) Value() (driver.Value, error) { - return null.Int(i).Value() -} - -// Scan scans from the db value. null values become 0 -func (i *ClassifierID) Scan(value interface{}) error { - return null.ScanInt(value, (*null.Int)(i)) -} +func (i *ClassifierID) Scan(value any) error { return null.ScanInt(value, i) } +func (i ClassifierID) Value() (driver.Value, error) { return null.IntValue(i) } +func (i *ClassifierID) UnmarshalJSON(b []byte) error { return null.UnmarshalInt(b, i) } +func (i ClassifierID) MarshalJSON() ([]byte, error) { return null.MarshalInt(i) } diff --git a/core/models/classifiers_test.go b/core/models/classifiers_test.go index 90660b004..fc9ea588e 100644 --- a/core/models/classifiers_test.go +++ b/core/models/classifiers_test.go @@ -13,7 +13,7 @@ import ( ) func TestClassifiers(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshClassifiers) require.NoError(t, err) diff --git a/core/models/contacts.go b/core/models/contacts.go index 215bb0b2a..e6202b076 100644 --- a/core/models/contacts.go +++ b/core/models/contacts.go @@ -18,17 +18,18 @@ import ( "github.com/nyaruka/goflow/envs" "github.com/nyaruka/goflow/excellent/types" "github.com/nyaruka/goflow/flows" - "github.com/nyaruka/null" + "github.com/nyaruka/mailroom/runtime" + "github.com/nyaruka/null/v2" "github.com/nyaruka/redisx" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) // URNID is our type for urn ids, which can be null -type URNID null.Int +type URNID int // ContactID is our type for contact ids, which can be null -type ContactID null.Int +type ContactID int // URN priority constants const ( @@ -80,7 +81,7 @@ type Contact struct { fields map[string]*flows.Value groups []*Group urns []urns.URN - tickets []*Ticket + ticket *Ticket createdOn time.Time modifiedOn time.Time lastSeenOn *time.Time @@ -95,6 +96,7 @@ func (c *Contact) Status() ContactStatus { return c.status } func (c *Contact) Fields() map[string]*flows.Value { return c.fields } func (c *Contact) Groups() []*Group { return c.groups } func (c *Contact) URNs() []urns.URN { return c.urns } +func (c *Contact) Ticket() *Ticket { return c.ticket } func (c *Contact) CreatedOn() time.Time { return c.createdOn } func (c *Contact) ModifiedOn() time.Time { return c.modifiedOn } func (c *Contact) LastSeenOn() *time.Time { return c.lastSeenOn } @@ -204,11 +206,11 @@ func (c *Contact) FlowContact(oa *OrgAssets) (*flows.Contact, error) { } } - // convert our tickets to flow tickets - tickets := make([]*flows.Ticket, len(c.tickets)) + // convert our ticket to a flow ticket + var ticket *flows.Ticket var err error - for i, t := range c.tickets { - tickets[i], err = t.FlowTicket(oa) + if c.ticket != nil { + ticket, err = c.ticket.FlowTicket(oa) if err != nil { return nil, errors.Wrapf(err, "error creating flow ticket") } @@ -228,7 +230,7 @@ func (c *Contact) FlowContact(oa *OrgAssets) (*flows.Contact, error) { c.urns, groups, c.fields, - tickets, + ticket, assets.IgnoreMissing, ) if err != nil { @@ -245,7 +247,7 @@ func LoadContact(ctx context.Context, db Queryer, oa *OrgAssets, id ContactID) ( return nil, err } if len(contacts) == 0 { - return nil, nil + return nil, errors.Errorf("no such contact #%d in org #%d", id, oa.OrgID()) } return contacts[0], nil } @@ -253,6 +255,10 @@ func LoadContact(ctx context.Context, db Queryer, oa *OrgAssets, id ContactID) ( // LoadContacts loads a set of contacts for the passed in ids. Note that the order of the returned contacts // won't necessarily match the order of the ids. func LoadContacts(ctx context.Context, db Queryer, oa *OrgAssets, ids []ContactID) ([]*Contact, error) { + if len(ids) == 0 { + return nil, nil + } + start := time.Now() rows, err := db.QueryxContext(ctx, sqlSelectContact, pq.Array(ids), oa.OrgID()) @@ -323,15 +329,14 @@ func LoadContacts(ctx context.Context, db Queryer, oa *OrgAssets, ids []ContactI } contact.urns = contactURNs - // initialize our tickets - tickets := make([]*Ticket, 0, len(e.Tickets)) - for _, t := range e.Tickets { + // grab the last opened open ticket + if len(e.Tickets) > 0 { + t := e.Tickets[0] ticketer := oa.TicketerByID(t.TicketerID) if ticketer != nil { - tickets = append(tickets, NewTicket(t.UUID, oa.OrgID(), NilUserID, NilFlowID, contact.ID(), ticketer.ID(), t.ExternalID, t.TopicID, t.Body, t.AssigneeID, nil)) + contact.ticket = NewTicket(t.UUID, oa.OrgID(), NilUserID, NilFlowID, contact.ID(), ticketer.ID(), t.ExternalID, t.TopicID, t.Body, t.AssigneeID, nil) } } - contact.tickets = tickets contacts = append(contacts, contact) } @@ -385,6 +390,10 @@ func GetContactIDsFromReferences(ctx context.Context, db Queryer, orgID OrgID, r // gets the contact IDs for the passed in org and set of UUIDs func getContactIDsFromUUIDs(ctx context.Context, db Queryer, orgID OrgID, uuids []flows.ContactUUID) ([]ContactID, error) { + if len(uuids) == 0 { + return nil, nil + } + ids, err := queryContactIDs(ctx, db, `SELECT id FROM contacts_contact WHERE org_id = $1 AND uuid = ANY($2) AND is_active = TRUE`, orgID, pq.Array(uuids)) if err != nil { return nil, errors.Wrapf(err, "error selecting contact ids by UUID") @@ -544,7 +553,7 @@ LEFT JOIN ( 'ticketer_id', t.ticketer_id, 'topic_id', t.topic_id, 'assignee_id', t.assignee_id - ) ORDER BY t.opened_on ASC, t.id ASC + ) ORDER BY t.opened_on DESC, t.id DESC ) as tickets FROM tickets_ticket t @@ -612,7 +621,6 @@ func CreateContact(ctx context.Context, db QueryerWithTx, oa *OrgAssets, userID // * If URNs exist but are orphaned it creates a new contact and assigns those URNs to them. // * If URNs exists and belongs to a single contact it returns that contact (other URNs are not assigned to the contact). // * If URNs exists and belongs to multiple contacts it will return an error. -// func GetOrCreateContact(ctx context.Context, db QueryerWithTx, oa *OrgAssets, urnz []urns.URN, channelID ChannelID) (*Contact, *flows.Contact, bool, error) { // ensure all URNs are normalized for i, urn := range urnz { @@ -647,34 +655,40 @@ func GetOrCreateContact(ctx context.Context, db QueryerWithTx, oa *OrgAssets, ur return contact, flowContact, created, nil } -// GetOrCreateContactIDsFromURNs will fetch or create the contacts for the passed in URNs, returning a map the same length as -// the passed in URNs with the ids of the contacts. -func GetOrCreateContactIDsFromURNs(ctx context.Context, db QueryerWithTx, oa *OrgAssets, urnz []urns.URN) (map[urns.URN]ContactID, error) { +// GetOrCreateContactsFromURNs will fetch or create the contacts for the passed in URNs, returning a map of the fetched +// contacts and another map of the created contacts. +func GetOrCreateContactsFromURNs(ctx context.Context, db QueryerWithTx, oa *OrgAssets, urnz []urns.URN) (map[urns.URN]*Contact, map[urns.URN]*Contact, error) { // ensure all URNs are normalized for i, urn := range urnz { urnz[i] = urn.Normalize(string(oa.Env().DefaultCountry())) } // find current owners of these URNs - owners, err := contactIDsFromURNs(ctx, db, oa.OrgID(), urnz) + owners, err := contactsFromURNs(ctx, db, oa, urnz) if err != nil { - return nil, errors.Wrapf(err, "error looking up contacts for URNs") + return nil, nil, errors.Wrap(err, "error looking up contacts for URNs") } + fetched := make(map[urns.URN]*Contact, len(urnz)) + created := make(map[urns.URN]*Contact, len(urnz)) + // create any contacts that are missing - for urn, contactID := range owners { - if contactID == NilContactID { + for urn, contact := range owners { + if contact == nil { contact, _, _, err := GetOrCreateContact(ctx, db, oa, []urns.URN{urn}, NilChannelID) if err != nil { - return nil, errors.Wrapf(err, "error creating contact") + return nil, nil, errors.Wrapf(err, "error creating contact") } - owners[urn] = contact.ID() + created[urn] = contact + } else { + fetched[urn] = contact } } - return owners, nil + + return fetched, created, nil } -// looks up the contacts who own the given urns (which should be normalized by the caller) and returns that information as a map +// looks up the contact IDs who own the given urns (which should be normalized by the caller) and returns that information as a map func contactIDsFromURNs(ctx context.Context, db Queryer, orgID OrgID, urnz []urns.URN) (map[urns.URN]ContactID, error) { identityToOriginal := make(map[urns.URN]urns.URN, len(urnz)) identities := make([]urns.URN, len(urnz)) @@ -705,6 +719,39 @@ func contactIDsFromURNs(ctx context.Context, db Queryer, orgID OrgID, urnz []urn return owners, nil } +// like contactIDsFromURNs but fetches the contacts +func contactsFromURNs(ctx context.Context, db Queryer, oa *OrgAssets, urnz []urns.URN) (map[urns.URN]*Contact, error) { + ids, err := contactIDsFromURNs(ctx, db, oa.OrgID(), urnz) + if err != nil { + return nil, err + } + + // get the ids of the contacts that exist + existingIDs := make([]ContactID, 0, len(ids)) + for _, id := range ids { + if id != NilContactID { + existingIDs = append(existingIDs, id) + } + } + + fetched, err := LoadContacts(ctx, db, oa, existingIDs) + if err != nil { + return nil, errors.Wrap(err, "error loading contacts") + } + + // and transform those into a map by URN + fetchedByID := make(map[ContactID]*Contact, len(fetched)) + for _, c := range fetched { + fetchedByID[c.ID()] = c + } + byURN := make(map[urns.URN]*Contact, len(ids)) + for urn, id := range ids { + byURN[urn] = fetchedByID[id] + } + + return byURN, nil +} + func getOrCreateContact(ctx context.Context, db QueryerWithTx, orgID OrgID, urnz []urns.URN, channelID ChannelID) (ContactID, bool, error) { // find current owners of these URNs owners, err := contactIDsFromURNs(ctx, db, orgID, urnz) @@ -1136,7 +1183,7 @@ func updateURNChannelPriority(urn urns.URN, channel *Channel, priority int) (urn // UpdateContactModifiedOn updates modified_on the passed in contacts func UpdateContactModifiedOn(ctx context.Context, db Queryer, contactIDs []ContactID) error { - for _, idBatch := range chunkSlice(contactIDs, 100) { + for _, idBatch := range ChunkSlice(contactIDs, 100) { _, err := db.ExecContext(ctx, `UPDATE contacts_contact SET modified_on = NOW() WHERE id = ANY($1)`, pq.Array(idBatch)) if err != nil { return errors.Wrap(err, "error updating modified_on for contact batch") @@ -1307,51 +1354,15 @@ type ContactURNsChanged struct { URNs []urns.URN } -// MarshalJSON marshals into JSON. 0 values will become null -func (i URNID) MarshalJSON() ([]byte, error) { - return null.Int(i).MarshalJSON() -} - -// UnmarshalJSON unmarshals from JSON. null values become 0 -func (i *URNID) UnmarshalJSON(b []byte) error { - return null.UnmarshalInt(b, (*null.Int)(i)) -} - -// Value returns the db value, null is returned for 0 -func (i URNID) Value() (driver.Value, error) { - return null.Int(i).Value() -} +func (i *URNID) Scan(value any) error { return null.ScanInt(value, i) } +func (i URNID) Value() (driver.Value, error) { return null.IntValue(i) } +func (i *URNID) UnmarshalJSON(b []byte) error { return null.UnmarshalInt(b, i) } +func (i URNID) MarshalJSON() ([]byte, error) { return null.MarshalInt(i) } -// Scan scans from the db value. null values become 0 -func (i *URNID) Scan(value interface{}) error { - return null.ScanInt(value, (*null.Int)(i)) -} - -// MarshalJSON marshals into JSON. 0 values will become null -func (i ContactID) MarshalJSON() ([]byte, error) { - return null.Int(i).MarshalJSON() -} - -// UnmarshalJSON unmarshals from JSON. null values become 0 -func (i *ContactID) UnmarshalJSON(b []byte) error { - return null.UnmarshalInt(b, (*null.Int)(i)) -} - -// Value returns the db value, null is returned for 0 -func (i ContactID) Value() (driver.Value, error) { - return null.Int(i).Value() -} - -// Scan scans from the db value. null values become 0 -func (i *ContactID) Scan(value interface{}) error { - return null.ScanInt(value, (*null.Int)(i)) -} - -// GetContactLocker returns the locker for a particular contact -func GetContactLocker(orgID OrgID, contactID ContactID) *redisx.Locker { - key := fmt.Sprintf("lock:c:%d:%d", orgID, contactID) - return redisx.NewLocker(key, time.Minute*5) -} +func (i *ContactID) Scan(value any) error { return null.ScanInt(value, i) } +func (i ContactID) Value() (driver.Value, error) { return null.IntValue(i) } +func (i *ContactID) UnmarshalJSON(b []byte) error { return null.UnmarshalInt(b, i) } +func (i ContactID) MarshalJSON() ([]byte, error) { return null.MarshalInt(i) } // ContactStatusChange struct used for our contact status change type ContactStatusChange struct { @@ -1416,3 +1427,65 @@ FROM ( WHERE c.id = r.id::int ` + +// LockContacts tries to grab locks for the given contacts, returning the locks and the skipped contacts +func LockContacts(ctx context.Context, rt *runtime.Runtime, orgID OrgID, ids []ContactID, retry time.Duration) (map[ContactID]string, []ContactID, error) { + locks := make(map[ContactID]string, len(ids)) + skipped := make([]ContactID, 0, 5) + + // this is set to true at the end of the function so the defer calls won't release the locks unless we're returning + // early due to an error + success := false + + for _, contactID := range ids { + // error if context has finished before we have + select { + case <-ctx.Done(): + return nil, nil, ctx.Err() + default: + } + + locker := getContactLocker(orgID, contactID) + + lock, err := locker.Grab(rt.RP, retry) + if err != nil { + return nil, nil, errors.Wrapf(err, "error attempting to grab lock") + } + + // no error but we didn't get the lock + if lock == "" { + skipped = append(skipped, contactID) + continue + } + + locks[contactID] = lock + + // if we error we want to release all locks on way out + defer func() { + if !success { + locker.Release(rt.RP, lock) + } + }() + } + + success = true + return locks, skipped, nil +} + +// UnlockContacts unlocks the given contacts using the given lock values +func UnlockContacts(rt *runtime.Runtime, orgID OrgID, locks map[ContactID]string) error { + for contactID, lock := range locks { + locker := getContactLocker(orgID, contactID) + + err := locker.Release(rt.RP, lock) + if err != nil { + return err + } + } + return nil +} + +// returns the locker for a particular contact +func getContactLocker(orgID OrgID, contactID ContactID) *redisx.Locker { + return redisx.NewLocker(fmt.Sprintf("lock:c:%d:%d", orgID, contactID), time.Minute*5) +} diff --git a/core/models/contacts_test.go b/core/models/contacts_test.go index 764069862..a1ef4388a 100644 --- a/core/models/contacts_test.go +++ b/core/models/contacts_test.go @@ -1,6 +1,7 @@ package models_test import ( + "context" "fmt" "sort" "testing" @@ -15,32 +16,36 @@ import ( "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" "github.com/nyaruka/mailroom/utils/test" + "github.com/nyaruka/redisx/assertredis" "github.com/shopspring/decimal" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "golang.org/x/exp/maps" ) func TestContacts(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) - testdata.InsertContactURN(db, testdata.Org1, testdata.Bob, "whatsapp:250788373373", 999) - testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.SupportTopic, "Where are my shoes?", "1234", time.Now(), testdata.Agent) - testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.SalesTopic, "Where are my pants?", "2345", time.Now(), nil) - testdata.InsertOpenTicket(db, testdata.Org1, testdata.Bob, testdata.Mailgun, testdata.DefaultTopic, "His name is Bob", "", time.Now(), testdata.Editor) + // for now it's still possible to have more than one open ticket in the database + testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.SupportTopic, "Where are my shoes?", "1234", time.Now(), testdata.Agent) + testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.SalesTopic, "Where are my pants?", "2345", time.Now(), nil) + + testdata.InsertContactURN(rt, testdata.Org1, testdata.Bob, "whatsapp:250788373373", 999) + testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Bob, testdata.Mailgun, testdata.DefaultTopic, "His name is Bob", "", time.Now(), testdata.Editor) // delete mailgun ticketer - db.MustExec(`UPDATE tickets_ticketer SET is_active = false WHERE id = $1`, testdata.Mailgun.ID) + rt.DB.MustExec(`UPDATE tickets_ticketer SET is_active = false WHERE id = $1`, testdata.Mailgun.ID) org, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshAll) assert.NoError(t, err) - db.MustExec(`DELETE FROM contacts_contacturn WHERE contact_id = $1`, testdata.George.ID) - db.MustExec(`DELETE FROM contacts_contactgroup_contacts WHERE contact_id = $1`, testdata.George.ID) - db.MustExec(`UPDATE contacts_contact SET is_active = FALSE WHERE id = $1`, testdata.Alexandria.ID) + rt.DB.MustExec(`DELETE FROM contacts_contacturn WHERE contact_id = $1`, testdata.George.ID) + rt.DB.MustExec(`DELETE FROM contacts_contactgroup_contacts WHERE contact_id = $1`, testdata.George.ID) + rt.DB.MustExec(`UPDATE contacts_contact SET is_active = FALSE WHERE id = $1`, testdata.Alexandria.ID) - modelContacts, err := models.LoadContacts(ctx, db, org, []models.ContactID{testdata.Cathy.ID, testdata.Bob.ID, testdata.George.ID, testdata.Alexandria.ID}) + modelContacts, err := models.LoadContacts(ctx, rt.DB, org, []models.ContactID{testdata.Cathy.ID, testdata.Bob.ID, testdata.George.ID, testdata.Alexandria.ID}) require.NoError(t, err) require.Equal(t, 3, len(modelContacts)) @@ -60,13 +65,11 @@ func TestContacts(t *testing.T) { assert.Equal(t, len(cathy.URNs()), 1) assert.Equal(t, cathy.URNs()[0].String(), "tel:+16055741111?id=10000&priority=1000") assert.Equal(t, 1, cathy.Groups().Count()) - assert.Equal(t, 2, cathy.Tickets().Count()) + assert.NotNil(t, cathy.Ticket()) - cathyTickets := cathy.Tickets().All() - assert.Equal(t, "Support", cathyTickets[0].Topic().Name()) - assert.Equal(t, "agent1@nyaruka.com", cathyTickets[0].Assignee().Email()) - assert.Equal(t, "Sales", cathyTickets[1].Topic().Name()) - assert.Nil(t, cathyTickets[1].Assignee()) + cathyTicket := cathy.Ticket() + assert.Equal(t, "Sales", cathyTicket.Topic().Name()) + assert.Nil(t, cathyTicket.Assignee()) assert.Equal(t, "Yobe", cathy.Fields()["state"].QueryValue()) assert.Equal(t, "Dokshi", cathy.Fields()["ward"].QueryValue()) @@ -79,17 +82,17 @@ func TestContacts(t *testing.T) { assert.Equal(t, "tel:+16055742222?id=10001&priority=1000", bob.URNs()[0].String()) assert.Equal(t, "whatsapp:250788373373?id=30000&priority=999", bob.URNs()[1].String()) assert.Equal(t, 0, bob.Groups().Count()) - assert.Equal(t, 0, bob.Tickets().Count()) // because ticketer no longer exists + assert.Nil(t, bob.Ticket()) // because ticketer no longer exists assert.Equal(t, "George", george.Name()) assert.Equal(t, decimal.RequireFromString("30"), george.Fields()["age"].QueryValue()) assert.Equal(t, 0, len(george.URNs())) assert.Equal(t, 0, george.Groups().Count()) - assert.Equal(t, 0, george.Tickets().Count()) + assert.Nil(t, george.Ticket()) // change bob to have a preferred URN and channel of our telephone channel := org.ChannelByID(testdata.TwilioChannel.ID) - err = modelContacts[1].UpdatePreferredURN(ctx, db, org, testdata.Bob.URNID, channel) + err = modelContacts[1].UpdatePreferredURN(ctx, rt.DB, org, testdata.Bob.URNID, channel) assert.NoError(t, err) bob, err = modelContacts[1].FlowContact(org) @@ -98,14 +101,14 @@ func TestContacts(t *testing.T) { assert.Equal(t, "whatsapp:250788373373?id=30000&priority=999", bob.URNs()[1].String()) // add another tel urn to bob - testdata.InsertContactURN(db, testdata.Org1, testdata.Bob, urns.URN("tel:+250788373373"), 10) + testdata.InsertContactURN(rt, testdata.Org1, testdata.Bob, urns.URN("tel:+250788373373"), 10) // reload the contact - modelContacts, err = models.LoadContacts(ctx, db, org, []models.ContactID{testdata.Bob.ID}) + modelContacts, err = models.LoadContacts(ctx, rt.DB, org, []models.ContactID{testdata.Bob.ID}) assert.NoError(t, err) // set our preferred channel again - err = modelContacts[0].UpdatePreferredURN(ctx, db, org, models.URNID(30001), channel) + err = modelContacts[0].UpdatePreferredURN(ctx, rt.DB, org, models.URNID(30001), channel) assert.NoError(t, err) bob, err = modelContacts[0].FlowContact(org) @@ -115,7 +118,7 @@ func TestContacts(t *testing.T) { assert.Equal(t, "whatsapp:250788373373?id=30000&priority=998", bob.URNs()[2].String()) // no op this time - err = modelContacts[0].UpdatePreferredURN(ctx, db, org, models.URNID(30001), channel) + err = modelContacts[0].UpdatePreferredURN(ctx, rt.DB, org, models.URNID(30001), channel) assert.NoError(t, err) bob, err = modelContacts[0].FlowContact(org) @@ -125,7 +128,7 @@ func TestContacts(t *testing.T) { assert.Equal(t, "whatsapp:250788373373?id=30000&priority=998", bob.URNs()[2].String()) // calling with no channel is a noop on the channel - err = modelContacts[0].UpdatePreferredURN(ctx, db, org, models.URNID(30001), nil) + err = modelContacts[0].UpdatePreferredURN(ctx, rt.DB, org, models.URNID(30001), nil) assert.NoError(t, err) bob, err = modelContacts[0].FlowContact(org) @@ -136,19 +139,19 @@ func TestContacts(t *testing.T) { } func TestCreateContact(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) - testdata.InsertContactGroup(db, testdata.Org1, "d636c966-79c1-4417-9f1c-82ad629773a2", "Kinyarwanda", "language = kin") + testdata.InsertContactGroup(rt, testdata.Org1, "d636c966-79c1-4417-9f1c-82ad629773a2", "Kinyarwanda", "language = kin") // add an orphaned URN - testdata.InsertContactURN(db, testdata.Org1, nil, urns.URN("telegram:200002"), 100) + testdata.InsertContactURN(rt, testdata.Org1, nil, urns.URN("telegram:200002"), 100) oa, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) require.NoError(t, err) - contact, flowContact, err := models.CreateContact(ctx, db, oa, models.UserID(1), "Rich", envs.Language(`kin`), []urns.URN{urns.URN("telegram:200001"), urns.URN("telegram:200002")}) + contact, flowContact, err := models.CreateContact(ctx, rt.DB, oa, models.UserID(1), "Rich", envs.Language(`kin`), []urns.URN{urns.URN("telegram:200001"), urns.URN("telegram:200002")}) require.NoError(t, err) assert.Equal(t, "Rich", contact.Name()) @@ -161,19 +164,19 @@ func TestCreateContact(t *testing.T) { assert.Len(t, flowContact.Groups().All(), 1) assert.Equal(t, assets.GroupUUID("d636c966-79c1-4417-9f1c-82ad629773a2"), flowContact.Groups().All()[0].UUID()) - _, _, err = models.CreateContact(ctx, db, oa, models.UserID(1), "Rich", envs.Language(`kin`), []urns.URN{urns.URN("telegram:200001")}) + _, _, err = models.CreateContact(ctx, rt.DB, oa, models.UserID(1), "Rich", envs.Language(`kin`), []urns.URN{urns.URN("telegram:200001")}) assert.EqualError(t, err, "URNs in use by other contacts") } func TestCreateContactRace(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) oa, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) assert.NoError(t, err) - mdb := testsuite.NewMockDB(db, func(funcName string, call int) error { + mdb := testsuite.NewMockDB(rt.DB, func(funcName string, call int) error { // Make beginning a transaction take a while to create race condition. All threads will fetch // URN owners and decide nobody owns the URN, so all threads will try to create a new contact. if funcName == "BeginTxx" { @@ -195,15 +198,15 @@ func TestCreateContactRace(t *testing.T) { } func TestGetOrCreateContact(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) - testdata.InsertContactGroup(db, testdata.Org1, "dcc16d85-8274-4d19-a3c2-152d4ee99380", "Telegrammer", `telegram = 100001`) + testdata.InsertContactGroup(rt, testdata.Org1, "dcc16d85-8274-4d19-a3c2-152d4ee99380", "Telegrammer", `telegram = 100001`) // add some orphaned URNs - testdata.InsertContactURN(db, testdata.Org1, nil, urns.URN("telegram:200001"), 100) - testdata.InsertContactURN(db, testdata.Org1, nil, urns.URN("telegram:200002"), 100) + testdata.InsertContactURN(rt, testdata.Org1, nil, urns.URN("telegram:200001"), 100) + testdata.InsertContactURN(rt, testdata.Org1, nil, urns.URN("telegram:200002"), 100) contactIDSeq := models.ContactID(30000) newContact := func() models.ContactID { id := contactIDSeq; contactIDSeq++; return id } @@ -305,7 +308,7 @@ func TestGetOrCreateContact(t *testing.T) { } for i, tc := range tcs { - contact, flowContact, created, err := models.GetOrCreateContact(ctx, db, oa, tc.URNs, tc.ChannelID) + contact, flowContact, created, err := models.GetOrCreateContact(ctx, rt.DB, oa, tc.URNs, tc.ChannelID) assert.NoError(t, err, "%d: error creating contact", i) assert.Equal(t, tc.ContactID, contact.ID(), "%d: contact id mismatch", i) @@ -322,14 +325,14 @@ func TestGetOrCreateContact(t *testing.T) { } func TestGetOrCreateContactRace(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) oa, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) assert.NoError(t, err) - mdb := testsuite.NewMockDB(db, func(funcName string, call int) error { + mdb := testsuite.NewMockDB(rt.DB, func(funcName string, call int) error { // Make beginning a transaction take a while to create race condition. All threads will fetch // URN owners and decide nobody owns the URN, so all threads will try to create a new contact. if funcName == "BeginTxx" { @@ -351,71 +354,73 @@ func TestGetOrCreateContactRace(t *testing.T) { } func TestGetOrCreateContactIDsFromURNs(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) - // add an orphaned URN - testdata.InsertContactURN(db, testdata.Org1, nil, urns.URN("telegram:200001"), 100) + oa, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) + assert.NoError(t, err) - contactIDSeq := models.ContactID(30000) - newContact := func() models.ContactID { id := contactIDSeq; contactIDSeq++; return id } - prevContact := func() models.ContactID { return contactIDSeq - 1 } + // add an orphaned URN + testdata.InsertContactURN(rt, testdata.Org1, nil, urns.URN("telegram:200001"), 100) - org, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) - assert.NoError(t, err) + cathy, _ := testdata.Cathy.Load(rt, oa) tcs := []struct { - OrgID models.OrgID - URNs []urns.URN - ContactIDs map[urns.URN]models.ContactID + orgID models.OrgID + urns []urns.URN + fetched map[urns.URN]*models.Contact + created []urns.URN }{ { - testdata.Org1.ID, - []urns.URN{testdata.Cathy.URN}, - map[urns.URN]models.ContactID{testdata.Cathy.URN: testdata.Cathy.ID}, + orgID: testdata.Org1.ID, + urns: []urns.URN{testdata.Cathy.URN}, + fetched: map[urns.URN]*models.Contact{ + testdata.Cathy.URN: cathy, + }, + created: []urns.URN{}, }, { - testdata.Org1.ID, - []urns.URN{urns.URN(testdata.Cathy.URN.String() + "?foo=bar")}, - map[urns.URN]models.ContactID{urns.URN(testdata.Cathy.URN.String() + "?foo=bar"): testdata.Cathy.ID}, + orgID: testdata.Org1.ID, + urns: []urns.URN{urns.URN(testdata.Cathy.URN.String() + "?foo=bar")}, + fetched: map[urns.URN]*models.Contact{ + urns.URN(testdata.Cathy.URN.String() + "?foo=bar"): cathy, + }, + created: []urns.URN{}, }, { - testdata.Org1.ID, - []urns.URN{testdata.Cathy.URN, urns.URN("telegram:100001")}, - map[urns.URN]models.ContactID{ - testdata.Cathy.URN: testdata.Cathy.ID, - urns.URN("telegram:100001"): newContact(), + orgID: testdata.Org1.ID, + urns: []urns.URN{testdata.Cathy.URN, urns.URN("telegram:100001")}, + fetched: map[urns.URN]*models.Contact{ + testdata.Cathy.URN: cathy, }, + created: []urns.URN{"telegram:100001"}, }, { - testdata.Org1.ID, - []urns.URN{urns.URN("telegram:100001")}, - map[urns.URN]models.ContactID{urns.URN("telegram:100001"): prevContact()}, - }, - { - testdata.Org1.ID, - []urns.URN{urns.URN("telegram:200001")}, - map[urns.URN]models.ContactID{urns.URN("telegram:200001"): newContact()}, // new contact assigned orphaned URN + orgID: testdata.Org1.ID, + urns: []urns.URN{urns.URN("telegram:200001")}, + fetched: map[urns.URN]*models.Contact{}, + created: []urns.URN{"telegram:200001"}, // new contact assigned orphaned URN }, } for i, tc := range tcs { - ids, err := models.GetOrCreateContactIDsFromURNs(ctx, db, org, tc.URNs) + fetched, created, err := models.GetOrCreateContactsFromURNs(ctx, rt.DB, oa, tc.urns) assert.NoError(t, err, "%d: error getting contact ids", i) - assert.Equal(t, tc.ContactIDs, ids, "%d: mismatch in contact ids", i) + assert.Equal(t, tc.fetched, fetched, "%d: fetched contacts mismatch", i) + assert.Equal(t, tc.created, maps.Keys(created), "%d: created contacts mismatch", i) } } -func TestGetOrCreateContactIDsFromURNsRace(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() +func TestGetOrCreateContactsFromURNsRace(t *testing.T) { + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) oa, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) assert.NoError(t, err) - mdb := testsuite.NewMockDB(db, func(funcName string, call int) error { + mdb := testsuite.NewMockDB(rt.DB, func(funcName string, call int) error { // Make beginning a transaction take a while to create race condition. All threads will fetch // URN owners and decide nobody owns the URN, so all threads will try to create a new contact. if funcName == "BeginTxx" { @@ -424,13 +429,13 @@ func TestGetOrCreateContactIDsFromURNsRace(t *testing.T) { return nil }) - var contacts [2]models.ContactID + var contacts [2]*models.Contact var errs [2]error test.RunConcurrently(2, func(i int) { - var cmap map[urns.URN]models.ContactID - cmap, errs[i] = models.GetOrCreateContactIDsFromURNs(ctx, mdb, oa, []urns.URN{urns.URN("telegram:100007")}) - contacts[i] = cmap[urns.URN("telegram:100007")] + var created map[urns.URN]*models.Contact + _, created, errs[i] = models.GetOrCreateContactsFromURNs(ctx, mdb, oa, []urns.URN{urns.URN("telegram:100007")}) + contacts[i] = created[urns.URN("telegram:100007")] }) require.NoError(t, errs[0]) @@ -439,9 +444,9 @@ func TestGetOrCreateContactIDsFromURNsRace(t *testing.T) { } func TestGetContactIDsFromReferences(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() - ids, err := models.GetContactIDsFromReferences(ctx, db, testdata.Org1.ID, []*flows.ContactReference{ + ids, err := models.GetContactIDsFromReferences(ctx, rt.DB, testdata.Org1.ID, []*flows.ContactReference{ flows.NewContactReference(testdata.Cathy.UUID, "Cathy"), flows.NewContactReference(testdata.Bob.UUID, "Bob"), }) @@ -450,23 +455,23 @@ func TestGetContactIDsFromReferences(t *testing.T) { } func TestStopContact(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) // stop kathy - err := models.StopContact(ctx, db, testdata.Org1.ID, testdata.Cathy.ID) + err := models.StopContact(ctx, rt.DB, testdata.Org1.ID, testdata.Cathy.ID) assert.NoError(t, err) // verify she's only in the stopped group - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contactgroup_contacts WHERE contact_id = $1`, testdata.Cathy.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contactgroup_contacts WHERE contact_id = $1`, testdata.Cathy.ID).Returns(1) // verify she's stopped - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contact WHERE id = $1 AND status = 'S' AND is_active = TRUE`, testdata.Cathy.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contact WHERE id = $1 AND status = 'S' AND is_active = TRUE`, testdata.Cathy.ID).Returns(1) } func TestUpdateContactLastSeenAndModifiedOn(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) @@ -475,20 +480,20 @@ func TestUpdateContactLastSeenAndModifiedOn(t *testing.T) { t0 := time.Now() - err = models.UpdateContactModifiedOn(ctx, db, []models.ContactID{testdata.Cathy.ID}) + err = models.UpdateContactModifiedOn(ctx, rt.DB, []models.ContactID{testdata.Cathy.ID}) assert.NoError(t, err) - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contact WHERE modified_on > $1 AND last_seen_on IS NULL`, t0).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contact WHERE modified_on > $1 AND last_seen_on IS NULL`, t0).Returns(1) t1 := time.Now().Truncate(time.Millisecond) time.Sleep(time.Millisecond * 5) - err = models.UpdateContactLastSeenOn(ctx, db, testdata.Cathy.ID, t1) + err = models.UpdateContactLastSeenOn(ctx, rt.DB, testdata.Cathy.ID, t1) assert.NoError(t, err) - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contact WHERE modified_on > $1 AND last_seen_on = $1`, t1).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contact WHERE modified_on > $1 AND last_seen_on = $1`, t1).Returns(1) - cathy, err := models.LoadContact(ctx, db, oa, testdata.Cathy.ID) + cathy, err := models.LoadContact(ctx, rt.DB, oa, testdata.Cathy.ID) require.NoError(t, err) assert.NotNil(t, cathy.LastSeenOn()) assert.True(t, t1.Equal(*cathy.LastSeenOn())) @@ -498,50 +503,50 @@ func TestUpdateContactLastSeenAndModifiedOn(t *testing.T) { time.Sleep(time.Millisecond * 5) // can update directly from the contact object - err = cathy.UpdateLastSeenOn(ctx, db, t2) + err = cathy.UpdateLastSeenOn(ctx, rt.DB, t2) require.NoError(t, err) assert.True(t, t2.Equal(*cathy.LastSeenOn())) // and that also updates the database - cathy, err = models.LoadContact(ctx, db, oa, testdata.Cathy.ID) + cathy, err = models.LoadContact(ctx, rt.DB, oa, testdata.Cathy.ID) require.NoError(t, err) assert.True(t, t2.Equal(*cathy.LastSeenOn())) assert.True(t, cathy.ModifiedOn().After(t2)) } func TestUpdateContactStatus(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) - err := models.UpdateContactStatus(ctx, db, []*models.ContactStatusChange{}) + err := models.UpdateContactStatus(ctx, rt.DB, []*models.ContactStatusChange{}) assert.NoError(t, err) - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contact WHERE id = $1 AND status = 'B'`, testdata.Cathy.ID).Returns(0) - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contact WHERE id = $1 AND status = 'S'`, testdata.Cathy.ID).Returns(0) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contact WHERE id = $1 AND status = 'B'`, testdata.Cathy.ID).Returns(0) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contact WHERE id = $1 AND status = 'S'`, testdata.Cathy.ID).Returns(0) changes := make([]*models.ContactStatusChange, 0, 1) changes = append(changes, &models.ContactStatusChange{testdata.Cathy.ID, flows.ContactStatusBlocked}) - err = models.UpdateContactStatus(ctx, db, changes) + err = models.UpdateContactStatus(ctx, rt.DB, changes) assert.NoError(t, err) - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contact WHERE id = $1 AND status = 'B'`, testdata.Cathy.ID).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contact WHERE id = $1 AND status = 'S'`, testdata.Cathy.ID).Returns(0) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contact WHERE id = $1 AND status = 'B'`, testdata.Cathy.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contact WHERE id = $1 AND status = 'S'`, testdata.Cathy.ID).Returns(0) changes = make([]*models.ContactStatusChange, 0, 1) changes = append(changes, &models.ContactStatusChange{testdata.Cathy.ID, flows.ContactStatusStopped}) - err = models.UpdateContactStatus(ctx, db, changes) + err = models.UpdateContactStatus(ctx, rt.DB, changes) assert.NoError(t, err) - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contact WHERE id = $1 AND status = 'B'`, testdata.Cathy.ID).Returns(0) - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contact WHERE id = $1 AND status = 'S'`, testdata.Cathy.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contact WHERE id = $1 AND status = 'B'`, testdata.Cathy.ID).Returns(0) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contact WHERE id = $1 AND status = 'S'`, testdata.Cathy.ID).Returns(1) } func TestUpdateContactURNs(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) @@ -549,11 +554,11 @@ func TestUpdateContactURNs(t *testing.T) { assert.NoError(t, err) numInitialURNs := 0 - db.Get(&numInitialURNs, `SELECT count(*) FROM contacts_contacturn`) + rt.DB.Get(&numInitialURNs, `SELECT count(*) FROM contacts_contacturn`) assertContactURNs := func(contactID models.ContactID, expected []string) { var actual []string - err = db.Select(&actual, `SELECT identity FROM contacts_contacturn WHERE contact_id = $1 ORDER BY priority DESC`, contactID) + err = rt.DB.Select(&actual, `SELECT identity FROM contacts_contacturn WHERE contact_id = $1 ORDER BY priority DESC`, contactID) assert.NoError(t, err) assert.Equal(t, expected, actual, "URNs mismatch for contact %d", contactID) } @@ -566,35 +571,35 @@ func TestUpdateContactURNs(t *testing.T) { bobURN := urns.URN(fmt.Sprintf("tel:+16055742222?id=%d", testdata.Bob.URNID)) // give Cathy a new higher priority URN - err = models.UpdateContactURNs(ctx, db, oa, []*models.ContactURNsChanged{{testdata.Cathy.ID, testdata.Org1.ID, []urns.URN{"tel:+16055700001", cathyURN}}}) + err = models.UpdateContactURNs(ctx, rt.DB, oa, []*models.ContactURNsChanged{{testdata.Cathy.ID, testdata.Org1.ID, []urns.URN{"tel:+16055700001", cathyURN}}}) assert.NoError(t, err) assertContactURNs(testdata.Cathy.ID, []string{"tel:+16055700001", "tel:+16055741111"}) // give Bob a new lower priority URN - err = models.UpdateContactURNs(ctx, db, oa, []*models.ContactURNsChanged{{testdata.Bob.ID, testdata.Org1.ID, []urns.URN{bobURN, "tel:+16055700002"}}}) + err = models.UpdateContactURNs(ctx, rt.DB, oa, []*models.ContactURNsChanged{{testdata.Bob.ID, testdata.Org1.ID, []urns.URN{bobURN, "tel:+16055700002"}}}) assert.NoError(t, err) assertContactURNs(testdata.Bob.ID, []string{"tel:+16055742222", "tel:+16055700002"}) - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contacturn WHERE contact_id IS NULL`).Returns(0) // shouldn't be any orphan URNs - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contacturn`).Returns(numInitialURNs + 2) // but 2 new URNs + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contacturn WHERE contact_id IS NULL`).Returns(0) // shouldn't be any orphan URNs + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contacturn`).Returns(numInitialURNs + 2) // but 2 new URNs // remove a URN from Cathy - err = models.UpdateContactURNs(ctx, db, oa, []*models.ContactURNsChanged{{testdata.Cathy.ID, testdata.Org1.ID, []urns.URN{"tel:+16055700001"}}}) + err = models.UpdateContactURNs(ctx, rt.DB, oa, []*models.ContactURNsChanged{{testdata.Cathy.ID, testdata.Org1.ID, []urns.URN{"tel:+16055700001"}}}) assert.NoError(t, err) assertContactURNs(testdata.Cathy.ID, []string{"tel:+16055700001"}) - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contacturn WHERE contact_id IS NULL`).Returns(1) // now orphaned + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contacturn WHERE contact_id IS NULL`).Returns(1) // now orphaned // steal a URN from Bob - err = models.UpdateContactURNs(ctx, db, oa, []*models.ContactURNsChanged{{testdata.Cathy.ID, testdata.Org1.ID, []urns.URN{"tel:+16055700001", "tel:+16055700002"}}}) + err = models.UpdateContactURNs(ctx, rt.DB, oa, []*models.ContactURNsChanged{{testdata.Cathy.ID, testdata.Org1.ID, []urns.URN{"tel:+16055700001", "tel:+16055700002"}}}) assert.NoError(t, err) assertContactURNs(testdata.Cathy.ID, []string{"tel:+16055700001", "tel:+16055700002"}) assertContactURNs(testdata.Bob.ID, []string{"tel:+16055742222"}) // steal the URN back from Cathy whilst simulataneously adding new URN to Cathy and not-changing anything for George - err = models.UpdateContactURNs(ctx, db, oa, []*models.ContactURNsChanged{ + err = models.UpdateContactURNs(ctx, rt.DB, oa, []*models.ContactURNsChanged{ {testdata.Bob.ID, testdata.Org1.ID, []urns.URN{"tel:+16055742222", "tel:+16055700002"}}, {testdata.Cathy.ID, testdata.Org1.ID, []urns.URN{"tel:+16055700001", "tel:+16055700003"}}, {testdata.George.ID, testdata.Org1.ID, []urns.URN{"tel:+16055743333"}}, @@ -605,5 +610,51 @@ func TestUpdateContactURNs(t *testing.T) { assertContactURNs(testdata.Bob.ID, []string{"tel:+16055742222", "tel:+16055700002"}) assertContactURNs(testdata.George.ID, []string{"tel:+16055743333"}) - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contacturn`).Returns(numInitialURNs + 3) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contacturn`).Returns(numInitialURNs + 3) +} + +func TestLockContacts(t *testing.T) { + ctx, rt := testsuite.Runtime() + + defer testsuite.Reset(testsuite.ResetRedis) + + // grab lock for contact 102 + models.LockContacts(ctx, rt, testdata.Org1.ID, []models.ContactID{102}, time.Second) + + assertredis.Exists(t, rt.RP, "lock:c:1:102") + + // try to get locks for 101, 102, 103 + locks, skipped, err := models.LockContacts(ctx, rt, testdata.Org1.ID, []models.ContactID{101, 102, 103}, time.Second) + assert.NoError(t, err) + assert.ElementsMatch(t, []models.ContactID{101, 103}, maps.Keys(locks)) + assert.Equal(t, []models.ContactID{102}, skipped) // because it's already locked + + assertredis.Exists(t, rt.RP, "lock:c:1:101") + assertredis.Exists(t, rt.RP, "lock:c:1:102") + assertredis.Exists(t, rt.RP, "lock:c:1:103") + + err = models.UnlockContacts(rt, testdata.Org1.ID, locks) + assert.NoError(t, err) + + assertredis.NotExists(t, rt.RP, "lock:c:1:101") + assertredis.Exists(t, rt.RP, "lock:c:1:102") + assertredis.NotExists(t, rt.RP, "lock:c:1:103") + + // lock contacts 103, 104, 105 so only 101 is unlocked + models.LockContacts(ctx, rt, testdata.Org1.ID, []models.ContactID{103}, time.Second) + + // create a new context with a 2 second timelimit + ctx2, cancel := context.WithTimeout(ctx, time.Second) + defer cancel() + + start := time.Now() + + _, _, err = models.LockContacts(ctx2, rt, testdata.Org1.ID, []models.ContactID{101, 102, 103, 104}, time.Second) + assert.EqualError(t, err, "context deadline exceeded") + + // call should have completed in just over the context deadline + assert.Less(t, time.Since(start), time.Second*3) + + // since we errored, any locks we grabbed before the error, should have been released + assertredis.NotExists(t, rt.RP, "lock:c:1:101") } diff --git a/core/models/fields_test.go b/core/models/fields_test.go index d26617fb2..52154c628 100644 --- a/core/models/fields_test.go +++ b/core/models/fields_test.go @@ -12,7 +12,7 @@ import ( ) func TestFields(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshFields) require.NoError(t, err) diff --git a/core/models/flow_stats_test.go b/core/models/flow_stats_test.go index 05d975781..c9843ee6c 100644 --- a/core/models/flow_stats_test.go +++ b/core/models/flow_stats_test.go @@ -14,7 +14,7 @@ import ( ) func TestRecordFlowStatistics(t *testing.T) { - ctx, rt, _, rp := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetRedis) @@ -34,12 +34,12 @@ func TestRecordFlowStatistics(t *testing.T) { err = models.RecordFlowStatistics(ctx, rt, nil, []flows.Session{session1, session2, session3}, []flows.Sprint{session1Sprint1, session2Sprint1, session3Sprint1}) require.NoError(t, err) - assertredis.Keys(t, rp, []string{ + assertredis.Keys(t, rt.RP, []string{ "recent_contacts:5fd2e537-0534-4c12-8425-bef87af09d46:072b95b3-61c3-4e0e-8dd1-eb7481083f94", // "what's your fav color" -> color split }) // all 3 contacts went from first msg to the color split - no operands recorded for this segment - assertredis.ZRange(t, rp, "recent_contacts:5fd2e537-0534-4c12-8425-bef87af09d46:072b95b3-61c3-4e0e-8dd1-eb7481083f94", 0, -1, + assertredis.ZRange(t, rt.RP, "recent_contacts:5fd2e537-0534-4c12-8425-bef87af09d46:072b95b3-61c3-4e0e-8dd1-eb7481083f94", 0, -1, []string{"LZbbzXDPJH|123|", "reuPYVP90u|234|", "qWARtWDACk|345|"}, ) @@ -57,7 +57,7 @@ func TestRecordFlowStatistics(t *testing.T) { err = models.RecordFlowStatistics(ctx, rt, nil, []flows.Session{session3}, []flows.Sprint{session3Sprint3}) require.NoError(t, err) - assertredis.Keys(t, rp, []string{ + assertredis.Keys(t, rt.RP, []string{ "recent_contacts:5fd2e537-0534-4c12-8425-bef87af09d46:072b95b3-61c3-4e0e-8dd1-eb7481083f94", // "what's your fav color" -> color split "recent_contacts:c02fc3ba-369a-4c87-9bc4-c3b376bda6d2:57b50d33-2b5a-4726-82de-9848c61eff6e", // color split :: Blue exit -> next node "recent_contacts:ea6c38dc-11e2-4616-9f3e-577e44765d44:8712db6b-25ff-4789-892c-581f24eeeb95", // color split :: Other exit -> next node @@ -68,17 +68,17 @@ func TestRecordFlowStatistics(t *testing.T) { }) // check recent operands for color split :: Blue exit -> next node - assertredis.ZRange(t, rp, "recent_contacts:c02fc3ba-369a-4c87-9bc4-c3b376bda6d2:57b50d33-2b5a-4726-82de-9848c61eff6e", 0, -1, + assertredis.ZRange(t, rt.RP, "recent_contacts:c02fc3ba-369a-4c87-9bc4-c3b376bda6d2:57b50d33-2b5a-4726-82de-9848c61eff6e", 0, -1, []string{"2SS5dyuJzp|123|blue", "6MBPV0gqT9|234|BLUE"}, ) // check recent operands for color split :: Other exit -> next node - assertredis.ZRange(t, rp, "recent_contacts:ea6c38dc-11e2-4616-9f3e-577e44765d44:8712db6b-25ff-4789-892c-581f24eeeb95", 0, -1, + assertredis.ZRange(t, rt.RP, "recent_contacts:ea6c38dc-11e2-4616-9f3e-577e44765d44:8712db6b-25ff-4789-892c-581f24eeeb95", 0, -1, []string{"uI8bPiuaeA|345|teal", "2Vz/MpdX9s|345|azure"}, ) // check recent operands for split by expression :: Other exit -> next node - assertredis.ZRange(t, rp, "recent_contacts:2b698218-87e5-4ab8-922e-e65f91d12c10:88d8bf00-51ce-4e5e-aae8-4f957a0761a0", 0, -1, + assertredis.ZRange(t, rt.RP, "recent_contacts:2b698218-87e5-4ab8-922e-e65f91d12c10:88d8bf00-51ce-4e5e-aae8-4f957a0761a0", 0, -1, []string{"2MsZZ/N3TH|123|0", "KKLrT60Tr9|234|0"}, ) } diff --git a/core/models/flows.go b/core/models/flows.go index 32094ac0b..48a2078fa 100644 --- a/core/models/flows.go +++ b/core/models/flows.go @@ -10,7 +10,7 @@ import ( "github.com/nyaruka/gocommon/dbutil" "github.com/nyaruka/goflow/assets" "github.com/nyaruka/goflow/flows" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" "github.com/jmoiron/sqlx" "github.com/pkg/errors" @@ -18,7 +18,7 @@ import ( ) // FlowID is the type for flow IDs -type FlowID null.Int +type FlowID int // NilFlowID is nil value for flow IDs const NilFlowID = FlowID(0) @@ -90,7 +90,7 @@ func (f *Flow) Version() string { return f.f.Version } func (f *Flow) IVRRetryWait() *time.Duration { wait := CallRetryWait - value := f.f.Config.Get(flowConfigIVRRetryMinutes, nil) + value := f.f.Config[flowConfigIVRRetryMinutes] fv, isFloat := value.(float64) if isFloat { minutes := int(fv) @@ -225,22 +225,7 @@ var sqlSelectFlowByName = fmt.Sprintf(baseSqlSelectFlow, ) var sqlSelectFlowByID = fmt.Sprintf(baseSqlSelectFlow, `WHERE org_id = $1 AND id = $2 AND is_active = TRUE AND is_archived = FALSE`) -// MarshalJSON marshals into JSON. 0 values will become null -func (i FlowID) MarshalJSON() ([]byte, error) { - return null.Int(i).MarshalJSON() -} - -// UnmarshalJSON unmarshals from JSON. null values become 0 -func (i *FlowID) UnmarshalJSON(b []byte) error { - return null.UnmarshalInt(b, (*null.Int)(i)) -} - -// Value returns the db value, null is returned for 0 -func (i FlowID) Value() (driver.Value, error) { - return null.Int(i).Value() -} - -// Scan scans from the db value. null values become 0 -func (i *FlowID) Scan(value interface{}) error { - return null.ScanInt(value, (*null.Int)(i)) -} +func (i *FlowID) Scan(value any) error { return null.ScanInt(value, i) } +func (i FlowID) Value() (driver.Value, error) { return null.IntValue(i) } +func (i *FlowID) UnmarshalJSON(b []byte) error { return null.UnmarshalInt(b, i) } +func (i FlowID) MarshalJSON() ([]byte, error) { return null.MarshalInt(i) } diff --git a/core/models/flows_test.go b/core/models/flows_test.go index 656e4d8d3..9566c85e7 100644 --- a/core/models/flows_test.go +++ b/core/models/flows_test.go @@ -15,16 +15,16 @@ import ( ) func TestLoadFlows(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) - db.MustExec(`UPDATE flows_flow SET metadata = '{"ivr_retry": 30}'::json WHERE id = $1`, testdata.IVRFlow.ID) - db.MustExec(`UPDATE flows_flow SET metadata = '{"ivr_retry": -1}'::json WHERE id = $1`, testdata.SurveyorFlow.ID) - db.MustExec(`UPDATE flows_flow SET expires_after_minutes = 720 WHERE id = $1`, testdata.Favorites.ID) - db.MustExec(`UPDATE flows_flow SET expires_after_minutes = 1 WHERE id = $1`, testdata.PickANumber.ID) // too small for messaging - db.MustExec(`UPDATE flows_flow SET expires_after_minutes = 12345678 WHERE id = $1`, testdata.SingleMessage.ID) // too large for messaging - db.MustExec(`UPDATE flows_flow SET expires_after_minutes = 123 WHERE id = $1`, testdata.SurveyorFlow.ID) // surveyor flows shouldn't have expires + rt.DB.MustExec(`UPDATE flows_flow SET metadata = '{"ivr_retry": 30}'::json WHERE id = $1`, testdata.IVRFlow.ID) + rt.DB.MustExec(`UPDATE flows_flow SET metadata = '{"ivr_retry": -1}'::json WHERE id = $1`, testdata.SurveyorFlow.ID) + rt.DB.MustExec(`UPDATE flows_flow SET expires_after_minutes = 720 WHERE id = $1`, testdata.Favorites.ID) + rt.DB.MustExec(`UPDATE flows_flow SET expires_after_minutes = 1 WHERE id = $1`, testdata.PickANumber.ID) // too small for messaging + rt.DB.MustExec(`UPDATE flows_flow SET expires_after_minutes = 12345678 WHERE id = $1`, testdata.SingleMessage.ID) // too large for messaging + rt.DB.MustExec(`UPDATE flows_flow SET expires_after_minutes = 123 WHERE id = $1`, testdata.SurveyorFlow.ID) // surveyor flows shouldn't have expires sixtyMinutes := 60 * time.Minute thirtyMinutes := 30 * time.Minute @@ -115,33 +115,33 @@ func TestLoadFlows(t *testing.T) { for _, tc := range tcs { // test loading by UUID - dbFlow, err := models.LoadFlowByUUID(ctx, db, tc.org.ID, tc.uuid) + dbFlow, err := models.LoadFlowByUUID(ctx, rt.DB, tc.org.ID, tc.uuid) assert.NoError(t, err) assertFlow(&tc, dbFlow) // test loading by name - dbFlow, err = models.LoadFlowByName(ctx, db, tc.org.ID, tc.name) + dbFlow, err = models.LoadFlowByName(ctx, rt.DB, tc.org.ID, tc.name) assert.NoError(t, err) assertFlow(&tc, dbFlow) // test loading by ID - dbFlow, err = models.LoadFlowByID(ctx, db, tc.org.ID, tc.id) + dbFlow, err = models.LoadFlowByID(ctx, rt.DB, tc.org.ID, tc.id) assert.NoError(t, err) assertFlow(&tc, dbFlow) } // test loading flow with wrong org - dbFlow, err := models.LoadFlowByID(ctx, db, testdata.Org2.ID, testdata.Favorites.ID) + dbFlow, err := models.LoadFlowByID(ctx, rt.DB, testdata.Org2.ID, testdata.Favorites.ID) assert.NoError(t, err) assert.Nil(t, dbFlow) } func TestFlowIDForUUID(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() org, _ := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) - tx, err := db.BeginTxx(ctx, nil) + tx, err := rt.DB.BeginTxx(ctx, nil) assert.NoError(t, err) id, err := models.FlowIDForUUID(ctx, tx, org, testdata.Favorites.UUID) @@ -152,7 +152,7 @@ func TestFlowIDForUUID(t *testing.T) { tx.MustExec(`UPDATE flows_flow SET is_active = FALSE WHERE id = $1`, testdata.Favorites.ID) tx.Commit() - tx, err = db.BeginTxx(ctx, nil) + tx, err = rt.DB.BeginTxx(ctx, nil) assert.NoError(t, err) defer tx.Rollback() diff --git a/core/models/globals_test.go b/core/models/globals_test.go index 6c61be52f..e17872684 100644 --- a/core/models/globals_test.go +++ b/core/models/globals_test.go @@ -11,14 +11,14 @@ import ( ) func TestLoadGlobals(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer func() { - db.MustExec(`UPDATE globals_global SET value = 'Nyaruka' WHERE org_id = $1 AND key = $2`, testdata.Org1.ID, "org_name") + rt.DB.MustExec(`UPDATE globals_global SET value = 'Nyaruka' WHERE org_id = $1 AND key = $2`, testdata.Org1.ID, "org_name") }() // set one of our global values to empty - db.MustExec(`UPDATE globals_global SET value = '' WHERE org_id = $1 AND key = $2`, testdata.Org1.ID, "org_name") + rt.DB.MustExec(`UPDATE globals_global SET value = '' WHERE org_id = $1 AND key = $2`, testdata.Org1.ID, "org_name") oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshGlobals) require.NoError(t, err) diff --git a/core/models/groups_test.go b/core/models/groups_test.go index fee836534..16c611dea 100644 --- a/core/models/groups_test.go +++ b/core/models/groups_test.go @@ -8,15 +8,14 @@ import ( "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestLoadGroups(t *testing.T) { - ctx, _, db0, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() - db := testsuite.NewMockDB(db0, func(funcName string, call int) error { + db := testsuite.NewMockDB(rt.DB, func(funcName string, call int) error { // fail first query for groups if funcName == "QueryxContext" && call == 0 { return errors.New("boom") diff --git a/core/models/http_logs.go b/core/models/http_logs.go index c5f98c086..e2327b685 100644 --- a/core/models/http_logs.go +++ b/core/models/http_logs.go @@ -6,11 +6,11 @@ import ( "time" "github.com/nyaruka/goflow/flows" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" ) // HTTPLogID is our type for HTTPLog ids -type HTTPLogID null.Int +type HTTPLogID int // HTTPLogType is the type for the type of log this is type HTTPLogType string @@ -108,25 +108,10 @@ func InsertHTTPLogs(ctx context.Context, tx Queryer, logs []*HTTPLog) error { return BulkQuery(ctx, "inserted http logs", tx, insertHTTPLogsSQL, logs) } -// MarshalJSON marshals into JSON. 0 values will become null -func (i HTTPLogID) MarshalJSON() ([]byte, error) { - return null.Int(i).MarshalJSON() -} - -// UnmarshalJSON unmarshals from JSON. null values become 0 -func (i *HTTPLogID) UnmarshalJSON(b []byte) error { - return null.UnmarshalInt(b, (*null.Int)(i)) -} - -// Value returns the db value, null is returned for 0 -func (i HTTPLogID) Value() (driver.Value, error) { - return null.Int(i).Value() -} - -// Scan scans from the db value. null values become 0 -func (i *HTTPLogID) Scan(value interface{}) error { - return null.ScanInt(value, (*null.Int)(i)) -} +func (i *HTTPLogID) Scan(value any) error { return null.ScanInt(value, i) } +func (i HTTPLogID) Value() (driver.Value, error) { return null.IntValue(i) } +func (i *HTTPLogID) UnmarshalJSON(b []byte) error { return null.UnmarshalInt(b, i) } +func (i HTTPLogID) MarshalJSON() ([]byte, error) { return null.MarshalInt(i) } // HTTPLogger is a logger for HTTPLogs type HTTPLogger struct { diff --git a/core/models/http_logs_test.go b/core/models/http_logs_test.go index 38bac0ddf..50abe0929 100644 --- a/core/models/http_logs_test.go +++ b/core/models/http_logs_test.go @@ -17,36 +17,36 @@ import ( ) func TestHTTPLogs(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() - defer func() { db.MustExec(`DELETE FROM request_logs_httplog`) }() + defer func() { rt.DB.MustExec(`DELETE FROM request_logs_httplog`) }() // insert a classifier log log := models.NewClassifierCalledLog(testdata.Org1.ID, testdata.Wit.ID, "http://foo.bar", 200, "GET /", "STATUS 200", false, time.Second, 0, time.Now()) - err := models.InsertHTTPLogs(ctx, db, []*models.HTTPLog{log}) + err := models.InsertHTTPLogs(ctx, rt.DB, []*models.HTTPLog{log}) assert.Nil(t, err) - assertdb.Query(t, db, `SELECT count(*) from request_logs_httplog WHERE org_id = $1 AND status_code = 200 AND classifier_id = $2 AND is_error = FALSE`, testdata.Org1.ID, testdata.Wit.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) from request_logs_httplog WHERE org_id = $1 AND status_code = 200 AND classifier_id = $2 AND is_error = FALSE`, testdata.Org1.ID, testdata.Wit.ID).Returns(1) // insert a log with nil response log = models.NewClassifierCalledLog(testdata.Org1.ID, testdata.Wit.ID, "http://foo.bar", 0, "GET /", "", true, time.Second, 0, time.Now()) - err = models.InsertHTTPLogs(ctx, db, []*models.HTTPLog{log}) + err = models.InsertHTTPLogs(ctx, rt.DB, []*models.HTTPLog{log}) assert.Nil(t, err) - assertdb.Query(t, db, `SELECT count(*) from request_logs_httplog WHERE org_id = $1 AND status_code = 0 AND classifier_id = $2 AND is_error = TRUE AND response IS NULL`, testdata.Org1.ID, testdata.Wit.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) from request_logs_httplog WHERE org_id = $1 AND status_code = 0 AND classifier_id = $2 AND is_error = TRUE AND response IS NULL`, testdata.Org1.ID, testdata.Wit.ID).Returns(1) // insert a webhook log log = models.NewWebhookCalledLog(testdata.Org1.ID, testdata.Favorites.ID, "http://foo.bar", 400, "GET /", "HTTP 200", false, time.Second, 2, time.Now()) - err = models.InsertHTTPLogs(ctx, db, []*models.HTTPLog{log}) + err = models.InsertHTTPLogs(ctx, rt.DB, []*models.HTTPLog{log}) assert.Nil(t, err) - assertdb.Query(t, db, `SELECT count(*) from request_logs_httplog WHERE org_id = $1 AND status_code = 400 AND flow_id = $2 AND num_retries = 2`, testdata.Org1.ID, testdata.Favorites.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) from request_logs_httplog WHERE org_id = $1 AND status_code = 400 AND flow_id = $2 AND num_retries = 2`, testdata.Org1.ID, testdata.Favorites.ID).Returns(1) } func TestHTTPLogger(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() - defer func() { db.MustExec(`DELETE FROM request_logs_httplog`) }() + defer func() { rt.DB.MustExec(`DELETE FROM request_logs_httplog`) }() defer httpx.SetRequestor(httpx.DefaultRequestor) httpx.SetRequestor(httpx.NewMockRequestor(map[string][]*httpx.MockResponse{ @@ -56,7 +56,7 @@ func TestHTTPLogger(t *testing.T) { }, })) - mailgun, err := models.LookupTicketerByUUID(ctx, db, testdata.Mailgun.UUID) + mailgun, err := models.LookupTicketerByUUID(ctx, rt.DB, testdata.Mailgun.UUID) require.NoError(t, err) logger := &models.HTTPLogger{} @@ -75,8 +75,8 @@ func TestHTTPLogger(t *testing.T) { require.NoError(t, err) log(flows.NewHTTPLog(trace2, flows.HTTPStatusFromCode, nil)) - err = logger.Insert(ctx, db) + err = logger.Insert(ctx, rt.DB) assert.NoError(t, err) - assertdb.Query(t, db, `SELECT count(*) from request_logs_httplog WHERE org_id = $1 AND ticketer_id = $2`, testdata.Org1.ID, testdata.Mailgun.ID).Returns(2) + assertdb.Query(t, rt.DB, `SELECT count(*) from request_logs_httplog WHERE org_id = $1 AND ticketer_id = $2`, testdata.Org1.ID, testdata.Mailgun.ID).Returns(2) } diff --git a/core/models/imports.go b/core/models/imports.go index 5a51b37f8..ced20f157 100644 --- a/core/models/imports.go +++ b/core/models/imports.go @@ -16,20 +16,20 @@ import ( "github.com/nyaruka/goflow/flows" "github.com/nyaruka/goflow/flows/modifiers" "github.com/nyaruka/mailroom/runtime" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" "github.com/pkg/errors" ) // ContactImportID is the type for contact import IDs -type ContactImportID null.Int +type ContactImportID int -func (i ContactImportID) MarshalJSON() ([]byte, error) { return null.Int(i).MarshalJSON() } -func (i *ContactImportID) UnmarshalJSON(b []byte) error { return null.UnmarshalInt(b, (*null.Int)(i)) } -func (i ContactImportID) Value() (driver.Value, error) { return null.Int(i).Value() } -func (i *ContactImportID) Scan(value interface{}) error { return null.ScanInt(value, (*null.Int)(i)) } +func (i *ContactImportID) Scan(value any) error { return null.ScanInt(value, i) } +func (i ContactImportID) Value() (driver.Value, error) { return null.IntValue(i) } +func (i *ContactImportID) UnmarshalJSON(b []byte) error { return null.UnmarshalInt(b, i) } +func (i ContactImportID) MarshalJSON() ([]byte, error) { return null.MarshalInt(i) } // ContactImportBatchID is the type for contact import batch IDs -type ContactImportBatchID int64 +type ContactImportBatchID int // ContactImportStatus is the status of an import type ContactImportStatus string @@ -104,9 +104,9 @@ type ContactImportBatch struct { } // Import does the actual import of this batch -func (b *ContactImportBatch) Import(ctx context.Context, rt *runtime.Runtime, orgID OrgID) error { +func (b *ContactImportBatch) Import(ctx context.Context, rt *runtime.Runtime, orgID OrgID, userID UserID) error { // if any error occurs this batch should be marked as failed - if err := b.tryImport(ctx, rt, orgID); err != nil { + if err := b.tryImport(ctx, rt, orgID, userID); err != nil { b.markFailed(ctx, rt.DB) return err } @@ -124,7 +124,7 @@ type importContact struct { errors []string } -func (b *ContactImportBatch) tryImport(ctx context.Context, rt *runtime.Runtime, orgID OrgID) error { +func (b *ContactImportBatch) tryImport(ctx context.Context, rt *runtime.Runtime, orgID OrgID, userID UserID) error { if err := b.markProcessing(ctx, rt.DB); err != nil { return errors.Wrap(err, "error marking as processing") } @@ -161,8 +161,7 @@ func (b *ContactImportBatch) tryImport(ctx context.Context, rt *runtime.Runtime, } // and apply in bulk - // TODO pass user here who created the import? - _, err = ApplyModifiers(ctx, rt, oa, NilUserID, modifiersByContact) + _, err = ApplyModifiers(ctx, rt, oa, userID, modifiersByContact) if err != nil { return errors.Wrap(err, "error applying modifiers") } @@ -189,6 +188,8 @@ func (b *ContactImportBatch) getOrCreateContacts(ctx context.Context, db Queryer addError := func(s string, args ...interface{}) { imp.errors = append(imp.errors, fmt.Sprintf(s, args...)) } spec := imp.spec + isActive := spec.Status == "" || spec.Status == flows.ContactStatusActive + uuid := spec.UUID if uuid != "" { imp.contact = contactsByUUID[uuid] @@ -228,6 +229,13 @@ func (b *ContactImportBatch) getOrCreateContacts(ctx context.Context, db Queryer addModifier(modifiers.NewLanguage(lang)) } } + if !isActive { + if spec.Status == flows.ContactStatusArchived || spec.Status == flows.ContactStatusBlocked || spec.Status == flows.ContactStatusStopped { + addModifier(modifiers.NewStatus(spec.Status)) + } else { + addError("'%s' is not a valid status", spec.Status) + } + } for key, value := range spec.Fields { field := sa.Fields().Get(key) @@ -238,7 +246,7 @@ func (b *ContactImportBatch) getOrCreateContacts(ctx context.Context, db Queryer } } - if len(spec.Groups) > 0 { + if len(spec.Groups) > 0 && isActive { groups := make([]*flows.Group, 0, len(spec.Groups)) for _, uuid := range spec.Groups { group := sa.Groups().Get(uuid) @@ -363,12 +371,13 @@ func LoadContactImportBatch(ctx context.Context, db Queryer, id ContactImportBat // ContactSpec describes a contact to be updated or created type ContactSpec struct { - UUID flows.ContactUUID `json:"uuid"` - Name *string `json:"name"` - Language *string `json:"language"` - URNs []urns.URN `json:"urns"` - Fields map[string]string `json:"fields"` - Groups []assets.GroupUUID `json:"groups"` + UUID flows.ContactUUID `json:"uuid"` + Name *string `json:"name"` + Language *string `json:"language"` + Status flows.ContactStatus `json:"status"` + URNs []urns.URN `json:"urns"` + Fields map[string]string `json:"fields"` + Groups []assets.GroupUUID `json:"groups"` ImportRow int `json:"_import_row"` } diff --git a/core/models/imports_test.go b/core/models/imports_test.go index 3243fc4ab..827b6ee42 100644 --- a/core/models/imports_test.go +++ b/core/models/imports_test.go @@ -18,34 +18,33 @@ import ( "github.com/nyaruka/goflow/test" _ "github.com/nyaruka/mailroom/core/handlers" "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/runtime" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - - "github.com/jmoiron/sqlx" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestContactImports(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) // start with no contacts or URNs - db.MustExec(`DELETE FROM contacts_contacturn`) - db.MustExec(`DELETE FROM contacts_contactgroup_contacts`) - db.MustExec(`DELETE FROM contacts_contact`) - db.MustExec(`ALTER SEQUENCE contacts_contact_id_seq RESTART WITH 10000`) - db.MustExec(`ALTER SEQUENCE contacts_contacturn_id_seq RESTART WITH 10000`) + rt.DB.MustExec(`DELETE FROM contacts_contacturn`) + rt.DB.MustExec(`DELETE FROM contacts_contactgroup_contacts`) + rt.DB.MustExec(`DELETE FROM contacts_contact`) + rt.DB.MustExec(`ALTER SEQUENCE contacts_contact_id_seq RESTART WITH 10000`) + rt.DB.MustExec(`ALTER SEQUENCE contacts_contacturn_id_seq RESTART WITH 10000`) // add contact in other org to make sure we can't update it - testdata.InsertContact(db, testdata.Org2, "f7a8016d-69a6-434b-aae7-5142ce4a98ba", "Xavier", "spa", models.ContactStatusActive) + testdata.InsertContact(rt, testdata.Org2, "f7a8016d-69a6-434b-aae7-5142ce4a98ba", "Xavier", "spa", models.ContactStatusActive) // add dynamic group to test imported contacts are added to it - testdata.InsertContactGroup(db, testdata.Org1, "fc32f928-ad37-477c-a88e-003d30fd7406", "Adults", "age >= 40") + testdata.InsertContactGroup(rt, testdata.Org1, "fc32f928-ad37-477c-a88e-003d30fd7406", "Adults", "age >= 40") // give our org a country by setting country on a channel - db.MustExec(`UPDATE channels_channel SET country = 'US' WHERE id = $1`, testdata.TwilioChannel.ID) + rt.DB.MustExec(`UPDATE channels_channel SET country = 'US' WHERE id = $1`, testdata.TwilioChannel.ID) testJSON := testsuite.ReadFile("testdata/imports.json") @@ -67,13 +66,13 @@ func TestContactImports(t *testing.T) { defer uuids.SetGenerator(uuids.DefaultGenerator) for i, tc := range tcs { - importID := testdata.InsertContactImport(db, testdata.Org1, testdata.Admin) - batchID := testdata.InsertContactImportBatch(db, importID, tc.Specs) + importID := testdata.InsertContactImport(rt, testdata.Org1, testdata.Admin) + batchID := testdata.InsertContactImportBatch(rt, importID, tc.Specs) - batch, err := models.LoadContactImportBatch(ctx, db, batchID) + batch, err := models.LoadContactImportBatch(ctx, rt.DB, batchID) require.NoError(t, err) - err = batch.Import(ctx, rt, testdata.Org1.ID) + err = batch.Import(ctx, rt, testdata.Org1.ID, testdata.Admin.ID) require.NoError(t, err) results := &struct { @@ -82,11 +81,11 @@ func TestContactImports(t *testing.T) { NumErrored int `db:"num_errored"` Errors json.RawMessage `db:"errors"` }{} - err = db.Get(results, `SELECT num_created, num_updated, num_errored, errors FROM contacts_contactimportbatch WHERE id = $1`, batchID) + err = rt.DB.Get(results, `SELECT num_created, num_updated, num_errored, errors FROM contacts_contactimportbatch WHERE id = $1`, batchID) require.NoError(t, err) // load all contacts and convert to specs - contacts := loadAllContacts(t, db, oa) + contacts := loadAllContacts(t, rt, oa) specs := make([]*models.ContactSpec, len(contacts)) for i, contact := range contacts { name := contact.Name() @@ -108,6 +107,7 @@ func TestContactImports(t *testing.T) { UUID: contact.UUID(), Name: &name, Language: &lang, + Status: contact.Status(), URNs: contact.URNs().RawURNs(), Fields: fields, Groups: groupUUIDs, @@ -146,20 +146,20 @@ func TestContactImports(t *testing.T) { } func TestLoadContactImport(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) - importID := testdata.InsertContactImport(db, testdata.Org1, testdata.Admin) - batch1ID := testdata.InsertContactImportBatch(db, importID, []byte(`[ + importID := testdata.InsertContactImport(rt, testdata.Org1, testdata.Admin) + batch1ID := testdata.InsertContactImportBatch(rt, importID, []byte(`[ {"name": "Norbert", "language": "eng", "urns": ["tel:+16055740001"]}, {"name": "Leah", "urns": ["tel:+16055740002"]} ]`)) - testdata.InsertContactImportBatch(db, importID, []byte(`[ + testdata.InsertContactImportBatch(rt, importID, []byte(`[ {"name": "Rowan", "language": "spa", "urns": ["tel:+16055740003"]} ]`)) - imp, err := models.LoadContactImport(ctx, db, importID) + imp, err := models.LoadContactImport(ctx, rt.DB, importID) require.NoError(t, err) assert.Equal(t, testdata.Org1.ID, imp.OrgID) @@ -168,7 +168,7 @@ func TestLoadContactImport(t *testing.T) { assert.Nil(t, imp.FinishedOn) assert.Equal(t, "P", imp.BatchStatuses) - batch1, err := models.LoadContactImportBatch(ctx, db, batch1ID) + batch1, err := models.LoadContactImportBatch(ctx, rt.DB, batch1ID) require.NoError(t, err) assert.Equal(t, importID, batch1.ImportID) @@ -177,18 +177,18 @@ func TestLoadContactImport(t *testing.T) { assert.Equal(t, 0, batch1.RecordStart) assert.Equal(t, 2, batch1.RecordEnd) - err = batch1.Import(ctx, rt, testdata.Org1.ID) + err = batch1.Import(ctx, rt, testdata.Org1.ID, testdata.Admin.ID) require.NoError(t, err) - imp, err = models.LoadContactImport(ctx, db, importID) + imp, err = models.LoadContactImport(ctx, rt.DB, importID) require.NoError(t, err) batchStatuses := strings.Split(imp.BatchStatuses, "") sort.Strings(batchStatuses) assert.Equal(t, []string{"C", "P"}, batchStatuses) - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contactimportbatch WHERE status = 'C' AND finished_on IS NOT NULL`).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contactimportbatch WHERE status = 'P' AND finished_on IS NULL`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contactimportbatch WHERE status = 'C' AND finished_on IS NOT NULL`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contactimportbatch WHERE status = 'P' AND finished_on IS NULL`).Returns(1) } func TestContactSpecUnmarshal(t *testing.T) { @@ -221,8 +221,8 @@ func TestContactSpecUnmarshal(t *testing.T) { } // utility to load all contacts for the given org and return as slice sorted by ID -func loadAllContacts(t *testing.T, db *sqlx.DB, oa *models.OrgAssets) []*flows.Contact { - rows, err := db.Queryx(`SELECT id FROM contacts_contact WHERE org_id = $1`, oa.OrgID()) +func loadAllContacts(t *testing.T, rt *runtime.Runtime, oa *models.OrgAssets) []*flows.Contact { + rows, err := rt.DB.Queryx(`SELECT id FROM contacts_contact WHERE org_id = $1`, oa.OrgID()) require.NoError(t, err) defer rows.Close() @@ -233,7 +233,7 @@ func loadAllContacts(t *testing.T, db *sqlx.DB, oa *models.OrgAssets) []*flows.C allIDs = append(allIDs, id) } - contacts, err := models.LoadContacts(context.Background(), db, oa, allIDs) + contacts, err := models.LoadContacts(context.Background(), rt.DB, oa, allIDs) require.NoError(t, err) sort.Slice(contacts, func(i, j int) bool { return contacts[i].ID() < contacts[j].ID() }) diff --git a/core/models/incident.go b/core/models/incident.go index e9b5299ab..5f82e2a8d 100644 --- a/core/models/incident.go +++ b/core/models/incident.go @@ -13,35 +13,20 @@ import ( "github.com/nyaruka/goflow/flows" "github.com/nyaruka/goflow/flows/events" "github.com/nyaruka/mailroom/runtime" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" "github.com/nyaruka/redisx" "github.com/pkg/errors" ) // IncidentID is our type for incident ids -type IncidentID null.Int +type IncidentID int64 const NilIncidentID = IncidentID(0) -// MarshalJSON marshals into JSON. 0 values will become null -func (i IncidentID) MarshalJSON() ([]byte, error) { - return null.Int(i).MarshalJSON() -} - -// UnmarshalJSON unmarshals from JSON. null values become 0 -func (i *IncidentID) UnmarshalJSON(b []byte) error { - return null.UnmarshalInt(b, (*null.Int)(i)) -} - -// Value returns the db value, null is returned for 0 -func (i IncidentID) Value() (driver.Value, error) { - return null.Int(i).Value() -} - -// Scan scans from the db value. null values become 0 -func (i *IncidentID) Scan(value interface{}) error { - return null.ScanInt(value, (*null.Int)(i)) -} +func (i *IncidentID) Scan(value any) error { return null.ScanInt(value, i) } +func (i IncidentID) Value() (driver.Value, error) { return null.IntValue(i) } +func (i *IncidentID) UnmarshalJSON(b []byte) error { return null.UnmarshalInt(b, i) } +func (i IncidentID) MarshalJSON() ([]byte, error) { return null.MarshalInt(i) } type IncidentType string diff --git a/core/models/incident_test.go b/core/models/incident_test.go index f62bd82ca..807409afe 100644 --- a/core/models/incident_test.go +++ b/core/models/incident_test.go @@ -21,41 +21,41 @@ import ( ) func TestIncidentWebhooksUnhealthy(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) oa := testdata.Org1.Load(rt) - id1, err := models.IncidentWebhooksUnhealthy(ctx, db, rp, oa, []flows.NodeUUID{"5a2e83f1-efa8-40ba-bc0c-8873c525de7d", "aba89043-6f0a-4ccf-ba7f-0e1674b90759"}) + id1, err := models.IncidentWebhooksUnhealthy(ctx, rt.DB, rt.RP, oa, []flows.NodeUUID{"5a2e83f1-efa8-40ba-bc0c-8873c525de7d", "aba89043-6f0a-4ccf-ba7f-0e1674b90759"}) require.NoError(t, err) assert.NotEqual(t, 0, id1) - assertdb.Query(t, db, `SELECT count(*) FROM notifications_incident`).Returns(1) - assertredis.SMembers(t, rp, fmt.Sprintf("incident:%d:nodes", id1), []string{"5a2e83f1-efa8-40ba-bc0c-8873c525de7d", "aba89043-6f0a-4ccf-ba7f-0e1674b90759"}) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM notifications_incident`).Returns(1) + assertredis.SMembers(t, rt.RP, fmt.Sprintf("incident:%d:nodes", id1), []string{"5a2e83f1-efa8-40ba-bc0c-8873c525de7d", "aba89043-6f0a-4ccf-ba7f-0e1674b90759"}) // raising same incident doesn't create a new one... - id2, err := models.IncidentWebhooksUnhealthy(ctx, db, rp, oa, []flows.NodeUUID{"3b1743cd-bd8b-449e-8e8a-11a3bc479766"}) + id2, err := models.IncidentWebhooksUnhealthy(ctx, rt.DB, rt.RP, oa, []flows.NodeUUID{"3b1743cd-bd8b-449e-8e8a-11a3bc479766"}) require.NoError(t, err) assert.Equal(t, id1, id2) // but will add new nodes to the incident's node set - assertdb.Query(t, db, `SELECT count(*) FROM notifications_incident`).Returns(1) - assertredis.SMembers(t, rp, fmt.Sprintf("incident:%d:nodes", id1), []string{"3b1743cd-bd8b-449e-8e8a-11a3bc479766", "5a2e83f1-efa8-40ba-bc0c-8873c525de7d", "aba89043-6f0a-4ccf-ba7f-0e1674b90759"}) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM notifications_incident`).Returns(1) + assertredis.SMembers(t, rt.RP, fmt.Sprintf("incident:%d:nodes", id1), []string{"3b1743cd-bd8b-449e-8e8a-11a3bc479766", "5a2e83f1-efa8-40ba-bc0c-8873c525de7d", "aba89043-6f0a-4ccf-ba7f-0e1674b90759"}) // when the incident has ended, a new one can be created - db.MustExec(`UPDATE notifications_incident SET ended_on = NOW()`) + rt.DB.MustExec(`UPDATE notifications_incident SET ended_on = NOW()`) - id3, err := models.IncidentWebhooksUnhealthy(ctx, db, rp, oa, nil) + id3, err := models.IncidentWebhooksUnhealthy(ctx, rt.DB, rt.RP, oa, nil) require.NoError(t, err) assert.NotEqual(t, id1, id3) - assertdb.Query(t, db, `SELECT count(*) FROM notifications_incident`).Returns(2) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM notifications_incident`).Returns(2) } func TestGetOpenIncidents(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) @@ -63,28 +63,28 @@ func TestGetOpenIncidents(t *testing.T) { oa2 := testdata.Org2.Load(rt) // create incident for org 1 - id1, err := models.IncidentWebhooksUnhealthy(ctx, db, rp, oa1, nil) + id1, err := models.IncidentWebhooksUnhealthy(ctx, rt.DB, rt.RP, oa1, nil) require.NoError(t, err) - incidents, err := models.GetOpenIncidents(ctx, db, []models.IncidentType{models.IncidentTypeWebhooksUnhealthy}) + incidents, err := models.GetOpenIncidents(ctx, rt.DB, []models.IncidentType{models.IncidentTypeWebhooksUnhealthy}) assert.NoError(t, err) assert.Equal(t, 1, len(incidents)) assert.Equal(t, id1, incidents[0].ID) assert.Equal(t, models.IncidentTypeWebhooksUnhealthy, incidents[0].Type) // but then end it - err = incidents[0].End(ctx, db) + err = incidents[0].End(ctx, rt.DB) require.NoError(t, err) // and create another one... - id2, err := models.IncidentWebhooksUnhealthy(ctx, db, rp, oa1, nil) + id2, err := models.IncidentWebhooksUnhealthy(ctx, rt.DB, rt.RP, oa1, nil) require.NoError(t, err) // create an incident for org 2 - id3, err := models.IncidentWebhooksUnhealthy(ctx, db, rp, oa2, nil) + id3, err := models.IncidentWebhooksUnhealthy(ctx, rt.DB, rt.RP, oa2, nil) require.NoError(t, err) - incidents, err = models.GetOpenIncidents(ctx, db, []models.IncidentType{models.IncidentTypeWebhooksUnhealthy}) + incidents, err = models.GetOpenIncidents(ctx, rt.DB, []models.IncidentType{models.IncidentTypeWebhooksUnhealthy}) require.NoError(t, err) assert.Equal(t, 2, len(incidents)) @@ -96,7 +96,7 @@ func TestGetOpenIncidents(t *testing.T) { } func TestWebhookNode(t *testing.T) { - _, rt, _, _ := testsuite.Get() + _, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetRedis) diff --git a/core/models/labels_test.go b/core/models/labels_test.go index e5d118f8a..6b76be3eb 100644 --- a/core/models/labels_test.go +++ b/core/models/labels_test.go @@ -12,7 +12,7 @@ import ( ) func TestLabels(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshLabels) require.NoError(t, err) diff --git a/core/models/locations_test.go b/core/models/locations_test.go index afad6082f..842de4601 100644 --- a/core/models/locations_test.go +++ b/core/models/locations_test.go @@ -13,12 +13,14 @@ import ( ) func TestLocations(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() - db.MustExec(`INSERT INTO locations_boundaryalias(is_active, created_on, modified_on, name, boundary_id, created_by_id, modified_by_id, org_id) - VALUES(TRUE, NOW(), NOW(), 'Soko', 8148, 1, 1, 1);`) - db.MustExec(`INSERT INTO locations_boundaryalias(is_active, created_on, modified_on, name, boundary_id, created_by_id, modified_by_id, org_id) - VALUES(TRUE, NOW(), NOW(), 'Sokoz', 8148, 1, 1, 2);`) + defer rt.DB.MustExec(`DELETE FROM locations_boundaryalias WHERE created_by_id = 2`) + + rt.DB.MustExec(`INSERT INTO locations_boundaryalias(is_active, created_on, modified_on, name, boundary_id, created_by_id, modified_by_id, org_id) + VALUES(TRUE, NOW(), NOW(), 'Soko', 8148, 2, 1, 1);`) + rt.DB.MustExec(`INSERT INTO locations_boundaryalias(is_active, created_on, modified_on, name, boundary_id, created_by_id, modified_by_id, org_id) + VALUES(TRUE, NOW(), NOW(), 'Sokoz', 8148, 2, 1, 2);`) oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshLocations) require.NoError(t, err) diff --git a/core/models/msgs.go b/core/models/msgs.go index 6960d87f6..8a7ade1ec 100644 --- a/core/models/msgs.go +++ b/core/models/msgs.go @@ -2,36 +2,31 @@ package models import ( "context" - "database/sql" "database/sql/driver" - "encoding/json" "fmt" "strings" "time" + "github.com/gomodule/redigo/redis" + "github.com/lib/pq" "github.com/nyaruka/gocommon/dates" "github.com/nyaruka/gocommon/gsm7" "github.com/nyaruka/gocommon/urns" "github.com/nyaruka/goflow/assets" "github.com/nyaruka/goflow/envs" - "github.com/nyaruka/goflow/excellent" - "github.com/nyaruka/goflow/excellent/types" "github.com/nyaruka/goflow/flows" - "github.com/nyaruka/goflow/flows/definition/legacy/expressions" - "github.com/nyaruka/goflow/flows/events" "github.com/nyaruka/goflow/utils" "github.com/nyaruka/mailroom/runtime" - "github.com/nyaruka/null" - - "github.com/gomodule/redigo/redis" - "github.com/lib/pq" - "github.com/lib/pq/hstore" + "github.com/nyaruka/null/v2" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) +// maximum number of repeated messages to same contact allowed in 5 minute window +const msgRepetitionLimit = 20 + // MsgID is our internal type for msg ids, which can be null/0 -type MsgID null.Int +type MsgID int64 // NilMsgID is our constant for a nil msg id const NilMsgID = MsgID(0) @@ -54,23 +49,22 @@ const ( type MsgType string const ( - MsgTypeInbox = MsgType("I") - MsgTypeFlow = MsgType("F") - MsgTypeIVR = MsgType("V") - MsgTypeUSSD = MsgType("U") + MsgTypeText = MsgType("T") + MsgTypeVoice = MsgType("V") ) type MsgStatus string const ( - MsgStatusPending = MsgStatus("P") // incoming msg created but not yet handled, or outgoing message that failed to queue - MsgStatusHandled = MsgStatus("H") // incoming msg handled - MsgStatusQueued = MsgStatus("Q") // outgoing msg created and queued to courier - MsgStatusWired = MsgStatus("W") // outgoing msg requested to be sent via channel - MsgStatusSent = MsgStatus("S") // outgoing msg having received sent confirmation from channel - MsgStatusDelivered = MsgStatus("D") // outgoing msg having received delivery confirmation from channel - MsgStatusErrored = MsgStatus("E") // outgoing msg which has errored and will be retried - MsgStatusFailed = MsgStatus("F") // outgoing msg which has failed permanently + MsgStatusPending = MsgStatus("P") // incoming msg created but not yet handled + MsgStatusHandled = MsgStatus("H") // incoming msg handled + MsgStatusInitializing = MsgStatus("I") // outgoing message that failed to queue + MsgStatusQueued = MsgStatus("Q") // outgoing msg created and queued to courier + MsgStatusWired = MsgStatus("W") // outgoing msg requested to be sent via channel + MsgStatusSent = MsgStatus("S") // outgoing msg having received sent confirmation from channel + MsgStatusDelivered = MsgStatus("D") // outgoing msg having received delivery confirmation from channel + MsgStatusErrored = MsgStatus("E") // outgoing msg which has errored and will be retried + MsgStatusFailed = MsgStatus("F") // outgoing msg which has failed permanently ) type MsgFailedReason null.String @@ -91,109 +85,90 @@ var unsendableToFailedReason = map[flows.UnsendableReason]MsgFailedReason{ flows.UnsendableReasonNoDestination: MsgFailedNoDestination, } -// BroadcastID is our internal type for broadcast ids, which can be null/0 -type BroadcastID null.Int - -// NilBroadcastID is our constant for a nil broadcast id -const NilBroadcastID = BroadcastID(0) - -// TemplateState represents what state are templates are in, either already evaluated, not evaluated or -// that they are unevaluated legacy templates -type TemplateState string - -const ( - TemplateStateEvaluated = TemplateState("evaluated") - TemplateStateLegacy = TemplateState("legacy") - TemplateStateUnevaluated = TemplateState("unevaluated") -) - // Msg is our type for mailroom messages type Msg struct { m struct { - ID flows.MsgID `db:"id" json:"id"` - BroadcastID BroadcastID `db:"broadcast_id" json:"broadcast_id,omitempty"` - UUID flows.MsgUUID `db:"uuid" json:"uuid"` - Text string `db:"text" json:"text"` - HighPriority bool `db:"high_priority" json:"high_priority"` - CreatedOn time.Time `db:"created_on" json:"created_on"` - ModifiedOn time.Time `db:"modified_on" json:"modified_on"` - SentOn *time.Time `db:"sent_on" json:"sent_on"` - QueuedOn time.Time `db:"queued_on" json:"queued_on"` - Direction MsgDirection `db:"direction" json:"direction"` - Status MsgStatus `db:"status" json:"status"` - Visibility MsgVisibility `db:"visibility" json:"-"` - MsgType MsgType `db:"msg_type" json:"-"` - MsgCount int `db:"msg_count" json:"tps_cost"` - ErrorCount int `db:"error_count" json:"error_count"` - NextAttempt *time.Time `db:"next_attempt" json:"next_attempt"` - FailedReason MsgFailedReason `db:"failed_reason" json:"-"` - ExternalID null.String `db:"external_id" json:"-"` - ResponseToExternalID null.String ` json:"response_to_external_id,omitempty"` - Attachments pq.StringArray `db:"attachments" json:"attachments,omitempty"` - Metadata null.Map `db:"metadata" json:"metadata,omitempty"` - ChannelID ChannelID `db:"channel_id" json:"channel_id"` - ChannelUUID assets.ChannelUUID ` json:"channel_uuid"` - ContactID ContactID `db:"contact_id" json:"contact_id"` - ContactURNID *URNID `db:"contact_urn_id" json:"contact_urn_id"` - IsResend bool ` json:"is_resend,omitempty"` - URN urns.URN `db:"urn_urn" json:"urn"` - URNAuth null.String `db:"urn_auth" json:"urn_auth,omitempty"` - OrgID OrgID `db:"org_id" json:"org_id"` - FlowID FlowID `db:"flow_id" json:"-"` - - // extra data from handling added to the courier payload - SessionID SessionID `json:"session_id,omitempty"` - SessionStatus SessionStatus `json:"session_status,omitempty"` - Flow *assets.FlowReference `json:"flow,omitempty"` - - // These fields are set on the last outgoing message in a session's sprint. In the case - // of the session being at a wait with a timeout then the timeout will be set. It is up to - // Courier to update the session's timeout appropriately after sending the message. - SessionWaitStartedOn *time.Time `json:"session_wait_started_on,omitempty"` - SessionTimeout int `json:"session_timeout,omitempty"` - } - - channel *Channel -} - -func (m *Msg) ID() flows.MsgID { return m.m.ID } -func (m *Msg) BroadcastID() BroadcastID { return m.m.BroadcastID } -func (m *Msg) UUID() flows.MsgUUID { return m.m.UUID } -func (m *Msg) Channel() *Channel { return m.channel } -func (m *Msg) Text() string { return m.m.Text } -func (m *Msg) HighPriority() bool { return m.m.HighPriority } -func (m *Msg) CreatedOn() time.Time { return m.m.CreatedOn } -func (m *Msg) ModifiedOn() time.Time { return m.m.ModifiedOn } -func (m *Msg) SentOn() *time.Time { return m.m.SentOn } -func (m *Msg) QueuedOn() time.Time { return m.m.QueuedOn } -func (m *Msg) Direction() MsgDirection { return m.m.Direction } -func (m *Msg) Status() MsgStatus { return m.m.Status } -func (m *Msg) Visibility() MsgVisibility { return m.m.Visibility } -func (m *Msg) MsgType() MsgType { return m.m.MsgType } -func (m *Msg) ErrorCount() int { return m.m.ErrorCount } -func (m *Msg) NextAttempt() *time.Time { return m.m.NextAttempt } -func (m *Msg) FailedReason() MsgFailedReason { return m.m.FailedReason } -func (m *Msg) ExternalID() null.String { return m.m.ExternalID } -func (m *Msg) Metadata() map[string]interface{} { return m.m.Metadata.Map() } -func (m *Msg) MsgCount() int { return m.m.MsgCount } -func (m *Msg) ChannelID() ChannelID { return m.m.ChannelID } -func (m *Msg) ChannelUUID() assets.ChannelUUID { return m.m.ChannelUUID } -func (m *Msg) URN() urns.URN { return m.m.URN } -func (m *Msg) URNAuth() null.String { return m.m.URNAuth } -func (m *Msg) OrgID() OrgID { return m.m.OrgID } -func (m *Msg) FlowID() FlowID { return m.m.FlowID } -func (m *Msg) ContactID() ContactID { return m.m.ContactID } -func (m *Msg) ContactURNID() *URNID { return m.m.ContactURNID } -func (m *Msg) IsResend() bool { return m.m.IsResend } + ID flows.MsgID `db:"id"` + UUID flows.MsgUUID `db:"uuid"` + OrgID OrgID `db:"org_id"` + + // origin + BroadcastID BroadcastID `db:"broadcast_id"` + FlowID FlowID `db:"flow_id"` + TicketID TicketID `db:"ticket_id"` + CreatedByID UserID `db:"created_by_id"` + + // content + Text string `db:"text"` + Attachments pq.StringArray `db:"attachments"` + QuickReplies pq.StringArray `db:"quick_replies"` + Locale envs.Locale `db:"locale"` + + HighPriority bool `db:"high_priority"` + Direction MsgDirection `db:"direction"` + Status MsgStatus `db:"status"` + Visibility MsgVisibility `db:"visibility"` + MsgType MsgType `db:"msg_type"` + MsgCount int `db:"msg_count"` + CreatedOn time.Time `db:"created_on"` + ModifiedOn time.Time `db:"modified_on"` + ExternalID null.String `db:"external_id"` + Metadata null.Map `db:"metadata"` + ChannelID ChannelID `db:"channel_id"` + ContactID ContactID `db:"contact_id"` + ContactURNID *URNID `db:"contact_urn_id"` + URN urns.URN `db:"urn_urn"` + URNAuth null.String `db:"urn_auth"` + + SentOn *time.Time `db:"sent_on"` + QueuedOn time.Time `db:"queued_on"` + ErrorCount int `db:"error_count"` + NextAttempt *time.Time `db:"next_attempt"` + FailedReason MsgFailedReason `db:"failed_reason"` + } + + // transient fields set during message creation that provide extra data when queuing to courier + Contact *flows.Contact + Session *Session + LastInSprint bool + IsResend bool +} + +func (m *Msg) ID() flows.MsgID { return m.m.ID } +func (m *Msg) BroadcastID() BroadcastID { return m.m.BroadcastID } +func (m *Msg) UUID() flows.MsgUUID { return m.m.UUID } +func (m *Msg) Text() string { return m.m.Text } +func (m *Msg) QuickReplies() []string { return m.m.QuickReplies } +func (m *Msg) Locale() envs.Locale { return m.m.Locale } +func (m *Msg) HighPriority() bool { return m.m.HighPriority } +func (m *Msg) CreatedOn() time.Time { return m.m.CreatedOn } +func (m *Msg) ModifiedOn() time.Time { return m.m.ModifiedOn } +func (m *Msg) SentOn() *time.Time { return m.m.SentOn } +func (m *Msg) QueuedOn() time.Time { return m.m.QueuedOn } +func (m *Msg) Direction() MsgDirection { return m.m.Direction } +func (m *Msg) Status() MsgStatus { return m.m.Status } +func (m *Msg) Visibility() MsgVisibility { return m.m.Visibility } +func (m *Msg) Type() MsgType { return m.m.MsgType } +func (m *Msg) ErrorCount() int { return m.m.ErrorCount } +func (m *Msg) NextAttempt() *time.Time { return m.m.NextAttempt } +func (m *Msg) FailedReason() MsgFailedReason { return m.m.FailedReason } +func (m *Msg) ExternalID() null.String { return m.m.ExternalID } +func (m *Msg) Metadata() map[string]any { return m.m.Metadata } +func (m *Msg) MsgCount() int { return m.m.MsgCount } +func (m *Msg) ChannelID() ChannelID { return m.m.ChannelID } +func (m *Msg) URN() urns.URN { return m.m.URN } +func (m *Msg) URNAuth() null.String { return m.m.URNAuth } +func (m *Msg) OrgID() OrgID { return m.m.OrgID } +func (m *Msg) FlowID() FlowID { return m.m.FlowID } +func (m *Msg) TicketID() TicketID { return m.m.TicketID } +func (m *Msg) ContactID() ContactID { return m.m.ContactID } +func (m *Msg) ContactURNID() *URNID { return m.m.ContactURNID } func (m *Msg) SetChannel(channel *Channel) { - m.channel = channel if channel != nil { m.m.ChannelID = channel.ID() - m.m.ChannelUUID = channel.UUID() } else { m.m.ChannelID = NilChannelID - m.m.ChannelUUID = "" } } @@ -226,10 +201,6 @@ func (m *Msg) Attachments() []utils.Attachment { return attachments } -func (m *Msg) MarshalJSON() ([]byte, error) { - return json.Marshal(m.m) -} - // NewIncomingIVR creates a new incoming IVR message for the passed in text and attachment func NewIncomingIVR(cfg *runtime.Config, orgID OrgID, call *Call, in *flows.MsgIn, createdOn time.Time) *Msg { msg := &Msg{} @@ -241,7 +212,7 @@ func NewIncomingIVR(cfg *runtime.Config, orgID OrgID, call *Call, in *flows.MsgI m.Direction = DirectionIn m.Status = MsgStatusHandled m.Visibility = VisibilityVisible - m.MsgType = MsgTypeIVR + m.MsgType = MsgTypeVoice m.ContactID = call.ContactID() urnID := call.ContactURNID() @@ -267,11 +238,12 @@ func NewOutgoingIVR(cfg *runtime.Config, orgID OrgID, call *Call, out *flows.Msg msg.SetURN(out.URN()) m.UUID = out.UUID() m.Text = out.Text() + m.Locale = out.Locale() m.HighPriority = false m.Direction = DirectionOut m.Status = MsgStatusWired m.Visibility = VisibilityVisible - m.MsgType = MsgTypeIVR + m.MsgType = MsgTypeVoice m.ContactID = call.ContactID() urnID := call.ContactURNID() @@ -292,73 +264,57 @@ func NewOutgoingIVR(cfg *runtime.Config, orgID OrgID, call *Call, out *flows.Msg return msg } -var msgRepetitionsScript = redis.NewScript(3, ` -local key, contact_id, text = KEYS[1], KEYS[2], KEYS[3] -local count = 1 - --- try to look up in window -local record = redis.call("HGET", key, contact_id) -if record then - local record_count = tonumber(string.sub(record, 1, 2)) - local record_text = string.sub(record, 4, -1) - - if record_text == text then - count = math.min(record_count + 1, 99) - else - count = 1 - end -end - --- create our new record with our updated count -record = string.format("%02d:%s", count, text) - --- write our new record with updated count and set expiration -redis.call("HSET", key, contact_id, record) -redis.call("EXPIRE", key, 300) - -return count -`) - -// GetMsgRepetitions gets the number of repetitions of this msg text for the given contact in the current 5 minute window -func GetMsgRepetitions(rp *redis.Pool, contact *flows.Contact, msg *flows.MsgOut) (int, error) { - rc := rp.Get() - defer rc.Close() - - keyTime := dates.Now().UTC().Round(time.Minute * 5) - key := fmt.Sprintf("msg_repetitions:%s", keyTime.Format("2006-01-02T15:04")) - return redis.Int(msgRepetitionsScript.Do(rc, key, contact.ID(), msg.Text())) -} - // NewOutgoingFlowMsg creates an outgoing message for the passed in flow message func NewOutgoingFlowMsg(rt *runtime.Runtime, org *Org, channel *Channel, session *Session, flow *Flow, out *flows.MsgOut, createdOn time.Time) (*Msg, error) { - return newOutgoingMsg(rt, org, channel, session.Contact(), out, createdOn, session, flow, NilBroadcastID) + return newOutgoingTextMsg(rt, org, channel, session.Contact(), out, createdOn, session, flow, NilBroadcastID, NilTicketID, NilUserID) } // NewOutgoingBroadcastMsg creates an outgoing message which is part of a broadcast -func NewOutgoingBroadcastMsg(rt *runtime.Runtime, org *Org, channel *Channel, contact *flows.Contact, out *flows.MsgOut, createdOn time.Time, broadcastID BroadcastID) (*Msg, error) { - return newOutgoingMsg(rt, org, channel, contact, out, createdOn, nil, nil, broadcastID) +func NewOutgoingBroadcastMsg(rt *runtime.Runtime, org *Org, channel *Channel, contact *flows.Contact, out *flows.MsgOut, createdOn time.Time, bb *BroadcastBatch) (*Msg, error) { + return newOutgoingTextMsg(rt, org, channel, contact, out, createdOn, nil, nil, bb.BroadcastID, NilTicketID, bb.CreatedByID) } -func newOutgoingMsg(rt *runtime.Runtime, org *Org, channel *Channel, contact *flows.Contact, out *flows.MsgOut, createdOn time.Time, session *Session, flow *Flow, broadcastID BroadcastID) (*Msg, error) { +// NewOutgoingTicketMsg creates an outgoing message from a ticket +func NewOutgoingTicketMsg(rt *runtime.Runtime, org *Org, channel *Channel, contact *flows.Contact, out *flows.MsgOut, createdOn time.Time, ticketID TicketID, userID UserID) (*Msg, error) { + return newOutgoingTextMsg(rt, org, channel, contact, out, createdOn, nil, nil, NilBroadcastID, ticketID, userID) +} + +// NewOutgoingChatMsg creates an outgoing message from chat +func NewOutgoingChatMsg(rt *runtime.Runtime, org *Org, channel *Channel, contact *flows.Contact, out *flows.MsgOut, createdOn time.Time, userID UserID) (*Msg, error) { + return newOutgoingTextMsg(rt, org, channel, contact, out, createdOn, nil, nil, NilBroadcastID, NilTicketID, userID) +} + +func newOutgoingTextMsg(rt *runtime.Runtime, org *Org, channel *Channel, contact *flows.Contact, out *flows.MsgOut, createdOn time.Time, session *Session, flow *Flow, broadcastID BroadcastID, ticketID TicketID, userID UserID) (*Msg, error) { msg := &Msg{} m := &msg.m m.UUID = out.UUID() m.OrgID = org.ID() m.ContactID = ContactID(contact.ID()) m.BroadcastID = broadcastID + m.TicketID = ticketID m.Text = out.Text() + m.QuickReplies = out.QuickReplies() + m.Locale = out.Locale() m.HighPriority = false m.Direction = DirectionOut m.Status = MsgStatusQueued m.Visibility = VisibilityVisible - m.MsgType = MsgTypeFlow + m.MsgType = MsgTypeText m.MsgCount = 1 m.CreatedOn = createdOn - m.Metadata = null.NewMap(buildMsgMetadata(out)) + m.CreatedByID = userID + m.Metadata = null.Map(buildMsgMetadata(out)) msg.SetChannel(channel) msg.SetURN(out.URN()) + // if we have attachments, add them + if len(out.Attachments()) > 0 { + for _, a := range out.Attachments() { + m.Attachments = append(m.Attachments, string(NormalizeAttachment(rt.Config, a))) + } + } + if out.UnsendableReason() != flows.NilUnsendableReason { m.Status = MsgStatusFailed m.FailedReason = unsendableToFailedReason[out.UnsendableReason()] @@ -372,7 +328,7 @@ func newOutgoingMsg(rt *runtime.Runtime, org *Org, channel *Channel, contact *fl if err != nil { return nil, errors.Wrap(err, "error looking up msg repetitions") } - if repetitions >= 20 { + if repetitions >= msgRepetitionLimit { m.Status = MsgStatusFailed m.FailedReason = MsgFailedLooping @@ -380,28 +336,9 @@ func newOutgoingMsg(rt *runtime.Runtime, org *Org, channel *Channel, contact *fl } } - // if we have a session, set fields on the message from that - if session != nil { - m.ResponseToExternalID = session.IncomingMsgExternalID() - m.SessionID = session.ID() - m.SessionStatus = session.Status() - - if flow != nil { - m.FlowID = flow.ID() - m.Flow = flow.Reference() - } - - // if we're responding to an incoming message, send as high priority - if session.IncomingMsgID() != NilMsgID { - m.HighPriority = true - } - } - - // if we have attachments, add them - if len(out.Attachments()) > 0 { - for _, a := range out.Attachments() { - m.Attachments = append(m.Attachments, string(NormalizeAttachment(rt.Config, a))) - } + // if we're a chat/ticket message, or we're responding to an incoming message in a flow, send as high priority + if (broadcastID == NilBroadcastID && session == nil) || (session != nil && session.IncomingMsgID() != NilMsgID) { + m.HighPriority = true } // if we're sending to a phone, message may have to be sent in multiple parts @@ -409,32 +346,21 @@ func newOutgoingMsg(rt *runtime.Runtime, org *Org, channel *Channel, contact *fl m.MsgCount = gsm7.Segments(m.Text) + len(m.Attachments) } + if flow != nil { + m.FlowID = flow.ID() + } + + // set transient fields which we'll use when queuing to courier + msg.Contact = contact + msg.Session = session + return msg, nil } func buildMsgMetadata(m *flows.MsgOut) map[string]interface{} { metadata := make(map[string]interface{}) - if len(m.QuickReplies()) > 0 { - metadata["quick_replies"] = m.QuickReplies() - } if m.Templating() != nil { - mLanguage, mCountry := m.Locale().ToParts() - - // TODO once we're queuing messages with locale and courier is reading that, can just add templating directly - // without language and country - metadata["templating"] = struct { - Template *assets.TemplateReference `json:"template"` - Language envs.Language `json:"language"` - Country envs.Country `json:"country"` - Variables []string `json:"variables,omitempty"` - Namespace string `json:"namespace"` - }{ - Template: m.Templating_.Template(), - Language: mLanguage, - Country: mCountry, - Variables: m.Templating().Variables(), - Namespace: m.Templating().Namespace(), - } + metadata["templating"] = m.Templating() } if m.Topic() != flows.NilMsgTopic { metadata["topic"] = string(m.Topic()) @@ -442,8 +368,8 @@ func buildMsgMetadata(m *flows.MsgOut) map[string]interface{} { return metadata } -// NewIncomingMsg creates a new incoming message for the passed in text and attachment -func NewIncomingMsg(cfg *runtime.Config, orgID OrgID, channel *Channel, contactID ContactID, in *flows.MsgIn, createdOn time.Time) *Msg { +// NewIncomingSurveyorMsg creates a new incoming message for the passed in text and attachment +func NewIncomingSurveyorMsg(cfg *runtime.Config, orgID OrgID, channel *Channel, contactID ContactID, in *flows.MsgIn, createdOn time.Time) *Msg { msg := &Msg{} msg.SetChannel(channel) @@ -455,7 +381,7 @@ func NewIncomingMsg(cfg *runtime.Config, orgID OrgID, channel *Channel, contactI m.Direction = DirectionIn m.Status = MsgStatusHandled m.Visibility = VisibilityVisible - m.MsgType = MsgTypeFlow + m.MsgType = MsgTypeText m.ContactID = contactID m.OrgID = orgID m.CreatedOn = createdOn @@ -468,12 +394,46 @@ func NewIncomingMsg(cfg *runtime.Config, orgID OrgID, channel *Channel, contactI return msg } +var msgRepetitionsScript = redis.NewScript(3, ` +local key, contact_id, text = KEYS[1], KEYS[2], KEYS[3] + +local msg_key = string.format("%d|%s", contact_id, string.lower(string.sub(text, 1, 128))) +local count = 1 + +-- try to look up in window +local record = redis.call("HGET", key, msg_key) +if record then + count = tonumber(record) + 1 +end + +-- write updated count and set expiration +redis.call("HSET", key, msg_key, count) +redis.call("EXPIRE", key, 300) + +return count +`) + +// GetMsgRepetitions gets the number of repetitions of this msg text for the given contact in the current 5 minute window +func GetMsgRepetitions(rp *redis.Pool, contact *flows.Contact, msg *flows.MsgOut) (int, error) { + rc := rp.Get() + defer rc.Close() + + keyTime := dates.Now().UTC().Round(time.Minute * 5) + key := fmt.Sprintf("msg_repetitions:%s", keyTime.Format("2006-01-02T15:04")) + return redis.Int(msgRepetitionsScript.Do(rc, key, contact.ID(), msg.Text())) +} + var loadMessagesSQL = ` SELECT id, + uuid, broadcast_id, - uuid, + flow_id, + ticket_id, text, + attachments, + quick_replies, + locale, created_on, direction, status, @@ -484,7 +444,6 @@ SELECT failed_reason, coalesce(high_priority, FALSE) as high_priority, external_id, - attachments, metadata, channel_id, contact_id, @@ -507,9 +466,14 @@ func GetMessagesByID(ctx context.Context, db Queryer, orgID OrgID, direction Msg var loadMessagesForRetrySQL = ` SELECT m.id, - m.broadcast_id, m.uuid, + m.broadcast_id, + m.flow_id, + m.ticket_id, m.text, + m.attachments, + m.quick_replies, + m.locale, m.created_on, m.direction, m.status, @@ -520,7 +484,6 @@ SELECT m.failed_reason, m.high_priority, m.external_id, - m.attachments, m.metadata, m.channel_id, m.contact_id, @@ -535,7 +498,7 @@ INNER JOIN INNER JOIN channels_channel c ON c.id = m.channel_id WHERE - m.direction = 'O' AND m.status IN ('P', 'E') AND m.next_attempt <= NOW() AND c.is_active = TRUE + m.direction = 'O' AND m.status IN ('I', 'E') AND m.next_attempt <= NOW() AND c.is_active = TRUE ORDER BY m.next_attempt ASC, m.created_on ASC LIMIT 5000` @@ -553,8 +516,6 @@ func loadMessages(ctx context.Context, db Queryer, sql string, params ...interfa defer rows.Close() msgs := make([]*Msg, 0) - channelIDsSeen := make(map[ChannelID]bool) - channelIDs := make([]ChannelID, 0, 5) for rows.Next() { msg := &Msg{} @@ -564,25 +525,6 @@ func loadMessages(ctx context.Context, db Queryer, sql string, params ...interfa } msgs = append(msgs, msg) - - if msg.ChannelID() != NilChannelID && !channelIDsSeen[msg.ChannelID()] { - channelIDsSeen[msg.ChannelID()] = true - channelIDs = append(channelIDs, msg.ChannelID()) - } - } - - channels, err := GetChannelsByID(ctx, db, channelIDs) - if err != nil { - return nil, errors.Wrap(err, "error fetching channels for messages") - } - - channelsByID := make(map[ChannelID]*Channel) - for _, ch := range channels { - channelsByID[ch.ID()] = ch - } - - for _, msg := range msgs { - msg.SetChannel(channelsByID[msg.m.ChannelID]) } return msgs, nil @@ -607,12 +549,6 @@ func NormalizeAttachment(cfg *runtime.Config, attachment utils.Attachment) utils return utils.Attachment(fmt.Sprintf("%s:%s", attachment.ContentType(), url)) } -// SetTimeout sets the timeout for this message -func (m *Msg) SetTimeout(start time.Time, timeout time.Duration) { - m.m.SessionWaitStartedOn = &start - m.m.SessionTimeout = int(timeout / time.Second) -} - // InsertMessages inserts the passed in messages in a single query func InsertMessages(ctx context.Context, tx Queryer, msgs []*Msg) error { is := make([]interface{}, len(msgs)) @@ -620,51 +556,37 @@ func InsertMessages(ctx context.Context, tx Queryer, msgs []*Msg) error { is[i] = &msgs[i].m } - return BulkQuery(ctx, "insert messages", tx, insertMsgSQL, is) + return BulkQuery(ctx, "insert messages", tx, sqlInsertMsgSQL, is) } -const insertMsgSQL = ` +const sqlInsertMsgSQL = ` INSERT INTO -msgs_msg(uuid, text, high_priority, created_on, modified_on, queued_on, sent_on, direction, status, attachments, metadata, +msgs_msg(uuid, text, attachments, quick_replies, locale, high_priority, created_on, modified_on, queued_on, sent_on, direction, status, metadata, visibility, msg_type, msg_count, error_count, next_attempt, failed_reason, channel_id, - contact_id, contact_urn_id, org_id, flow_id, broadcast_id) - VALUES(:uuid, :text, :high_priority, :created_on, now(), now(), :sent_on, :direction, :status, :attachments, :metadata, + contact_id, contact_urn_id, org_id, flow_id, broadcast_id, ticket_id, created_by_id) + VALUES(:uuid, :text, :attachments, :quick_replies, :locale, :high_priority, :created_on, now(), now(), :sent_on, :direction, :status, :metadata, :visibility, :msg_type, :msg_count, :error_count, :next_attempt, :failed_reason, :channel_id, - :contact_id, :contact_urn_id, :org_id, :flow_id, :broadcast_id) + :contact_id, :contact_urn_id, :org_id, :flow_id, :broadcast_id, :ticket_id, :created_by_id) RETURNING - id as id, - now() as modified_on, - now() as queued_on + id AS id, + modified_on AS modified_on, + queued_on AS queued_on ` -// UpdateMessage updates a message after handling -func UpdateMessage(ctx context.Context, tx Queryer, msgID MsgID, status MsgStatus, visibility MsgVisibility, msgType MsgType, flow FlowID, attachments []utils.Attachment, logUUIDs []ChannelLogUUID) error { +// MarkMessageHandled updates a message after handling +func MarkMessageHandled(ctx context.Context, tx Queryer, msgID MsgID, status MsgStatus, visibility MsgVisibility, flowID FlowID, ticketID TicketID, attachments []utils.Attachment, logUUIDs []ChannelLogUUID) error { _, err := tx.ExecContext(ctx, - `UPDATE - msgs_msg - SET - status = $2, - visibility = $3, - msg_type = $4, - flow_id = $5, - attachments = $6, - log_uuids = array_cat(log_uuids, $7) - WHERE - id = $1`, - msgID, status, visibility, msgType, flow, pq.Array(attachments), pq.Array(logUUIDs)) - - if err != nil { - return errors.Wrapf(err, "error updating msg: %d", msgID) - } - - return nil + `UPDATE msgs_msg SET status = $2, visibility = $3, flow_id = $4, ticket_id = $5, attachments = $6, log_uuids = array_cat(log_uuids, $7) WHERE id = $1`, + msgID, status, visibility, flowID, ticketID, pq.Array(attachments), pq.Array(logUUIDs), + ) + return errors.Wrapf(err, "error marking msg #%d as handled", msgID) } -// MarkMessagesForRequeuing marks the passed in messages as pending(P) with a next attempt value +// MarkMessagesForRequeuing marks the passed in messages as initializing(I) with a next attempt value // so that the retry messages task will pick them up. func MarkMessagesForRequeuing(ctx context.Context, db Queryer, msgs []*Msg) error { nextAttempt := time.Now().Add(10 * time.Minute) - return updateMessageStatus(ctx, db, msgs, MsgStatusPending, &nextAttempt) + return updateMessageStatus(ctx, db, msgs, MsgStatusInitializing, &nextAttempt) } // MarkMessagesQueued marks the passed in messages as queued(Q) @@ -690,486 +612,10 @@ func updateMessageStatus(ctx context.Context, db Queryer, msgs []*Msg, status Ms return BulkQuery(ctx, "updating message status", db, sqlUpdateMsgStatus, is) } -// BroadcastTranslation is the translation for the passed in language -type BroadcastTranslation struct { - Text string `json:"text"` - Attachments []utils.Attachment `json:"attachments,omitempty"` - QuickReplies []string `json:"quick_replies,omitempty"` -} - -// Broadcast represents a broadcast that needs to be sent -type Broadcast struct { - b struct { - BroadcastID BroadcastID `json:"broadcast_id,omitempty" db:"id"` - Translations map[envs.Language]*BroadcastTranslation `json:"translations"` - Text hstore.Hstore ` db:"text"` - TemplateState TemplateState `json:"template_state"` - BaseLanguage envs.Language `json:"base_language" db:"base_language"` - URNs []urns.URN `json:"urns,omitempty"` - ContactIDs []ContactID `json:"contact_ids,omitempty"` - GroupIDs []GroupID `json:"group_ids,omitempty"` - OrgID OrgID `json:"org_id" db:"org_id"` - CreatedByID UserID `json:"created_by_id,omitempty" db:"created_by_id"` - ParentID BroadcastID `json:"parent_id,omitempty" db:"parent_id"` - TicketID TicketID `json:"ticket_id,omitempty" db:"ticket_id"` - } -} - -func (b *Broadcast) ID() BroadcastID { return b.b.BroadcastID } -func (b *Broadcast) OrgID() OrgID { return b.b.OrgID } -func (b *Broadcast) CreatedByID() UserID { return b.b.CreatedByID } -func (b *Broadcast) ContactIDs() []ContactID { return b.b.ContactIDs } -func (b *Broadcast) GroupIDs() []GroupID { return b.b.GroupIDs } -func (b *Broadcast) URNs() []urns.URN { return b.b.URNs } -func (b *Broadcast) BaseLanguage() envs.Language { return b.b.BaseLanguage } -func (b *Broadcast) Translations() map[envs.Language]*BroadcastTranslation { return b.b.Translations } -func (b *Broadcast) TemplateState() TemplateState { return b.b.TemplateState } -func (b *Broadcast) TicketID() TicketID { return b.b.TicketID } - -func (b *Broadcast) MarshalJSON() ([]byte, error) { return json.Marshal(b.b) } -func (b *Broadcast) UnmarshalJSON(data []byte) error { return json.Unmarshal(data, &b.b) } - -// NewBroadcast creates a new broadcast with the passed in parameters -func NewBroadcast( - orgID OrgID, id BroadcastID, translations map[envs.Language]*BroadcastTranslation, - state TemplateState, baseLanguage envs.Language, urns []urns.URN, contactIDs []ContactID, groupIDs []GroupID, ticketID TicketID, createdByID UserID) *Broadcast { - - bcast := &Broadcast{} - bcast.b.OrgID = orgID - bcast.b.BroadcastID = id - bcast.b.Translations = translations - bcast.b.TemplateState = state - bcast.b.BaseLanguage = baseLanguage - bcast.b.URNs = urns - bcast.b.ContactIDs = contactIDs - bcast.b.GroupIDs = groupIDs - bcast.b.TicketID = ticketID - bcast.b.CreatedByID = createdByID - - return bcast -} - -// InsertChildBroadcast clones the passed in broadcast as a parent, then inserts that broadcast into the DB -func InsertChildBroadcast(ctx context.Context, db Queryer, parent *Broadcast) (*Broadcast, error) { - child := NewBroadcast( - parent.OrgID(), - NilBroadcastID, - parent.b.Translations, - parent.b.TemplateState, - parent.b.BaseLanguage, - parent.b.URNs, - parent.b.ContactIDs, - parent.b.GroupIDs, - parent.b.TicketID, - parent.b.CreatedByID, - ) - child.b.ParentID = parent.ID() - - // populate text from our translations - child.b.Text.Map = make(map[string]sql.NullString) - for lang, t := range child.b.Translations { - child.b.Text.Map[string(lang)] = sql.NullString{String: t.Text, Valid: true} - if len(t.Attachments) > 0 || len(t.QuickReplies) > 0 { - return nil, errors.Errorf("cannot clone broadcast with quick replies or attachments") - } - } - - // insert our broadcast - err := BulkQuery(ctx, "inserting broadcast", db, insertBroadcastSQL, []interface{}{&child.b}) - if err != nil { - return nil, errors.Wrapf(err, "error inserting child broadcast for broadcast: %d", parent.ID()) - } - - // build up all our contact associations - contacts := make([]interface{}, 0, len(child.b.ContactIDs)) - for _, contactID := range child.b.ContactIDs { - contacts = append(contacts, &broadcastContact{ - BroadcastID: child.ID(), - ContactID: contactID, - }) - } - - // insert our contacts - err = BulkQuery(ctx, "inserting broadcast contacts", db, insertBroadcastContactsSQL, contacts) - if err != nil { - return nil, errors.Wrapf(err, "error inserting contacts for broadcast") - } - - // build up all our group associations - groups := make([]interface{}, 0, len(child.b.GroupIDs)) - for _, groupID := range child.b.GroupIDs { - groups = append(groups, &broadcastGroup{ - BroadcastID: child.ID(), - GroupID: groupID, - }) - } - - // insert our groups - err = BulkQuery(ctx, "inserting broadcast groups", db, insertBroadcastGroupsSQL, groups) - if err != nil { - return nil, errors.Wrapf(err, "error inserting groups for broadcast") - } - - // finally our URNs - urns := make([]interface{}, 0, len(child.b.URNs)) - for _, urn := range child.b.URNs { - urnID := GetURNID(urn) - if urnID == NilURNID { - return nil, errors.Errorf("attempt to insert new broadcast with URNs that do not have id: %s", urn) - } - urns = append(urns, &broadcastURN{ - BroadcastID: child.ID(), - URNID: urnID, - }) - } - - // insert our urns - err = BulkQuery(ctx, "inserting broadcast urns", db, insertBroadcastURNsSQL, urns) - if err != nil { - return nil, errors.Wrapf(err, "error inserting URNs for broadcast") - } - - return child, nil -} - -type broadcastURN struct { - BroadcastID BroadcastID `db:"broadcast_id"` - URNID URNID `db:"contacturn_id"` -} - -type broadcastContact struct { - BroadcastID BroadcastID `db:"broadcast_id"` - ContactID ContactID `db:"contact_id"` -} - -type broadcastGroup struct { - BroadcastID BroadcastID `db:"broadcast_id"` - GroupID GroupID `db:"contactgroup_id"` -} - -const insertBroadcastSQL = ` -INSERT INTO - msgs_broadcast( org_id, parent_id, ticket_id, created_on, modified_on, status, text, base_language, send_all, is_active) - VALUES(:org_id, :parent_id, :ticket_id, NOW() , NOW(), 'Q', :text, :base_language, FALSE, TRUE) -RETURNING - id -` - -const insertBroadcastContactsSQL = ` -INSERT INTO - msgs_broadcast_contacts( broadcast_id, contact_id) - VALUES(:broadcast_id, :contact_id) -` - -const insertBroadcastGroupsSQL = ` -INSERT INTO - msgs_broadcast_groups( broadcast_id, contactgroup_id) - VALUES(:broadcast_id, :contactgroup_id) -` - -const insertBroadcastURNsSQL = ` -INSERT INTO - msgs_broadcast_urns( broadcast_id, contacturn_id) - VALUES(:broadcast_id, :contacturn_id) -` - -// NewBroadcastFromEvent creates a broadcast object from the passed in broadcast event -func NewBroadcastFromEvent(ctx context.Context, tx Queryer, oa *OrgAssets, event *events.BroadcastCreatedEvent) (*Broadcast, error) { - // converst our translations to our type - translations := make(map[envs.Language]*BroadcastTranslation) - for l, t := range event.Translations { - translations[l] = &BroadcastTranslation{ - Text: t.Text, - Attachments: t.Attachments, - QuickReplies: t.QuickReplies, - } - } - - // resolve our contact references - contactIDs, err := GetContactIDsFromReferences(ctx, tx, oa.OrgID(), event.Contacts) - if err != nil { - return nil, errors.Wrapf(err, "error resolving contact references") - } - - // and our groups - groupIDs := make([]GroupID, 0, len(event.Groups)) - for i := range event.Groups { - group := oa.GroupByUUID(event.Groups[i].UUID) - if group != nil { - groupIDs = append(groupIDs, group.ID()) - } - } - - return NewBroadcast(oa.OrgID(), NilBroadcastID, translations, TemplateStateEvaluated, event.BaseLanguage, event.URNs, contactIDs, groupIDs, NilTicketID, NilUserID), nil -} - -func (b *Broadcast) CreateBatch(contactIDs []ContactID) *BroadcastBatch { - return &BroadcastBatch{ - BroadcastID: b.b.BroadcastID, - BaseLanguage: b.b.BaseLanguage, - Translations: b.b.Translations, - TemplateState: b.b.TemplateState, - OrgID: b.b.OrgID, - CreatedByID: b.b.CreatedByID, - TicketID: b.b.TicketID, - ContactIDs: contactIDs, - } -} - -// BroadcastBatch represents a batch of contacts that need messages sent for -type BroadcastBatch struct { - BroadcastID BroadcastID `json:"broadcast_id,omitempty"` - Translations map[envs.Language]*BroadcastTranslation `json:"translations"` - BaseLanguage envs.Language `json:"base_language"` - TemplateState TemplateState `json:"template_state"` - URNs map[ContactID]urns.URN `json:"urns,omitempty"` - ContactIDs []ContactID `json:"contact_ids,omitempty"` - IsLast bool `json:"is_last"` - OrgID OrgID `json:"org_id"` - CreatedByID UserID `json:"created_by_id"` - TicketID TicketID `json:"ticket_id"` -} - -func (b *BroadcastBatch) CreateMessages(ctx context.Context, rt *runtime.Runtime, oa *OrgAssets) ([]*Msg, error) { - repeatedContacts := make(map[ContactID]bool) - broadcastURNs := b.URNs - - // build our list of contact ids - contactIDs := b.ContactIDs - - // build a map of the contacts that are present both in our URN list and our contact id list - if broadcastURNs != nil { - for _, id := range contactIDs { - _, found := broadcastURNs[id] - if found { - repeatedContacts[id] = true - } - } - - // if we have URN we need to send to, add those contacts as well if not already repeated - for id := range broadcastURNs { - if !repeatedContacts[id] { - contactIDs = append(contactIDs, id) - } - } - } - - // load all our contacts - contacts, err := LoadContacts(ctx, rt.DB, oa, contactIDs) - if err != nil { - return nil, errors.Wrapf(err, "error loading contacts for broadcast") - } - - channels := oa.SessionAssets().Channels() - - // for each contact, build our message - msgs := make([]*Msg, 0, len(contacts)) - - // utility method to build up our message - buildMessage := func(c *Contact, forceURN urns.URN) (*Msg, error) { - if c.Status() != ContactStatusActive { - return nil, nil - } - - contact, err := c.FlowContact(oa) - if err != nil { - return nil, errors.Wrapf(err, "error creating flow contact") - } - - urn := urns.NilURN - var channel *Channel - - // we are forcing to send to a non-preferred URN, find the channel - if forceURN != urns.NilURN { - for _, u := range contact.URNs() { - if u.URN().Identity() == forceURN.Identity() { - c := channels.GetForURN(u, assets.ChannelRoleSend) - if c == nil { - return nil, nil - } - urn = u.URN() - channel = oa.ChannelByUUID(c.UUID()) - break - } - } - } else { - // no forced URN, find the first URN we can send to - for _, u := range contact.URNs() { - c := channels.GetForURN(u, assets.ChannelRoleSend) - if c != nil { - urn = u.URN() - channel = oa.ChannelByUUID(c.UUID()) - break - } - } - } - - // no urn and channel? move on - if channel == nil { - return nil, nil - } - - // resolve our translations, the order is: - // 1) valid contact language - // 2) org default language - // 3) broadcast base language - lang := contact.Language() - if lang != envs.NilLanguage { - found := false - for _, l := range oa.Env().AllowedLanguages() { - if l == lang { - found = true - break - } - } - if !found { - lang = envs.NilLanguage - } - } - - // have a valid contact language, try that - trans := b.Translations - t := trans[lang] - - // not found? try org default language - if t == nil { - lang = oa.Env().DefaultLanguage() - t = trans[lang] - } - - // not found? use broadcast base language - if t == nil { - lang = b.BaseLanguage - t = trans[lang] - } - - if t == nil { - logrus.WithField("base_language", b.BaseLanguage).WithField("translations", trans).Error("unable to find translation for broadcast") - return nil, nil - } - - template := "" - - // if this is a legacy template, migrate it forward - if b.TemplateState == TemplateStateLegacy { - template, _ = expressions.MigrateTemplate(t.Text, nil) - } else if b.TemplateState == TemplateStateUnevaluated { - template = t.Text - } - - text := t.Text - - // if we have a template, evaluate it - if template != "" { - // build up the minimum viable context for templates - templateCtx := types.NewXObject(map[string]types.XValue{ - "contact": flows.Context(oa.Env(), contact), - "fields": flows.Context(oa.Env(), contact.Fields()), - "globals": flows.Context(oa.Env(), oa.SessionAssets().Globals()), - "urns": flows.ContextFunc(oa.Env(), contact.URNs().MapContext), - }) - text, _ = excellent.EvaluateTemplate(oa.Env(), templateCtx, template, nil) - } - - // don't do anything if we have no text or attachments - if text == "" && len(t.Attachments) == 0 { - return nil, nil - } - - unsendableReason := flows.NilUnsendableReason - if contact.Status() != flows.ContactStatusActive { - unsendableReason = flows.UnsendableReasonContactStatus - } else if urn == urns.NilURN || channel == nil { - unsendableReason = flows.UnsendableReasonNoDestination - } - - // create our outgoing message - out := flows.NewMsgOut(urn, channel.ChannelReference(), text, t.Attachments, t.QuickReplies, nil, flows.NilMsgTopic, envs.NewLocale(lang, envs.NilCountry), unsendableReason) - msg, err := NewOutgoingBroadcastMsg(rt, oa.Org(), channel, contact, out, time.Now(), b.BroadcastID) - if err != nil { - return nil, errors.Wrapf(err, "error creating outgoing message") - } - - return msg, nil - } - - // run through all our contacts to create our messages - for _, c := range contacts { - // use the preferred URN if present - urn := broadcastURNs[c.ID()] - msg, err := buildMessage(c, urn) - if err != nil { - return nil, errors.Wrapf(err, "error creating broadcast message") - } - if msg != nil { - msgs = append(msgs, msg) - } - - // if this is a contact that will receive two messages, calculate that one as well - if repeatedContacts[c.ID()] { - m2, err := buildMessage(c, urns.NilURN) - if err != nil { - return nil, errors.Wrapf(err, "error creating broadcast message") - } - - // add this message if it isn't a duplicate - if m2 != nil && m2.URN() != msg.URN() { - msgs = append(msgs, m2) - } - } - } - - // insert them in a single request - err = InsertMessages(ctx, rt.DB, msgs) - if err != nil { - return nil, errors.Wrapf(err, "error inserting broadcast messages") - } - - // if the broadcast was a ticket reply, update the ticket - if b.TicketID != NilTicketID { - if err := b.updateTicket(ctx, rt.DB, oa); err != nil { - return nil, err - } - } - - return msgs, nil -} - -func (b *BroadcastBatch) updateTicket(ctx context.Context, db Queryer, oa *OrgAssets) error { - firstReplySeconds, err := TicketRecordReplied(ctx, db, b.TicketID, dates.Now()) - if err != nil { - return err - } - - // record reply counts for org, user and team - replyCounts := map[string]int{scopeOrg(oa): 1} - - if b.CreatedByID != NilUserID { - user := oa.UserByID(b.CreatedByID) - if user != nil { - replyCounts[scopeUser(oa, user)] = 1 - if user.Team() != nil { - replyCounts[scopeTeam(user.Team())] = 1 - } - } - } - - if err := insertTicketDailyCounts(ctx, db, TicketDailyCountReply, oa.Org().Timezone(), replyCounts); err != nil { - return err - } - - if firstReplySeconds >= 0 { - if err := insertTicketDailyTiming(ctx, db, TicketDailyTimingFirstReply, oa.Org().Timezone(), scopeOrg(oa), firstReplySeconds); err != nil { - return err - } - } - return nil -} - const sqlUpdateMsgForResending = ` UPDATE msgs_msg m SET channel_id = r.channel_id::int, - status = 'P', + status = 'Q', error_count = 0, failed_reason = NULL, queued_on = r.queued_on::timestamp with time zone, @@ -1215,24 +661,19 @@ func ResendMessages(ctx context.Context, db Queryer, rp *redis.Pool, oa *OrgAsse if ch != nil { channel := oa.ChannelByUUID(ch.UUID()) - msg.channel = channel - msg.m.ChannelID = channel.ID() - msg.m.ChannelUUID = channel.UUID() msg.m.Status = MsgStatusPending msg.m.QueuedOn = dates.Now() msg.m.SentOn = nil msg.m.ErrorCount = 0 msg.m.FailedReason = "" - msg.m.IsResend = true // mark message as being a resend so it will be queued to courier as such + msg.IsResend = true // mark message as being a resend so it will be queued to courier as such resends = append(resends, msg.m) resent = append(resent, msg) } else { // if we don't have channel or a URN, fail again - msg.channel = nil msg.m.ChannelID = NilChannelID - msg.m.ChannelUUID = assets.ChannelUUID("") msg.m.Status = MsgStatusFailed msg.m.QueuedOn = dates.Now() msg.m.SentOn = nil @@ -1281,68 +722,40 @@ func FailChannelMessages(ctx context.Context, db Queryer, orgID OrgID, channelID return nil } -// MarkBroadcastSent marks the passed in broadcast as sent -func MarkBroadcastSent(ctx context.Context, db Queryer, id BroadcastID) error { - // noop if it is a nil id - if id == NilBroadcastID { - return nil +func NewMsgOut(oa *OrgAssets, c *flows.Contact, text string, atts []utils.Attachment, qrs []string, locale envs.Locale) (*flows.MsgOut, *Channel) { + // resolve URN + channel for this contact + urn := urns.NilURN + var channel *Channel + var channelRef *assets.ChannelReference + for _, dest := range c.ResolveDestinations(false) { + urn = dest.URN.URN() + channel = oa.ChannelByUUID(dest.Channel.UUID()) + channelRef = dest.Channel.Reference() + break } - _, err := db.ExecContext(ctx, `UPDATE msgs_broadcast SET status = 'S', modified_on = now() WHERE id = $1`, id) - if err != nil { - return errors.Wrapf(err, "error setting broadcast with id %d as sent", id) + // is this message sendable? + unsendableReason := flows.NilUnsendableReason + if c.Status() != flows.ContactStatusActive { + unsendableReason = flows.UnsendableReasonContactStatus + } else if urn == urns.NilURN || channel == nil { + unsendableReason = flows.UnsendableReasonNoDestination } - return nil -} - -// NilID implementations - -// MarshalJSON marshals into JSON. 0 values will become null -func (i MsgID) MarshalJSON() ([]byte, error) { - return null.Int(i).MarshalJSON() -} - -// UnmarshalJSON unmarshals from JSON. null values become 0 -func (i *MsgID) UnmarshalJSON(b []byte) error { - return null.UnmarshalInt(b, (*null.Int)(i)) -} - -// Value returns the db value, null is returned for 0 -func (i MsgID) Value() (driver.Value, error) { - return null.Int(i).Value() -} -// Scan scans from the db value. null values become 0 -func (i *MsgID) Scan(value interface{}) error { - return null.ScanInt(value, (*null.Int)(i)) + return flows.NewMsgOut(urn, channelRef, text, atts, qrs, nil, flows.NilMsgTopic, locale, unsendableReason), channel } -// MarshalJSON marshals into JSON. 0 values will become null -func (i BroadcastID) MarshalJSON() ([]byte, error) { - return null.Int(i).MarshalJSON() -} - -// UnmarshalJSON unmarshals from JSON. null values become 0 -func (i *BroadcastID) UnmarshalJSON(b []byte) error { - return null.UnmarshalInt(b, (*null.Int)(i)) -} - -// Value returns the db value, null is returned for 0 -func (i BroadcastID) Value() (driver.Value, error) { - return null.Int(i).Value() -} +// NilID implementations -// Scan scans from the db value. null values become 0 -func (i *BroadcastID) Scan(value interface{}) error { - return null.ScanInt(value, (*null.Int)(i)) -} +func (i *MsgID) Scan(value any) error { return null.ScanInt(value, i) } +func (i MsgID) Value() (driver.Value, error) { return null.IntValue(i) } +func (i *MsgID) UnmarshalJSON(b []byte) error { return null.UnmarshalInt(b, i) } +func (i MsgID) MarshalJSON() ([]byte, error) { return null.MarshalInt(i) } -// Value returns the db value, null is returned for "" -func (s MsgFailedReason) Value() (driver.Value, error) { - return null.String(s).Value() -} +func (i *BroadcastID) Scan(value any) error { return null.ScanInt(value, i) } +func (i BroadcastID) Value() (driver.Value, error) { return null.IntValue(i) } +func (i *BroadcastID) UnmarshalJSON(b []byte) error { return null.UnmarshalInt(b, i) } +func (i BroadcastID) MarshalJSON() ([]byte, error) { return null.MarshalInt(i) } -// Scan scans from the db value. null values become "" -func (s *MsgFailedReason) Scan(value interface{}) error { - return null.ScanString(value, (*null.String)(s)) -} +func (s MsgFailedReason) Value() (driver.Value, error) { return null.StringValue(s) } +func (s *MsgFailedReason) Scan(value any) error { return null.ScanString(value, s) } diff --git a/core/models/msgs_test.go b/core/models/msgs_test.go index 967dd7805..10da776e4 100644 --- a/core/models/msgs_test.go +++ b/core/models/msgs_test.go @@ -2,40 +2,37 @@ package models_test import ( "context" - "encoding/json" "fmt" "testing" "time" "github.com/nyaruka/gocommon/dates" "github.com/nyaruka/gocommon/dbutil/assertdb" - "github.com/nyaruka/gocommon/jsonx" "github.com/nyaruka/gocommon/urns" "github.com/nyaruka/goflow/assets" "github.com/nyaruka/goflow/envs" "github.com/nyaruka/goflow/flows" - "github.com/nyaruka/goflow/test" "github.com/nyaruka/goflow/utils" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/runtime" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" "github.com/nyaruka/redisx/assertredis" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestNewOutgoingFlowMsg(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) - blake := testdata.InsertContact(db, testdata.Org1, "79b94a23-6d13-43f4-95fe-c733ee457857", "Blake", envs.NilLanguage, models.ContactStatusBlocked) - blakeURNID := testdata.InsertContactURN(db, testdata.Org1, blake, "tel:++250700000007", 1) + blake := testdata.InsertContact(rt, testdata.Org1, "79b94a23-6d13-43f4-95fe-c733ee457857", "Blake", envs.NilLanguage, models.ContactStatusBlocked) + blakeURNID := testdata.InsertContactURN(rt, testdata.Org1, blake, "tel:++250700000007", 1) tcs := []struct { - ChannelUUID assets.ChannelUUID + Channel *testdata.Channel Text string Contact *testdata.Contact URN urns.URN @@ -55,7 +52,7 @@ func TestNewOutgoingFlowMsg(t *testing.T) { ExpectedPriority bool }{ { - ChannelUUID: "74729f45-7f29-4868-9dc4-90e491e3c7d8", + Channel: testdata.TwilioChannel, Text: "missing urn id", Contact: testdata.Cathy, URN: urns.URN("tel:+250700000001"), @@ -69,7 +66,7 @@ func TestNewOutgoingFlowMsg(t *testing.T) { ExpectedPriority: true, }, { - ChannelUUID: "74729f45-7f29-4868-9dc4-90e491e3c7d8", + Channel: testdata.TwilioChannel, Text: "test outgoing", Contact: testdata.Cathy, URN: urns.URN(fmt.Sprintf("tel:+250700000001?id=%d", testdata.Cathy.URNID)), @@ -79,15 +76,12 @@ func TestNewOutgoingFlowMsg(t *testing.T) { Flow: testdata.SingleMessage, ExpectedStatus: models.MsgStatusQueued, ExpectedFailedReason: models.NilMsgFailedReason, - ExpectedMetadata: map[string]interface{}{ - "quick_replies": []string{"yes", "no"}, - "topic": "purchase", - }, - ExpectedMsgCount: 1, - ExpectedPriority: false, + ExpectedMetadata: map[string]interface{}{"topic": "purchase"}, + ExpectedMsgCount: 1, + ExpectedPriority: false, }, { - ChannelUUID: "74729f45-7f29-4868-9dc4-90e491e3c7d8", + Channel: testdata.TwilioChannel, Text: "test outgoing", Contact: testdata.Cathy, URN: urns.URN(fmt.Sprintf("tel:+250700000001?id=%d", testdata.Cathy.URNID)), @@ -101,7 +95,7 @@ func TestNewOutgoingFlowMsg(t *testing.T) { ExpectedPriority: false, }, { - ChannelUUID: "74729f45-7f29-4868-9dc4-90e491e3c7d8", + Channel: testdata.TwilioChannel, Text: "suspended org", Contact: testdata.Cathy, URN: urns.URN(fmt.Sprintf("tel:+250700000001?id=%d", testdata.Cathy.URNID)), @@ -115,8 +109,8 @@ func TestNewOutgoingFlowMsg(t *testing.T) { ExpectedPriority: false, }, { - ChannelUUID: "74729f45-7f29-4868-9dc4-90e491e3c7d8", - Text: "missing URN", + Channel: nil, + Text: "no destination", Contact: testdata.Cathy, URN: urns.NilURN, URNID: models.URNID(0), @@ -129,21 +123,7 @@ func TestNewOutgoingFlowMsg(t *testing.T) { ExpectedPriority: false, }, { - ChannelUUID: "", - Text: "missing Channel", - Contact: testdata.Cathy, - URN: urns.URN(fmt.Sprintf("tel:+250700000001?id=%d", testdata.Cathy.URNID)), - URNID: testdata.Cathy.URNID, - Unsendable: flows.UnsendableReasonNoDestination, - Flow: testdata.Favorites, - ExpectedStatus: models.MsgStatusFailed, - ExpectedFailedReason: models.MsgFailedNoDestination, - ExpectedMetadata: map[string]interface{}{}, - ExpectedMsgCount: 1, - ExpectedPriority: false, - }, - { - ChannelUUID: "74729f45-7f29-4868-9dc4-90e491e3c7d8", + Channel: testdata.TwilioChannel, Text: "blocked contact", Contact: blake, URN: urns.URN(fmt.Sprintf("tel:+250700000007?id=%d", blakeURNID)), @@ -162,12 +142,20 @@ func TestNewOutgoingFlowMsg(t *testing.T) { for _, tc := range tcs { desc := fmt.Sprintf("text='%s'", tc.Text) - db.MustExec(`UPDATE orgs_org SET is_suspended = $1 WHERE id = $2`, tc.SuspendedOrg, testdata.Org1.ID) + rt.DB.MustExec(`UPDATE orgs_org SET is_suspended = $1 WHERE id = $2`, tc.SuspendedOrg, testdata.Org1.ID) oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshOrg) require.NoError(t, err) - channel := oa.ChannelByUUID(tc.ChannelUUID) + var ch *models.Channel + var chRef *assets.ChannelReference + expectedChannelID := models.NilChannelID + if tc.Channel != nil { + ch = oa.ChannelByUUID(tc.Channel.UUID) + chRef = ch.ChannelReference() + expectedChannelID = ch.ID() + } + flow, _ := oa.FlowByID(tc.Flow.ID) session := insertTestSession(t, ctx, rt, testdata.Org1, tc.Contact, testdata.Favorites) @@ -175,18 +163,25 @@ func TestNewOutgoingFlowMsg(t *testing.T) { session.SetIncomingMsg(tc.ResponseTo, null.NullString) } - flowMsg := flows.NewMsgOut(tc.URN, assets.NewChannelReference(tc.ChannelUUID, "Test Channel"), tc.Text, tc.Attachments, tc.QuickReplies, nil, tc.Topic, envs.NilLocale, tc.Unsendable) - msg, err := models.NewOutgoingFlowMsg(rt, oa.Org(), channel, session, flow, flowMsg, now) + flowMsg := flows.NewMsgOut(tc.URN, chRef, tc.Text, tc.Attachments, tc.QuickReplies, nil, tc.Topic, envs.NilLocale, tc.Unsendable) + msg, err := models.NewOutgoingFlowMsg(rt, oa.Org(), ch, session, flow, flowMsg, now) assert.NoError(t, err) - err = models.InsertMessages(ctx, db, []*models.Msg{msg}) + expectedAttachments := tc.Attachments + if expectedAttachments == nil { + expectedAttachments = []utils.Attachment{} + } + + err = models.InsertMessages(ctx, rt.DB, []*models.Msg{msg}) assert.NoError(t, err) assert.Equal(t, oa.OrgID(), msg.OrgID()) assert.Equal(t, tc.Text, msg.Text()) + assert.Equal(t, models.MsgTypeText, msg.Type()) + assert.Equal(t, expectedAttachments, msg.Attachments()) + assert.Equal(t, tc.QuickReplies, msg.QuickReplies()) assert.Equal(t, tc.Contact.ID, msg.ContactID()) - assert.Equal(t, channel, msg.Channel()) - assert.Equal(t, tc.ChannelUUID, msg.ChannelUUID()) + assert.Equal(t, expectedChannelID, msg.ChannelID()) assert.Equal(t, tc.URN, msg.URN()) if tc.URNID != models.NilURNID { assert.Equal(t, tc.URNID, *msg.ContactURNID()) @@ -206,10 +201,10 @@ func TestNewOutgoingFlowMsg(t *testing.T) { } // check nil failed reasons are saved as NULLs - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE failed_reason IS NOT NULL`).Returns(4) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE failed_reason IS NOT NULL`).Returns(3) // ensure org is unsuspended - db.MustExec(`UPDATE orgs_org SET is_suspended = FALSE`) + rt.DB.MustExec(`UPDATE orgs_org SET is_suspended = FALSE`) models.FlushCache() oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshOrg) @@ -243,179 +238,20 @@ func TestNewOutgoingFlowMsg(t *testing.T) { } } -func TestMarshalMsg(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() - - defer testsuite.Reset(testsuite.ResetData) - - assertdb.Query(t, db, `SELECT count(*) FROM orgs_org WHERE is_suspended = TRUE`).Returns(0) - - oa, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) - require.NoError(t, err) - require.False(t, oa.Org().Suspended()) - - channel := oa.ChannelByUUID(testdata.TwilioChannel.UUID) - flow, _ := oa.FlowByID(testdata.Favorites.ID) - urn := urns.URN(fmt.Sprintf("tel:+250700000001?id=%d", testdata.Cathy.URNID)) - flowMsg1 := flows.NewMsgOut( - urn, - assets.NewChannelReference(testdata.TwilioChannel.UUID, "Test Channel"), - "Hi there", - []utils.Attachment{utils.Attachment("image/jpeg:https://dl-foo.com/image.jpg")}, - []string{"yes", "no"}, - nil, - flows.MsgTopicPurchase, - envs.NilLocale, - flows.NilUnsendableReason, - ) - - // create a non-priority flow message.. i.e. the session isn't responding to an incoming message - session := insertTestSession(t, ctx, rt, testdata.Org1, testdata.Cathy, testdata.Favorites) - msg1, err := models.NewOutgoingFlowMsg(rt, oa.Org(), channel, session, flow, flowMsg1, time.Date(2021, 11, 9, 14, 3, 30, 0, time.UTC)) - require.NoError(t, err) - - cathy := session.Contact() - - err = models.InsertMessages(ctx, db, []*models.Msg{msg1}) - require.NoError(t, err) - - marshaled, err := json.Marshal(msg1) - assert.NoError(t, err) - - test.AssertEqualJSON(t, []byte(fmt.Sprintf(`{ - "attachments": [ - "image/jpeg:https://dl-foo.com/image.jpg" - ], - "channel_id": 10000, - "channel_uuid": "74729f45-7f29-4868-9dc4-90e491e3c7d8", - "contact_id": 10000, - "contact_urn_id": 10000, - "created_on": "2021-11-09T14:03:30Z", - "direction": "O", - "error_count": 0, - "flow": {"uuid": "9de3663f-c5c5-4c92-9f45-ecbc09abcc85", "name": "Favorites"}, - "high_priority": false, - "id": %d, - "metadata": { - "quick_replies": [ - "yes", - "no" - ], - "topic": "purchase" - }, - "modified_on": %s, - "next_attempt": null, - "org_id": 1, - "queued_on": %s, - "sent_on": null, - "session_id": %d, - "session_status": "W", - "status": "Q", - "text": "Hi there", - "tps_cost": 2, - "urn": "tel:+250700000001?id=10000", - "uuid": "%s" - }`, msg1.ID(), jsonx.MustMarshal(msg1.ModifiedOn()), jsonx.MustMarshal(msg1.QueuedOn()), session.ID(), msg1.UUID())), marshaled) - - // create a priority flow message.. i.e. the session is responding to an incoming message - flowMsg2 := flows.NewMsgOut( - urn, - assets.NewChannelReference(testdata.TwilioChannel.UUID, "Test Channel"), - "Hi there", - nil, nil, nil, - flows.NilMsgTopic, - envs.NilLocale, - flows.NilUnsendableReason, - ) - in1 := testdata.InsertIncomingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "test", models.MsgStatusHandled) - session.SetIncomingMsg(models.MsgID(in1.ID()), null.String("EX123")) - msg2, err := models.NewOutgoingFlowMsg(rt, oa.Org(), channel, session, flow, flowMsg2, time.Date(2021, 11, 9, 14, 3, 30, 0, time.UTC)) - require.NoError(t, err) - - err = models.InsertMessages(ctx, db, []*models.Msg{msg2}) - require.NoError(t, err) - - marshaled, err = json.Marshal(msg2) - assert.NoError(t, err) - - test.AssertEqualJSON(t, []byte(fmt.Sprintf(`{ - "channel_id": 10000, - "channel_uuid": "74729f45-7f29-4868-9dc4-90e491e3c7d8", - "contact_id": 10000, - "contact_urn_id": 10000, - "created_on": "2021-11-09T14:03:30Z", - "direction": "O", - "error_count": 0, - "flow": {"uuid": "9de3663f-c5c5-4c92-9f45-ecbc09abcc85", "name": "Favorites"}, - "response_to_external_id": "EX123", - "high_priority": true, - "id": %d, - "metadata": null, - "modified_on": %s, - "next_attempt": null, - "org_id": 1, - "queued_on": %s, - "sent_on": null, - "session_id": %d, - "session_status": "W", - "status": "Q", - "text": "Hi there", - "tps_cost": 1, - "urn": "tel:+250700000001?id=10000", - "uuid": "%s" - }`, msg2.ID(), jsonx.MustMarshal(msg2.ModifiedOn()), jsonx.MustMarshal(msg2.QueuedOn()), session.ID(), msg2.UUID())), marshaled) - - // try a broadcast message which won't have session and flow fields set - bcastID := testdata.InsertBroadcast(db, testdata.Org1, `eng`, map[envs.Language]string{`eng`: "Blast"}, models.NilScheduleID, []*testdata.Contact{testdata.Cathy}, nil) - bcastMsg1 := flows.NewMsgOut(urn, assets.NewChannelReference(testdata.TwilioChannel.UUID, "Test Channel"), "Blast", nil, nil, nil, flows.NilMsgTopic, envs.NilLocale, flows.NilUnsendableReason) - msg3, err := models.NewOutgoingBroadcastMsg(rt, oa.Org(), channel, cathy, bcastMsg1, time.Date(2021, 11, 9, 14, 3, 30, 0, time.UTC), bcastID) - require.NoError(t, err) - - err = models.InsertMessages(ctx, db, []*models.Msg{msg2}) - require.NoError(t, err) - - marshaled, err = json.Marshal(msg3) - assert.NoError(t, err) - - test.AssertEqualJSON(t, []byte(fmt.Sprintf(`{ - "broadcast_id": %d, - "channel_id": 10000, - "channel_uuid": "74729f45-7f29-4868-9dc4-90e491e3c7d8", - "contact_id": 10000, - "contact_urn_id": 10000, - "created_on": "2021-11-09T14:03:30Z", - "direction": "O", - "error_count": 0, - "high_priority": false, - "id": %d, - "metadata": null, - "modified_on": %s, - "next_attempt": null, - "org_id": 1, - "queued_on": %s, - "sent_on": null, - "status": "Q", - "text": "Blast", - "tps_cost": 1, - "urn": "tel:+250700000001?id=10000", - "uuid": "%s" - }`, bcastID, msg3.ID(), jsonx.MustMarshal(msg3.ModifiedOn()), jsonx.MustMarshal(msg3.QueuedOn()), msg3.UUID())), marshaled) -} - func TestGetMessagesByID(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) - msgIn1 := testdata.InsertIncomingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "in 1", models.MsgStatusHandled) - msgOut1 := testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "out 1", []utils.Attachment{"image/jpeg:hi.jpg"}, models.MsgStatusSent, false) - msgOut2 := testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "out 2", nil, models.MsgStatusSent, false) - msgOut3 := testdata.InsertOutgoingMsg(db, testdata.Org2, testdata.Org2Channel, testdata.Org2Contact, "out 3", nil, models.MsgStatusSent, false) - testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "hi 3", nil, models.MsgStatusSent, false) + msgIn1 := testdata.InsertIncomingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "in 1", models.MsgStatusHandled) + msgOut1 := testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "out 1", []utils.Attachment{"image/jpeg:hi.jpg"}, models.MsgStatusSent, false) + msgOut2 := testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "out 2", nil, models.MsgStatusSent, false) + msgOut3 := testdata.InsertOutgoingMsg(rt, testdata.Org2, testdata.Org2Channel, testdata.Org2Contact, "out 3", nil, models.MsgStatusSent, false) + testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "hi 3", nil, models.MsgStatusSent, false) ids := []models.MsgID{models.MsgID(msgIn1.ID()), models.MsgID(msgOut1.ID()), models.MsgID(msgOut2.ID()), models.MsgID(msgOut3.ID())} - msgs, err := models.GetMessagesByID(ctx, db, testdata.Org1.ID, models.DirectionOut, ids) + msgs, err := models.GetMessagesByID(ctx, rt.DB, testdata.Org1.ID, models.DirectionOut, ids) // should only return the outgoing messages for this org require.NoError(t, err) @@ -424,7 +260,7 @@ func TestGetMessagesByID(t *testing.T) { assert.Equal(t, []utils.Attachment{"image/jpeg:hi.jpg"}, msgs[0].Attachments()) assert.Equal(t, "out 2", msgs[1].Text()) - msgs, err = models.GetMessagesByID(ctx, db, testdata.Org1.ID, models.DirectionIn, ids) + msgs, err = models.GetMessagesByID(ctx, rt.DB, testdata.Org1.ID, models.DirectionIn, ids) // should only return the incoming message for this org require.NoError(t, err) @@ -433,70 +269,70 @@ func TestGetMessagesByID(t *testing.T) { } func TestResendMessages(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) oa, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) require.NoError(t, err) - out1 := testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "hi", nil, models.MsgStatusFailed, false) - out2 := testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Bob, "hi", nil, models.MsgStatusFailed, false) + out1 := testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "hi", nil, models.MsgStatusFailed, false) + out2 := testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Bob, "hi", nil, models.MsgStatusFailed, false) // failed message with no channel - out3 := testdata.InsertOutgoingMsg(db, testdata.Org1, nil, testdata.Cathy, "hi", nil, models.MsgStatusFailed, false) + out3 := testdata.InsertOutgoingMsg(rt, testdata.Org1, nil, testdata.Cathy, "hi", nil, models.MsgStatusFailed, false) // failed message with no URN - out4 := testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "hi", nil, models.MsgStatusFailed, false) - db.MustExec(`UPDATE msgs_msg SET contact_urn_id = NULL, failed_reason = 'D' WHERE id = $1`, out4.ID()) + out4 := testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "hi", nil, models.MsgStatusFailed, false) + rt.DB.MustExec(`UPDATE msgs_msg SET contact_urn_id = NULL, failed_reason = 'D' WHERE id = $1`, out4.ID()) // failed message with URN which we no longer have a channel for - out5 := testdata.InsertOutgoingMsg(db, testdata.Org1, nil, testdata.George, "hi", nil, models.MsgStatusFailed, false) - db.MustExec(`UPDATE msgs_msg SET failed_reason = 'E' WHERE id = $1`, out5.ID()) - db.MustExec(`UPDATE contacts_contacturn SET scheme = 'viber', path = '1234', identity = 'viber:1234' WHERE id = $1`, testdata.George.URNID) + out5 := testdata.InsertOutgoingMsg(rt, testdata.Org1, nil, testdata.George, "hi", nil, models.MsgStatusFailed, false) + rt.DB.MustExec(`UPDATE msgs_msg SET failed_reason = 'E' WHERE id = $1`, out5.ID()) + rt.DB.MustExec(`UPDATE contacts_contacturn SET scheme = 'viber', path = '1234', identity = 'viber:1234' WHERE id = $1`, testdata.George.URNID) // other failed message not included in set to resend - testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "hi", nil, models.MsgStatusFailed, false) + testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "hi", nil, models.MsgStatusFailed, false) // give Bob's URN an affinity for the Vonage channel - db.MustExec(`UPDATE contacts_contacturn SET channel_id = $1 WHERE id = $2`, testdata.VonageChannel.ID, testdata.Bob.URNID) + rt.DB.MustExec(`UPDATE contacts_contacturn SET channel_id = $1 WHERE id = $2`, testdata.VonageChannel.ID, testdata.Bob.URNID) ids := []models.MsgID{models.MsgID(out1.ID()), models.MsgID(out2.ID()), models.MsgID(out3.ID()), models.MsgID(out4.ID()), models.MsgID(out5.ID())} - msgs, err := models.GetMessagesByID(ctx, db, testdata.Org1.ID, models.DirectionOut, ids) + msgs, err := models.GetMessagesByID(ctx, rt.DB, testdata.Org1.ID, models.DirectionOut, ids) require.NoError(t, err) now := dates.Now() // resend both msgs - resent, err := models.ResendMessages(ctx, db, rp, oa, msgs) + resent, err := models.ResendMessages(ctx, rt.DB, rt.RP, oa, msgs) require.NoError(t, err) assert.Len(t, resent, 3) // only #1, #2 and #3 can be resent // both messages should now have a channel and be marked for resending - assert.True(t, resent[0].IsResend()) + assert.True(t, resent[0].IsResend) assert.Equal(t, testdata.TwilioChannel.ID, resent[0].ChannelID()) - assert.True(t, resent[1].IsResend()) + assert.True(t, resent[1].IsResend) assert.Equal(t, testdata.VonageChannel.ID, resent[1].ChannelID()) // channel changed - assert.True(t, resent[2].IsResend()) + assert.True(t, resent[2].IsResend) assert.Equal(t, testdata.TwilioChannel.ID, resent[2].ChannelID()) // channel added - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'P' AND queued_on > $1 AND sent_on IS NULL`, now).Returns(3) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'Q' AND queued_on > $1 AND sent_on IS NULL`, now).Returns(3) - assertdb.Query(t, db, `SELECT status, failed_reason FROM msgs_msg WHERE id = $1`, out4.ID()).Columns(map[string]interface{}{"status": "F", "failed_reason": "D"}) - assertdb.Query(t, db, `SELECT status, failed_reason FROM msgs_msg WHERE id = $1`, out5.ID()).Columns(map[string]interface{}{"status": "F", "failed_reason": "D"}) + assertdb.Query(t, rt.DB, `SELECT status, failed_reason FROM msgs_msg WHERE id = $1`, out4.ID()).Columns(map[string]any{"status": "F", "failed_reason": "D"}) + assertdb.Query(t, rt.DB, `SELECT status, failed_reason FROM msgs_msg WHERE id = $1`, out5.ID()).Columns(map[string]any{"status": "F", "failed_reason": "D"}) } func TestFailMessages(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) - out1 := testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "hi", nil, models.MsgStatusPending, false) - out2 := testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Bob, "hi", nil, models.MsgStatusErrored, false) - out3 := testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "hi", nil, models.MsgStatusFailed, false) - out4 := testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "hi", nil, models.MsgStatusQueued, false) - testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.George, "hi", nil, models.MsgStatusQueued, false) + out1 := testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "hi", nil, models.MsgStatusPending, false) + out2 := testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Bob, "hi", nil, models.MsgStatusErrored, false) + out3 := testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "hi", nil, models.MsgStatusFailed, false) + out4 := testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "hi", nil, models.MsgStatusQueued, false) + testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.George, "hi", nil, models.MsgStatusQueued, false) ids := []models.MsgID{models.MsgID(out1.ID()), models.MsgID(out2.ID()), models.MsgID(out3.ID()), models.MsgID(out4.ID())} println(ids) @@ -504,20 +340,20 @@ func TestFailMessages(t *testing.T) { now := dates.Now() // fail the msgs - err := models.FailChannelMessages(ctx, db, testdata.Org1.ID, testdata.TwilioChannel.ID, models.MsgFailedChannelRemoved) + err := models.FailChannelMessages(ctx, rt.DB, testdata.Org1.ID, testdata.TwilioChannel.ID, models.MsgFailedChannelRemoved) require.NoError(t, err) //assert.Len(t, failedMsgs, 3) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'F' AND modified_on > $1`, now).Returns(4) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'F' AND failed_reason = 'R' AND modified_on > $1`, now).Returns(4) - assertdb.Query(t, db, `SELECT status FROM msgs_msg WHERE id = $1`, out3.ID()).Columns(map[string]interface{}{"status": "F"}) - assertdb.Query(t, db, `SELECT failed_reason FROM msgs_msg WHERE id = $1`, out3.ID()).Columns(map[string]interface{}{"failed_reason": nil}) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'F' AND modified_on > $1`, now).Returns(4) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'F' AND failed_reason = 'R' AND modified_on > $1`, now).Returns(4) + assertdb.Query(t, rt.DB, `SELECT status FROM msgs_msg WHERE id = $1`, out3.ID()).Columns(map[string]interface{}{"status": "F"}) + assertdb.Query(t, rt.DB, `SELECT failed_reason FROM msgs_msg WHERE id = $1`, out3.ID()).Columns(map[string]interface{}{"failed_reason": nil}) } func TestGetMsgRepetitions(t *testing.T) { - _, rt, db, rp := testsuite.Get() + _, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetRedis) defer dates.SetNowSource(dates.DefaultNowSource) @@ -525,41 +361,37 @@ func TestGetMsgRepetitions(t *testing.T) { dates.SetNowSource(dates.NewFixedNowSource(time.Date(2021, 11, 18, 12, 13, 3, 234567, time.UTC))) oa := testdata.Org1.Load(rt) - _, cathy := testdata.Cathy.Load(db, oa) + _, cathy := testdata.Cathy.Load(rt, oa) + _, george := testdata.George.Load(rt, oa) msg1 := flows.NewMsgOut(testdata.Cathy.URN, nil, "foo", nil, nil, nil, flows.NilMsgTopic, envs.NilLocale, flows.NilUnsendableReason) - msg2 := flows.NewMsgOut(testdata.Cathy.URN, nil, "bar", nil, nil, nil, flows.NilMsgTopic, envs.NilLocale, flows.NilUnsendableReason) + msg2 := flows.NewMsgOut(testdata.Cathy.URN, nil, "FOO", nil, nil, nil, flows.NilMsgTopic, envs.NilLocale, flows.NilUnsendableReason) + msg3 := flows.NewMsgOut(testdata.Cathy.URN, nil, "bar", nil, nil, nil, flows.NilMsgTopic, envs.NilLocale, flows.NilUnsendableReason) + msg4 := flows.NewMsgOut(testdata.George.URN, nil, "foo", nil, nil, nil, flows.NilMsgTopic, envs.NilLocale, flows.NilUnsendableReason) - assertRepetitions := func(m *flows.MsgOut, expected int) { - count, err := models.GetMsgRepetitions(rp, cathy, m) + assertRepetitions := func(contact *flows.Contact, m *flows.MsgOut, expected int) { + count, err := models.GetMsgRepetitions(rt.RP, contact, m) require.NoError(t, err) assert.Equal(t, expected, count) } - // keep counts up to 99 - for i := 0; i < 99; i++ { - assertRepetitions(msg1, i+1) + for i := 0; i < 20; i++ { + assertRepetitions(cathy, msg1, i+1) } - assertredis.HGetAll(t, rp, "msg_repetitions:2021-11-18T12:15", map[string]string{"10000": "99:foo"}) - - for i := 0; i < 50; i++ { - assertRepetitions(msg1, 99) + for i := 0; i < 10; i++ { + assertRepetitions(cathy, msg2, i+21) } - assertredis.HGetAll(t, rp, "msg_repetitions:2021-11-18T12:15", map[string]string{"10000": "99:foo"}) - - for i := 0; i < 19; i++ { - assertRepetitions(msg2, i+1) + for i := 0; i < 5; i++ { + assertRepetitions(cathy, msg3, i+1) } - assertredis.HGetAll(t, rp, "msg_repetitions:2021-11-18T12:15", map[string]string{"10000": "19:bar"}) - - for i := 0; i < 50; i++ { - assertRepetitions(msg2, 20+i) + for i := 0; i < 5; i++ { + assertRepetitions(george, msg4, i+1) } - assertredis.HGetAll(t, rp, "msg_repetitions:2021-11-18T12:15", map[string]string{"10000": "69:bar"}) + assertredis.HGetAll(t, rt.RP, "msg_repetitions:2021-11-18T12:15", map[string]string{"10000|foo": "30", "10000|bar": "5", "10002|foo": "5"}) } func TestNormalizeAttachment(t *testing.T) { - _, rt, _, _ := testsuite.Get() + _, rt := testsuite.Runtime() rt.Config.AttachmentDomain = "foo.bar.com" defer func() { rt.Config.AttachmentDomain = "" }() @@ -581,142 +413,119 @@ func TestNormalizeAttachment(t *testing.T) { } func TestMarkMessages(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() - defer testsuite.Reset(testsuite.ResetAll) + defer testsuite.Reset(testsuite.ResetData) - flowMsg1 := testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "Hello", nil, models.MsgStatusQueued, false) - msgs, err := models.GetMessagesByID(ctx, db, testdata.Org1.ID, models.DirectionOut, []models.MsgID{models.MsgID(flowMsg1.ID())}) + flowMsg1 := testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "Hello", nil, models.MsgStatusQueued, false) + msgs, err := models.GetMessagesByID(ctx, rt.DB, testdata.Org1.ID, models.DirectionOut, []models.MsgID{models.MsgID(flowMsg1.ID())}) require.NoError(t, err) msg1 := msgs[0] - flowMsg2 := testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "Hola", nil, models.MsgStatusQueued, false) - msgs, err = models.GetMessagesByID(ctx, db, testdata.Org1.ID, models.DirectionOut, []models.MsgID{models.MsgID(flowMsg2.ID())}) + flowMsg2 := testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "Hola", nil, models.MsgStatusQueued, false) + msgs, err = models.GetMessagesByID(ctx, rt.DB, testdata.Org1.ID, models.DirectionOut, []models.MsgID{models.MsgID(flowMsg2.ID())}) require.NoError(t, err) msg2 := msgs[0] - testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "Howdy", nil, models.MsgStatusQueued, false) + testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "Howdy", nil, models.MsgStatusQueued, false) - models.MarkMessagesForRequeuing(ctx, db, []*models.Msg{msg1, msg2}) + models.MarkMessagesForRequeuing(ctx, rt.DB, []*models.Msg{msg1, msg2}) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'P'`).Returns(2) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'I'`).Returns(2) // try running on database with BIGINT message ids - db.MustExec(`ALTER SEQUENCE "msgs_msg_id_seq" AS bigint;`) - db.MustExec(`ALTER SEQUENCE "msgs_msg_id_seq" RESTART WITH 3000000000;`) + rt.DB.MustExec(`ALTER SEQUENCE "msgs_msg_id_seq" AS bigint;`) + rt.DB.MustExec(`ALTER SEQUENCE "msgs_msg_id_seq" RESTART WITH 3000000000;`) - flowMsg4 := testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "Big messages!", nil, models.MsgStatusQueued, false) - msgs, err = models.GetMessagesByID(ctx, db, testdata.Org1.ID, models.DirectionOut, []models.MsgID{models.MsgID(flowMsg4.ID())}) + flowMsg4 := testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "Big messages!", nil, models.MsgStatusQueued, false) + msgs, err = models.GetMessagesByID(ctx, rt.DB, testdata.Org1.ID, models.DirectionOut, []models.MsgID{models.MsgID(flowMsg4.ID())}) require.NoError(t, err) msg4 := msgs[0] assert.Equal(t, flows.MsgID(3000000000), msg4.ID()) - err = models.MarkMessagesForRequeuing(ctx, db, []*models.Msg{msg4}) + err = models.MarkMessagesForRequeuing(ctx, rt.DB, []*models.Msg{msg4}) assert.NoError(t, err) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'P'`).Returns(3) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'Q'`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'I'`).Returns(3) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'Q'`).Returns(1) - err = models.MarkMessagesQueued(ctx, db, []*models.Msg{msg4}) + err = models.MarkMessagesQueued(ctx, rt.DB, []*models.Msg{msg4}) assert.NoError(t, err) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'P'`).Returns(2) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'Q'`).Returns(2) -} - -func TestNonPersistentBroadcasts(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() - - defer testsuite.Reset(testsuite.ResetData) - - ticket := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Bob, testdata.Mailgun, testdata.DefaultTopic, "", "", time.Now(), nil) - modelTicket := ticket.Load(db) - - translations := map[envs.Language]*models.BroadcastTranslation{envs.Language("eng"): {Text: "Hi there"}} - - // create a broadcast which doesn't actually exist in the DB - bcast := models.NewBroadcast( - testdata.Org1.ID, - models.NilBroadcastID, - translations, - models.TemplateStateUnevaluated, - envs.Language("eng"), - []urns.URN{"tel:+593979012345"}, - []models.ContactID{testdata.Alexandria.ID, testdata.Bob.ID, testdata.Cathy.ID}, - []models.GroupID{testdata.DoctorsGroup.ID}, - ticket.ID, - models.NilUserID, - ) - - assert.Equal(t, models.NilBroadcastID, bcast.ID()) - assert.Equal(t, testdata.Org1.ID, bcast.OrgID()) - assert.Equal(t, envs.Language("eng"), bcast.BaseLanguage()) - assert.Equal(t, translations, bcast.Translations()) - assert.Equal(t, models.TemplateStateUnevaluated, bcast.TemplateState()) - assert.Equal(t, ticket.ID, bcast.TicketID()) - assert.Equal(t, []urns.URN{"tel:+593979012345"}, bcast.URNs()) - assert.Equal(t, []models.ContactID{testdata.Alexandria.ID, testdata.Bob.ID, testdata.Cathy.ID}, bcast.ContactIDs()) - assert.Equal(t, []models.GroupID{testdata.DoctorsGroup.ID}, bcast.GroupIDs()) - - batch := bcast.CreateBatch([]models.ContactID{testdata.Alexandria.ID, testdata.Bob.ID}) - - assert.Equal(t, models.NilBroadcastID, batch.BroadcastID) - assert.Equal(t, testdata.Org1.ID, batch.OrgID) - assert.Equal(t, envs.Language("eng"), batch.BaseLanguage) - assert.Equal(t, translations, batch.Translations) - assert.Equal(t, models.TemplateStateUnevaluated, batch.TemplateState) - assert.Equal(t, ticket.ID, batch.TicketID) - assert.Equal(t, []models.ContactID{testdata.Alexandria.ID, testdata.Bob.ID}, batch.ContactIDs) - - oa, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) - require.NoError(t, err) - - msgs, err := batch.CreateMessages(ctx, rt, oa) - require.NoError(t, err) - - assert.Equal(t, 2, len(msgs)) - - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE direction = 'O' AND broadcast_id IS NULL AND text = 'Hi there'`).Returns(2) - - // test ticket was updated - assertdb.Query(t, db, `SELECT count(*) FROM tickets_ticket WHERE id = $1 AND last_activity_on > $2`, ticket.ID, modelTicket.LastActivityOn()).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'I'`).Returns(2) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'Q'`).Returns(2) } func TestNewOutgoingIVR(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() oa, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) require.NoError(t, err) vonage := oa.ChannelByUUID(testdata.VonageChannel.UUID) - conn, err := models.InsertCall(ctx, db, testdata.Org1.ID, testdata.VonageChannel.ID, models.NilStartID, testdata.Cathy.ID, testdata.Cathy.URNID, models.CallDirectionOut, models.CallStatusInProgress, "") + conn, err := models.InsertCall(ctx, rt.DB, testdata.Org1.ID, testdata.VonageChannel.ID, models.NilStartID, testdata.Cathy.ID, testdata.Cathy.URNID, models.CallDirectionOut, models.CallStatusInProgress, "") require.NoError(t, err) createdOn := time.Date(2021, 7, 26, 12, 6, 30, 0, time.UTC) - flowMsg := flows.NewIVRMsgOut(testdata.Cathy.URN, vonage.ChannelReference(), "Hello", "http://example.com/hi.mp3", "eng") + flowMsg := flows.NewIVRMsgOut(testdata.Cathy.URN, vonage.ChannelReference(), "Hello", "http://example.com/hi.mp3", "eng-US") dbMsg := models.NewOutgoingIVR(rt.Config, testdata.Org1.ID, conn, flowMsg, createdOn) assert.Equal(t, flowMsg.UUID(), dbMsg.UUID()) + assert.Equal(t, models.MsgTypeVoice, dbMsg.Type()) assert.Equal(t, "Hello", dbMsg.Text()) assert.Equal(t, []utils.Attachment{"audio:http://example.com/hi.mp3"}, dbMsg.Attachments()) + assert.Equal(t, envs.Locale("eng-US"), dbMsg.Locale()) assert.Equal(t, createdOn, dbMsg.CreatedOn()) assert.Equal(t, &createdOn, dbMsg.SentOn()) - err = models.InsertMessages(ctx, db, []*models.Msg{dbMsg}) + err = models.InsertMessages(ctx, rt.DB, []*models.Msg{dbMsg}) require.NoError(t, err) - assertdb.Query(t, db, `SELECT text, created_on, sent_on FROM msgs_msg WHERE uuid = $1`, dbMsg.UUID()).Columns(map[string]interface{}{"text": "Hello", "created_on": createdOn, "sent_on": createdOn}) + assertdb.Query(t, rt.DB, `SELECT text, created_on, sent_on FROM msgs_msg WHERE uuid = $1`, dbMsg.UUID()).Columns(map[string]interface{}{"text": "Hello", "created_on": createdOn, "sent_on": createdOn}) +} + +func TestNewMsgOut(t *testing.T) { + ctx, rt := testsuite.Runtime() + + oa, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) + require.NoError(t, err) + + _, cathy := testdata.Cathy.Load(rt, oa) + + out, ch := models.NewMsgOut(oa, cathy, "hello", nil, nil, envs.Locale(`eng-US`)) + assert.Equal(t, "hello", out.Text()) + assert.Equal(t, urns.URN("tel:+16055741111?id=10000&priority=1000"), out.URN()) + assert.Equal(t, assets.NewChannelReference("74729f45-7f29-4868-9dc4-90e491e3c7d8", "Twilio"), out.Channel()) + assert.Equal(t, envs.Locale(`eng-US`), out.Locale()) + assert.Equal(t, "Twilio", ch.Name()) + + cathy.SetStatus(flows.ContactStatusBlocked) + + out, ch = models.NewMsgOut(oa, cathy, "hello", nil, nil, envs.Locale(`eng-US`)) + assert.Equal(t, urns.URN("tel:+16055741111?id=10000&priority=1000"), out.URN()) + assert.Equal(t, assets.NewChannelReference("74729f45-7f29-4868-9dc4-90e491e3c7d8", "Twilio"), out.Channel()) + assert.Equal(t, "Twilio", ch.Name()) + assert.Equal(t, flows.UnsendableReasonContactStatus, out.UnsendableReason()) + + cathy.SetStatus(flows.ContactStatusActive) + cathy.ClearURNs() + + out, ch = models.NewMsgOut(oa, cathy, "hello", nil, nil, envs.Locale(`eng-US`)) + assert.Equal(t, urns.NilURN, out.URN()) + assert.Nil(t, out.Channel()) + assert.Nil(t, ch) + assert.Equal(t, flows.UnsendableReasonNoDestination, out.UnsendableReason()) } func insertTestSession(t *testing.T, ctx context.Context, rt *runtime.Runtime, org *testdata.Org, contact *testdata.Contact, flow *testdata.Flow) *models.Session { - testdata.InsertWaitingSession(rt.DB, org, contact, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now(), false, nil) + testdata.InsertWaitingSession(rt, org, contact, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now(), false, nil) oa, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) require.NoError(t, err) - _, flowContact := contact.Load(rt.DB, oa) + _, flowContact := contact.Load(rt, oa) session, err := models.FindWaitingSessionForContact(ctx, rt.DB, rt.SessionStorage, oa, models.FlowTypeMessaging, flowContact) require.NoError(t, err) diff --git a/core/models/notifications.go b/core/models/notifications.go index ad0b7af51..5b20725de 100644 --- a/core/models/notifications.go +++ b/core/models/notifications.go @@ -11,7 +11,7 @@ import ( ) // NotificationID is our type for notification ids -type NotificationID int +type NotificationID int64 type NotificationType string diff --git a/core/models/notifications_test.go b/core/models/notifications_test.go index f6e985786..4a034eaa8 100644 --- a/core/models/notifications_test.go +++ b/core/models/notifications_test.go @@ -7,6 +7,7 @@ import ( "github.com/jmoiron/sqlx" "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/runtime" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" "github.com/stretchr/testify/assert" @@ -14,7 +15,7 @@ import ( ) func TestTicketNotifications(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) @@ -24,13 +25,13 @@ func TestTicketNotifications(t *testing.T) { t0 := time.Now() // open unassigned tickets by a flow (i.e. no user) - ticket1, openedEvent1 := openTicket(t, ctx, db, nil, nil) - ticket2, openedEvent2 := openTicket(t, ctx, db, nil, nil) - err = models.NotificationsFromTicketEvents(ctx, db, oa, map[*models.Ticket]*models.TicketEvent{ticket1: openedEvent1, ticket2: openedEvent2}) + ticket1, openedEvent1 := openTicket(t, ctx, rt, nil, nil) + ticket2, openedEvent2 := openTicket(t, ctx, rt, nil, nil) + err = models.NotificationsFromTicketEvents(ctx, rt.DB, oa, map[*models.Ticket]*models.TicketEvent{ticket1: openedEvent1, ticket2: openedEvent2}) require.NoError(t, err) // check that all assignable users are notified once - assertNotifications(t, ctx, db, t0, map[*testdata.User][]models.NotificationType{ + assertNotifications(t, ctx, rt.DB, t0, map[*testdata.User][]models.NotificationType{ testdata.Admin: {models.NotificationTypeTicketsOpened}, testdata.Editor: {models.NotificationTypeTicketsOpened}, testdata.Agent: {models.NotificationTypeTicketsOpened}, @@ -39,96 +40,96 @@ func TestTicketNotifications(t *testing.T) { t1 := time.Now() // another ticket opened won't create new notifications - ticket3, openedEvent3 := openTicket(t, ctx, db, nil, nil) - err = models.NotificationsFromTicketEvents(ctx, db, oa, map[*models.Ticket]*models.TicketEvent{ticket3: openedEvent3}) + ticket3, openedEvent3 := openTicket(t, ctx, rt, nil, nil) + err = models.NotificationsFromTicketEvents(ctx, rt.DB, oa, map[*models.Ticket]*models.TicketEvent{ticket3: openedEvent3}) require.NoError(t, err) - assertNotifications(t, ctx, db, t1, map[*testdata.User][]models.NotificationType{}) + assertNotifications(t, ctx, rt.DB, t1, map[*testdata.User][]models.NotificationType{}) // mark all notifications as seen - db.MustExec(`UPDATE notifications_notification SET is_seen = TRUE`) + rt.DB.MustExec(`UPDATE notifications_notification SET is_seen = TRUE`) // open an unassigned ticket by a user - ticket4, openedEvent4 := openTicket(t, ctx, db, testdata.Editor, nil) - err = models.NotificationsFromTicketEvents(ctx, db, oa, map[*models.Ticket]*models.TicketEvent{ticket4: openedEvent4}) + ticket4, openedEvent4 := openTicket(t, ctx, rt, testdata.Editor, nil) + err = models.NotificationsFromTicketEvents(ctx, rt.DB, oa, map[*models.Ticket]*models.TicketEvent{ticket4: openedEvent4}) require.NoError(t, err) // check that all assignable users are notified except the user that opened the ticket - assertNotifications(t, ctx, db, t1, map[*testdata.User][]models.NotificationType{ + assertNotifications(t, ctx, rt.DB, t1, map[*testdata.User][]models.NotificationType{ testdata.Admin: {models.NotificationTypeTicketsOpened}, testdata.Agent: {models.NotificationTypeTicketsOpened}, }) t2 := time.Now() - db.MustExec(`UPDATE notifications_notification SET is_seen = TRUE`) + rt.DB.MustExec(`UPDATE notifications_notification SET is_seen = TRUE`) // open an already assigned ticket - ticket5, openedEvent5 := openTicket(t, ctx, db, nil, testdata.Agent) - err = models.NotificationsFromTicketEvents(ctx, db, oa, map[*models.Ticket]*models.TicketEvent{ticket5: openedEvent5}) + ticket5, openedEvent5 := openTicket(t, ctx, rt, nil, testdata.Agent) + err = models.NotificationsFromTicketEvents(ctx, rt.DB, oa, map[*models.Ticket]*models.TicketEvent{ticket5: openedEvent5}) require.NoError(t, err) // check that the assigned user gets a ticket activity notification - assertNotifications(t, ctx, db, t2, map[*testdata.User][]models.NotificationType{ + assertNotifications(t, ctx, rt.DB, t2, map[*testdata.User][]models.NotificationType{ testdata.Agent: {models.NotificationTypeTicketsActivity}, }) t3 := time.Now() // however if a user opens a ticket which is assigned to themselves, no notification - ticket6, openedEvent6 := openTicket(t, ctx, db, testdata.Admin, testdata.Admin) - err = models.NotificationsFromTicketEvents(ctx, db, oa, map[*models.Ticket]*models.TicketEvent{ticket6: openedEvent6}) + ticket6, openedEvent6 := openTicket(t, ctx, rt, testdata.Admin, testdata.Admin) + err = models.NotificationsFromTicketEvents(ctx, rt.DB, oa, map[*models.Ticket]*models.TicketEvent{ticket6: openedEvent6}) require.NoError(t, err) // check that the assigned user gets a ticket activity notification - assertNotifications(t, ctx, db, t3, map[*testdata.User][]models.NotificationType{}) + assertNotifications(t, ctx, rt.DB, t3, map[*testdata.User][]models.NotificationType{}) t4 := time.Now() - db.MustExec(`UPDATE notifications_notification SET is_seen = TRUE`) + rt.DB.MustExec(`UPDATE notifications_notification SET is_seen = TRUE`) // now have a user assign existing tickets to another user - _, err = models.TicketsAssign(ctx, db, oa, testdata.Admin.ID, []*models.Ticket{ticket1, ticket2}, testdata.Agent.ID, "") + _, err = models.TicketsAssign(ctx, rt.DB, oa, testdata.Admin.ID, []*models.Ticket{ticket1, ticket2}, testdata.Agent.ID) require.NoError(t, err) // check that the assigned user gets a ticket activity notification - assertNotifications(t, ctx, db, t4, map[*testdata.User][]models.NotificationType{ + assertNotifications(t, ctx, rt.DB, t4, map[*testdata.User][]models.NotificationType{ testdata.Agent: {models.NotificationTypeTicketsActivity}, }) t5 := time.Now() - db.MustExec(`UPDATE notifications_notification SET is_seen = TRUE`) + rt.DB.MustExec(`UPDATE notifications_notification SET is_seen = TRUE`) // and finally a user assigning a ticket to themselves - _, err = models.TicketsAssign(ctx, db, oa, testdata.Editor.ID, []*models.Ticket{ticket3}, testdata.Editor.ID, "") + _, err = models.TicketsAssign(ctx, rt.DB, oa, testdata.Editor.ID, []*models.Ticket{ticket3}, testdata.Editor.ID) require.NoError(t, err) // no notifications for self-assignment - assertNotifications(t, ctx, db, t5, map[*testdata.User][]models.NotificationType{}) + assertNotifications(t, ctx, rt.DB, t5, map[*testdata.User][]models.NotificationType{}) } func TestImportNotifications(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) - importID := testdata.InsertContactImport(db, testdata.Org1, testdata.Editor) - imp, err := models.LoadContactImport(ctx, db, importID) + importID := testdata.InsertContactImport(rt, testdata.Org1, testdata.Editor) + imp, err := models.LoadContactImport(ctx, rt.DB, importID) require.NoError(t, err) - err = imp.MarkFinished(ctx, db, models.ContactImportStatusComplete) + err = imp.MarkFinished(ctx, rt.DB, models.ContactImportStatusComplete) require.NoError(t, err) t0 := time.Now() - err = models.NotifyImportFinished(ctx, db, imp) + err = models.NotifyImportFinished(ctx, rt.DB, imp) require.NoError(t, err) - assertNotifications(t, ctx, db, t0, map[*testdata.User][]models.NotificationType{ + assertNotifications(t, ctx, rt.DB, t0, map[*testdata.User][]models.NotificationType{ testdata.Editor: {models.NotificationTypeImportFinished}, }) } func TestIncidentNotifications(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) @@ -137,10 +138,10 @@ func TestIncidentNotifications(t *testing.T) { t0 := time.Now() - _, err = models.IncidentWebhooksUnhealthy(ctx, db, rp, oa, nil) + _, err = models.IncidentWebhooksUnhealthy(ctx, rt.DB, rt.RP, oa, nil) require.NoError(t, err) - assertNotifications(t, ctx, db, t0, map[*testdata.User][]models.NotificationType{ + assertNotifications(t, ctx, rt.DB, t0, map[*testdata.User][]models.NotificationType{ testdata.Admin: {models.NotificationTypeIncidentStarted}, }) } @@ -164,12 +165,12 @@ func assertNotifications(t *testing.T, ctx context.Context, db *sqlx.DB, after t assert.Equal(t, expectedByID, actual) } -func openTicket(t *testing.T, ctx context.Context, db *sqlx.DB, openedBy *testdata.User, assignee *testdata.User) (*models.Ticket, *models.TicketEvent) { - ticket := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Internal, testdata.SupportTopic, "Where my pants", "", time.Now(), assignee) - modelTicket := ticket.Load(db) +func openTicket(t *testing.T, ctx context.Context, rt *runtime.Runtime, openedBy *testdata.User, assignee *testdata.User) (*models.Ticket, *models.TicketEvent) { + ticket := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Internal, testdata.SupportTopic, "Where my pants", "", time.Now(), assignee) + modelTicket := ticket.Load(rt) openedEvent := models.NewTicketOpenedEvent(modelTicket, openedBy.SafeID(), assignee.SafeID()) - err := models.InsertTicketEvents(ctx, db, []*models.TicketEvent{openedEvent}) + err := models.InsertTicketEvents(ctx, rt.DB, []*models.TicketEvent{openedEvent}) require.NoError(t, err) return modelTicket, openedEvent diff --git a/core/models/orgs.go b/core/models/orgs.go index b96d7b6f0..ca2c968d1 100644 --- a/core/models/orgs.go +++ b/core/models/orgs.go @@ -8,7 +8,6 @@ import ( "mime" "net/http" "path/filepath" - "strings" "time" "github.com/jmoiron/sqlx" @@ -24,7 +23,7 @@ import ( "github.com/nyaruka/goflow/utils/smtpx" "github.com/nyaruka/mailroom/core/goflow" "github.com/nyaruka/mailroom/runtime" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) @@ -141,7 +140,11 @@ func (o *Org) UnmarshalJSON(b []byte) error { // ConfigValue returns the string value for the passed in config (or default if not found) func (o *Org) ConfigValue(key string, def string) string { - return o.o.Config.GetString(key, def) + v, ok := o.o.Config[key].(string) + if ok { + return v + } + return def } // EmailService returns the email service for this org @@ -205,14 +208,7 @@ func (o *Org) attachmentPath(prefix string, filename string) string { } parts = append(parts, filename) - path := filepath.Join(parts...) - - // ensure path begins with / - if !strings.HasPrefix(path, "/") { - path = fmt.Sprintf("/%s", path) - } - - return path + return filepath.Join(parts...) } // gets the underlying org for the given session assets @@ -248,8 +244,8 @@ const selectOrgByID = ` SELECT ROW_TO_JSON(o) FROM (SELECT id, is_suspended, - COALESCE(o.config::json,'{}'::json) AS config, - (SELECT CASE date_format WHEN 'D' THEN 'DD-MM-YYYY' WHEN 'M' THEN 'MM-DD-YYYY' END) AS date_format, + o.config AS config, + (SELECT CASE date_format WHEN 'D' THEN 'DD-MM-YYYY' WHEN 'M' THEN 'MM-DD-YYYY' ELSE 'YYYY-MM-DD' END) AS date_format, 'tt:mm' AS time_format, timezone, (SELECT CASE is_anon WHEN TRUE THEN 'urns' WHEN FALSE THEN 'none' END) AS redaction_policy, diff --git a/core/models/orgs_test.go b/core/models/orgs_test.go index 19273b2af..b7432d89c 100644 --- a/core/models/orgs_test.go +++ b/core/models/orgs_test.go @@ -17,11 +17,11 @@ import ( ) func TestOrgs(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() tz, _ := time.LoadLocation("America/Los_Angeles") - tx, err := db.BeginTxx(ctx, nil) + tx, err := rt.DB.BeginTxx(ctx, nil) assert.NoError(t, err) defer tx.Rollback() @@ -30,6 +30,7 @@ func TestOrgs(t *testing.T) { tx.MustExec(`UPDATE orgs_org SET flow_languages = '{"fra", "eng"}' WHERE id = $1`, testdata.Org1.ID) tx.MustExec(`UPDATE orgs_org SET flow_languages = '{}' WHERE id = $1`, testdata.Org2.ID) + tx.MustExec(`UPDATE orgs_org SET date_format = 'M' WHERE id = $1`, testdata.Org2.ID) org, err := models.LoadOrg(ctx, rt.Config, tx, testdata.Org1.ID) assert.NoError(t, err) @@ -48,6 +49,7 @@ func TestOrgs(t *testing.T) { org, err = models.LoadOrg(ctx, rt.Config, tx, testdata.Org2.ID) assert.NoError(t, err) + assert.Equal(t, envs.DateFormatMonthDayYear, org.DateFormat()) assert.Equal(t, []envs.Language{}, org.AllowedLanguages()) assert.Equal(t, envs.NilLanguage, org.DefaultLanguage()) assert.Equal(t, "", org.DefaultLocale().ToBCP47()) @@ -57,14 +59,14 @@ func TestOrgs(t *testing.T) { } func TestStoreAttachment(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetStorage) image, err := os.Open("testdata/test.jpg") require.NoError(t, err) - org, err := models.LoadOrg(ctx, rt.Config, db, testdata.Org1.ID) + org, err := models.LoadOrg(ctx, rt.Config, rt.DB, testdata.Org1.ID) assert.NoError(t, err) attachment, err := org.StoreAttachment(context.Background(), rt, "668383ba-387c-49bc-b164-1213ac0ea7aa.jpg", "image/jpeg", image) diff --git a/core/models/resthooks.go b/core/models/resthooks.go index 0035fa73a..9b08e43d2 100644 --- a/core/models/resthooks.go +++ b/core/models/resthooks.go @@ -4,16 +4,15 @@ import ( "context" "time" + "github.com/jmoiron/sqlx" "github.com/nyaruka/gocommon/dbutil" "github.com/nyaruka/goflow/assets" - - "github.com/jmoiron/sqlx" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) // ResthookID is our type for the database id of a resthook -type ResthookID int64 +type ResthookID int // Resthook is the mailroom type for resthooks type Resthook struct { diff --git a/core/models/resthooks_test.go b/core/models/resthooks_test.go index 540005c8b..8d8ed8f65 100644 --- a/core/models/resthooks_test.go +++ b/core/models/resthooks_test.go @@ -11,15 +11,15 @@ import ( ) func TestResthooks(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() - db.MustExec(`INSERT INTO api_resthook(is_active, created_on, modified_on, slug, created_by_id, modified_by_id, org_id) + rt.DB.MustExec(`INSERT INTO api_resthook(is_active, created_on, modified_on, slug, created_by_id, modified_by_id, org_id) VALUES(TRUE, NOW(), NOW(), 'registration', 1, 1, 1);`) - db.MustExec(`INSERT INTO api_resthook(is_active, created_on, modified_on, slug, created_by_id, modified_by_id, org_id) + rt.DB.MustExec(`INSERT INTO api_resthook(is_active, created_on, modified_on, slug, created_by_id, modified_by_id, org_id) VALUES(TRUE, NOW(), NOW(), 'block', 1, 1, 1);`) - db.MustExec(`INSERT INTO api_resthooksubscriber(is_active, created_on, modified_on, target_url, created_by_id, modified_by_id, resthook_id) + rt.DB.MustExec(`INSERT INTO api_resthooksubscriber(is_active, created_on, modified_on, target_url, created_by_id, modified_by_id, resthook_id) VALUES(TRUE, NOW(), NOW(), 'https://foo.bar', 1, 1, 2);`) - db.MustExec(`INSERT INTO api_resthooksubscriber(is_active, created_on, modified_on, target_url, created_by_id, modified_by_id, resthook_id) + rt.DB.MustExec(`INSERT INTO api_resthooksubscriber(is_active, created_on, modified_on, target_url, created_by_id, modified_by_id, resthook_id) VALUES(TRUE, NOW(), NOW(), 'https://bar.foo', 1, 1, 2);`) oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshResthooks) diff --git a/core/models/runs.go b/core/models/runs.go index 45fa85207..3a9e2cb3c 100644 --- a/core/models/runs.go +++ b/core/models/runs.go @@ -6,11 +6,10 @@ import ( "time" "github.com/jmoiron/sqlx" - "github.com/lib/pq" "github.com/nyaruka/gocommon/jsonx" "github.com/nyaruka/goflow/flows" "github.com/nyaruka/goflow/flows/events" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" "github.com/pkg/errors" ) @@ -37,23 +36,6 @@ var runStatusMap = map[flows.RunStatus]RunStatus{ flows.RunStatusFailed: RunStatusFailed, } -// ExitType still needs to be set on runs until database triggers are updated to only look at status -type ExitType = null.String - -const ( - ExitInterrupted = ExitType("I") - ExitCompleted = ExitType("C") - ExitExpired = ExitType("E") - ExitFailed = ExitType("F") -) - -var runStatusToExitType = map[RunStatus]ExitType{ - RunStatusInterrupted: ExitInterrupted, - RunStatusCompleted: ExitCompleted, - RunStatusExpired: ExitExpired, - RunStatusFailed: ExitFailed, -} - // FlowRun is the mailroom type for a FlowRun type FlowRun struct { r struct { @@ -158,23 +140,3 @@ func newRun(ctx context.Context, tx *sqlx.Tx, oa *OrgAssets, session *Session, f return run, nil } - -// FindFlowStartedOverlap returns the list of contact ids which overlap with those passed in and which -// have been in the flow passed in. -func FindFlowStartedOverlap(ctx context.Context, db *sqlx.DB, flowID FlowID, contacts []ContactID) ([]ContactID, error) { - var overlap []ContactID - err := db.SelectContext(ctx, &overlap, flowStartedOverlapSQL, pq.Array(contacts), flowID) - return overlap, err -} - -// TODO: no perfect index, will probably use contact index flows_flowrun_contact_id_985792a9 -// could be slow in the cases of contacts having many distinct runs -const flowStartedOverlapSQL = ` -SELECT - DISTINCT(contact_id) -FROM - flows_flowrun -WHERE - contact_id = ANY($1) AND - flow_id = $2 -` diff --git a/core/models/schedules.go b/core/models/schedules.go index 7eb6eae81..250548486 100644 --- a/core/models/schedules.go +++ b/core/models/schedules.go @@ -7,13 +7,13 @@ import ( "time" "github.com/nyaruka/gocommon/dbutil" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" "github.com/pkg/errors" ) // ScheduleID is our internal type for schedule IDs -type ScheduleID null.Int +type ScheduleID int // NilScheduleID is our constant for a nil schedule id const NilScheduleID = ScheduleID(0) @@ -208,36 +208,13 @@ SELECT ROW_TO_JSON(s) FROM (SELECT o.timezone as timezone, (SELECT ROW_TO_JSON(sb) FROM ( SELECT - b.id as broadcast_id, - (SELECT JSON_OBJECT_AGG(ts.key, ts.value) FROM (SELECT key, JSON_BUILD_OBJECT('text', t.value) as value FROM each(b.text) t) ts) as translations, - 'unevaluated' as template_state, - b.base_language as base_language, - s.org_id as org_id, - (SELECT ARRAY_AGG(bc.contact_id) FROM ( - SELECT - bc.contact_id - FROM - msgs_broadcast_contacts bc - WHERE - bc.broadcast_id = b.id - ) bc) as contact_ids, - (SELECT ARRAY_AGG(bg.contactgroup_id) FROM ( - SELECT - bg.contactgroup_id - FROM - msgs_broadcast_groups bg - WHERE - bg.broadcast_id = b.id - ) bg) as group_ids, - (SELECT ARRAY_AGG(bu.urn) FROM ( - SELECT - cu.identity || '?id=' || cu.id as urn - FROM - msgs_broadcast_urns bus JOIN - contacts_contacturn cu ON cu.id = bus.contacturn_id - WHERE - bus.broadcast_id = b.id - ) bu) as urns + b.id AS broadcast_id, + b.translations, + 'unevaluated' AS template_state, + b.base_language, + s.org_id, + (SELECT ARRAY_AGG(bc.contact_id) FROM (SELECT bc.contact_id FROM msgs_broadcast_contacts bc WHERE bc.broadcast_id = b.id) bc) as contact_ids, + (SELECT ARRAY_AGG(bg.contactgroup_id) FROM (SELECT bg.contactgroup_id FROM msgs_broadcast_groups bg WHERE bg.broadcast_id = b.id) bg) as group_ids FROM msgs_broadcast b WHERE @@ -250,8 +227,7 @@ SELECT ROW_TO_JSON(s) FROM (SELECT 'T' as start_type, t.flow_id as flow_id, f.flow_type as flow_type, - TRUE as restart_participants, - TRUE as include_active, + '{}'::jsonb AS exclusions, (SELECT ARRAY_AGG(tc.contact_id) FROM ( SELECT tc.contact_id @@ -316,22 +292,7 @@ func GetUnfiredSchedules(ctx context.Context, db Queryer) ([]*Schedule, error) { return unfired, nil } -// MarshalJSON marshals into JSON. 0 values will become null -func (i ScheduleID) MarshalJSON() ([]byte, error) { - return null.Int(i).MarshalJSON() -} - -// UnmarshalJSON unmarshals from JSON. null values become 0 -func (i *ScheduleID) UnmarshalJSON(b []byte) error { - return null.UnmarshalInt(b, (*null.Int)(i)) -} - -// Value returns the db value, null is returned for 0 -func (i ScheduleID) Value() (driver.Value, error) { - return null.Int(i).Value() -} - -// Scan scans from the db value. null values become 0 -func (i *ScheduleID) Scan(value interface{}) error { - return null.ScanInt(value, (*null.Int)(i)) -} +func (i *ScheduleID) Scan(value any) error { return null.ScanInt(value, i) } +func (i ScheduleID) Value() (driver.Value, error) { return null.IntValue(i) } +func (i *ScheduleID) UnmarshalJSON(b []byte) error { return null.UnmarshalInt(b, i) } +func (i ScheduleID) MarshalJSON() ([]byte, error) { return null.MarshalInt(i) } diff --git a/core/models/schedules_test.go b/core/models/schedules_test.go index e01ecb0aa..a20d01f56 100644 --- a/core/models/schedules_test.go +++ b/core/models/schedules_test.go @@ -4,21 +4,21 @@ import ( "testing" "time" - "github.com/nyaruka/gocommon/urns" "github.com/nyaruka/goflow/envs" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/stretchr/testify/assert" ) func TestGetExpired(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() + + defer testsuite.Reset(testsuite.ResetData) // add a schedule and tie a broadcast to it var s1 models.ScheduleID - err := db.Get( + err := rt.DB.Get( &s1, `INSERT INTO schedules_schedule(is_active, repeat_period, created_on, modified_on, next_fire, created_by_id, modified_by_id, org_id) VALUES(TRUE, 'O', NOW(), NOW(), NOW()- INTERVAL '1 DAY', 1, 1, $1) RETURNING id`, @@ -26,16 +26,13 @@ func TestGetExpired(t *testing.T) { ) assert.NoError(t, err) - b1 := testdata.InsertBroadcast(db, testdata.Org1, "eng", map[envs.Language]string{"eng": "Test message", "fra": "Un Message"}, s1, + testdata.InsertBroadcast(rt, testdata.Org1, "eng", map[envs.Language]string{"eng": "Test message", "fra": "Un Message"}, s1, []*testdata.Contact{testdata.Cathy, testdata.George}, []*testdata.Group{testdata.DoctorsGroup}, ) - // add a URN - db.MustExec(`INSERT INTO msgs_broadcast_urns(broadcast_id, contacturn_id) VALUES($1, $2)`, b1, testdata.Cathy.URNID) - // add another and tie a trigger to it var s2 models.ScheduleID - err = db.Get( + err = rt.DB.Get( &s2, `INSERT INTO schedules_schedule(is_active, repeat_period, created_on, modified_on, next_fire, created_by_id, modified_by_id, org_id) VALUES(TRUE, 'O', NOW(), NOW(), NOW()- INTERVAL '2 DAY', 1, 1, $1) RETURNING id`, @@ -43,7 +40,7 @@ func TestGetExpired(t *testing.T) { ) assert.NoError(t, err) var t1 models.TriggerID - err = db.Get( + err = rt.DB.Get( &t1, `INSERT INTO triggers_trigger(is_active, created_on, modified_on, is_archived, trigger_type, created_by_id, modified_by_id, org_id, flow_id, schedule_id) VALUES(TRUE, NOW(), NOW(), FALSE, 'S', 1, 1, $1, $2, $3) RETURNING id`, @@ -52,13 +49,13 @@ func TestGetExpired(t *testing.T) { assert.NoError(t, err) // add a few contacts to the trigger - db.MustExec(`INSERT INTO triggers_trigger_contacts(trigger_id, contact_id) VALUES($1, $2),($1, $3)`, t1, testdata.Cathy.ID, testdata.George.ID) + rt.DB.MustExec(`INSERT INTO triggers_trigger_contacts(trigger_id, contact_id) VALUES($1, $2),($1, $3)`, t1, testdata.Cathy.ID, testdata.George.ID) // and a group - db.MustExec(`INSERT INTO triggers_trigger_groups(trigger_id, contactgroup_id) VALUES($1, $2)`, t1, testdata.DoctorsGroup.ID) + rt.DB.MustExec(`INSERT INTO triggers_trigger_groups(trigger_id, contactgroup_id) VALUES($1, $2)`, t1, testdata.DoctorsGroup.ID) var s3 models.ScheduleID - err = db.Get( + err = rt.DB.Get( &s3, `INSERT INTO schedules_schedule(is_active, repeat_period, created_on, modified_on, next_fire, created_by_id, modified_by_id, org_id) VALUES(TRUE, 'O', NOW(), NOW(), NOW()- INTERVAL '3 DAY', 1, 1, $1) RETURNING id`, @@ -67,7 +64,7 @@ func TestGetExpired(t *testing.T) { assert.NoError(t, err) // get expired schedules - schedules, err := models.GetUnfiredSchedules(ctx, db) + schedules, err := models.GetUnfiredSchedules(ctx, rt.DB) assert.NoError(t, err) assert.Equal(t, 3, len(schedules)) @@ -81,23 +78,22 @@ func TestGetExpired(t *testing.T) { assert.Nil(t, schedules[1].Broadcast()) start := schedules[1].FlowStart() assert.NotNil(t, start) - assert.Equal(t, models.FlowTypeMessaging, start.FlowType()) - assert.Equal(t, testdata.Favorites.ID, start.FlowID()) - assert.Equal(t, testdata.Org1.ID, start.OrgID()) - assert.Equal(t, []models.ContactID{testdata.Cathy.ID, testdata.George.ID}, start.ContactIDs()) - assert.Equal(t, []models.GroupID{testdata.DoctorsGroup.ID}, start.GroupIDs()) + assert.Equal(t, models.FlowTypeMessaging, start.FlowType) + assert.Equal(t, testdata.Favorites.ID, start.FlowID) + assert.Equal(t, testdata.Org1.ID, start.OrgID) + assert.Equal(t, []models.ContactID{testdata.Cathy.ID, testdata.George.ID}, start.ContactIDs) + assert.Equal(t, []models.GroupID{testdata.DoctorsGroup.ID}, start.GroupIDs) assert.Equal(t, s1, schedules[2].ID()) bcast := schedules[2].Broadcast() assert.NotNil(t, bcast) - assert.Equal(t, envs.Language("eng"), bcast.BaseLanguage()) - assert.Equal(t, models.TemplateStateUnevaluated, bcast.TemplateState()) - assert.Equal(t, "Test message", bcast.Translations()["eng"].Text) - assert.Equal(t, "Un Message", bcast.Translations()["fra"].Text) - assert.Equal(t, testdata.Org1.ID, bcast.OrgID()) - assert.Equal(t, []models.ContactID{testdata.Cathy.ID, testdata.George.ID}, bcast.ContactIDs()) - assert.Equal(t, []models.GroupID{testdata.DoctorsGroup.ID}, bcast.GroupIDs()) - assert.Equal(t, []urns.URN{urns.URN("tel:+16055741111?id=10000")}, bcast.URNs()) + assert.Equal(t, envs.Language("eng"), bcast.BaseLanguage) + assert.Equal(t, models.TemplateStateUnevaluated, bcast.TemplateState) + assert.Equal(t, "Test message", bcast.Translations["eng"].Text) + assert.Equal(t, "Un Message", bcast.Translations["fra"].Text) + assert.Equal(t, testdata.Org1.ID, bcast.OrgID) + assert.Equal(t, []models.ContactID{testdata.Cathy.ID, testdata.George.ID}, bcast.ContactIDs) + assert.Equal(t, []models.GroupID{testdata.DoctorsGroup.ID}, bcast.GroupIDs) } func TestNextFire(t *testing.T) { diff --git a/core/models/sessions.go b/core/models/sessions.go index a96098337..abb8ea1ad 100644 --- a/core/models/sessions.go +++ b/core/models/sessions.go @@ -21,7 +21,7 @@ import ( "github.com/nyaruka/goflow/flows/events" "github.com/nyaruka/mailroom/core/goflow" "github.com/nyaruka/mailroom/runtime" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) @@ -114,12 +114,11 @@ func (s *Session) IncomingMsgExternalID() null.String { return s.incomingExterna func (s *Session) Scene() *Scene { return s.scene } // StoragePath returns the path for the session -func (s *Session) StoragePath(cfg *runtime.Config) string { +func (s *Session) StoragePath() string { ts := s.CreatedOn().UTC().Format(storageTSFormat) - // example output: /orgs/1/c/20a5/20a5534c-b2ad-4f18-973a-f1aa3b4e6c74/20060102T150405.123Z_session_8a7fc501-177b-4567-a0aa-81c48e6de1c5_51df83ac21d3cf136d8341f0b11cb1a7.json" + // example output: orgs/1/c/20a5/20a5534c-b2ad-4f18-973a-f1aa3b4e6c74/20060102T150405.123Z_session_8a7fc501-177b-4567-a0aa-81c48e6de1c5_51df83ac21d3cf136d8341f0b11cb1a7.json" return path.Join( - cfg.S3SessionPrefix, "orgs", fmt.Sprintf("%d", s.OrgID()), "c", @@ -803,7 +802,7 @@ func WriteSessionOutputsToStorage(ctx context.Context, rt *runtime.Runtime, sess uploads := make([]*storage.Upload, len(sessions)) for i, s := range sessions { uploads[i] = &storage.Upload{ - Path: s.StoragePath(rt.Config), + Path: s.StoragePath(), Body: []byte(s.Output()), ContentType: "application/json", } @@ -850,7 +849,7 @@ func ExitSessions(ctx context.Context, db *sqlx.DB, sessionIDs []SessionID, stat } // split into batches and exit each batch in a transaction - for _, idBatch := range chunkSlice(sessionIDs, 100) { + for _, idBatch := range ChunkSlice(sessionIDs, 100) { tx, err := db.BeginTxx(ctx, nil) if err != nil { return errors.Wrapf(err, "error starting transaction to exit sessions") diff --git a/core/models/sessions_test.go b/core/models/sessions_test.go index c56e04ab0..98bbff9ac 100644 --- a/core/models/sessions_test.go +++ b/core/models/sessions_test.go @@ -11,6 +11,7 @@ import ( "github.com/nyaruka/goflow/flows" "github.com/nyaruka/goflow/test" "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/runtime" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" "github.com/stretchr/testify/assert" @@ -18,22 +19,22 @@ import ( ) func TestSessionCreationAndUpdating(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) - testFlows := testdata.ImportFlows(db, testdata.Org1, "testdata/session_test_flows.json") + testFlows := testdata.ImportFlows(rt, testdata.Org1, "testdata/session_test_flows.json") flow := testFlows[0] oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshFlows) require.NoError(t, err) - modelContact, _ := testdata.Bob.Load(db, oa) + modelContact, _ := testdata.Bob.Load(rt, oa) sa, flowSession, sprint1 := test.NewSessionBuilder().WithAssets(oa.SessionAssets()).WithFlow(flow.UUID). WithContact(testdata.Bob.UUID, flows.ContactID(testdata.Bob.ID), "Bob", "eng", "").MustBuild() - tx := db.MustBegin() + tx := rt.DB.MustBegin() hookCalls := 0 hook := func(context.Context, *sqlx.Tx, *redis.Pool, *models.OrgAssets, []*models.Session) error { @@ -62,13 +63,13 @@ func TestSessionCreationAndUpdating(t *testing.T) { assert.NotNil(t, session.Timeout()) // check that matches what is in the db - assertdb.Query(t, db, `SELECT status, session_type, current_flow_id, responded, ended_on, wait_resume_on_expire FROM flows_flowsession`). + assertdb.Query(t, rt.DB, `SELECT status, session_type, current_flow_id, responded, ended_on, wait_resume_on_expire FROM flows_flowsession`). Columns(map[string]interface{}{ "status": "W", "session_type": "M", "current_flow_id": int64(flow.ID), "responded": false, "ended_on": nil, "wait_resume_on_expire": false, }) // reload contact and check current flow is set - modelContact, _ = testdata.Bob.Load(db, oa) + modelContact, _ = testdata.Bob.Load(rt, oa) assert.Equal(t, flow.ID, modelContact.CurrentFlowID()) flowSession, err = session.FlowSession(rt.Config, oa.SessionAssets(), oa.Env()) @@ -77,7 +78,7 @@ func TestSessionCreationAndUpdating(t *testing.T) { flowSession, sprint2, err := test.ResumeSession(flowSession, sa, "no") require.NoError(t, err) - tx = db.MustBegin() + tx = rt.DB.MustBegin() err = session.Update(ctx, rt, tx, oa, flowSession, sprint2, modelContact, hook) require.NoError(t, err) @@ -99,7 +100,7 @@ func TestSessionCreationAndUpdating(t *testing.T) { flowSession, sprint3, err := test.ResumeSession(flowSession, sa, "yes") require.NoError(t, err) - tx = db.MustBegin() + tx = rt.DB.MustBegin() err = session.Update(ctx, rt, tx, oa, flowSession, sprint3, modelContact, hook) require.NoError(t, err) @@ -118,33 +119,33 @@ func TestSessionCreationAndUpdating(t *testing.T) { assert.NotNil(t, session.EndedOn()) // check that matches what is in the db - assertdb.Query(t, db, `SELECT status, session_type, current_flow_id, responded FROM flows_flowsession`). + assertdb.Query(t, rt.DB, `SELECT status, session_type, current_flow_id, responded FROM flows_flowsession`). Columns(map[string]interface{}{"status": "C", "session_type": "M", "current_flow_id": nil, "responded": true}) - assertdb.Query(t, db, `SELECT current_flow_id FROM contacts_contact WHERE id = $1`, testdata.Bob.ID).Returns(nil) + assertdb.Query(t, rt.DB, `SELECT current_flow_id FROM contacts_contact WHERE id = $1`, testdata.Bob.ID).Returns(nil) // reload contact and check current flow is cleared - modelContact, _ = testdata.Bob.Load(db, oa) + modelContact, _ = testdata.Bob.Load(rt, oa) assert.Equal(t, models.NilFlowID, modelContact.CurrentFlowID()) } func TestSingleSprintSession(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) - testFlows := testdata.ImportFlows(db, testdata.Org1, "testdata/session_test_flows.json") + testFlows := testdata.ImportFlows(rt, testdata.Org1, "testdata/session_test_flows.json") flow := testFlows[1] oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshFlows) require.NoError(t, err) - modelContact, _ := testdata.Bob.Load(db, oa) + modelContact, _ := testdata.Bob.Load(rt, oa) _, flowSession, sprint1 := test.NewSessionBuilder().WithAssets(oa.SessionAssets()).WithFlow(flow.UUID). WithContact(testdata.Bob.UUID, flows.ContactID(testdata.Bob.ID), "Bob", "eng", "").MustBuild() - tx := db.MustBegin() + tx := rt.DB.MustBegin() hookCalls := 0 hook := func(context.Context, *sqlx.Tx, *redis.Pool, *models.OrgAssets, []*models.Session) error { @@ -172,27 +173,27 @@ func TestSingleSprintSession(t *testing.T) { assert.Nil(t, session.Timeout()) // check that matches what is in the db - assertdb.Query(t, db, `SELECT status, session_type, current_flow_id, responded FROM flows_flowsession`). + assertdb.Query(t, rt.DB, `SELECT status, session_type, current_flow_id, responded FROM flows_flowsession`). Columns(map[string]interface{}{"status": "C", "session_type": "M", "current_flow_id": nil, "responded": false}) } func TestSessionWithSubflows(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) - testFlows := testdata.ImportFlows(db, testdata.Org1, "testdata/session_test_flows.json") + testFlows := testdata.ImportFlows(rt, testdata.Org1, "testdata/session_test_flows.json") parent, child := testFlows[2], testFlows[3] oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshFlows) require.NoError(t, err) - modelContact, _ := testdata.Cathy.Load(db, oa) + modelContact, _ := testdata.Cathy.Load(rt, oa) sa, flowSession, sprint1 := test.NewSessionBuilder().WithAssets(oa.SessionAssets()).WithFlow(parent.UUID). WithContact(testdata.Cathy.UUID, flows.ContactID(testdata.Cathy.ID), "Cathy", "eng", "").MustBuild() - tx := db.MustBegin() + tx := rt.DB.MustBegin() hookCalls := 0 hook := func(context.Context, *sqlx.Tx, *redis.Pool, *models.OrgAssets, []*models.Session) error { @@ -221,7 +222,7 @@ func TestSessionWithSubflows(t *testing.T) { assert.Nil(t, session.Timeout()) // check that matches what is in the db - assertdb.Query(t, db, `SELECT status, session_type, current_flow_id, responded, ended_on, wait_resume_on_expire FROM flows_flowsession`). + assertdb.Query(t, rt.DB, `SELECT status, session_type, current_flow_id, responded, ended_on, wait_resume_on_expire FROM flows_flowsession`). Columns(map[string]interface{}{ "status": "W", "session_type": "M", "current_flow_id": int64(child.ID), "responded": false, "ended_on": nil, "wait_resume_on_expire": true, }) @@ -232,7 +233,7 @@ func TestSessionWithSubflows(t *testing.T) { flowSession, sprint2, err := test.ResumeSession(flowSession, sa, "yes") require.NoError(t, err) - tx = db.MustBegin() + tx = rt.DB.MustBegin() err = session.Update(ctx, rt, tx, oa, flowSession, sprint2, modelContact, hook) require.NoError(t, err) @@ -250,22 +251,22 @@ func TestSessionWithSubflows(t *testing.T) { } func TestSessionFailedStart(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) - testFlows := testdata.ImportFlows(db, testdata.Org1, "testdata/ping_pong.json") + testFlows := testdata.ImportFlows(rt, testdata.Org1, "testdata/ping_pong.json") ping, pong := testFlows[0], testFlows[1] oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshFlows) require.NoError(t, err) - modelContact, _ := testdata.Cathy.Load(db, oa) + modelContact, _ := testdata.Cathy.Load(rt, oa) _, flowSession, sprint1 := test.NewSessionBuilder().WithAssets(oa.SessionAssets()).WithFlow(ping.UUID). WithContact(testdata.Cathy.UUID, flows.ContactID(testdata.Cathy.ID), "Cathy", "eng", "").MustBuild() - tx := db.MustBegin() + tx := rt.DB.MustBegin() hookCalls := 0 hook := func(context.Context, *sqlx.Tx, *redis.Pool, *models.OrgAssets, []*models.Session) error { @@ -289,62 +290,62 @@ func TestSessionFailedStart(t *testing.T) { assert.NotNil(t, session.EndedOn()) // check that matches what is in the db - assertdb.Query(t, db, `SELECT status, session_type, current_flow_id, responded FROM flows_flowsession`). + assertdb.Query(t, rt.DB, `SELECT status, session_type, current_flow_id, responded FROM flows_flowsession`). Columns(map[string]interface{}{"status": "F", "session_type": "M", "current_flow_id": nil, "responded": false}) - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE ended_on IS NOT NULL`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE ended_on IS NOT NULL`).Returns(1) // check the state of all the created runs - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowrun`).Returns(101) - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowrun WHERE flow_id = $1`, ping.ID).Returns(51) - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowrun WHERE flow_id = $1`, pong.ID).Returns(50) - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowrun WHERE status = 'F' AND exited_on IS NOT NULL`).Returns(101) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowrun`).Returns(101) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowrun WHERE flow_id = $1`, ping.ID).Returns(51) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowrun WHERE flow_id = $1`, pong.ID).Returns(50) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowrun WHERE status = 'F' AND exited_on IS NOT NULL`).Returns(101) } func TestInterruptSessionsForContacts(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) - session1ID, _ := insertSessionAndRun(db, testdata.Cathy, models.FlowTypeMessaging, models.SessionStatusCompleted, testdata.Favorites, models.NilCallID) - session2ID, run2ID := insertSessionAndRun(db, testdata.Cathy, models.FlowTypeVoice, models.SessionStatusWaiting, testdata.Favorites, models.NilCallID) - session3ID, _ := insertSessionAndRun(db, testdata.Bob, models.FlowTypeMessaging, models.SessionStatusWaiting, testdata.Favorites, models.NilCallID) - session4ID, _ := insertSessionAndRun(db, testdata.George, models.FlowTypeMessaging, models.SessionStatusWaiting, testdata.Favorites, models.NilCallID) + session1ID, _ := insertSessionAndRun(rt, testdata.Cathy, models.FlowTypeMessaging, models.SessionStatusCompleted, testdata.Favorites, models.NilCallID) + session2ID, run2ID := insertSessionAndRun(rt, testdata.Cathy, models.FlowTypeVoice, models.SessionStatusWaiting, testdata.Favorites, models.NilCallID) + session3ID, _ := insertSessionAndRun(rt, testdata.Bob, models.FlowTypeMessaging, models.SessionStatusWaiting, testdata.Favorites, models.NilCallID) + session4ID, _ := insertSessionAndRun(rt, testdata.George, models.FlowTypeMessaging, models.SessionStatusWaiting, testdata.Favorites, models.NilCallID) // noop if no contacts - _, err := models.InterruptSessionsForContacts(ctx, db, []models.ContactID{}) + _, err := models.InterruptSessionsForContacts(ctx, rt.DB, []models.ContactID{}) assert.NoError(t, err) - assertSessionAndRunStatus(t, db, session1ID, models.SessionStatusCompleted) - assertSessionAndRunStatus(t, db, session2ID, models.SessionStatusWaiting) - assertSessionAndRunStatus(t, db, session3ID, models.SessionStatusWaiting) - assertSessionAndRunStatus(t, db, session4ID, models.SessionStatusWaiting) + assertSessionAndRunStatus(t, rt, session1ID, models.SessionStatusCompleted) + assertSessionAndRunStatus(t, rt, session2ID, models.SessionStatusWaiting) + assertSessionAndRunStatus(t, rt, session3ID, models.SessionStatusWaiting) + assertSessionAndRunStatus(t, rt, session4ID, models.SessionStatusWaiting) - count, err := models.InterruptSessionsForContacts(ctx, db, []models.ContactID{testdata.Cathy.ID, testdata.Bob.ID, testdata.Alexandria.ID}) + count, err := models.InterruptSessionsForContacts(ctx, rt.DB, []models.ContactID{testdata.Cathy.ID, testdata.Bob.ID, testdata.Alexandria.ID}) assert.Equal(t, 2, count) assert.NoError(t, err) - assertSessionAndRunStatus(t, db, session1ID, models.SessionStatusCompleted) // wasn't waiting - assertSessionAndRunStatus(t, db, session2ID, models.SessionStatusInterrupted) - assertSessionAndRunStatus(t, db, session3ID, models.SessionStatusInterrupted) - assertSessionAndRunStatus(t, db, session4ID, models.SessionStatusWaiting) // contact not included + assertSessionAndRunStatus(t, rt, session1ID, models.SessionStatusCompleted) // wasn't waiting + assertSessionAndRunStatus(t, rt, session2ID, models.SessionStatusInterrupted) + assertSessionAndRunStatus(t, rt, session3ID, models.SessionStatusInterrupted) + assertSessionAndRunStatus(t, rt, session4ID, models.SessionStatusWaiting) // contact not included // check other columns are correct on interrupted session, run and contact - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE ended_on IS NOT NULL AND wait_started_on IS NULL AND wait_expires_on IS NULL AND timeout_on IS NULL AND current_flow_id IS NULL AND id = $1`, session2ID).Returns(1) - assertdb.Query(t, db, `SELECT status FROM flows_flowrun WHERE id = $1`, run2ID).Columns(map[string]interface{}{"status": "I"}) - assertdb.Query(t, db, `SELECT current_flow_id FROM contacts_contact WHERE id = $1`, testdata.Cathy.ID).Returns(nil) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE ended_on IS NOT NULL AND wait_started_on IS NULL AND wait_expires_on IS NULL AND timeout_on IS NULL AND current_flow_id IS NULL AND id = $1`, session2ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowrun WHERE id = $1`, run2ID).Columns(map[string]interface{}{"status": "I"}) + assertdb.Query(t, rt.DB, `SELECT current_flow_id FROM contacts_contact WHERE id = $1`, testdata.Cathy.ID).Returns(nil) } func TestInterruptSessionsForContactsTx(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) - session1ID, _ := insertSessionAndRun(db, testdata.Cathy, models.FlowTypeMessaging, models.SessionStatusCompleted, testdata.Favorites, models.NilCallID) - session2ID, run2ID := insertSessionAndRun(db, testdata.Cathy, models.FlowTypeVoice, models.SessionStatusWaiting, testdata.Favorites, models.NilCallID) - session3ID, _ := insertSessionAndRun(db, testdata.Bob, models.FlowTypeMessaging, models.SessionStatusWaiting, testdata.Favorites, models.NilCallID) - session4ID, _ := insertSessionAndRun(db, testdata.George, models.FlowTypeMessaging, models.SessionStatusWaiting, testdata.Favorites, models.NilCallID) + session1ID, _ := insertSessionAndRun(rt, testdata.Cathy, models.FlowTypeMessaging, models.SessionStatusCompleted, testdata.Favorites, models.NilCallID) + session2ID, run2ID := insertSessionAndRun(rt, testdata.Cathy, models.FlowTypeVoice, models.SessionStatusWaiting, testdata.Favorites, models.NilCallID) + session3ID, _ := insertSessionAndRun(rt, testdata.Bob, models.FlowTypeMessaging, models.SessionStatusWaiting, testdata.Favorites, models.NilCallID) + session4ID, _ := insertSessionAndRun(rt, testdata.George, models.FlowTypeMessaging, models.SessionStatusWaiting, testdata.Favorites, models.NilCallID) - tx := db.MustBegin() + tx := rt.DB.MustBegin() // noop if no contacts err := models.InterruptSessionsForContactsTx(ctx, tx, []models.ContactID{}) @@ -352,128 +353,128 @@ func TestInterruptSessionsForContactsTx(t *testing.T) { require.NoError(t, tx.Commit()) - assertSessionAndRunStatus(t, db, session1ID, models.SessionStatusCompleted) - assertSessionAndRunStatus(t, db, session2ID, models.SessionStatusWaiting) - assertSessionAndRunStatus(t, db, session3ID, models.SessionStatusWaiting) - assertSessionAndRunStatus(t, db, session4ID, models.SessionStatusWaiting) + assertSessionAndRunStatus(t, rt, session1ID, models.SessionStatusCompleted) + assertSessionAndRunStatus(t, rt, session2ID, models.SessionStatusWaiting) + assertSessionAndRunStatus(t, rt, session3ID, models.SessionStatusWaiting) + assertSessionAndRunStatus(t, rt, session4ID, models.SessionStatusWaiting) - tx = db.MustBegin() + tx = rt.DB.MustBegin() err = models.InterruptSessionsForContactsTx(ctx, tx, []models.ContactID{testdata.Cathy.ID, testdata.Bob.ID}) require.NoError(t, err) require.NoError(t, tx.Commit()) - assertSessionAndRunStatus(t, db, session1ID, models.SessionStatusCompleted) // wasn't waiting - assertSessionAndRunStatus(t, db, session2ID, models.SessionStatusInterrupted) - assertSessionAndRunStatus(t, db, session3ID, models.SessionStatusInterrupted) - assertSessionAndRunStatus(t, db, session4ID, models.SessionStatusWaiting) // contact not included + assertSessionAndRunStatus(t, rt, session1ID, models.SessionStatusCompleted) // wasn't waiting + assertSessionAndRunStatus(t, rt, session2ID, models.SessionStatusInterrupted) + assertSessionAndRunStatus(t, rt, session3ID, models.SessionStatusInterrupted) + assertSessionAndRunStatus(t, rt, session4ID, models.SessionStatusWaiting) // contact not included // check other columns are correct on interrupted session, run and contact - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE ended_on IS NOT NULL AND wait_started_on IS NULL AND wait_expires_on IS NULL AND timeout_on IS NULL AND current_flow_id IS NULL AND id = $1`, session2ID).Returns(1) - assertdb.Query(t, db, `SELECT status FROM flows_flowrun WHERE id = $1`, run2ID).Columns(map[string]interface{}{"status": "I"}) - assertdb.Query(t, db, `SELECT current_flow_id FROM contacts_contact WHERE id = $1`, testdata.Cathy.ID).Returns(nil) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE ended_on IS NOT NULL AND wait_started_on IS NULL AND wait_expires_on IS NULL AND timeout_on IS NULL AND current_flow_id IS NULL AND id = $1`, session2ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowrun WHERE id = $1`, run2ID).Columns(map[string]interface{}{"status": "I"}) + assertdb.Query(t, rt.DB, `SELECT current_flow_id FROM contacts_contact WHERE id = $1`, testdata.Cathy.ID).Returns(nil) } func TestInterruptSessionsForChannels(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) - cathy1CallID := testdata.InsertCall(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy) - cathy2CallID := testdata.InsertCall(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy) - bobCallID := testdata.InsertCall(db, testdata.Org1, testdata.TwilioChannel, testdata.Bob) - georgeCallID := testdata.InsertCall(db, testdata.Org1, testdata.VonageChannel, testdata.George) + cathy1CallID := testdata.InsertCall(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy) + cathy2CallID := testdata.InsertCall(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy) + bobCallID := testdata.InsertCall(rt, testdata.Org1, testdata.TwilioChannel, testdata.Bob) + georgeCallID := testdata.InsertCall(rt, testdata.Org1, testdata.VonageChannel, testdata.George) - session1ID, _ := insertSessionAndRun(db, testdata.Cathy, models.FlowTypeMessaging, models.SessionStatusCompleted, testdata.Favorites, cathy1CallID) - session2ID, _ := insertSessionAndRun(db, testdata.Cathy, models.FlowTypeMessaging, models.SessionStatusWaiting, testdata.Favorites, cathy2CallID) - session3ID, _ := insertSessionAndRun(db, testdata.Bob, models.FlowTypeMessaging, models.SessionStatusWaiting, testdata.Favorites, bobCallID) - session4ID, _ := insertSessionAndRun(db, testdata.George, models.FlowTypeMessaging, models.SessionStatusWaiting, testdata.Favorites, georgeCallID) + session1ID, _ := insertSessionAndRun(rt, testdata.Cathy, models.FlowTypeMessaging, models.SessionStatusCompleted, testdata.Favorites, cathy1CallID) + session2ID, _ := insertSessionAndRun(rt, testdata.Cathy, models.FlowTypeMessaging, models.SessionStatusWaiting, testdata.Favorites, cathy2CallID) + session3ID, _ := insertSessionAndRun(rt, testdata.Bob, models.FlowTypeMessaging, models.SessionStatusWaiting, testdata.Favorites, bobCallID) + session4ID, _ := insertSessionAndRun(rt, testdata.George, models.FlowTypeMessaging, models.SessionStatusWaiting, testdata.Favorites, georgeCallID) - err := models.InterruptSessionsForChannel(ctx, db, testdata.TwilioChannel.ID) + err := models.InterruptSessionsForChannel(ctx, rt.DB, testdata.TwilioChannel.ID) require.NoError(t, err) - assertSessionAndRunStatus(t, db, session1ID, models.SessionStatusCompleted) // wasn't waiting - assertSessionAndRunStatus(t, db, session2ID, models.SessionStatusInterrupted) - assertSessionAndRunStatus(t, db, session3ID, models.SessionStatusInterrupted) - assertSessionAndRunStatus(t, db, session4ID, models.SessionStatusWaiting) // channel not included + assertSessionAndRunStatus(t, rt, session1ID, models.SessionStatusCompleted) // wasn't waiting + assertSessionAndRunStatus(t, rt, session2ID, models.SessionStatusInterrupted) + assertSessionAndRunStatus(t, rt, session3ID, models.SessionStatusInterrupted) + assertSessionAndRunStatus(t, rt, session4ID, models.SessionStatusWaiting) // channel not included // check other columns are correct on interrupted session and contact - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE ended_on IS NOT NULL AND wait_started_on IS NULL AND wait_expires_on IS NULL AND timeout_on IS NULL AND current_flow_id IS NULL AND id = $1`, session2ID).Returns(1) - assertdb.Query(t, db, `SELECT current_flow_id FROM contacts_contact WHERE id = $1`, testdata.Cathy.ID).Returns(nil) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE ended_on IS NOT NULL AND wait_started_on IS NULL AND wait_expires_on IS NULL AND timeout_on IS NULL AND current_flow_id IS NULL AND id = $1`, session2ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT current_flow_id FROM contacts_contact WHERE id = $1`, testdata.Cathy.ID).Returns(nil) } func TestInterruptSessionsForFlows(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) - cathy1CallID := testdata.InsertCall(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy) - cathy2CallID := testdata.InsertCall(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy) - bobCallID := testdata.InsertCall(db, testdata.Org1, testdata.TwilioChannel, testdata.Bob) - georgeCallID := testdata.InsertCall(db, testdata.Org1, testdata.VonageChannel, testdata.George) + cathy1CallID := testdata.InsertCall(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy) + cathy2CallID := testdata.InsertCall(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy) + bobCallID := testdata.InsertCall(rt, testdata.Org1, testdata.TwilioChannel, testdata.Bob) + georgeCallID := testdata.InsertCall(rt, testdata.Org1, testdata.VonageChannel, testdata.George) - session1ID, _ := insertSessionAndRun(db, testdata.Cathy, models.FlowTypeMessaging, models.SessionStatusCompleted, testdata.Favorites, cathy1CallID) - session2ID, _ := insertSessionAndRun(db, testdata.Cathy, models.FlowTypeMessaging, models.SessionStatusWaiting, testdata.Favorites, cathy2CallID) - session3ID, _ := insertSessionAndRun(db, testdata.Bob, models.FlowTypeMessaging, models.SessionStatusWaiting, testdata.Favorites, bobCallID) - session4ID, _ := insertSessionAndRun(db, testdata.George, models.FlowTypeMessaging, models.SessionStatusWaiting, testdata.PickANumber, georgeCallID) + session1ID, _ := insertSessionAndRun(rt, testdata.Cathy, models.FlowTypeMessaging, models.SessionStatusCompleted, testdata.Favorites, cathy1CallID) + session2ID, _ := insertSessionAndRun(rt, testdata.Cathy, models.FlowTypeMessaging, models.SessionStatusWaiting, testdata.Favorites, cathy2CallID) + session3ID, _ := insertSessionAndRun(rt, testdata.Bob, models.FlowTypeMessaging, models.SessionStatusWaiting, testdata.Favorites, bobCallID) + session4ID, _ := insertSessionAndRun(rt, testdata.George, models.FlowTypeMessaging, models.SessionStatusWaiting, testdata.PickANumber, georgeCallID) // noop if no flows - err := models.InterruptSessionsForFlows(ctx, db, []models.FlowID{}) + err := models.InterruptSessionsForFlows(ctx, rt.DB, []models.FlowID{}) require.NoError(t, err) - assertSessionAndRunStatus(t, db, session1ID, models.SessionStatusCompleted) - assertSessionAndRunStatus(t, db, session2ID, models.SessionStatusWaiting) - assertSessionAndRunStatus(t, db, session3ID, models.SessionStatusWaiting) - assertSessionAndRunStatus(t, db, session4ID, models.SessionStatusWaiting) + assertSessionAndRunStatus(t, rt, session1ID, models.SessionStatusCompleted) + assertSessionAndRunStatus(t, rt, session2ID, models.SessionStatusWaiting) + assertSessionAndRunStatus(t, rt, session3ID, models.SessionStatusWaiting) + assertSessionAndRunStatus(t, rt, session4ID, models.SessionStatusWaiting) - err = models.InterruptSessionsForFlows(ctx, db, []models.FlowID{testdata.Favorites.ID}) + err = models.InterruptSessionsForFlows(ctx, rt.DB, []models.FlowID{testdata.Favorites.ID}) require.NoError(t, err) - assertSessionAndRunStatus(t, db, session1ID, models.SessionStatusCompleted) // wasn't waiting - assertSessionAndRunStatus(t, db, session2ID, models.SessionStatusInterrupted) - assertSessionAndRunStatus(t, db, session3ID, models.SessionStatusInterrupted) - assertSessionAndRunStatus(t, db, session4ID, models.SessionStatusWaiting) // flow not included + assertSessionAndRunStatus(t, rt, session1ID, models.SessionStatusCompleted) // wasn't waiting + assertSessionAndRunStatus(t, rt, session2ID, models.SessionStatusInterrupted) + assertSessionAndRunStatus(t, rt, session3ID, models.SessionStatusInterrupted) + assertSessionAndRunStatus(t, rt, session4ID, models.SessionStatusWaiting) // flow not included // check other columns are correct on interrupted session and contact - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE ended_on IS NOT NULL AND wait_started_on IS NULL AND wait_expires_on IS NULL AND timeout_on IS NULL AND current_flow_id IS NULL AND id = $1`, session2ID).Returns(1) - assertdb.Query(t, db, `SELECT current_flow_id FROM contacts_contact WHERE id = $1`, testdata.Cathy.ID).Returns(nil) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE ended_on IS NOT NULL AND wait_started_on IS NULL AND wait_expires_on IS NULL AND timeout_on IS NULL AND current_flow_id IS NULL AND id = $1`, session2ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT current_flow_id FROM contacts_contact WHERE id = $1`, testdata.Cathy.ID).Returns(nil) } func TestGetSessionWaitExpiresOn(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) s1Expires := time.Date(2022, 1, 26, 13, 28, 30, 0, time.UTC) - s1ID := testdata.InsertWaitingSession(db, testdata.Org1, testdata.Cathy, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), s1Expires, true, nil) + s1ID := testdata.InsertWaitingSession(rt, testdata.Org1, testdata.Cathy, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), s1Expires, true, nil) - s1Actual, err := models.GetSessionWaitExpiresOn(ctx, db, s1ID) + s1Actual, err := models.GetSessionWaitExpiresOn(ctx, rt.DB, s1ID) assert.NoError(t, err) assert.Equal(t, s1Expires, *s1Actual) // for a non-waiting session, should return nil - s2ID := testdata.InsertFlowSession(db, testdata.Org1, testdata.Cathy, models.FlowTypeMessaging, models.SessionStatusCompleted, testdata.Favorites, models.NilCallID) + s2ID := testdata.InsertFlowSession(rt, testdata.Org1, testdata.Cathy, models.FlowTypeMessaging, models.SessionStatusCompleted, testdata.Favorites, models.NilCallID) - s2Actual, err := models.GetSessionWaitExpiresOn(ctx, db, s2ID) + s2Actual, err := models.GetSessionWaitExpiresOn(ctx, rt.DB, s2ID) assert.NoError(t, err) assert.Nil(t, s2Actual) } func TestClearWaitTimeout(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) oa := testdata.Org1.Load(rt) - _, cathy := testdata.Cathy.Load(db, oa) + _, cathy := testdata.Cathy.Load(rt, oa) expiresOn := time.Now().Add(time.Hour) timeoutOn := time.Now().Add(time.Minute) - testdata.InsertWaitingSession(db, testdata.Org1, testdata.Cathy, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), expiresOn, true, &timeoutOn) + testdata.InsertWaitingSession(rt, testdata.Org1, testdata.Cathy, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), expiresOn, true, &timeoutOn) - session, err := models.FindWaitingSessionForContact(ctx, db, nil, oa, models.FlowTypeMessaging, cathy) + session, err := models.FindWaitingSessionForContact(ctx, rt.DB, nil, oa, models.FlowTypeMessaging, cathy) require.NoError(t, err) // can be called without db connection to clear without updating db @@ -482,24 +483,24 @@ func TestClearWaitTimeout(t *testing.T) { assert.NotNil(t, session.WaitExpiresOn()) // unaffected // and called with one to clear in the database as well - session.ClearWaitTimeout(ctx, db) + session.ClearWaitTimeout(ctx, rt.DB) assert.Nil(t, session.WaitTimeoutOn()) - assertdb.Query(t, db, `SELECT timeout_on FROM flows_flowsession WHERE id = $1`, session.ID()).Returns(nil) + assertdb.Query(t, rt.DB, `SELECT timeout_on FROM flows_flowsession WHERE id = $1`, session.ID()).Returns(nil) } -func insertSessionAndRun(db *sqlx.DB, contact *testdata.Contact, sessionType models.FlowType, status models.SessionStatus, flow *testdata.Flow, connID models.CallID) (models.SessionID, models.FlowRunID) { +func insertSessionAndRun(rt *runtime.Runtime, contact *testdata.Contact, sessionType models.FlowType, status models.SessionStatus, flow *testdata.Flow, connID models.CallID) (models.SessionID, models.FlowRunID) { // create session and add a run with same status - sessionID := testdata.InsertFlowSession(db, testdata.Org1, contact, sessionType, status, flow, connID) - runID := testdata.InsertFlowRun(db, testdata.Org1, sessionID, contact, flow, models.RunStatus(status)) + sessionID := testdata.InsertFlowSession(rt, testdata.Org1, contact, sessionType, status, flow, connID) + runID := testdata.InsertFlowRun(rt, testdata.Org1, sessionID, contact, flow, models.RunStatus(status)) // mark contact as being in that flow - db.MustExec(`UPDATE contacts_contact SET current_flow_id = $2 WHERE id = $1`, contact.ID, flow.ID) + rt.DB.MustExec(`UPDATE contacts_contact SET current_flow_id = $2 WHERE id = $1`, contact.ID, flow.ID) return sessionID, runID } -func assertSessionAndRunStatus(t *testing.T, db *sqlx.DB, sessionID models.SessionID, status models.SessionStatus) { - assertdb.Query(t, db, `SELECT status FROM flows_flowsession WHERE id = $1`, sessionID).Columns(map[string]interface{}{"status": string(status)}) - assertdb.Query(t, db, `SELECT status FROM flows_flowrun WHERE session_id = $1`, sessionID).Columns(map[string]interface{}{"status": string(status)}) +func assertSessionAndRunStatus(t *testing.T, rt *runtime.Runtime, sessionID models.SessionID, status models.SessionStatus) { + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowsession WHERE id = $1`, sessionID).Columns(map[string]interface{}{"status": string(status)}) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowrun WHERE session_id = $1`, sessionID).Columns(map[string]interface{}{"status": string(status)}) } diff --git a/core/models/starts.go b/core/models/starts.go index db9013037..166a4303f 100644 --- a/core/models/starts.go +++ b/core/models/starts.go @@ -9,16 +9,21 @@ import ( "github.com/nyaruka/gocommon/urns" "github.com/nyaruka/gocommon/uuids" "github.com/nyaruka/goflow/flows" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" "github.com/pkg/errors" ) // StartID is our type for flow start idst -type StartID null.Int +type StartID int // NilStartID is our constant for a nil start id var NilStartID = StartID(0) +func (i *StartID) Scan(value any) error { return null.ScanInt(value, i) } +func (i StartID) Value() (driver.Value, error) { return null.IntValue(i) } +func (i *StartID) UnmarshalJSON(b []byte) error { return null.UnmarshalInt(b, i) } +func (i StartID) MarshalJSON() ([]byte, error) { return null.MarshalInt(i) } + // StartType is the type for the type of a start type StartType string @@ -42,255 +47,172 @@ const ( StartStatusFailed = StartStatus("F") ) -// MarkStartComplete sets the status for the passed in flow start -func MarkStartComplete(ctx context.Context, db Queryer, startID StartID) error { - _, err := db.ExecContext(ctx, "UPDATE flows_flowstart SET status = 'C', modified_on = NOW() WHERE id = $1", startID) - if err != nil { - return errors.Wrapf(err, "error setting start as complete") - } - return nil +// Exclusions are preset exclusion conditions +type Exclusions struct { + NonActive bool `json:"non_active"` // contacts who are blocked, stopped or archived + InAFlow bool `json:"in_a_flow"` // contacts who are currently in a flow (including this one) + StartedPreviously bool `json:"started_previously"` // contacts who have been in this flow in the last 90 days + NotSeenSinceDays int `json:"not_seen_since_days"` // contacts who have not been seen for more than this number of days } -// MarkStartStarted sets the status for the passed in flow start to S and updates the contact count on it -func MarkStartStarted(ctx context.Context, db Queryer, startID StartID, contactCount int, createdContactIDs []ContactID) error { - _, err := db.ExecContext(ctx, "UPDATE flows_flowstart SET status = 'S', contact_count = $2, modified_on = NOW() WHERE id = $1", startID, contactCount) - if err != nil { - return errors.Wrapf(err, "error setting start as started") - } - - // if we created contacts, add them to the start for logging - if len(createdContactIDs) > 0 { - type startContact struct { - StartID StartID `db:"flowstart_id"` - ContactID ContactID `db:"contact_id"` - } +// NoExclusions is a constant for the empty value +var NoExclusions = Exclusions{} - args := make([]*startContact, len(createdContactIDs)) - for i, id := range createdContactIDs { - args[i] = &startContact{StartID: startID, ContactID: id} - } - return BulkQuery( - ctx, "adding created contacts to flow start", db, - `INSERT INTO flows_flowstart_contacts(flowstart_id, contact_id) VALUES(:flowstart_id, :contact_id) ON CONFLICT DO NOTHING`, - args, - ) +// Scan supports reading exclusion values from JSON in database +func (e *Exclusions) Scan(value any) error { + if value == nil { + *e = Exclusions{} + return nil } - return nil -} - -// MarkStartFailed sets the status for the passed in flow start to F -func MarkStartFailed(ctx context.Context, db Queryer, startID StartID) error { - _, err := db.ExecContext(ctx, "UPDATE flows_flowstart SET status = 'F', modified_on = NOW() WHERE id = $1", startID) - if err != nil { - return errors.Wrapf(err, "error setting start as failed") - } - return nil -} - -// FlowStartBatch represents a single flow batch that needs to be started -type FlowStartBatch struct { - b struct { - StartID StartID `json:"start_id"` - StartType StartType `json:"start_type"` - OrgID OrgID `json:"org_id"` - CreatedByID UserID `json:"created_by_id"` - FlowID FlowID `json:"flow_id"` - FlowType FlowType `json:"flow_type"` - ContactIDs []ContactID `json:"contact_ids"` - - ParentSummary null.JSON `json:"parent_summary,omitempty"` - SessionHistory null.JSON `json:"session_history,omitempty"` - Extra null.JSON `json:"extra,omitempty"` - - RestartParticipants bool `json:"restart_participants"` - IncludeActive bool `json:"include_active"` - - IsLast bool `json:"is_last,omitempty"` - TotalContacts int `json:"total_contacts"` - - CreatedBy string `json:"created_by"` // deprecated + b, ok := value.([]byte) + if !ok { + return errors.New("failed type assertion to []byte") } + return json.Unmarshal(b, &e) } -func (b *FlowStartBatch) StartID() StartID { return b.b.StartID } -func (b *FlowStartBatch) StartType() StartType { return b.b.StartType } -func (b *FlowStartBatch) OrgID() OrgID { return b.b.OrgID } -func (b *FlowStartBatch) CreatedByID() UserID { return b.b.CreatedByID } -func (b *FlowStartBatch) FlowID() FlowID { return b.b.FlowID } -func (b *FlowStartBatch) ContactIDs() []ContactID { return b.b.ContactIDs } -func (b *FlowStartBatch) ExcludeStartedPreviously() bool { return !b.b.RestartParticipants } -func (b *FlowStartBatch) ExcludeInAFlow() bool { return !b.b.IncludeActive } -func (b *FlowStartBatch) IsLast() bool { return b.b.IsLast } -func (b *FlowStartBatch) TotalContacts() int { return b.b.TotalContacts } - -func (b *FlowStartBatch) ParentSummary() json.RawMessage { return json.RawMessage(b.b.ParentSummary) } -func (b *FlowStartBatch) SessionHistory() json.RawMessage { return json.RawMessage(b.b.SessionHistory) } -func (b *FlowStartBatch) Extra() json.RawMessage { return json.RawMessage(b.b.Extra) } - -func (b *FlowStartBatch) MarshalJSON() ([]byte, error) { return json.Marshal(b.b) } -func (b *FlowStartBatch) UnmarshalJSON(data []byte) error { return json.Unmarshal(data, &b.b) } +func (e Exclusions) Value() (driver.Value, error) { return json.Marshal(e) } // FlowStart represents the top level flow start in our system type FlowStart struct { - s struct { - ID StartID `json:"start_id" db:"id"` - UUID uuids.UUID ` db:"uuid"` - StartType StartType `json:"start_type" db:"start_type"` - OrgID OrgID `json:"org_id" db:"org_id"` - CreatedByID UserID `json:"created_by_id" db:"created_by_id"` - FlowID FlowID `json:"flow_id" db:"flow_id"` - FlowType FlowType `json:"flow_type"` - - URNs []urns.URN `json:"urns,omitempty"` - ContactIDs []ContactID `json:"contact_ids,omitempty"` - GroupIDs []GroupID `json:"group_ids,omitempty"` - ExcludeGroupIDs []GroupID `json:"exclude_group_ids,omitempty"` // used when loading scheduled triggers as flow starts - Query null.String `json:"query,omitempty" db:"query"` - CreateContact bool `json:"create_contact"` - - RestartParticipants bool `json:"restart_participants" db:"restart_participants"` - IncludeActive bool `json:"include_active" db:"include_active"` - - Extra null.JSON `json:"extra,omitempty" db:"extra"` - ParentSummary null.JSON `json:"parent_summary,omitempty" db:"parent_summary"` - SessionHistory null.JSON `json:"session_history,omitempty" db:"session_history"` - } + ID StartID `json:"start_id" db:"id"` + UUID uuids.UUID `json:"-" db:"uuid"` + StartType StartType `json:"start_type" db:"start_type"` + OrgID OrgID `json:"org_id" db:"org_id"` + CreatedByID UserID `json:"created_by_id" db:"created_by_id"` + FlowID FlowID `json:"flow_id" db:"flow_id"` + FlowType FlowType `json:"flow_type"` + + URNs []urns.URN `json:"urns,omitempty"` + ContactIDs []ContactID `json:"contact_ids,omitempty"` + GroupIDs []GroupID `json:"group_ids,omitempty"` + ExcludeGroupIDs []GroupID `json:"exclude_group_ids,omitempty"` // used when loading scheduled triggers as flow starts + Query null.String `json:"query,omitempty" db:"query"` + CreateContact bool `json:"create_contact"` + Exclusions Exclusions `json:"exclusions" db:"exclusions"` + + Params null.JSON `json:"params,omitempty" db:"params"` + ParentSummary null.JSON `json:"parent_summary,omitempty" db:"parent_summary"` + SessionHistory null.JSON `json:"session_history,omitempty" db:"session_history"` } -func (s *FlowStart) ID() StartID { return s.s.ID } -func (s *FlowStart) OrgID() OrgID { return s.s.OrgID } -func (s *FlowStart) Type() StartType { return s.s.StartType } -func (s *FlowStart) CreatedByID() UserID { return s.s.CreatedByID } -func (s *FlowStart) FlowID() FlowID { return s.s.FlowID } -func (s *FlowStart) FlowType() FlowType { return s.s.FlowType } +// NewFlowStart creates a new flow start objects for the passed in parameters +func NewFlowStart(orgID OrgID, startType StartType, flowType FlowType, flowID FlowID) *FlowStart { + return &FlowStart{ + UUID: uuids.New(), + OrgID: orgID, + StartType: startType, + FlowType: flowType, + FlowID: flowID, + } +} -func (s *FlowStart) GroupIDs() []GroupID { return s.s.GroupIDs } func (s *FlowStart) WithGroupIDs(groupIDs []GroupID) *FlowStart { - s.s.GroupIDs = groupIDs + s.GroupIDs = groupIDs return s } -func (s *FlowStart) ExcludeGroupIDs() []GroupID { return s.s.ExcludeGroupIDs } + func (s *FlowStart) WithExcludeGroupIDs(groupIDs []GroupID) *FlowStart { - s.s.ExcludeGroupIDs = groupIDs + s.ExcludeGroupIDs = groupIDs return s } -func (s *FlowStart) ContactIDs() []ContactID { return s.s.ContactIDs } func (s *FlowStart) WithContactIDs(contactIDs []ContactID) *FlowStart { - s.s.ContactIDs = contactIDs + s.ContactIDs = contactIDs return s } -func (s *FlowStart) URNs() []urns.URN { return s.s.URNs } func (s *FlowStart) WithURNs(us []urns.URN) *FlowStart { - s.s.URNs = us + s.URNs = us return s } -func (s *FlowStart) Query() string { return string(s.s.Query) } func (s *FlowStart) WithQuery(query string) *FlowStart { - s.s.Query = null.String(query) + s.Query = null.String(query) return s } -func (s *FlowStart) ExcludeStartedPreviously() bool { return !s.s.RestartParticipants } func (s *FlowStart) WithExcludeStartedPreviously(exclude bool) *FlowStart { - s.s.RestartParticipants = !exclude + s.Exclusions.StartedPreviously = exclude return s } -func (s *FlowStart) ExcludeInAFlow() bool { return !s.s.IncludeActive } func (s *FlowStart) WithExcludeInAFlow(exclude bool) *FlowStart { - s.s.IncludeActive = !exclude + s.Exclusions.InAFlow = exclude return s } -func (s *FlowStart) CreateContact() bool { return s.s.CreateContact } func (s *FlowStart) WithCreateContact(create bool) *FlowStart { - s.s.CreateContact = create + s.CreateContact = create return s } -func (s *FlowStart) ParentSummary() json.RawMessage { return json.RawMessage(s.s.ParentSummary) } func (s *FlowStart) WithParentSummary(sum json.RawMessage) *FlowStart { - s.s.ParentSummary = null.JSON(sum) + s.ParentSummary = null.JSON(sum) return s } -func (s *FlowStart) SessionHistory() json.RawMessage { return json.RawMessage(s.s.SessionHistory) } func (s *FlowStart) WithSessionHistory(history json.RawMessage) *FlowStart { - s.s.SessionHistory = null.JSON(history) + s.SessionHistory = null.JSON(history) return s } -func (s *FlowStart) Extra() json.RawMessage { return json.RawMessage(s.s.Extra) } -func (s *FlowStart) WithExtra(extra json.RawMessage) *FlowStart { - s.s.Extra = null.JSON(extra) +func (s *FlowStart) WithParams(params json.RawMessage) *FlowStart { + s.Params = null.JSON(params) return s } -func (s *FlowStart) MarshalJSON() ([]byte, error) { return json.Marshal(s.s) } -func (s *FlowStart) UnmarshalJSON(data []byte) error { return json.Unmarshal(data, &s.s) } +// MarkStartStarted sets the status for the passed in flow start to S and updates the contact count on it +func MarkStartStarted(ctx context.Context, db Queryer, startID StartID, contactCount int) error { + _, err := db.ExecContext(ctx, "UPDATE flows_flowstart SET status = 'S', contact_count = $2, modified_on = NOW() WHERE id = $1", startID, contactCount) + return errors.Wrapf(err, "error setting start as started") +} -// GetFlowStartAttributes gets the basic attributes for the passed in start id, this includes ONLY its id, uuid, flow_id and extra +// MarkStartComplete sets the status for the passed in flow start +func MarkStartComplete(ctx context.Context, db Queryer, startID StartID) error { + _, err := db.ExecContext(ctx, "UPDATE flows_flowstart SET status = 'C', modified_on = NOW() WHERE id = $1", startID) + return errors.Wrapf(err, "error marking flow start as complete") +} + +// MarkStartFailed sets the status for the passed in flow start to F +func MarkStartFailed(ctx context.Context, db Queryer, startID StartID) error { + _, err := db.ExecContext(ctx, "UPDATE flows_flowstart SET status = 'F', modified_on = NOW() WHERE id = $1", startID) + return errors.Wrapf(err, "error setting flow start as failed") +} + +// GetFlowStartAttributes gets the basic attributes for the passed in start id, this includes ONLY its id, uuid, flow_id and params func GetFlowStartAttributes(ctx context.Context, db Queryer, startID StartID) (*FlowStart, error) { start := &FlowStart{} - err := db.GetContext(ctx, &start.s, `SELECT id, uuid, flow_id, extra, parent_summary, session_history FROM flows_flowstart WHERE id = $1`, startID) + err := db.GetContext(ctx, start, `SELECT id, uuid, flow_id, params, parent_summary, session_history FROM flows_flowstart WHERE id = $1`, startID) if err != nil { return nil, errors.Wrapf(err, "unable to load start attributes for id: %d", startID) } return start, nil } -// NewFlowStart creates a new flow start objects for the passed in parameters -func NewFlowStart(orgID OrgID, startType StartType, flowType FlowType, flowID FlowID) *FlowStart { - s := &FlowStart{} - s.s.UUID = uuids.New() - s.s.OrgID = orgID - s.s.StartType = startType - s.s.FlowType = flowType - s.s.FlowID = flowID - s.s.RestartParticipants = true - s.s.IncludeActive = true - return s -} - type startContact struct { - StartID StartID `db:"start_id"` + StartID StartID `db:"flowstart_id"` ContactID ContactID `db:"contact_id"` } type startGroup struct { - StartID StartID `db:"start_id"` + StartID StartID `db:"flowstart_id"` GroupID GroupID `db:"contactgroup_id"` } // InsertFlowStarts inserts all the passed in starts func InsertFlowStarts(ctx context.Context, db Queryer, starts []*FlowStart) error { - is := make([]interface{}, len(starts)) - for i, s := range starts { - // populate UUID if needed - if s.s.UUID == "" { - s.s.UUID = uuids.New() - } - - is[i] = &s.s - } - // insert our starts - err := BulkQuery(ctx, "inserting flow start", db, sqlInsertStart, is) + err := BulkQuery(ctx, "inserting flow start", db, sqlInsertStart, starts) if err != nil { return errors.Wrapf(err, "error inserting flow starts") } // build up all our contact associations - contacts := make([]interface{}, 0, len(starts)) + contacts := make([]*startContact, 0, len(starts)) for _, start := range starts { - for _, contactID := range start.ContactIDs() { - contacts = append(contacts, &startContact{ - StartID: start.ID(), - ContactID: contactID, - }) + for _, contactID := range start.ContactIDs { + contacts = append(contacts, &startContact{StartID: start.ID, ContactID: contactID}) } } @@ -301,13 +223,10 @@ func InsertFlowStarts(ctx context.Context, db Queryer, starts []*FlowStart) erro } // build up all our group associations - groups := make([]interface{}, 0, len(starts)) + groups := make([]*startGroup, 0, len(starts)) for _, start := range starts { - for _, groupID := range start.GroupIDs() { - groups = append(groups, &startGroup{ - StartID: start.ID(), - GroupID: groupID, - }) + for _, groupID := range start.GroupIDs { + groups = append(groups, &startGroup{StartID: start.ID, GroupID: groupID}) } } @@ -322,56 +241,52 @@ func InsertFlowStarts(ctx context.Context, db Queryer, starts []*FlowStart) erro const sqlInsertStart = ` INSERT INTO - flows_flowstart(uuid, org_id, flow_id, start_type, created_on, modified_on, restart_participants, include_active, query, status, extra, parent_summary, session_history) - VALUES(:uuid, :org_id, :flow_id, :start_type, NOW(), NOW(), :restart_participants, :include_active, :query, 'P', :extra, :parent_summary, :session_history) + flows_flowstart(uuid, org_id, flow_id, start_type, created_on, modified_on, query, exclusions, status, params, parent_summary, session_history) + VALUES(:uuid, :org_id, :flow_id, :start_type, NOW(), NOW(), :query, :exclusions, 'P', :params, :parent_summary, :session_history) RETURNING id ` const sqlInsertStartContact = ` -INSERT INTO flows_flowstart_contacts(flowstart_id, contact_id) VALUES(:start_id, :contact_id)` +INSERT INTO flows_flowstart_contacts(flowstart_id, contact_id) VALUES(:flowstart_id, :contact_id)` const sqlInsertStartGroup = ` -INSERT INTO flows_flowstart_groups(flowstart_id, contactgroup_id) VALUES(:start_id, :contactgroup_id)` +INSERT INTO flows_flowstart_groups(flowstart_id, contactgroup_id) VALUES(:flowstart_id, :contactgroup_id)` // CreateBatch creates a batch for this start using the passed in contact ids func (s *FlowStart) CreateBatch(contactIDs []ContactID, last bool, totalContacts int) *FlowStartBatch { - b := &FlowStartBatch{} - b.b.StartID = s.ID() - b.b.StartType = s.s.StartType - b.b.OrgID = s.OrgID() - b.b.FlowID = s.FlowID() - b.b.FlowType = s.FlowType() - b.b.ContactIDs = contactIDs - b.b.RestartParticipants = s.s.RestartParticipants - b.b.IncludeActive = s.s.IncludeActive - b.b.ParentSummary = null.JSON(s.ParentSummary()) - b.b.SessionHistory = null.JSON(s.SessionHistory()) - b.b.Extra = null.JSON(s.Extra()) - b.b.IsLast = last - b.b.TotalContacts = totalContacts - b.b.CreatedByID = s.s.CreatedByID - return b -} - -// MarshalJSON marshals into JSON. 0 values will become null -func (i StartID) MarshalJSON() ([]byte, error) { - return null.Int(i).MarshalJSON() -} - -// UnmarshalJSON unmarshals from JSON. null values become 0 -func (i *StartID) UnmarshalJSON(b []byte) error { - return null.UnmarshalInt(b, (*null.Int)(i)) -} - -// Value returns the db value, null is returned for 0 -func (i StartID) Value() (driver.Value, error) { - return null.Int(i).Value() + return &FlowStartBatch{ + StartID: s.ID, + StartType: s.StartType, + OrgID: s.OrgID, + FlowID: s.FlowID, + FlowType: s.FlowType, + ContactIDs: contactIDs, + ParentSummary: s.ParentSummary, + SessionHistory: s.SessionHistory, + Params: s.Params, + CreatedByID: s.CreatedByID, + IsLast: last, + TotalContacts: totalContacts, + } } -// Scan scans from the db value. null values become 0 -func (i *StartID) Scan(value interface{}) error { - return null.ScanInt(value, (*null.Int)(i)) +// FlowStartBatch represents a single flow batch that needs to be started +type FlowStartBatch struct { + StartID StartID `json:"start_id"` + StartType StartType `json:"start_type"` + OrgID OrgID `json:"org_id"` + CreatedByID UserID `json:"created_by_id"` + FlowID FlowID `json:"flow_id"` + FlowType FlowType `json:"flow_type"` + ContactIDs []ContactID `json:"contact_ids"` + + Params null.JSON `json:"params,omitempty"` + ParentSummary null.JSON `json:"parent_summary,omitempty"` + SessionHistory null.JSON `json:"session_history,omitempty"` + + IsLast bool `json:"is_last,omitempty"` + TotalContacts int `json:"total_contacts"` } // ReadSessionHistory reads a session history from the given JSON diff --git a/core/models/starts_test.go b/core/models/starts_test.go index b9e23fb8e..a3aa93f47 100644 --- a/core/models/starts_test.go +++ b/core/models/starts_test.go @@ -13,21 +13,24 @@ import ( "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - + "github.com/nyaruka/null/v2" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestStarts(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() + + defer testsuite.Reset(testsuite.ResetData) - startID := testdata.InsertFlowStart(db, testdata.Org1, testdata.SingleMessage, []*testdata.Contact{testdata.Cathy, testdata.Bob}) + startID := testdata.InsertFlowStart(rt, testdata.Org1, testdata.SingleMessage, []*testdata.Contact{testdata.Cathy, testdata.Bob}) startJSON := []byte(fmt.Sprintf(`{ "start_id": %d, "start_type": "M", "org_id": %d, "created_by_id": %d, + "exclusions": {}, "flow_id": %d, "flow_type": "M", "contact_ids": [%d, %d], @@ -35,65 +38,61 @@ func TestStarts(t *testing.T) { "exclude_group_ids": [%d], "urns": ["tel:+12025550199"], "query": null, - "restart_participants": true, - "include_active": true, + "params": {"foo": "bar"}, "parent_summary": {"uuid": "b65b1a22-db6d-4f5a-9b3d-7302368a82e6"}, - "session_history": {"parent_uuid": "532a3899-492f-4ffe-aed7-e75ad524efab", "ancestors": 3, "ancestors_since_input": 1}, - "extra": {"foo": "bar"} + "session_history": {"parent_uuid": "532a3899-492f-4ffe-aed7-e75ad524efab", "ancestors": 3, "ancestors_since_input": 1} }`, startID, testdata.Org1.ID, testdata.Admin.ID, testdata.SingleMessage.ID, testdata.Cathy.ID, testdata.Bob.ID, testdata.DoctorsGroup.ID, testdata.TestersGroup.ID)) start := &models.FlowStart{} err := json.Unmarshal(startJSON, start) require.NoError(t, err) - assert.Equal(t, startID, start.ID()) - assert.Equal(t, testdata.Org1.ID, start.OrgID()) - assert.Equal(t, testdata.Admin.ID, start.CreatedByID()) - assert.Equal(t, testdata.SingleMessage.ID, start.FlowID()) - assert.Equal(t, models.FlowTypeMessaging, start.FlowType()) - assert.Equal(t, "", start.Query()) - assert.False(t, start.ExcludeStartedPreviously()) - assert.False(t, start.ExcludeInAFlow()) - assert.Equal(t, []models.ContactID{testdata.Cathy.ID, testdata.Bob.ID}, start.ContactIDs()) - assert.Equal(t, []models.GroupID{testdata.DoctorsGroup.ID}, start.GroupIDs()) - assert.Equal(t, []models.GroupID{testdata.TestersGroup.ID}, start.ExcludeGroupIDs()) - - assert.Equal(t, json.RawMessage(`{"uuid": "b65b1a22-db6d-4f5a-9b3d-7302368a82e6"}`), start.ParentSummary()) - assert.Equal(t, json.RawMessage(`{"parent_uuid": "532a3899-492f-4ffe-aed7-e75ad524efab", "ancestors": 3, "ancestors_since_input": 1}`), start.SessionHistory()) - assert.Equal(t, json.RawMessage(`{"foo": "bar"}`), start.Extra()) - - err = models.MarkStartStarted(ctx, db, startID, 2, []models.ContactID{testdata.George.ID}) + assert.Equal(t, startID, start.ID) + assert.Equal(t, testdata.Org1.ID, start.OrgID) + assert.Equal(t, testdata.Admin.ID, start.CreatedByID) + assert.Equal(t, testdata.SingleMessage.ID, start.FlowID) + assert.Equal(t, models.FlowTypeMessaging, start.FlowType) + assert.Equal(t, null.NullString, start.Query) + assert.False(t, start.Exclusions.StartedPreviously) + assert.False(t, start.Exclusions.InAFlow) + assert.Equal(t, []models.ContactID{testdata.Cathy.ID, testdata.Bob.ID}, start.ContactIDs) + assert.Equal(t, []models.GroupID{testdata.DoctorsGroup.ID}, start.GroupIDs) + assert.Equal(t, []models.GroupID{testdata.TestersGroup.ID}, start.ExcludeGroupIDs) + + assert.Equal(t, null.JSON(`{"uuid": "b65b1a22-db6d-4f5a-9b3d-7302368a82e6"}`), start.ParentSummary) + assert.Equal(t, null.JSON(`{"parent_uuid": "532a3899-492f-4ffe-aed7-e75ad524efab", "ancestors": 3, "ancestors_since_input": 1}`), start.SessionHistory) + assert.Equal(t, null.JSON(`{"foo": "bar"}`), start.Params) + + err = models.MarkStartStarted(ctx, rt.DB, startID, 2) require.NoError(t, err) - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowstart WHERE id = $1 AND status = 'S' AND contact_count = 2`, startID).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowstart_contacts WHERE flowstart_id = $1`, startID).Returns(3) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowstart WHERE id = $1 AND status = 'S' AND contact_count = 2`, startID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowstart_contacts WHERE flowstart_id = $1`, startID).Returns(2) batch := start.CreateBatch([]models.ContactID{testdata.Cathy.ID, testdata.Bob.ID}, false, 3) - assert.Equal(t, startID, batch.StartID()) - assert.Equal(t, models.StartTypeManual, batch.StartType()) - assert.Equal(t, testdata.SingleMessage.ID, batch.FlowID()) - assert.Equal(t, []models.ContactID{testdata.Cathy.ID, testdata.Bob.ID}, batch.ContactIDs()) - assert.False(t, batch.ExcludeStartedPreviously()) - assert.False(t, batch.ExcludeInAFlow()) - assert.Equal(t, testdata.Admin.ID, batch.CreatedByID()) - assert.False(t, batch.IsLast()) - assert.Equal(t, 3, batch.TotalContacts()) - - assert.Equal(t, json.RawMessage(`{"uuid": "b65b1a22-db6d-4f5a-9b3d-7302368a82e6"}`), batch.ParentSummary()) - assert.Equal(t, json.RawMessage(`{"parent_uuid": "532a3899-492f-4ffe-aed7-e75ad524efab", "ancestors": 3, "ancestors_since_input": 1}`), batch.SessionHistory()) - assert.Equal(t, json.RawMessage(`{"foo": "bar"}`), batch.Extra()) - - history, err := models.ReadSessionHistory(batch.SessionHistory()) + assert.Equal(t, startID, batch.StartID) + assert.Equal(t, models.StartTypeManual, batch.StartType) + assert.Equal(t, testdata.SingleMessage.ID, batch.FlowID) + assert.Equal(t, []models.ContactID{testdata.Cathy.ID, testdata.Bob.ID}, batch.ContactIDs) + assert.Equal(t, testdata.Admin.ID, batch.CreatedByID) + assert.False(t, batch.IsLast) + assert.Equal(t, 3, batch.TotalContacts) + + assert.Equal(t, null.JSON(`{"uuid": "b65b1a22-db6d-4f5a-9b3d-7302368a82e6"}`), batch.ParentSummary) + assert.Equal(t, null.JSON(`{"parent_uuid": "532a3899-492f-4ffe-aed7-e75ad524efab", "ancestors": 3, "ancestors_since_input": 1}`), batch.SessionHistory) + assert.Equal(t, null.JSON(`{"foo": "bar"}`), batch.Params) + + history, err := models.ReadSessionHistory(batch.SessionHistory) assert.NoError(t, err) assert.Equal(t, flows.SessionUUID("532a3899-492f-4ffe-aed7-e75ad524efab"), history.ParentUUID) _, err = models.ReadSessionHistory([]byte(`{`)) assert.EqualError(t, err, "unexpected end of JSON input") - err = models.MarkStartComplete(ctx, db, startID) + err = models.MarkStartComplete(ctx, rt.DB, startID) require.NoError(t, err) - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowstart WHERE id = $1 AND status = 'C'`, startID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowstart WHERE id = $1 AND status = 'C'`, startID).Returns(1) } func TestStartsBuilding(t *testing.T) { @@ -105,24 +104,31 @@ func TestStartsBuilding(t *testing.T) { WithExcludeGroupIDs([]models.GroupID{testdata.TestersGroup.ID}). WithContactIDs([]models.ContactID{testdata.Cathy.ID, testdata.Bob.ID}). WithQuery(`language != ""`). - WithCreateContact(true) + WithCreateContact(true). + WithParams(json.RawMessage(`{"foo": "bar"}`)) marshalled, err := jsonx.Marshal(start) require.NoError(t, err) test.AssertEqualJSON(t, []byte(fmt.Sprintf(`{ - "UUID": "1ae96956-4b34-433e-8d1a-f05fe6923d6d", "contact_ids": [%d, %d], "create_contact": true, "created_by_id": null, "exclude_group_ids": [%d], + "exclusions": { + "in_a_flow": false, + "non_active": false, + "not_seen_since_days": 0, + "started_previously": false + }, "flow_id": %d, "flow_type": "M", "group_ids": [%d], - "include_active": true, "org_id": 1, + "params": { + "foo": "bar" + }, "query": "language != \"\"", - "restart_participants": true, "start_id": null, "start_type": "M" }`, testdata.Cathy.ID, testdata.Bob.ID, testdata.TestersGroup.ID, testdata.Favorites.ID, testdata.DoctorsGroup.ID)), marshalled) diff --git a/core/models/teams.go b/core/models/teams.go index a6aeec210..63830785f 100644 --- a/core/models/teams.go +++ b/core/models/teams.go @@ -3,7 +3,7 @@ package models import ( "database/sql/driver" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" ) const ( @@ -12,7 +12,7 @@ const ( ) // TeamID is our type for team ids, which can be null -type TeamID null.Int +type TeamID int type TeamUUID string @@ -22,22 +22,7 @@ type Team struct { Name string `json:"name"` } -// MarshalJSON marshals into JSON. 0 values will become null -func (i TeamID) MarshalJSON() ([]byte, error) { - return null.Int(i).MarshalJSON() -} - -// UnmarshalJSON unmarshals from JSON. null values become 0 -func (i *TeamID) UnmarshalJSON(b []byte) error { - return null.UnmarshalInt(b, (*null.Int)(i)) -} - -// Value returns the db value, null is returned for 0 -func (i TeamID) Value() (driver.Value, error) { - return null.Int(i).Value() -} - -// Scan scans from the db value. null values become 0 -func (i *TeamID) Scan(value interface{}) error { - return null.ScanInt(value, (*null.Int)(i)) -} +func (i *TeamID) Scan(value any) error { return null.ScanInt(value, i) } +func (i TeamID) Value() (driver.Value, error) { return null.IntValue(i) } +func (i *TeamID) UnmarshalJSON(b []byte) error { return null.UnmarshalInt(b, i) } +func (i TeamID) MarshalJSON() ([]byte, error) { return null.MarshalInt(i) } diff --git a/core/models/templates.go b/core/models/templates.go index 6e51781b7..1fdc20285 100644 --- a/core/models/templates.go +++ b/core/models/templates.go @@ -9,7 +9,7 @@ import ( "github.com/nyaruka/gocommon/dbutil" "github.com/nyaruka/goflow/assets" "github.com/nyaruka/goflow/envs" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) diff --git a/core/models/templates_test.go b/core/models/templates_test.go index 5b494067f..1d33e1dcb 100644 --- a/core/models/templates_test.go +++ b/core/models/templates_test.go @@ -13,7 +13,7 @@ import ( ) func TestTemplates(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshTemplates) require.NoError(t, err) diff --git a/core/models/testdata/imports.json b/core/models/testdata/imports.json index 4551c47fa..c52a7e5e6 100644 --- a/core/models/testdata/imports.json +++ b/core/models/testdata/imports.json @@ -26,6 +26,7 @@ "uuid": "1ae96956-4b34-433e-8d1a-f05fe6923d6d", "name": "Ann", "language": "", + "status": "active", "urns": [ "tel:+16055700001?id=10000&priority=1000" ], @@ -36,6 +37,7 @@ "uuid": "e7187099-7d38-4f60-955c-325957214c42", "name": "Bob", "language": "", + "status": "active", "urns": [ "tel:+16055700002?id=10001&priority=1000" ], @@ -73,6 +75,7 @@ "uuid": "1ae96956-4b34-433e-8d1a-f05fe6923d6d", "name": "Ann", "language": "", + "status": "active", "urns": [ "tel:+16055700001?id=10000&priority=1000" ], @@ -83,6 +86,7 @@ "uuid": "e7187099-7d38-4f60-955c-325957214c42", "name": "Bob", "language": "", + "status": "active", "urns": [ "tel:+16055700002?id=10001&priority=1000" ], @@ -93,6 +97,7 @@ "uuid": "59d74b86-3e2f-4a93-aece-b05d2fdcde0c", "name": "Cat", "language": "spa", + "status": "active", "urns": [ "tel:+16055700003?id=10002&priority=1000", "tel:+593979000001?id=10003&priority=999" @@ -142,6 +147,7 @@ "uuid": "1ae96956-4b34-433e-8d1a-f05fe6923d6d", "name": "Ann", "language": "", + "status": "active", "urns": [ "tel:+16055700001?id=10000&priority=1000" ], @@ -152,6 +158,7 @@ "uuid": "e7187099-7d38-4f60-955c-325957214c42", "name": "Bob", "language": "", + "status": "active", "urns": [ "tel:+16055700002?id=10001&priority=1000", "tel:+593979000002?id=10004&priority=999" @@ -165,6 +172,7 @@ "uuid": "59d74b86-3e2f-4a93-aece-b05d2fdcde0c", "name": "Cat", "language": "spa", + "status": "active", "urns": [ "tel:+16055700003?id=10002&priority=1000", "tel:+593979000001?id=10003&priority=999" @@ -221,8 +229,8 @@ "num_errored": 1, "errors": [ { - "record": 0, "row": 1, + "record": 0, "message": "Unable to find or create contact with URNs tel:+16055700001, tel:+593979000001" } ], @@ -231,6 +239,7 @@ "uuid": "1ae96956-4b34-433e-8d1a-f05fe6923d6d", "name": "Ann", "language": "", + "status": "active", "urns": [ "tel:+16055700001?id=10000&priority=1000" ], @@ -241,6 +250,7 @@ "uuid": "e7187099-7d38-4f60-955c-325957214c42", "name": "Bob", "language": "", + "status": "active", "urns": [ "tel:+16055700002?id=10001&priority=1000", "tel:+593979000002?id=10004&priority=999" @@ -255,6 +265,7 @@ "uuid": "59d74b86-3e2f-4a93-aece-b05d2fdcde0c", "name": "Cat", "language": "spa", + "status": "active", "urns": [ "tel:+16055700003?id=10002&priority=1000", "tel:+593979000001?id=10003&priority=999" @@ -297,8 +308,8 @@ "num_errored": 1, "errors": [ { - "record": 2, "row": 4, + "record": 2, "message": "Unable to find contact with UUID '68dc10e7-19ce-4052-b202-7c1b49e69ba0'" } ], @@ -307,6 +318,7 @@ "uuid": "1ae96956-4b34-433e-8d1a-f05fe6923d6d", "name": "Anne", "language": "", + "status": "active", "urns": [ "tel:+16055700001?id=10000&priority=1000" ], @@ -317,6 +329,7 @@ "uuid": "e7187099-7d38-4f60-955c-325957214c42", "name": "Bob", "language": "kin", + "status": "active", "urns": [ "tel:+16055700002?id=10001&priority=1000", "tel:+593979000002?id=10004&priority=999" @@ -331,6 +344,7 @@ "uuid": "59d74b86-3e2f-4a93-aece-b05d2fdcde0c", "name": "Cat", "language": "spa", + "status": "active", "urns": [ "tel:+16055700003?id=10002&priority=1000", "tel:+593979000001?id=10003&priority=999" @@ -367,6 +381,7 @@ "uuid": "1ae96956-4b34-433e-8d1a-f05fe6923d6d", "name": "Anne", "language": "", + "status": "active", "urns": [ "tel:+16055700001?id=10000&priority=1000", "tel:+16055700002?id=10001&priority=999" @@ -378,6 +393,7 @@ "uuid": "e7187099-7d38-4f60-955c-325957214c42", "name": "Bob", "language": "kin", + "status": "active", "urns": [ "tel:+593979000002?id=10004&priority=999" ], @@ -391,6 +407,7 @@ "uuid": "59d74b86-3e2f-4a93-aece-b05d2fdcde0c", "name": "Cat", "language": "spa", + "status": "active", "urns": [ "tel:+16055700003?id=10002&priority=1000", "tel:+593979000001?id=10003&priority=999" @@ -421,8 +438,8 @@ "num_errored": 1, "errors": [ { - "record": 0, "row": 1, + "record": 0, "message": "Unable to find contact with UUID 'f7a8016d-69a6-434b-aae7-5142ce4a98ba'" } ], @@ -431,6 +448,7 @@ "uuid": "1ae96956-4b34-433e-8d1a-f05fe6923d6d", "name": "Anne", "language": "", + "status": "active", "urns": [ "tel:+16055700001?id=10000&priority=1000", "tel:+16055700002?id=10001&priority=999" @@ -442,6 +460,7 @@ "uuid": "e7187099-7d38-4f60-955c-325957214c42", "name": "Bob", "language": "kin", + "status": "active", "urns": [ "tel:+593979000002?id=10004&priority=999" ], @@ -455,6 +474,7 @@ "uuid": "59d74b86-3e2f-4a93-aece-b05d2fdcde0c", "name": "Cat", "language": "spa", + "status": "active", "urns": [ "tel:+16055700003?id=10002&priority=1000", "tel:+593979000001?id=10003&priority=999" @@ -494,8 +514,8 @@ "num_errored": 1, "errors": [ { - "record": 1, "row": 2, + "record": 1, "message": "Unable to find or create contact with URNs xyz:1234567" } ], @@ -504,6 +524,7 @@ "uuid": "1ae96956-4b34-433e-8d1a-f05fe6923d6d", "name": "Anne", "language": "", + "status": "active", "urns": [ "tel:+16055700001?id=10000&priority=1000", "tel:+16055700002?id=10001&priority=999", @@ -516,6 +537,7 @@ "uuid": "e7187099-7d38-4f60-955c-325957214c42", "name": "Bob", "language": "kin", + "status": "active", "urns": [ "tel:+593979000002?id=10004&priority=999" ], @@ -529,6 +551,7 @@ "uuid": "59d74b86-3e2f-4a93-aece-b05d2fdcde0c", "name": "Cat", "language": "spa", + "status": "active", "urns": [ "tel:+16055700003?id=10002&priority=1000", "tel:+593979000001?id=10003&priority=999" @@ -543,5 +566,111 @@ ] } ] + }, + { + "description": "contacts with non-active statuses", + "specs": [ + { + "uuid": "1ae96956-4b34-433e-8d1a-f05fe6923d6d", + "name": "Anne", + "status": "archived", + "_import_row": 1 + }, + { + "name": "Blake", + "urns": [ + "tel:+16055700007" + ], + "status": "blocked", + "_import_row": 2 + }, + { + "name": "Ivan", + "urns": [ + "tel:+16055700008" + ], + "status": "invalid", + "_import_row": 3 + } + ], + "num_created": 2, + "num_updated": 1, + "num_errored": 0, + "errors": [ + { + "row": 3, + "record": 2, + "message": "'invalid' is not a valid status" + } + ], + "contacts": [ + { + "uuid": "1ae96956-4b34-433e-8d1a-f05fe6923d6d", + "name": "Anne", + "language": "", + "status": "archived", + "urns": [ + "tel:+16055700001?id=10000&priority=1000", + "tel:+16055700002?id=10001&priority=999", + "tel:+16055700005?id=10006&priority=998" + ], + "fields": {}, + "groups": [] + }, + { + "uuid": "e7187099-7d38-4f60-955c-325957214c42", + "name": "Bob", + "language": "kin", + "status": "active", + "urns": [ + "tel:+593979000002?id=10004&priority=999" + ], + "fields": { + "age": "28", + "joined": "2020-01-01T10:45:30.000000Z" + }, + "groups": [] + }, + { + "uuid": "59d74b86-3e2f-4a93-aece-b05d2fdcde0c", + "name": "Cat", + "language": "spa", + "status": "active", + "urns": [ + "tel:+16055700003?id=10002&priority=1000", + "tel:+593979000001?id=10003&priority=999" + ], + "fields": { + "age": "39", + "joined": "2020-02-01T17:15:30.000000Z" + }, + "groups": [ + "5e9d8fab-5e7e-4f51-b533-261af5dea70d", + "c153e265-f7c9-4539-9dbc-9b358714b638" + ] + }, + { + "uuid": "9688d21d-95aa-4bed-afc7-f31b35731a3d", + "name": "Blake", + "language": "", + "status": "blocked", + "urns": [ + "tel:+16055700007?id=10007&priority=1000" + ], + "fields": {}, + "groups": [] + }, + { + "uuid": "297611a6-b583-45c3-8587-d4e530c948f0", + "name": "Ivan", + "language": "", + "status": "active", + "urns": [ + "tel:+16055700008?id=10008&priority=1000" + ], + "fields": {}, + "groups": [] + } + ] } ] \ No newline at end of file diff --git a/core/models/ticket_events.go b/core/models/ticket_events.go index 93a4397d8..d0bdc2f2b 100644 --- a/core/models/ticket_events.go +++ b/core/models/ticket_events.go @@ -6,7 +6,7 @@ import ( "time" "github.com/nyaruka/gocommon/dates" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" ) type TicketEventID int @@ -29,7 +29,7 @@ type TicketEvent struct { TicketID TicketID `json:"ticket_id" db:"ticket_id"` EventType TicketEventType `json:"event_type" db:"event_type"` Note null.String `json:"note,omitempty" db:"note"` - TopicID TopicID `json:"topic_id,omitempty" db:"topic_id"` + TopicID TopicID `json:"topic_id,omitempty" db:"topic_id"` AssigneeID UserID `json:"assignee_id,omitempty" db:"assignee_id"` CreatedByID UserID `json:"created_by_id,omitempty" db:"created_by_id"` CreatedOn time.Time `json:"created_on" db:"created_on"` @@ -40,8 +40,8 @@ func NewTicketOpenedEvent(t *Ticket, userID UserID, assigneeID UserID) *TicketEv return newTicketEvent(t, userID, TicketEventTypeOpened, "", NilTopicID, assigneeID) } -func NewTicketAssignedEvent(t *Ticket, userID UserID, assigneeID UserID, note string) *TicketEvent { - return newTicketEvent(t, userID, TicketEventTypeAssigned, note, NilTopicID, assigneeID) +func NewTicketAssignedEvent(t *Ticket, userID UserID, assigneeID UserID) *TicketEvent { + return newTicketEvent(t, userID, TicketEventTypeAssigned, "", NilTopicID, assigneeID) } func NewTicketNoteAddedEvent(t *Ticket, userID UserID, note string) *TicketEvent { diff --git a/core/models/ticket_events_test.go b/core/models/ticket_events_test.go index eab35dd73..c3ed0c1c6 100644 --- a/core/models/ticket_events_test.go +++ b/core/models/ticket_events_test.go @@ -8,18 +8,18 @@ import ( "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestTicketEvents(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) - ticket := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Have you seen my cookies?", "17", time.Now(), nil) - modelTicket := ticket.Load(db) + ticket := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Have you seen my cookies?", "17", time.Now(), nil) + modelTicket := ticket.Load(rt) e1 := models.NewTicketOpenedEvent(modelTicket, testdata.Admin.ID, testdata.Agent.ID) assert.Equal(t, testdata.Org1.ID, e1.OrgID()) @@ -29,10 +29,9 @@ func TestTicketEvents(t *testing.T) { assert.Equal(t, null.NullString, e1.Note()) assert.Equal(t, testdata.Admin.ID, e1.CreatedByID()) - e2 := models.NewTicketAssignedEvent(modelTicket, testdata.Admin.ID, testdata.Agent.ID, "please handle") + e2 := models.NewTicketAssignedEvent(modelTicket, testdata.Admin.ID, testdata.Agent.ID) assert.Equal(t, models.TicketEventTypeAssigned, e2.EventType()) assert.Equal(t, testdata.Agent.ID, e2.AssigneeID()) - assert.Equal(t, null.String("please handle"), e2.Note()) assert.Equal(t, testdata.Admin.ID, e2.CreatedByID()) e3 := models.NewTicketNoteAddedEvent(modelTicket, testdata.Agent.ID, "please handle") @@ -53,10 +52,9 @@ func TestTicketEvents(t *testing.T) { assert.Equal(t, testdata.SupportTopic.ID, e6.TopicID()) assert.Equal(t, testdata.Agent.ID, e6.CreatedByID()) - err := models.InsertTicketEvents(ctx, db, []*models.TicketEvent{e1, e2, e3, e4, e5}) + err := models.InsertTicketEvents(ctx, rt.DB, []*models.TicketEvent{e1, e2, e3, e4, e5}) require.NoError(t, err) - assertdb.Query(t, db, `SELECT count(*) FROM tickets_ticketevent`).Returns(5) - assertdb.Query(t, db, `SELECT assignee_id, note FROM tickets_ticketevent WHERE id = $1`, e2.ID()). - Columns(map[string]interface{}{"assignee_id": int64(testdata.Agent.ID), "note": "please handle"}) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM tickets_ticketevent`).Returns(5) + assertdb.Query(t, rt.DB, `SELECT assignee_id FROM tickets_ticketevent WHERE id = $1`, e2.ID()).Columns(map[string]any{"assignee_id": int64(testdata.Agent.ID)}) } diff --git a/core/models/tickets.go b/core/models/tickets.go index 4ad0ed342..96b4f7199 100644 --- a/core/models/tickets.go +++ b/core/models/tickets.go @@ -7,6 +7,8 @@ import ( "net/http" "time" + "github.com/jmoiron/sqlx" + "github.com/lib/pq" "github.com/nyaruka/gocommon/dates" "github.com/nyaruka/gocommon/dbutil" "github.com/nyaruka/gocommon/httpx" @@ -16,40 +18,22 @@ import ( "github.com/nyaruka/goflow/utils" "github.com/nyaruka/mailroom/core/goflow" "github.com/nyaruka/mailroom/runtime" - "github.com/nyaruka/null" - - "github.com/jmoiron/sqlx" - "github.com/lib/pq" + "github.com/nyaruka/null/v2" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) -type TicketID null.Int +type TicketID int // NilTicketID is our constant for a nil ticket id const NilTicketID = TicketID(0) -// MarshalJSON marshals into JSON. 0 values will become null -func (i TicketID) MarshalJSON() ([]byte, error) { - return null.Int(i).MarshalJSON() -} +func (i *TicketID) Scan(value any) error { return null.ScanInt(value, i) } +func (i TicketID) Value() (driver.Value, error) { return null.IntValue(i) } +func (i *TicketID) UnmarshalJSON(b []byte) error { return null.UnmarshalInt(b, i) } +func (i TicketID) MarshalJSON() ([]byte, error) { return null.MarshalInt(i) } -// UnmarshalJSON unmarshals from JSON. null values become 0 -func (i *TicketID) UnmarshalJSON(b []byte) error { - return null.UnmarshalInt(b, (*null.Int)(i)) -} - -// Value returns the db value, null is returned for 0 -func (i TicketID) Value() (driver.Value, error) { - return null.Int(i).Value() -} - -// Scan scans from the db value. null values become 0 -func (i *TicketID) Scan(value interface{}) error { - return null.ScanInt(value, (*null.Int)(i)) -} - -type TicketerID null.Int +type TicketerID int type TicketStatus string type TicketDailyCountType string type TicketDailyTimingType string @@ -114,7 +98,7 @@ func NewTicket(uuid flows.TicketUUID, orgID OrgID, userID UserID, flowID FlowID, t.t.TopicID = topicID t.t.Body = body t.t.AssigneeID = assigneeID - t.t.Config = null.NewMap(config) + t.t.Config = null.Map(config) return t } @@ -131,7 +115,8 @@ func (t *Ticket) AssigneeID() UserID { return t.t.AssigneeID } func (t *Ticket) RepliedOn() *time.Time { return t.t.RepliedOn } func (t *Ticket) LastActivityOn() time.Time { return t.t.LastActivityOn } func (t *Ticket) Config(key string) string { - return t.t.Config.GetString(key, "") + v, _ := t.t.Config[key].(string) + return v } func (t *Ticket) OpenedByID() UserID { return t.t.OpenedByID } @@ -187,60 +172,66 @@ func (t *Ticket) ForwardIncoming(ctx context.Context, rt *runtime.Runtime, oa *O return err } -const sqlSelectOpenTickets = ` +const sqlSelectLastOpenTicket = ` SELECT - t.id, - t.uuid, - t.org_id, - t.contact_id, - t.ticketer_id, - t.external_id, - t.status, - t.topic_id, - t.body, - t.assignee_id, - t.config, - t.opened_on, - t.opened_by_id, - t.opened_in_id, - t.replied_on, - t.modified_on, - t.closed_on, - t.last_activity_on -FROM - tickets_ticket t -WHERE - t.contact_id = $1 AND t.status = 'O'` - -// LoadOpenTicketsForContact looks up the open tickets for the passed in contact -func LoadOpenTicketsForContact(ctx context.Context, db Queryer, contact *Contact) ([]*Ticket, error) { - return loadTickets(ctx, db, sqlSelectOpenTickets, contact.ID()) + id, + uuid, + org_id, + contact_id, + ticketer_id, + external_id, + status, + topic_id, + body, + assignee_id, + config, + opened_on, + opened_by_id, + opened_in_id, + replied_on, + modified_on, + closed_on, + last_activity_on + FROM tickets_ticket + WHERE contact_id = $1 AND status = 'O' +ORDER BY opened_on DESC + LIMIT 1` + +// LoadOpenTicketForContact looks up the last opened open ticket for the passed in contact +func LoadOpenTicketForContact(ctx context.Context, db Queryer, contact *Contact) (*Ticket, error) { + tickets, err := loadTickets(ctx, db, sqlSelectLastOpenTicket, contact.ID()) + if err != nil { + return nil, err + } + if len(tickets) > 0 { + return tickets[0], nil + } + return nil, nil } const sqlSelectTicketsByID = ` SELECT - t.id, - t.uuid, - t.org_id, - t.contact_id, - t.ticketer_id, - t.external_id, - t.status, - t.topic_id, - t.body, - t.assignee_id, - t.config, - t.opened_on, - t.opened_by_id, - t.opened_in_id, - t.replied_on, - t.modified_on, - t.closed_on, - t.last_activity_on -FROM - tickets_ticket t -WHERE - t.id = ANY($1)` + id, + uuid, + org_id, + contact_id, + ticketer_id, + external_id, + status, + topic_id, + body, + assignee_id, + config, + opened_on, + opened_by_id, + opened_in_id, + replied_on, + modified_on, + closed_on, + last_activity_on + FROM tickets_ticket + WHERE id = ANY($1) +ORDER BY opened_on DESC` // LoadTickets loads all of the tickets with the given ids func LoadTickets(ctx context.Context, db Queryer, ids []TicketID) ([]*Ticket, error) { @@ -401,7 +392,7 @@ func UpdateTicketExternalID(ctx context.Context, db Queryer, ticket *Ticket, ext func UpdateTicketConfig(ctx context.Context, db Queryer, ticket *Ticket, config map[string]string) error { t := &ticket.t for key, value := range config { - t.Config.Map()[key] = value + t.Config[key] = value } return Exec(ctx, "update ticket config", db, `UPDATE tickets_ticket SET config = $2 WHERE id = $1`, t.ID, t.Config) @@ -428,7 +419,7 @@ UPDATE tickets_ticket WHERE id = ANY($1)` // TicketsAssign assigns the passed in tickets -func TicketsAssign(ctx context.Context, db Queryer, oa *OrgAssets, userID UserID, tickets []*Ticket, assigneeID UserID, note string) (map[*Ticket]*TicketEvent, error) { +func TicketsAssign(ctx context.Context, db Queryer, oa *OrgAssets, userID UserID, tickets []*Ticket, assigneeID UserID) (map[*Ticket]*TicketEvent, error) { ids := make([]TicketID, 0, len(tickets)) events := make([]*TicketEvent, 0, len(tickets)) eventsByTicket := make(map[*Ticket]*TicketEvent, len(tickets)) @@ -453,7 +444,7 @@ func TicketsAssign(ctx context.Context, db Queryer, oa *OrgAssets, userID UserID t.ModifiedOn = now t.LastActivityOn = now - e := NewTicketAssignedEvent(ticket, userID, assigneeID, note) + e := NewTicketAssignedEvent(ticket, userID, assigneeID) events = append(events, e) eventsByTicket[ticket] = e } @@ -803,7 +794,7 @@ func (t *Ticketer) UpdateConfig(ctx context.Context, db Queryer, add map[string] dbMap[key] = value } - return Exec(ctx, "update ticketer config", db, `UPDATE tickets_ticketer SET config = $2 WHERE id = $1`, t.t.ID, null.NewMap(dbMap)) + return Exec(ctx, "update ticketer config", db, `UPDATE tickets_ticketer SET config = $2 WHERE id = $1`, t.t.ID, null.Map(dbMap)) } // TicketService extends the engine's ticket service and adds support for forwarding new incoming messages @@ -905,25 +896,10 @@ func loadTicketers(ctx context.Context, db sqlx.Queryer, orgID OrgID) ([]assets. return ticketers, nil } -// MarshalJSON marshals into JSON. 0 values will become null -func (i TicketerID) MarshalJSON() ([]byte, error) { - return null.Int(i).MarshalJSON() -} - -// UnmarshalJSON unmarshals from JSON. null values become 0 -func (i *TicketerID) UnmarshalJSON(b []byte) error { - return null.UnmarshalInt(b, (*null.Int)(i)) -} - -// Value returns the db value, null is returned for 0 -func (i TicketerID) Value() (driver.Value, error) { - return null.Int(i).Value() -} - -// Scan scans from the db value. null values become 0 -func (i *TicketerID) Scan(value interface{}) error { - return null.ScanInt(value, (*null.Int)(i)) -} +func (i *TicketerID) Scan(value any) error { return null.ScanInt(value, i) } +func (i TicketerID) Value() (driver.Value, error) { return null.IntValue(i) } +func (i *TicketerID) UnmarshalJSON(b []byte) error { return null.UnmarshalInt(b, i) } +func (i TicketerID) MarshalJSON() ([]byte, error) { return null.MarshalInt(i) } func insertTicketDailyCounts(ctx context.Context, tx Queryer, countType TicketDailyCountType, tz *time.Location, scopeCounts map[string]int) error { return insertDailyCounts(ctx, tx, "tickets_ticketdailycount", countType, tz, scopeCounts) @@ -932,3 +908,34 @@ func insertTicketDailyCounts(ctx context.Context, tx Queryer, countType TicketDa func insertTicketDailyTiming(ctx context.Context, tx Queryer, countType TicketDailyTimingType, tz *time.Location, scope string, duration time.Duration) error { return insertDailyTiming(ctx, tx, "tickets_ticketdailytiming", countType, tz, scope, duration) } + +func RecordTicketReply(ctx context.Context, db Queryer, oa *OrgAssets, ticketID TicketID, userID UserID) error { + firstReplyTime, err := TicketRecordReplied(ctx, db, ticketID, dates.Now()) + if err != nil { + return err + } + + // record reply counts for org, user and team + replyCounts := map[string]int{scopeOrg(oa): 1} + + if userID != NilUserID { + user := oa.UserByID(userID) + if user != nil { + replyCounts[scopeUser(oa, user)] = 1 + if user.Team() != nil { + replyCounts[scopeTeam(user.Team())] = 1 + } + } + } + + if err := insertTicketDailyCounts(ctx, db, TicketDailyCountReply, oa.Org().Timezone(), replyCounts); err != nil { + return err + } + + if firstReplyTime >= 0 { + if err := insertTicketDailyTiming(ctx, db, TicketDailyTimingFirstReply, oa.Org().Timezone(), scopeOrg(oa), firstReplyTime); err != nil { + return err + } + } + return nil +} diff --git a/core/models/tickets_test.go b/core/models/tickets_test.go index f54f06aa5..1096c2f59 100644 --- a/core/models/tickets_test.go +++ b/core/models/tickets_test.go @@ -5,29 +5,28 @@ import ( "testing" "time" - "github.com/jmoiron/sqlx" "github.com/nyaruka/gocommon/dates" "github.com/nyaruka/gocommon/dbutil/assertdb" "github.com/nyaruka/gocommon/httpx" "github.com/nyaruka/goflow/flows" "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/runtime" _ "github.com/nyaruka/mailroom/services/tickets/mailgun" _ "github.com/nyaruka/mailroom/services/tickets/zendesk" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/nyaruka/null" - + "github.com/nyaruka/null/v2" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestTicketers(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) // can load directly by UUID - ticketer, err := models.LookupTicketerByUUID(ctx, db, testdata.Zendesk.UUID) + ticketer, err := models.LookupTicketerByUUID(ctx, rt.DB, testdata.Zendesk.UUID) assert.NoError(t, err) assert.Equal(t, testdata.Zendesk.ID, ticketer.ID()) assert.Equal(t, testdata.Zendesk.UUID, ticketer.UUID()) @@ -49,7 +48,7 @@ func TestTicketers(t *testing.T) { assert.Equal(t, "Zendesk (Nyaruka)", ticketer.Name()) assert.Equal(t, "1234-abcd", ticketer.Config("push_id")) - ticketer.UpdateConfig(ctx, db, map[string]string{"new-key": "foo"}, map[string]bool{"push_id": true}) + ticketer.UpdateConfig(ctx, rt.DB, map[string]string{"new-key": "foo"}, map[string]bool{"push_id": true}) org1, _ = models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshTicketers) ticketer = org1.TicketerByID(testdata.Zendesk.ID) @@ -60,7 +59,7 @@ func TestTicketers(t *testing.T) { } func TestTickets(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) @@ -118,62 +117,62 @@ func TestTickets(t *testing.T) { assert.Equal(t, testdata.Admin.ID, ticket1.AssigneeID()) assert.Equal(t, "", ticket1.Config("xyz")) - err := models.InsertTickets(ctx, db, oa, []*models.Ticket{ticket1, ticket2, ticket3}) + err := models.InsertTickets(ctx, rt.DB, oa, []*models.Ticket{ticket1, ticket2, ticket3}) assert.NoError(t, err) // check all tickets were created - assertdb.Query(t, db, `SELECT count(*) FROM tickets_ticket WHERE status = 'O' AND closed_on IS NULL`).Returns(3) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM tickets_ticket WHERE status = 'O' AND closed_on IS NULL`).Returns(3) // check counts were added - assertTicketDailyCount(t, db, models.TicketDailyCountOpening, fmt.Sprintf("o:%d", testdata.Org1.ID), 3) - assertTicketDailyCount(t, db, models.TicketDailyCountOpening, fmt.Sprintf("o:%d", testdata.Org2.ID), 0) - assertTicketDailyCount(t, db, models.TicketDailyCountAssignment, fmt.Sprintf("o:%d:u:%d", testdata.Org1.ID, testdata.Admin.ID), 2) - assertTicketDailyCount(t, db, models.TicketDailyCountAssignment, fmt.Sprintf("o:%d:u:%d", testdata.Org1.ID, testdata.Editor.ID), 0) + assertTicketDailyCount(t, rt, models.TicketDailyCountOpening, fmt.Sprintf("o:%d", testdata.Org1.ID), 3) + assertTicketDailyCount(t, rt, models.TicketDailyCountOpening, fmt.Sprintf("o:%d", testdata.Org2.ID), 0) + assertTicketDailyCount(t, rt, models.TicketDailyCountAssignment, fmt.Sprintf("o:%d:u:%d", testdata.Org1.ID, testdata.Admin.ID), 2) + assertTicketDailyCount(t, rt, models.TicketDailyCountAssignment, fmt.Sprintf("o:%d:u:%d", testdata.Org1.ID, testdata.Editor.ID), 0) // can lookup a ticket by UUID - tk1, err := models.LookupTicketByUUID(ctx, db, "2ef57efc-d85f-4291-b330-e4afe68af5fe") + tk1, err := models.LookupTicketByUUID(ctx, rt.DB, "2ef57efc-d85f-4291-b330-e4afe68af5fe") assert.NoError(t, err) assert.Equal(t, "Where are my cookies?", tk1.Body()) // can lookup a ticket by external ID and ticketer - tk2, err := models.LookupTicketByExternalID(ctx, db, testdata.Zendesk.ID, "EX7869") + tk2, err := models.LookupTicketByExternalID(ctx, rt.DB, testdata.Zendesk.ID, "EX7869") assert.NoError(t, err) assert.Equal(t, "Where are my trousers?", tk2.Body()) // can lookup open tickets by contact org1, _ := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) - cathy, err := models.LoadContact(ctx, db, org1, testdata.Cathy.ID) + cathy, err := models.LoadContact(ctx, rt.DB, org1, testdata.Cathy.ID) require.NoError(t, err) - tks, err := models.LoadOpenTicketsForContact(ctx, db, cathy) + tk, err := models.LoadOpenTicketForContact(ctx, rt.DB, cathy) assert.NoError(t, err) - assert.Equal(t, 1, len(tks)) - assert.Equal(t, "Where are my cookies?", tks[0].Body()) + assert.NotNil(t, tk) + assert.Equal(t, "Where are my cookies?", tk.Body()) } func TestUpdateTicketConfig(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) - ticket := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Where my shoes", "123", time.Now(), nil) - modelTicket := ticket.Load(db) + ticket := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Where my shoes", "123", time.Now(), nil) + modelTicket := ticket.Load(rt) // empty configs are null - assertdb.Query(t, db, `SELECT count(*) FROM tickets_ticket WHERE config IS NULL AND id = $1`, ticket.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM tickets_ticket WHERE config IS NULL AND id = $1`, ticket.ID).Returns(1) - models.UpdateTicketConfig(ctx, db, modelTicket, map[string]string{"foo": "2352", "bar": "abc"}) + models.UpdateTicketConfig(ctx, rt.DB, modelTicket, map[string]string{"foo": "2352", "bar": "abc"}) - assertdb.Query(t, db, `SELECT count(*) FROM tickets_ticket WHERE config='{"foo": "2352", "bar": "abc"}'::jsonb AND id = $1`, ticket.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM tickets_ticket WHERE config='{"foo": "2352", "bar": "abc"}'::jsonb AND id = $1`, ticket.ID).Returns(1) // updates are additive - models.UpdateTicketConfig(ctx, db, modelTicket, map[string]string{"foo": "6547", "zed": "xyz"}) + models.UpdateTicketConfig(ctx, rt.DB, modelTicket, map[string]string{"foo": "6547", "zed": "xyz"}) - assertdb.Query(t, db, `SELECT count(*) FROM tickets_ticket WHERE config='{"foo": "6547", "bar": "abc", "zed": "xyz"}'::jsonb AND id = $1`, ticket.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM tickets_ticket WHERE config='{"foo": "6547", "bar": "abc", "zed": "xyz"}'::jsonb AND id = $1`, ticket.ID).Returns(1) } func TestUpdateTicketLastActivity(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) @@ -182,38 +181,38 @@ func TestUpdateTicketLastActivity(t *testing.T) { defer dates.SetNowSource(dates.DefaultNowSource) dates.SetNowSource(dates.NewFixedNowSource(now)) - ticket := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Where my shoes", "123", time.Now(), nil) - modelTicket := ticket.Load(db) + ticket := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Where my shoes", "123", time.Now(), nil) + modelTicket := ticket.Load(rt) - models.UpdateTicketLastActivity(ctx, db, []*models.Ticket{modelTicket}) + models.UpdateTicketLastActivity(ctx, rt.DB, []*models.Ticket{modelTicket}) assert.Equal(t, now, modelTicket.LastActivityOn()) - assertdb.Query(t, db, `SELECT last_activity_on FROM tickets_ticket WHERE id = $1`, ticket.ID).Returns(modelTicket.LastActivityOn()) + assertdb.Query(t, rt.DB, `SELECT last_activity_on FROM tickets_ticket WHERE id = $1`, ticket.ID).Returns(modelTicket.LastActivityOn()) } func TestTicketsAssign(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshTicketers) require.NoError(t, err) - ticket1 := testdata.InsertClosedTicket(db, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Where my shoes", "123", nil) - modelTicket1 := ticket1.Load(db) + ticket1 := testdata.InsertClosedTicket(rt, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Where my shoes", "123", nil) + modelTicket1 := ticket1.Load(rt) - ticket2 := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Where my pants", "234", time.Now(), nil) - modelTicket2 := ticket2.Load(db) + ticket2 := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Where my pants", "234", time.Now(), nil) + modelTicket2 := ticket2.Load(rt) // create ticket already assigned to a user - ticket3 := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Where my glasses", "", time.Now(), testdata.Admin) - modelTicket3 := ticket3.Load(db) + ticket3 := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Where my glasses", "", time.Now(), testdata.Admin) + modelTicket3 := ticket3.Load(rt) - testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "", "", time.Now(), nil) + testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "", "", time.Now(), nil) - evts, err := models.TicketsAssign(ctx, db, oa, testdata.Admin.ID, []*models.Ticket{modelTicket1, modelTicket2, modelTicket3}, testdata.Agent.ID, "please handle these") + evts, err := models.TicketsAssign(ctx, rt.DB, oa, testdata.Admin.ID, []*models.Ticket{modelTicket1, modelTicket2, modelTicket3}, testdata.Agent.ID) require.NoError(t, err) assert.Equal(t, 3, len(evts)) assert.Equal(t, models.TicketEventTypeAssigned, evts[modelTicket1].EventType()) @@ -221,79 +220,79 @@ func TestTicketsAssign(t *testing.T) { assert.Equal(t, models.TicketEventTypeAssigned, evts[modelTicket3].EventType()) // check tickets are now assigned - assertdb.Query(t, db, `SELECT assignee_id FROM tickets_ticket WHERE id = $1`, ticket1.ID).Columns(map[string]interface{}{"assignee_id": int64(testdata.Agent.ID)}) - assertdb.Query(t, db, `SELECT assignee_id FROM tickets_ticket WHERE id = $1`, ticket2.ID).Columns(map[string]interface{}{"assignee_id": int64(testdata.Agent.ID)}) - assertdb.Query(t, db, `SELECT assignee_id FROM tickets_ticket WHERE id = $1`, ticket3.ID).Columns(map[string]interface{}{"assignee_id": int64(testdata.Agent.ID)}) + assertdb.Query(t, rt.DB, `SELECT assignee_id FROM tickets_ticket WHERE id = $1`, ticket1.ID).Columns(map[string]interface{}{"assignee_id": int64(testdata.Agent.ID)}) + assertdb.Query(t, rt.DB, `SELECT assignee_id FROM tickets_ticket WHERE id = $1`, ticket2.ID).Columns(map[string]interface{}{"assignee_id": int64(testdata.Agent.ID)}) + assertdb.Query(t, rt.DB, `SELECT assignee_id FROM tickets_ticket WHERE id = $1`, ticket3.ID).Columns(map[string]interface{}{"assignee_id": int64(testdata.Agent.ID)}) // and there are new assigned events with notifications - assertdb.Query(t, db, `SELECT count(*) FROM tickets_ticketevent WHERE event_type = 'A' AND note = 'please handle these'`).Returns(3) - assertdb.Query(t, db, `SELECT count(*) FROM notifications_notification WHERE user_id = $1 AND notification_type = 'tickets:activity'`, testdata.Agent.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM tickets_ticketevent WHERE event_type = 'A'`).Returns(3) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM notifications_notification WHERE user_id = $1 AND notification_type = 'tickets:activity'`, testdata.Agent.ID).Returns(1) // and daily counts (we only count first assignments of a ticket) - assertTicketDailyCount(t, db, models.TicketDailyCountAssignment, fmt.Sprintf("o:%d:u:%d", testdata.Org1.ID, testdata.Agent.ID), 2) - assertTicketDailyCount(t, db, models.TicketDailyCountAssignment, fmt.Sprintf("o:%d:u:%d", testdata.Org1.ID, testdata.Admin.ID), 0) + assertTicketDailyCount(t, rt, models.TicketDailyCountAssignment, fmt.Sprintf("o:%d:u:%d", testdata.Org1.ID, testdata.Agent.ID), 2) + assertTicketDailyCount(t, rt, models.TicketDailyCountAssignment, fmt.Sprintf("o:%d:u:%d", testdata.Org1.ID, testdata.Admin.ID), 0) } func TestTicketsAddNote(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshTicketers) require.NoError(t, err) - ticket1 := testdata.InsertClosedTicket(db, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Where my shoes", "123", nil) - modelTicket1 := ticket1.Load(db) + ticket1 := testdata.InsertClosedTicket(rt, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Where my shoes", "123", nil) + modelTicket1 := ticket1.Load(rt) - ticket2 := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Where my pants", "234", time.Now(), testdata.Agent) - modelTicket2 := ticket2.Load(db) + ticket2 := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Where my pants", "234", time.Now(), testdata.Agent) + modelTicket2 := ticket2.Load(rt) - testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "", "", time.Now(), nil) + testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "", "", time.Now(), nil) - evts, err := models.TicketsAddNote(ctx, db, oa, testdata.Admin.ID, []*models.Ticket{modelTicket1, modelTicket2}, "spam") + evts, err := models.TicketsAddNote(ctx, rt.DB, oa, testdata.Admin.ID, []*models.Ticket{modelTicket1, modelTicket2}, "spam") require.NoError(t, err) assert.Equal(t, 2, len(evts)) assert.Equal(t, models.TicketEventTypeNoteAdded, evts[modelTicket1].EventType()) assert.Equal(t, models.TicketEventTypeNoteAdded, evts[modelTicket2].EventType()) // check there are new note events - assertdb.Query(t, db, `SELECT count(*) FROM tickets_ticketevent WHERE event_type = 'N' AND note = 'spam'`).Returns(2) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM tickets_ticketevent WHERE event_type = 'N' AND note = 'spam'`).Returns(2) - assertdb.Query(t, db, `SELECT count(*) FROM notifications_notification WHERE user_id = $1 AND notification_type = 'tickets:activity'`, testdata.Agent.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM notifications_notification WHERE user_id = $1 AND notification_type = 'tickets:activity'`, testdata.Agent.ID).Returns(1) } func TestTicketsChangeTopic(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshTicketers) require.NoError(t, err) - ticket1 := testdata.InsertClosedTicket(db, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.SalesTopic, "Where my shoes", "123", nil) - modelTicket1 := ticket1.Load(db) + ticket1 := testdata.InsertClosedTicket(rt, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.SalesTopic, "Where my shoes", "123", nil) + modelTicket1 := ticket1.Load(rt) - ticket2 := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.SupportTopic, "Where my pants", "234", time.Now(), nil) - modelTicket2 := ticket2.Load(db) + ticket2 := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.SupportTopic, "Where my pants", "234", time.Now(), nil) + modelTicket2 := ticket2.Load(rt) - ticket3 := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Where my pants", "345", time.Now(), nil) - modelTicket3 := ticket3.Load(db) + ticket3 := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Where my pants", "345", time.Now(), nil) + modelTicket3 := ticket3.Load(rt) - testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "", "", time.Now(), nil) + testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "", "", time.Now(), nil) - evts, err := models.TicketsChangeTopic(ctx, db, oa, testdata.Admin.ID, []*models.Ticket{modelTicket1, modelTicket2, modelTicket3}, testdata.SupportTopic.ID) + evts, err := models.TicketsChangeTopic(ctx, rt.DB, oa, testdata.Admin.ID, []*models.Ticket{modelTicket1, modelTicket2, modelTicket3}, testdata.SupportTopic.ID) require.NoError(t, err) assert.Equal(t, 2, len(evts)) // ticket 2 not included as already has that topic assert.Equal(t, models.TicketEventTypeTopicChanged, evts[modelTicket1].EventType()) assert.Equal(t, models.TicketEventTypeTopicChanged, evts[modelTicket3].EventType()) // check tickets are updated and we have events - assertdb.Query(t, db, `SELECT count(*) FROM tickets_ticket WHERE topic_id = $1`, testdata.SupportTopic.ID).Returns(3) - assertdb.Query(t, db, `SELECT count(*) FROM tickets_ticketevent WHERE event_type = 'T' AND topic_id = $1`, testdata.SupportTopic.ID).Returns(2) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM tickets_ticket WHERE topic_id = $1`, testdata.SupportTopic.ID).Returns(3) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM tickets_ticketevent WHERE event_type = 'T' AND topic_id = $1`, testdata.SupportTopic.ID).Returns(2) } func TestCloseTickets(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) defer httpx.SetRequestor(httpx.DefaultRequestor) @@ -310,15 +309,15 @@ func TestCloseTickets(t *testing.T) { oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshTicketers|models.RefreshGroups) require.NoError(t, err) - ticket1 := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Where my shoes", "123", time.Now(), nil) - modelTicket1 := ticket1.Load(db) + ticket1 := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Where my shoes", "123", time.Now(), nil) + modelTicket1 := ticket1.Load(rt) - ticket2 := testdata.InsertClosedTicket(db, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Where my pants", "234", nil) - modelTicket2 := ticket2.Load(db) + ticket2 := testdata.InsertClosedTicket(rt, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Where my pants", "234", nil) + modelTicket2 := ticket2.Load(rt) - _, cathy := testdata.Cathy.Load(db, oa) + _, cathy := testdata.Cathy.Load(rt, oa) - err = models.CalculateDynamicGroups(ctx, db, oa, []*flows.Contact{cathy}) + err = models.CalculateDynamicGroups(ctx, rt.DB, oa, []*flows.Contact{cathy}) require.NoError(t, err) assert.Equal(t, "Doctors", cathy.Groups().All()[0].Name()) @@ -331,39 +330,39 @@ func TestCloseTickets(t *testing.T) { assert.Equal(t, models.TicketEventTypeClosed, evts[modelTicket1].EventType()) // check ticket #1 is now closed - assertdb.Query(t, db, `SELECT count(*) FROM tickets_ticket WHERE id = $1 AND status = 'C' AND closed_on IS NOT NULL`, ticket1.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM tickets_ticket WHERE id = $1 AND status = 'C' AND closed_on IS NOT NULL`, ticket1.ID).Returns(1) // and there's closed event for it - assertdb.Query(t, db, `SELECT count(*) FROM tickets_ticketevent WHERE org_id = $1 AND ticket_id = $2 AND event_type = 'C'`, + assertdb.Query(t, rt.DB, `SELECT count(*) FROM tickets_ticketevent WHERE org_id = $1 AND ticket_id = $2 AND event_type = 'C'`, []interface{}{testdata.Org1.ID, ticket1.ID}, 1) // and the logger has an http log it can insert for that ticketer - require.NoError(t, logger.Insert(ctx, db)) + require.NoError(t, logger.Insert(ctx, rt.DB)) - assertdb.Query(t, db, `SELECT count(*) FROM request_logs_httplog WHERE ticketer_id = $1`, testdata.Mailgun.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM request_logs_httplog WHERE ticketer_id = $1`, testdata.Mailgun.ID).Returns(1) // reload Cathy and check they're no longer in the tickets group - _, cathy = testdata.Cathy.Load(db, oa) + _, cathy = testdata.Cathy.Load(rt, oa) assert.Equal(t, 1, len(cathy.Groups().All())) assert.Equal(t, "Doctors", cathy.Groups().All()[0].Name()) // but no events for ticket #2 which was already closed - assertdb.Query(t, db, `SELECT count(*) FROM tickets_ticketevent WHERE ticket_id = $1 AND event_type = 'C'`, ticket2.ID).Returns(0) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM tickets_ticketevent WHERE ticket_id = $1 AND event_type = 'C'`, ticket2.ID).Returns(0) // can close tickets without a user - ticket3 := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Where my shoes", "123", time.Now(), nil) - modelTicket3 := ticket3.Load(db) + ticket3 := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Where my shoes", "123", time.Now(), nil) + modelTicket3 := ticket3.Load(rt) evts, err = models.CloseTickets(ctx, rt, oa, models.NilUserID, []*models.Ticket{modelTicket3}, false, false, logger) require.NoError(t, err) assert.Equal(t, 1, len(evts)) assert.Equal(t, models.TicketEventTypeClosed, evts[modelTicket3].EventType()) - assertdb.Query(t, db, `SELECT count(*) FROM tickets_ticketevent WHERE ticket_id = $1 AND event_type = 'C' AND created_by_id IS NULL`, ticket3.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM tickets_ticketevent WHERE ticket_id = $1 AND event_type = 'C' AND created_by_id IS NULL`, ticket3.ID).Returns(1) } func TestReopenTickets(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) defer httpx.SetRequestor(httpx.DefaultRequestor) @@ -380,11 +379,11 @@ func TestReopenTickets(t *testing.T) { oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshTicketers|models.RefreshGroups) require.NoError(t, err) - ticket1 := testdata.InsertClosedTicket(db, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Where my shoes", "123", nil) - modelTicket1 := ticket1.Load(db) + ticket1 := testdata.InsertClosedTicket(rt, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Where my shoes", "123", nil) + modelTicket1 := ticket1.Load(rt) - ticket2 := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Where my pants", "234", time.Now(), nil) - modelTicket2 := ticket2.Load(db) + ticket2 := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Where my pants", "234", time.Now(), nil) + modelTicket2 := ticket2.Load(rt) logger := &models.HTTPLogger{} evts, err := models.ReopenTickets(ctx, rt, oa, testdata.Admin.ID, []*models.Ticket{modelTicket1, modelTicket2}, true, logger) @@ -393,65 +392,65 @@ func TestReopenTickets(t *testing.T) { assert.Equal(t, models.TicketEventTypeReopened, evts[modelTicket1].EventType()) // check ticket #1 is now closed - assertdb.Query(t, db, `SELECT count(*) FROM tickets_ticket WHERE id = $1 AND status = 'O' AND closed_on IS NULL`, ticket1.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM tickets_ticket WHERE id = $1 AND status = 'O' AND closed_on IS NULL`, ticket1.ID).Returns(1) // and there's reopened event for it - assertdb.Query(t, db, `SELECT count(*) FROM tickets_ticketevent WHERE org_id = $1 AND ticket_id = $2 AND event_type = 'R'`, testdata.Org1.ID, ticket1.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM tickets_ticketevent WHERE org_id = $1 AND ticket_id = $2 AND event_type = 'R'`, testdata.Org1.ID, ticket1.ID).Returns(1) // and the logger has an http log it can insert for that ticketer - require.NoError(t, logger.Insert(ctx, db)) + require.NoError(t, logger.Insert(ctx, rt.DB)) - assertdb.Query(t, db, `SELECT count(*) FROM request_logs_httplog WHERE ticketer_id = $1`, testdata.Mailgun.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM request_logs_httplog WHERE ticketer_id = $1`, testdata.Mailgun.ID).Returns(1) // but no events for ticket #2 which waas already open - assertdb.Query(t, db, `SELECT count(*) FROM tickets_ticketevent WHERE ticket_id = $1 AND event_type = 'R'`, ticket2.ID).Returns(0) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM tickets_ticketevent WHERE ticket_id = $1 AND event_type = 'R'`, ticket2.ID).Returns(0) // check Cathy is now in the open tickets group - _, cathy := testdata.Cathy.Load(db, oa) + _, cathy := testdata.Cathy.Load(rt, oa) assert.Equal(t, 2, len(cathy.Groups().All())) assert.Equal(t, "Doctors", cathy.Groups().All()[0].Name()) assert.Equal(t, "Open Tickets", cathy.Groups().All()[1].Name()) // reopening doesn't change opening daily counts - assertTicketDailyCount(t, db, models.TicketDailyCountOpening, fmt.Sprintf("o:%d", testdata.Org1.ID), 0) + assertTicketDailyCount(t, rt, models.TicketDailyCountOpening, fmt.Sprintf("o:%d", testdata.Org1.ID), 0) } func TestTicketRecordReply(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) openedOn := time.Date(2022, 5, 18, 14, 21, 0, 0, time.UTC) repliedOn := time.Date(2022, 5, 18, 15, 0, 0, 0, time.UTC) - ticket := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Where my shoes", "123", openedOn, nil) + ticket := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Where my shoes", "123", openedOn, nil) - timing, err := models.TicketRecordReplied(ctx, db, ticket.ID, repliedOn) + timing, err := models.TicketRecordReplied(ctx, rt.DB, ticket.ID, repliedOn) assert.NoError(t, err) assert.Equal(t, 2340*time.Second, timing) - modelTicket := ticket.Load(db) + modelTicket := ticket.Load(rt) assert.Equal(t, repliedOn, *modelTicket.RepliedOn()) assert.Equal(t, repliedOn, modelTicket.LastActivityOn()) - assertdb.Query(t, db, `SELECT replied_on FROM tickets_ticket WHERE id = $1`, ticket.ID).Returns(repliedOn) - assertdb.Query(t, db, `SELECT last_activity_on FROM tickets_ticket WHERE id = $1`, ticket.ID).Returns(repliedOn) + assertdb.Query(t, rt.DB, `SELECT replied_on FROM tickets_ticket WHERE id = $1`, ticket.ID).Returns(repliedOn) + assertdb.Query(t, rt.DB, `SELECT last_activity_on FROM tickets_ticket WHERE id = $1`, ticket.ID).Returns(repliedOn) repliedAgainOn := time.Date(2022, 5, 18, 15, 5, 0, 0, time.UTC) // if we call it again, it won't change replied_on again but it will update last_activity_on - timing, err = models.TicketRecordReplied(ctx, db, ticket.ID, repliedAgainOn) + timing, err = models.TicketRecordReplied(ctx, rt.DB, ticket.ID, repliedAgainOn) assert.NoError(t, err) assert.Equal(t, time.Duration(-1), timing) - modelTicket = ticket.Load(db) + modelTicket = ticket.Load(rt) assert.Equal(t, repliedOn, *modelTicket.RepliedOn()) assert.Equal(t, repliedAgainOn, modelTicket.LastActivityOn()) - assertdb.Query(t, db, `SELECT replied_on FROM tickets_ticket WHERE id = $1`, ticket.ID).Returns(repliedOn) - assertdb.Query(t, db, `SELECT last_activity_on FROM tickets_ticket WHERE id = $1`, ticket.ID).Returns(repliedAgainOn) + assertdb.Query(t, rt.DB, `SELECT replied_on FROM tickets_ticket WHERE id = $1`, ticket.ID).Returns(repliedOn) + assertdb.Query(t, rt.DB, `SELECT last_activity_on FROM tickets_ticket WHERE id = $1`, ticket.ID).Returns(repliedAgainOn) } -func assertTicketDailyCount(t *testing.T, db *sqlx.DB, countType models.TicketDailyCountType, scope string, expected int) { - assertdb.Query(t, db, `SELECT COALESCE(SUM(count), 0) FROM tickets_ticketdailycount WHERE count_type = $1 AND scope = $2`, countType, scope).Returns(expected) +func assertTicketDailyCount(t *testing.T, rt *runtime.Runtime, countType models.TicketDailyCountType, scope string, expected int) { + assertdb.Query(t, rt.DB, `SELECT COALESCE(SUM(count), 0) FROM tickets_ticketdailycount WHERE count_type = $1 AND scope = $2`, countType, scope).Returns(expected) } diff --git a/core/models/topics.go b/core/models/topics.go index 9b01aac23..ba7b7a291 100644 --- a/core/models/topics.go +++ b/core/models/topics.go @@ -10,12 +10,12 @@ import ( "github.com/nyaruka/gocommon/dates" "github.com/nyaruka/gocommon/dbutil" "github.com/nyaruka/goflow/assets" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) -type TopicID null.Int +type TopicID int const NilTopicID = TopicID(0) @@ -86,22 +86,7 @@ func loadTopics(ctx context.Context, db sqlx.Queryer, orgID OrgID) ([]assets.Top return topics, nil } -// MarshalJSON marshals into JSON. 0 values will become null -func (i TopicID) MarshalJSON() ([]byte, error) { - return null.Int(i).MarshalJSON() -} - -// UnmarshalJSON unmarshals from JSON. null values become 0 -func (i *TopicID) UnmarshalJSON(b []byte) error { - return null.UnmarshalInt(b, (*null.Int)(i)) -} - -// Value returns the db value, null is returned for 0 -func (i TopicID) Value() (driver.Value, error) { - return null.Int(i).Value() -} - -// Scan scans from the db value. null values become 0 -func (i *TopicID) Scan(value interface{}) error { - return null.ScanInt(value, (*null.Int)(i)) -} +func (i *TopicID) Scan(value any) error { return null.ScanInt(value, i) } +func (i TopicID) Value() (driver.Value, error) { return null.IntValue(i) } +func (i *TopicID) UnmarshalJSON(b []byte) error { return null.UnmarshalInt(b, i) } +func (i TopicID) MarshalJSON() ([]byte, error) { return null.MarshalInt(i) } diff --git a/core/models/topics_test.go b/core/models/topics_test.go index 3053a795e..d3dbf98ab 100644 --- a/core/models/topics_test.go +++ b/core/models/topics_test.go @@ -12,7 +12,7 @@ import ( ) func TestTopics(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshTopics) require.NoError(t, err) diff --git a/core/models/triggers_test.go b/core/models/triggers_test.go index f6a8d0760..5180b9306 100644 --- a/core/models/triggers_test.go +++ b/core/models/triggers_test.go @@ -15,15 +15,15 @@ import ( ) func TestLoadTriggers(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) - db.MustExec(`DELETE FROM triggers_trigger`) - farmersGroup := testdata.InsertContactGroup(db, testdata.Org1, assets.GroupUUID(uuids.New()), "Farmers", "") + rt.DB.MustExec(`DELETE FROM triggers_trigger`) + farmersGroup := testdata.InsertContactGroup(rt, testdata.Org1, assets.GroupUUID(uuids.New()), "Farmers", "") // create trigger for other org to ensure it isn't loaded - testdata.InsertCatchallTrigger(db, testdata.Org2, testdata.Org2Favorites, nil, nil) + testdata.InsertCatchallTrigger(rt, testdata.Org2, testdata.Org2Favorites, nil, nil) tcs := []struct { id models.TriggerID @@ -38,18 +38,14 @@ func TestLoadTriggers(t *testing.T) { channelID models.ChannelID }{ { - id: testdata.InsertKeywordTrigger(db, testdata.Org1, testdata.Favorites, "join", models.MatchFirst, nil, nil), + id: testdata.InsertKeywordTrigger(rt, testdata.Org1, testdata.Favorites, "join", models.MatchFirst, nil, nil), type_: models.KeywordTriggerType, flowID: testdata.Favorites.ID, keyword: "join", keywordMatchType: models.MatchFirst, }, { - id: testdata.InsertKeywordTrigger( - db, testdata.Org1, testdata.PickANumber, "start", models.MatchOnly, - []*testdata.Group{testdata.DoctorsGroup, testdata.TestersGroup}, - []*testdata.Group{farmersGroup}, - ), + id: testdata.InsertKeywordTrigger(rt, testdata.Org1, testdata.PickANumber, "start", models.MatchOnly, []*testdata.Group{testdata.DoctorsGroup, testdata.TestersGroup}, []*testdata.Group{farmersGroup}), type_: models.KeywordTriggerType, flowID: testdata.PickANumber.ID, keyword: "start", @@ -58,37 +54,37 @@ func TestLoadTriggers(t *testing.T) { excludeGroups: []models.GroupID{farmersGroup.ID}, }, { - id: testdata.InsertIncomingCallTrigger(db, testdata.Org1, testdata.Favorites, []*testdata.Group{testdata.DoctorsGroup, testdata.TestersGroup}, []*testdata.Group{farmersGroup}), + id: testdata.InsertIncomingCallTrigger(rt, testdata.Org1, testdata.Favorites, []*testdata.Group{testdata.DoctorsGroup, testdata.TestersGroup}, []*testdata.Group{farmersGroup}), type_: models.IncomingCallTriggerType, flowID: testdata.Favorites.ID, includeGroups: []models.GroupID{testdata.DoctorsGroup.ID, testdata.TestersGroup.ID}, excludeGroups: []models.GroupID{farmersGroup.ID}, }, { - id: testdata.InsertMissedCallTrigger(db, testdata.Org1, testdata.Favorites), + id: testdata.InsertMissedCallTrigger(rt, testdata.Org1, testdata.Favorites), type_: models.MissedCallTriggerType, flowID: testdata.Favorites.ID, }, { - id: testdata.InsertNewConversationTrigger(db, testdata.Org1, testdata.Favorites, testdata.TwilioChannel), + id: testdata.InsertNewConversationTrigger(rt, testdata.Org1, testdata.Favorites, testdata.TwilioChannel), type_: models.NewConversationTriggerType, flowID: testdata.Favorites.ID, channelID: testdata.TwilioChannel.ID, }, { - id: testdata.InsertReferralTrigger(db, testdata.Org1, testdata.Favorites, "", nil), + id: testdata.InsertReferralTrigger(rt, testdata.Org1, testdata.Favorites, "", nil), type_: models.ReferralTriggerType, flowID: testdata.Favorites.ID, }, { - id: testdata.InsertReferralTrigger(db, testdata.Org1, testdata.Favorites, "3256437635", testdata.TwilioChannel), + id: testdata.InsertReferralTrigger(rt, testdata.Org1, testdata.Favorites, "3256437635", testdata.TwilioChannel), type_: models.ReferralTriggerType, flowID: testdata.Favorites.ID, referrerID: "3256437635", channelID: testdata.TwilioChannel.ID, }, { - id: testdata.InsertCatchallTrigger(db, testdata.Org1, testdata.Favorites, nil, nil), + id: testdata.InsertCatchallTrigger(rt, testdata.Org1, testdata.Favorites, nil, nil), type_: models.CatchallTriggerType, flowID: testdata.Favorites.ID, }, @@ -116,32 +112,32 @@ func TestLoadTriggers(t *testing.T) { } func TestFindMatchingMsgTrigger(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) - db.MustExec(`DELETE FROM triggers_trigger`) + rt.DB.MustExec(`DELETE FROM triggers_trigger`) - joinID := testdata.InsertKeywordTrigger(db, testdata.Org1, testdata.Favorites, "join", models.MatchFirst, nil, nil) - resistID := testdata.InsertKeywordTrigger(db, testdata.Org1, testdata.SingleMessage, "resist", models.MatchOnly, nil, nil) - emojiID := testdata.InsertKeywordTrigger(db, testdata.Org1, testdata.PickANumber, "👍", models.MatchFirst, nil, nil) - doctorsID := testdata.InsertKeywordTrigger(db, testdata.Org1, testdata.SingleMessage, "resist", models.MatchOnly, []*testdata.Group{testdata.DoctorsGroup}, nil) - doctorsAndNotTestersID := testdata.InsertKeywordTrigger(db, testdata.Org1, testdata.SingleMessage, "resist", models.MatchOnly, []*testdata.Group{testdata.DoctorsGroup}, []*testdata.Group{testdata.TestersGroup}) - doctorsCatchallID := testdata.InsertCatchallTrigger(db, testdata.Org1, testdata.SingleMessage, []*testdata.Group{testdata.DoctorsGroup}, nil) - othersAllID := testdata.InsertCatchallTrigger(db, testdata.Org1, testdata.SingleMessage, nil, nil) + joinID := testdata.InsertKeywordTrigger(rt, testdata.Org1, testdata.Favorites, "join", models.MatchFirst, nil, nil) + resistID := testdata.InsertKeywordTrigger(rt, testdata.Org1, testdata.SingleMessage, "resist", models.MatchOnly, nil, nil) + emojiID := testdata.InsertKeywordTrigger(rt, testdata.Org1, testdata.PickANumber, "👍", models.MatchFirst, nil, nil) + doctorsID := testdata.InsertKeywordTrigger(rt, testdata.Org1, testdata.SingleMessage, "resist", models.MatchOnly, []*testdata.Group{testdata.DoctorsGroup}, nil) + doctorsAndNotTestersID := testdata.InsertKeywordTrigger(rt, testdata.Org1, testdata.SingleMessage, "resist", models.MatchOnly, []*testdata.Group{testdata.DoctorsGroup}, []*testdata.Group{testdata.TestersGroup}) + doctorsCatchallID := testdata.InsertCatchallTrigger(rt, testdata.Org1, testdata.SingleMessage, []*testdata.Group{testdata.DoctorsGroup}, nil) + othersAllID := testdata.InsertCatchallTrigger(rt, testdata.Org1, testdata.SingleMessage, nil, nil) // trigger for other org - testdata.InsertCatchallTrigger(db, testdata.Org2, testdata.Org2Favorites, nil, nil) + testdata.InsertCatchallTrigger(rt, testdata.Org2, testdata.Org2Favorites, nil, nil) oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshTriggers) require.NoError(t, err) - testdata.DoctorsGroup.Add(db, testdata.Bob) - testdata.TestersGroup.Add(db, testdata.Bob) + testdata.DoctorsGroup.Add(rt, testdata.Bob) + testdata.TestersGroup.Add(rt, testdata.Bob) - _, cathy := testdata.Cathy.Load(db, oa) - _, george := testdata.George.Load(db, oa) - _, bob := testdata.Bob.Load(db, oa) + _, cathy := testdata.Cathy.Load(rt, oa) + _, george := testdata.George.Load(rt, oa) + _, bob := testdata.Bob.Load(rt, oa) tcs := []struct { text string @@ -171,25 +167,25 @@ func TestFindMatchingMsgTrigger(t *testing.T) { } func TestFindMatchingIncomingCallTrigger(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) - doctorsAndNotTestersTriggerID := testdata.InsertIncomingCallTrigger(db, testdata.Org1, testdata.Favorites, []*testdata.Group{testdata.DoctorsGroup}, []*testdata.Group{testdata.TestersGroup}) - doctorsTriggerID := testdata.InsertIncomingCallTrigger(db, testdata.Org1, testdata.Favorites, []*testdata.Group{testdata.DoctorsGroup}, nil) - notTestersTriggerID := testdata.InsertIncomingCallTrigger(db, testdata.Org1, testdata.Favorites, nil, []*testdata.Group{testdata.TestersGroup}) - everyoneTriggerID := testdata.InsertIncomingCallTrigger(db, testdata.Org1, testdata.Favorites, nil, nil) + doctorsAndNotTestersTriggerID := testdata.InsertIncomingCallTrigger(rt, testdata.Org1, testdata.Favorites, []*testdata.Group{testdata.DoctorsGroup}, []*testdata.Group{testdata.TestersGroup}) + doctorsTriggerID := testdata.InsertIncomingCallTrigger(rt, testdata.Org1, testdata.Favorites, []*testdata.Group{testdata.DoctorsGroup}, nil) + notTestersTriggerID := testdata.InsertIncomingCallTrigger(rt, testdata.Org1, testdata.Favorites, nil, []*testdata.Group{testdata.TestersGroup}) + everyoneTriggerID := testdata.InsertIncomingCallTrigger(rt, testdata.Org1, testdata.Favorites, nil, nil) oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshTriggers) require.NoError(t, err) - testdata.DoctorsGroup.Add(db, testdata.Bob) - testdata.TestersGroup.Add(db, testdata.Bob, testdata.Alexandria) + testdata.DoctorsGroup.Add(rt, testdata.Bob) + testdata.TestersGroup.Add(rt, testdata.Bob, testdata.Alexandria) - _, cathy := testdata.Cathy.Load(db, oa) - _, bob := testdata.Bob.Load(db, oa) - _, george := testdata.George.Load(db, oa) - _, alexa := testdata.Alexandria.Load(db, oa) + _, cathy := testdata.Cathy.Load(rt, oa) + _, bob := testdata.Bob.Load(rt, oa) + _, george := testdata.George.Load(rt, oa) + _, alexa := testdata.Alexandria.Load(rt, oa) tcs := []struct { contact *flows.Contact @@ -209,11 +205,11 @@ func TestFindMatchingIncomingCallTrigger(t *testing.T) { } func TestFindMatchingMissedCallTrigger(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) - testdata.InsertCatchallTrigger(db, testdata.Org1, testdata.SingleMessage, nil, nil) + testdata.InsertCatchallTrigger(rt, testdata.Org1, testdata.SingleMessage, nil, nil) oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshTriggers) require.NoError(t, err) @@ -222,7 +218,7 @@ func TestFindMatchingMissedCallTrigger(t *testing.T) { trigger := models.FindMatchingMissedCallTrigger(oa) assert.Nil(t, trigger) - triggerID := testdata.InsertMissedCallTrigger(db, testdata.Org1, testdata.Favorites) + triggerID := testdata.InsertMissedCallTrigger(rt, testdata.Org1, testdata.Favorites) oa, err = models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshTriggers) require.NoError(t, err) @@ -232,12 +228,12 @@ func TestFindMatchingMissedCallTrigger(t *testing.T) { } func TestFindMatchingNewConversationTrigger(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) - twilioTriggerID := testdata.InsertNewConversationTrigger(db, testdata.Org1, testdata.Favorites, testdata.TwilioChannel) - noChTriggerID := testdata.InsertNewConversationTrigger(db, testdata.Org1, testdata.Favorites, nil) + twilioTriggerID := testdata.InsertNewConversationTrigger(rt, testdata.Org1, testdata.Favorites, testdata.TwilioChannel) + noChTriggerID := testdata.InsertNewConversationTrigger(rt, testdata.Org1, testdata.Favorites, nil) oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshTriggers) require.NoError(t, err) @@ -259,13 +255,13 @@ func TestFindMatchingNewConversationTrigger(t *testing.T) { } func TestFindMatchingReferralTrigger(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) - fooID := testdata.InsertReferralTrigger(db, testdata.Org1, testdata.Favorites, "foo", testdata.TwitterChannel) - barID := testdata.InsertReferralTrigger(db, testdata.Org1, testdata.Favorites, "bar", nil) - bazID := testdata.InsertReferralTrigger(db, testdata.Org1, testdata.Favorites, "", testdata.TwitterChannel) + fooID := testdata.InsertReferralTrigger(rt, testdata.Org1, testdata.Favorites, "foo", testdata.TwitterChannel) + barID := testdata.InsertReferralTrigger(rt, testdata.Org1, testdata.Favorites, "bar", nil) + bazID := testdata.InsertReferralTrigger(rt, testdata.Org1, testdata.Favorites, "", testdata.TwitterChannel) oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshTriggers) require.NoError(t, err) @@ -294,23 +290,23 @@ func TestFindMatchingReferralTrigger(t *testing.T) { } func TestArchiveContactTriggers(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) - everybodyID := testdata.InsertKeywordTrigger(db, testdata.Org1, testdata.Favorites, "join", models.MatchFirst, nil, nil) - cathyOnly1ID := testdata.InsertScheduledTrigger(db, testdata.Org1, testdata.Favorites, nil, nil, []*testdata.Contact{testdata.Cathy}) - cathyOnly2ID := testdata.InsertScheduledTrigger(db, testdata.Org1, testdata.Favorites, nil, nil, []*testdata.Contact{testdata.Cathy}) - cathyAndGeorgeID := testdata.InsertScheduledTrigger(db, testdata.Org1, testdata.Favorites, nil, nil, []*testdata.Contact{testdata.Cathy, testdata.George}) - cathyAndGroupID := testdata.InsertScheduledTrigger(db, testdata.Org1, testdata.Favorites, []*testdata.Group{testdata.DoctorsGroup}, nil, []*testdata.Contact{testdata.Cathy}) - georgeOnlyID := testdata.InsertScheduledTrigger(db, testdata.Org1, testdata.Favorites, nil, nil, []*testdata.Contact{testdata.George}) + everybodyID := testdata.InsertKeywordTrigger(rt, testdata.Org1, testdata.Favorites, "join", models.MatchFirst, nil, nil) + cathyOnly1ID := testdata.InsertScheduledTrigger(rt, testdata.Org1, testdata.Favorites, nil, nil, []*testdata.Contact{testdata.Cathy}) + cathyOnly2ID := testdata.InsertScheduledTrigger(rt, testdata.Org1, testdata.Favorites, nil, nil, []*testdata.Contact{testdata.Cathy}) + cathyAndGeorgeID := testdata.InsertScheduledTrigger(rt, testdata.Org1, testdata.Favorites, nil, nil, []*testdata.Contact{testdata.Cathy, testdata.George}) + cathyAndGroupID := testdata.InsertScheduledTrigger(rt, testdata.Org1, testdata.Favorites, []*testdata.Group{testdata.DoctorsGroup}, nil, []*testdata.Contact{testdata.Cathy}) + georgeOnlyID := testdata.InsertScheduledTrigger(rt, testdata.Org1, testdata.Favorites, nil, nil, []*testdata.Contact{testdata.George}) - err := models.ArchiveContactTriggers(ctx, db, []models.ContactID{testdata.Cathy.ID, testdata.Bob.ID}) + err := models.ArchiveContactTriggers(ctx, rt.DB, []models.ContactID{testdata.Cathy.ID, testdata.Bob.ID}) require.NoError(t, err) assertTriggerArchived := func(id models.TriggerID, archived bool) { var isArchived bool - db.Get(&isArchived, `SELECT is_archived FROM triggers_trigger WHERE id = $1`, id) + rt.DB.Get(&isArchived, `SELECT is_archived FROM triggers_trigger WHERE id = $1`, id) assert.Equal(t, archived, isArchived, `is_archived mismatch for trigger %d`, id) } diff --git a/core/models/users.go b/core/models/users.go index 8eb41f2f7..87b7de27a 100644 --- a/core/models/users.go +++ b/core/models/users.go @@ -10,7 +10,7 @@ import ( "github.com/jmoiron/sqlx" "github.com/nyaruka/gocommon/dbutil" "github.com/nyaruka/goflow/assets" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) @@ -21,27 +21,12 @@ const ( ) // UserID is our type for user ids, which can be null -type UserID null.Int +type UserID int -// MarshalJSON marshals into JSON. 0 values will become null -func (i UserID) MarshalJSON() ([]byte, error) { - return null.Int(i).MarshalJSON() -} - -// UnmarshalJSON unmarshals from JSON. null values become 0 -func (i *UserID) UnmarshalJSON(b []byte) error { - return null.UnmarshalInt(b, (*null.Int)(i)) -} - -// Value returns the db value, null is returned for 0 -func (i UserID) Value() (driver.Value, error) { - return null.Int(i).Value() -} - -// Scan scans from the db value. null values become 0 -func (i *UserID) Scan(value interface{}) error { - return null.ScanInt(value, (*null.Int)(i)) -} +func (i *UserID) Scan(value any) error { return null.ScanInt(value, i) } +func (i UserID) Value() (driver.Value, error) { return null.IntValue(i) } +func (i *UserID) UnmarshalJSON(b []byte) error { return null.UnmarshalInt(b, i) } +func (i UserID) MarshalJSON() ([]byte, error) { return null.MarshalInt(i) } type UserRole string diff --git a/core/models/users_test.go b/core/models/users_test.go index dee438067..2f8f30e9a 100644 --- a/core/models/users_test.go +++ b/core/models/users_test.go @@ -11,7 +11,7 @@ import ( ) func TestLoadUsers(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshUsers) require.NoError(t, err) diff --git a/core/models/utils.go b/core/models/utils.go index d2cb5b987..818cd3f61 100644 --- a/core/models/utils.go +++ b/core/models/utils.go @@ -3,6 +3,8 @@ package models import ( "context" "database/sql" + "database/sql/driver" + "encoding/json" "fmt" "time" @@ -66,7 +68,7 @@ func BulkQuery[T any](ctx context.Context, label string, tx Queryer, sql string, func BulkQueryBatches(ctx context.Context, label string, tx Queryer, sql string, batchSize int, structs []interface{}) error { start := time.Now() - batches := chunkSlice(structs, batchSize) + batches := ChunkSlice(structs, batchSize) for i, batch := range batches { err := dbutil.BulkQuery(ctx, tx, sql, batch) if err != nil { @@ -79,7 +81,7 @@ func BulkQueryBatches(ctx context.Context, label string, tx Queryer, sql string, return nil } -func chunkSlice[T any](slice []T, size int) [][]T { +func ChunkSlice[T any](slice []T, size int) [][]T { chunks := make([][]T, 0, len(slice)/size+1) for i := 0; i < len(slice); i += size { @@ -91,3 +93,27 @@ func chunkSlice[T any](slice []T, size int) [][]T { } return chunks } + +// Map is a generic map which is written to the database as JSON. For nullable fields use null.Map. +type JSONMap map[string]any + +// Scan implements the Scanner interface +func (m *JSONMap) Scan(value any) error { + var raw []byte + switch typed := value.(type) { + case string: + raw = []byte(typed) + case []byte: + raw = typed + default: + return fmt.Errorf("unable to scan %T as map", value) + } + + if err := json.Unmarshal(raw, m); err != nil { + return err + } + return nil +} + +// Value implements the Valuer interface +func (m JSONMap) Value() (driver.Value, error) { return json.Marshal(m) } diff --git a/core/models/utils_test.go b/core/models/utils_test.go index 4e7b4ea58..c8934e25e 100644 --- a/core/models/utils_test.go +++ b/core/models/utils_test.go @@ -11,11 +11,11 @@ import ( ) func TestBulkQueryBatches(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) - db.MustExec(`CREATE TABLE foo (id serial NOT NULL PRIMARY KEY, name TEXT, age INT)`) + rt.DB.MustExec(`CREATE TABLE foo (id serial NOT NULL PRIMARY KEY, name TEXT, age INT)`) type foo struct { ID int `db:"id"` @@ -26,18 +26,18 @@ func TestBulkQueryBatches(t *testing.T) { sql := `INSERT INTO foo (name, age) VALUES(:name, :age) RETURNING id` // noop with zero structs - err := models.BulkQueryBatches(ctx, "foo inserts", db, sql, 10, nil) + err := models.BulkQueryBatches(ctx, "foo inserts", rt.DB, sql, 10, nil) assert.NoError(t, err) // test when structs fit into one batch foo1 := &foo{Name: "A", Age: 30} foo2 := &foo{Name: "B", Age: 31} - err = models.BulkQueryBatches(ctx, "foo inserts", db, sql, 2, []interface{}{foo1, foo2}) + err = models.BulkQueryBatches(ctx, "foo inserts", rt.DB, sql, 2, []interface{}{foo1, foo2}) assert.NoError(t, err) assert.Equal(t, 1, foo1.ID) assert.Equal(t, 2, foo2.ID) - assertdb.Query(t, db, `SELECT count(*) FROM foo WHERE name = 'A' AND age = 30`).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM foo WHERE name = 'B' AND age = 31`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM foo WHERE name = 'A' AND age = 30`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM foo WHERE name = 'B' AND age = 31`).Returns(1) // test when multiple batches are required foo3 := &foo{Name: "C", Age: 32} @@ -45,17 +45,17 @@ func TestBulkQueryBatches(t *testing.T) { foo5 := &foo{Name: "E", Age: 34} foo6 := &foo{Name: "F", Age: 35} foo7 := &foo{Name: "G", Age: 36} - err = models.BulkQueryBatches(ctx, "foo inserts", db, sql, 2, []interface{}{foo3, foo4, foo5, foo6, foo7}) + err = models.BulkQueryBatches(ctx, "foo inserts", rt.DB, sql, 2, []interface{}{foo3, foo4, foo5, foo6, foo7}) assert.NoError(t, err) assert.Equal(t, 3, foo3.ID) assert.Equal(t, 4, foo4.ID) assert.Equal(t, 5, foo5.ID) assert.Equal(t, 6, foo6.ID) assert.Equal(t, 7, foo7.ID) - assertdb.Query(t, db, `SELECT count(*) FROM foo WHERE name = 'C' AND age = 32`).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM foo WHERE name = 'D' AND age = 33`).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM foo WHERE name = 'E' AND age = 34`).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM foo WHERE name = 'F' AND age = 35`).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM foo WHERE name = 'G' AND age = 36`).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM foo `).Returns(7) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM foo WHERE name = 'C' AND age = 32`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM foo WHERE name = 'D' AND age = 33`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM foo WHERE name = 'E' AND age = 34`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM foo WHERE name = 'F' AND age = 35`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM foo WHERE name = 'G' AND age = 36`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM foo `).Returns(7) } diff --git a/core/models/webhook_event_test.go b/core/models/webhook_event_test.go index 761a5ebcd..c0acad9ec 100644 --- a/core/models/webhook_event_test.go +++ b/core/models/webhook_event_test.go @@ -13,11 +13,11 @@ import ( ) func TestWebhookEvents(t *testing.T) { - ctx, _, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() // create a resthook to insert against var resthookID models.ResthookID - db.Get(&resthookID, `INSERT INTO api_resthook(is_active, slug, org_id, created_on, modified_on, created_by_id, modified_by_id) VALUES(TRUE, 'foo', 1, NOW(), NOW(), 1, 1) RETURNING id;`) + rt.DB.Get(&resthookID, `INSERT INTO api_resthook(is_active, slug, org_id, created_on, modified_on, created_by_id, modified_by_id) VALUES(TRUE, 'foo', 1, NOW(), NOW(), 1, 1) RETURNING id;`) tcs := []struct { OrgID models.OrgID @@ -29,10 +29,10 @@ func TestWebhookEvents(t *testing.T) { for _, tc := range tcs { e := models.NewWebhookEvent(tc.OrgID, tc.ResthookID, tc.Data, time.Now()) - err := models.InsertWebhookEvents(ctx, db, []*models.WebhookEvent{e}) + err := models.InsertWebhookEvents(ctx, rt.DB, []*models.WebhookEvent{e}) assert.NoError(t, err) assert.NotZero(t, e.ID()) - assertdb.Query(t, db, `SELECT count(*) FROM api_webhookevent WHERE org_id = $1 AND resthook_id = $2 AND data = $3`, tc.OrgID, tc.ResthookID, tc.Data).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM api_webhookevent WHERE org_id = $1 AND resthook_id = $2 AND data = $3`, tc.OrgID, tc.ResthookID, tc.Data).Returns(1) } } diff --git a/core/msgio/android_test.go b/core/msgio/android_test.go index 13d3fb263..1e2236792 100644 --- a/core/msgio/android_test.go +++ b/core/msgio/android_test.go @@ -63,7 +63,7 @@ func newMockFCMEndpoint(tokens ...string) *MockFCMEndpoint { } func TestSyncAndroidChannels(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() mockFCM := newMockFCMEndpoint("FCMID3") defer mockFCM.Stop() @@ -71,9 +71,9 @@ func TestSyncAndroidChannels(t *testing.T) { fc := mockFCM.Client("FCMKEY123") // create some Android channels - testChannel1 := testdata.InsertChannel(db, testdata.Org1, "A", "Android 1", []string{"tel"}, "SR", map[string]interface{}{"FCM_ID": ""}) // no FCM ID - testChannel2 := testdata.InsertChannel(db, testdata.Org1, "A", "Android 2", []string{"tel"}, "SR", map[string]interface{}{"FCM_ID": "FCMID2"}) // invalid FCM ID - testChannel3 := testdata.InsertChannel(db, testdata.Org1, "A", "Android 3", []string{"tel"}, "SR", map[string]interface{}{"FCM_ID": "FCMID3"}) // valid FCM ID + testChannel1 := testdata.InsertChannel(rt, testdata.Org1, "A", "Android 1", []string{"tel"}, "SR", map[string]interface{}{"FCM_ID": ""}) // no FCM ID + testChannel2 := testdata.InsertChannel(rt, testdata.Org1, "A", "Android 2", []string{"tel"}, "SR", map[string]interface{}{"FCM_ID": "FCMID2"}) // invalid FCM ID + testChannel3 := testdata.InsertChannel(rt, testdata.Org1, "A", "Android 3", []string{"tel"}, "SR", map[string]interface{}{"FCM_ID": "FCMID3"}) // valid FCM ID oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshChannels) require.NoError(t, err) @@ -95,7 +95,7 @@ func TestSyncAndroidChannels(t *testing.T) { } func TestCreateFCMClient(t *testing.T) { - _, rt, _, _ := testsuite.Get() + _, rt := testsuite.Runtime() rt.Config.FCMKey = "1234" diff --git a/core/msgio/courier.go b/core/msgio/courier.go index 254b1a51b..d76140000 100644 --- a/core/msgio/courier.go +++ b/core/msgio/courier.go @@ -15,6 +15,8 @@ import ( "github.com/nyaruka/gocommon/jsonx" "github.com/nyaruka/gocommon/urns" "github.com/nyaruka/goflow/assets" + "github.com/nyaruka/goflow/envs" + "github.com/nyaruka/goflow/flows" "github.com/nyaruka/goflow/utils" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/runtime" @@ -23,7 +25,7 @@ import ( ) var courierHttpClient = &http.Client{ - Timeout: 5 * time.Second, + Timeout: 1 * time.Minute, // big so we let courier determine when things timeout } const ( @@ -31,6 +33,102 @@ const ( highPriority = 1 ) +type MsgOrigin string + +const ( + MsgOriginFlow MsgOrigin = "flow" + MsgOriginBroadcast MsgOrigin = "broadcast" + MsgOriginTicket MsgOrigin = "ticket" + MsgOriginChat MsgOrigin = "chat" +) + +// Msg is the format of a message queued to courier +type Msg struct { + ID flows.MsgID `json:"id"` + UUID flows.MsgUUID `json:"uuid"` + OrgID models.OrgID `json:"org_id"` + Origin MsgOrigin `json:"origin"` + Text string `json:"text"` + Attachments []utils.Attachment `json:"attachments,omitempty"` + QuickReplies []string `json:"quick_replies,omitempty"` + Locale envs.Locale `json:"locale,omitempty"` + HighPriority bool `json:"high_priority"` + MsgCount int `json:"tps_cost"` + CreatedOn time.Time `json:"created_on"` + ChannelUUID assets.ChannelUUID `json:"channel_uuid"` + ContactID models.ContactID `json:"contact_id"` + ContactURNID models.URNID `json:"contact_urn_id"` + URN urns.URN `json:"urn"` + URNAuth string `json:"urn_auth,omitempty"` + Metadata map[string]any `json:"metadata,omitempty"` + Flow *assets.FlowReference `json:"flow,omitempty"` + ResponseToExternalID string `json:"response_to_external_id,omitempty"` + IsResend bool `json:"is_resend,omitempty"` + + ContactLastSeenOn *time.Time `json:"contact_last_seen_on,omitempty"` + SessionID models.SessionID `json:"session_id,omitempty"` + SessionStatus models.SessionStatus `json:"session_status,omitempty"` + SessionWaitStartedOn *time.Time `json:"session_wait_started_on,omitempty"` + SessionTimeout int `json:"session_timeout,omitempty"` +} + +// NewCourierMsg creates a courier message in the format it's expecting to be queued +func NewCourierMsg(oa *models.OrgAssets, m *models.Msg, channel *models.Channel) (*Msg, error) { + msg := &Msg{ + ID: m.ID(), + UUID: m.UUID(), + OrgID: m.OrgID(), + Text: m.Text(), + Attachments: m.Attachments(), + QuickReplies: m.QuickReplies(), + Locale: m.Locale(), + HighPriority: m.HighPriority(), + MsgCount: m.MsgCount(), + CreatedOn: m.CreatedOn(), + ChannelUUID: channel.UUID(), + ContactID: m.ContactID(), + ContactURNID: *m.ContactURNID(), + URN: m.URN(), + URNAuth: string(m.URNAuth()), + Metadata: m.Metadata(), + IsResend: m.IsResend, + } + + if m.FlowID() != models.NilFlowID { + msg.Origin = MsgOriginFlow + flow, _ := oa.FlowByID(m.FlowID()) // always a chance flow no longer exists + if flow != nil { + msg.Flow = flow.Reference() + } + } else if m.BroadcastID() != models.NilBroadcastID { + msg.Origin = MsgOriginBroadcast + } else if m.TicketID() != models.NilTicketID { + msg.Origin = MsgOriginTicket + } else { + msg.Origin = MsgOriginChat + } + + if m.Contact != nil { + msg.ContactLastSeenOn = m.Contact.LastSeenOn() + } + + if m.Session != nil { + msg.SessionID = m.Session.ID() + msg.SessionStatus = m.Session.Status() + msg.ResponseToExternalID = string(m.Session.IncomingMsgExternalID()) + + if m.LastInSprint && m.Session.Timeout() != nil && m.Session.WaitStartedOn() != nil { + // These fields are set on the last outgoing message in a session's sprint. In the case + // of the session being at a wait with a timeout then the timeout will be set. It is up to + // Courier to update the session's timeout appropriately after sending the message. + msg.SessionWaitStartedOn = m.Session.WaitStartedOn() + msg.SessionTimeout = int(*m.Session.Timeout() / time.Second) + } + } + + return msg, nil +} + var queuePushScript = redis.NewScript(6, ` -- KEYS: [QueueType, QueueName, TPS, Priority, Items, EpochSecs] local queueType, queueName, tps, priority, items, epochSecs = KEYS[1], KEYS[2], tonumber(KEYS[3]), KEYS[4], KEYS[5], KEYS[6] @@ -61,11 +159,21 @@ end `) // PushCourierBatch pushes a batch of messages for a single contact and channel onto the appropriate courier queue -func PushCourierBatch(rc redis.Conn, ch *models.Channel, batch []*models.Msg, timestamp string) error { +func PushCourierBatch(rc redis.Conn, oa *models.OrgAssets, ch *models.Channel, msgs []*models.Msg, timestamp string) error { priority := bulkPriority - if batch[0].HighPriority() { + if msgs[0].HighPriority() { priority = highPriority } + + batch := make([]*Msg, len(msgs)) + for i, m := range msgs { + var err error + batch[i], err = NewCourierMsg(oa, m, ch) + if err != nil { + return errors.Wrap(err, "error creating courier message") + } + } + batchJSON := jsonx.MustMarshal(batch) _, err := queuePushScript.Do(rc, "msgs", ch.UUID(), ch.TPS(), priority, batchJSON, timestamp) @@ -73,7 +181,7 @@ func PushCourierBatch(rc redis.Conn, ch *models.Channel, batch []*models.Msg, ti } // QueueCourierMessages queues messages for a single contact to Courier -func QueueCourierMessages(rc redis.Conn, contactID models.ContactID, msgs []*models.Msg) error { +func QueueCourierMessages(rc redis.Conn, oa *models.OrgAssets, contactID models.ContactID, channel *models.Channel, msgs []*models.Msg) error { if len(msgs) == 0 { return nil } @@ -83,44 +191,36 @@ func QueueCourierMessages(rc redis.Conn, contactID models.ContactID, msgs []*mod now := dates.Now() epochSeconds := strconv.FormatFloat(float64(now.UnixNano()/int64(time.Microsecond))/float64(1000000), 'f', 6, 64) - // we batch msgs by channel uuid + // we batch msgs by priority batch := make([]*models.Msg, 0, len(msgs)) - currentChannel := msgs[0].Channel() + currentPriority := msgs[0].HighPriority() // commits our batch to redis commitBatch := func() error { if len(batch) > 0 { start := time.Now() - err := PushCourierBatch(rc, currentChannel, batch, epochSeconds) + err := PushCourierBatch(rc, oa, channel, batch, epochSeconds) if err != nil { return err } - logrus.WithFields(logrus.Fields{ - "msgs": len(batch), - "contact_id": contactID, - "channel_uuid": currentChannel.UUID(), - "elapsed": time.Since(start), - }).Info("msgs queued to courier") + logrus.WithFields(logrus.Fields{"msgs": len(batch), "contact_id": contactID, "channel_uuid": channel.UUID(), "elapsed": time.Since(start)}).Debug("msgs queued to courier") } return nil } for _, msg := range msgs { // sanity check the state of the msg we're about to queue... - assert(msg.Channel() != nil && msg.ChannelUUID() != "", "can't queue a message to courier without a channel") - assert(msg.Channel().Type() != models.ChannelTypeAndroid, "can't queue an android message to courier") assert(msg.URN() != urns.NilURN && msg.ContactURNID() != nil, "can't queue a message to courier without a URN") - // if this msg is the same channel and priority, add to current batch, otherwise start new batch - if msg.Channel() == currentChannel && msg.HighPriority() == currentPriority { + // if this msg is the same priority, add to current batch, otherwise start new batch + if msg.HighPriority() == currentPriority { batch = append(batch, msg) } else { if err := commitBatch(); err != nil { return err } - currentChannel = msg.Channel() currentPriority = msg.HighPriority() batch = []*models.Msg{msg} } diff --git a/core/msgio/courier_test.go b/core/msgio/courier_test.go index 0ce49e429..739465bc2 100644 --- a/core/msgio/courier_test.go +++ b/core/msgio/courier_test.go @@ -1,35 +1,186 @@ package msgio_test import ( + "context" "encoding/json" + "fmt" "testing" + "time" "github.com/gomodule/redigo/redis" "github.com/nyaruka/gocommon/jsonx" + "github.com/nyaruka/gocommon/urns" + "github.com/nyaruka/goflow/assets" + "github.com/nyaruka/goflow/envs" + "github.com/nyaruka/goflow/flows" + "github.com/nyaruka/goflow/test" + "github.com/nyaruka/goflow/utils" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/core/msgio" + "github.com/nyaruka/mailroom/runtime" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - + "github.com/nyaruka/null/v2" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) +func TestNewCourierMsg(t *testing.T) { + ctx, rt := testsuite.Runtime() + + defer testsuite.Reset(testsuite.ResetData) + + oa, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) + require.NoError(t, err) + require.False(t, oa.Org().Suspended()) + + _, cathy := testdata.Cathy.Load(rt, oa) + + channel := oa.ChannelByUUID(testdata.TwilioChannel.UUID) + flow, _ := oa.FlowByID(testdata.Favorites.ID) + urn := urns.URN(fmt.Sprintf("tel:+250700000001?id=%d", testdata.Cathy.URNID)) + flowMsg1 := flows.NewMsgOut( + urn, + assets.NewChannelReference(testdata.TwilioChannel.UUID, "Test Channel"), + "Hi there", + []utils.Attachment{utils.Attachment("image/jpeg:https://dl-foo.com/image.jpg")}, + []string{"yes", "no"}, + flows.NewMsgTemplating(assets.NewTemplateReference("4474d39c-ac2c-486d-bceb-8a774a515299", "tpl"), []string{"name"}, "tpls"), + flows.MsgTopicPurchase, + envs.Locale(`eng-US`), + flows.NilUnsendableReason, + ) + + // create a non-priority flow message.. i.e. the session isn't responding to an incoming message + testdata.InsertWaitingSession(rt, testdata.Org1, testdata.Cathy, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now(), false, nil) + session, err := models.FindWaitingSessionForContact(ctx, rt.DB, rt.SessionStorage, oa, models.FlowTypeMessaging, cathy) + require.NoError(t, err) + + msg1, err := models.NewOutgoingFlowMsg(rt, oa.Org(), channel, session, flow, flowMsg1, time.Date(2021, 11, 9, 14, 3, 30, 0, time.UTC)) + require.NoError(t, err) + + createAndAssertCourierMsg(t, ctx, rt, oa, msg1, fmt.Sprintf(`{ + "attachments": [ + "image/jpeg:https://dl-foo.com/image.jpg" + ], + "channel_uuid": "74729f45-7f29-4868-9dc4-90e491e3c7d8", + "contact_id": 10000, + "contact_urn_id": 10000, + "created_on": "2021-11-09T14:03:30Z", + "flow": {"uuid": "9de3663f-c5c5-4c92-9f45-ecbc09abcc85", "name": "Favorites"}, + "high_priority": false, + "id": 1, + "locale": "eng-US", + "metadata": { + "templating": { + "namespace": "tpls", + "template": {"name": "tpl", "uuid": "4474d39c-ac2c-486d-bceb-8a774a515299"}, + "variables": ["name"] + }, + "topic": "purchase" + }, + "org_id": 1, + "origin": "flow", + "quick_replies": [ + "yes", + "no" + ], + "session_id": %d, + "session_status": "W", + "text": "Hi there", + "tps_cost": 2, + "urn": "tel:+250700000001?id=10000", + "uuid": "%s" + }`, session.ID(), msg1.UUID())) + + // create a priority flow message.. i.e. the session is responding to an incoming message + cathy.SetLastSeenOn(time.Date(2023, 4, 20, 10, 15, 0, 0, time.UTC)) + flowMsg2 := flows.NewMsgOut( + urn, + assets.NewChannelReference(testdata.TwilioChannel.UUID, "Test Channel"), + "Hi there", + nil, nil, nil, + flows.NilMsgTopic, + envs.NilLocale, + flows.NilUnsendableReason, + ) + in1 := testdata.InsertIncomingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "test", models.MsgStatusHandled) + session.SetIncomingMsg(models.MsgID(in1.ID()), null.String("EX123")) + msg2, err := models.NewOutgoingFlowMsg(rt, oa.Org(), channel, session, flow, flowMsg2, time.Date(2021, 11, 9, 14, 3, 30, 0, time.UTC)) + require.NoError(t, err) + + createAndAssertCourierMsg(t, ctx, rt, oa, msg2, fmt.Sprintf(`{ + "channel_uuid": "74729f45-7f29-4868-9dc4-90e491e3c7d8", + "contact_id": 10000, + "contact_last_seen_on": "2023-04-20T10:15:00Z", + "contact_urn_id": 10000, + "created_on": "2021-11-09T14:03:30Z", + "flow": {"uuid": "9de3663f-c5c5-4c92-9f45-ecbc09abcc85", "name": "Favorites"}, + "response_to_external_id": "EX123", + "high_priority": true, + "id": 3, + "org_id": 1, + "origin": "flow", + "session_id": %d, + "session_status": "W", + "text": "Hi there", + "tps_cost": 1, + "urn": "tel:+250700000001?id=10000", + "uuid": "%s" + }`, session.ID(), msg2.UUID())) + + // try a broadcast message which won't have session and flow fields set and won't be high priority + bcastID := testdata.InsertBroadcast(rt, testdata.Org1, `eng`, map[envs.Language]string{`eng`: "Blast"}, models.NilScheduleID, []*testdata.Contact{testdata.Cathy}, nil) + bcastMsg1 := flows.NewMsgOut(urn, assets.NewChannelReference(testdata.TwilioChannel.UUID, "Test Channel"), "Blast", nil, nil, nil, flows.NilMsgTopic, envs.NilLocale, flows.NilUnsendableReason) + msg3, err := models.NewOutgoingBroadcastMsg(rt, oa.Org(), channel, cathy, bcastMsg1, time.Date(2021, 11, 9, 14, 3, 30, 0, time.UTC), &models.BroadcastBatch{BroadcastID: bcastID, CreatedByID: testdata.Admin.ID}) + require.NoError(t, err) + + createAndAssertCourierMsg(t, ctx, rt, oa, msg3, fmt.Sprintf(`{ + "channel_uuid": "74729f45-7f29-4868-9dc4-90e491e3c7d8", + "contact_id": 10000, + "contact_last_seen_on": "2023-04-20T10:15:00Z", + "contact_urn_id": 10000, + "created_on": "2021-11-09T14:03:30Z", + "high_priority": false, + "id": 4, + "org_id": 1, + "origin": "broadcast", + "text": "Blast", + "tps_cost": 1, + "urn": "tel:+250700000001?id=10000", + "uuid": "%s" + }`, msg3.UUID())) +} + +func createAndAssertCourierMsg(t *testing.T, ctx context.Context, rt *runtime.Runtime, oa *models.OrgAssets, m *models.Msg, expectedJSON string) { + // insert to db so that it gets an id + err := models.InsertMessages(ctx, rt.DB, []*models.Msg{m}) + require.NoError(t, err) + + channel := oa.ChannelByID(m.ChannelID()) + + cmsg3, err := msgio.NewCourierMsg(oa, m, channel) + assert.NoError(t, err) + + marshaled := jsonx.MustMarshal(cmsg3) + + test.AssertEqualJSON(t, []byte(expectedJSON), marshaled) +} + func TestQueueCourierMessages(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetData | testsuite.ResetRedis) - // create an Andoid channel - androidChannel := testdata.InsertChannel(db, testdata.Org1, "A", "Android 1", []string{"tel"}, "SR", map[string]interface{}{"FCM_ID": "FCMID"}) - oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshOrg|models.RefreshChannels) require.NoError(t, err) + twilio := oa.ChannelByUUID(testdata.TwilioChannel.UUID) + // noop if no messages provided - msgio.QueueCourierMessages(rc, testdata.Cathy.ID, []*models.Msg{}) + msgio.QueueCourierMessages(rc, oa, testdata.Cathy.ID, twilio, []*models.Msg{}) testsuite.AssertCourierQueues(t, map[string][]int{}) // queue 3 messages for Cathy.. @@ -37,33 +188,19 @@ func TestQueueCourierMessages(t *testing.T) { (&msgSpec{Channel: testdata.TwilioChannel, Contact: testdata.Cathy}).createMsg(t, rt, oa), (&msgSpec{Channel: testdata.TwilioChannel, Contact: testdata.Cathy}).createMsg(t, rt, oa), (&msgSpec{Channel: testdata.TwilioChannel, Contact: testdata.Cathy, HighPriority: true}).createMsg(t, rt, oa), - (&msgSpec{Channel: testdata.VonageChannel, Contact: testdata.Cathy}).createMsg(t, rt, oa), } - msgio.QueueCourierMessages(rc, testdata.Cathy.ID, msgs) + msgio.QueueCourierMessages(rc, oa, testdata.Cathy.ID, twilio, msgs) testsuite.AssertCourierQueues(t, map[string][]int{ "msgs:74729f45-7f29-4868-9dc4-90e491e3c7d8|10/0": {2}, // twilio, bulk priority "msgs:74729f45-7f29-4868-9dc4-90e491e3c7d8|10/1": {1}, // twilio, high priority - "msgs:19012bfd-3ce3-4cae-9bb9-76cf92c73d49|10/0": {1}, // vonage, bulk priority - }) - - // check that trying to queue a message without a channel will panic - assert.Panics(t, func() { - ms := msgSpec{Channel: nil, Contact: testdata.Cathy} - msgio.QueueCourierMessages(rc, testdata.Cathy.ID, []*models.Msg{ms.createMsg(t, rt, oa)}) - }) - - // check that trying to queue an Android message will panic - assert.Panics(t, func() { - ms := msgSpec{Channel: androidChannel, Contact: testdata.Cathy} - msgio.QueueCourierMessages(rc, testdata.Cathy.ID, []*models.Msg{ms.createMsg(t, rt, oa)}) }) } func TestClearChannelCourierQueue(t *testing.T) { - ctx, rt, _, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetData | testsuite.ResetRedis) @@ -71,15 +208,20 @@ func TestClearChannelCourierQueue(t *testing.T) { oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshOrg|models.RefreshChannels) require.NoError(t, err) - // queue 3 messages for Cathy.. - msgs := []*models.Msg{ + twilio := oa.ChannelByUUID(testdata.TwilioChannel.UUID) + vonage := oa.ChannelByUUID(testdata.VonageChannel.UUID) + + // queue 3 Twilio messages for Cathy.. + msgio.QueueCourierMessages(rc, oa, testdata.Cathy.ID, twilio, []*models.Msg{ (&msgSpec{Channel: testdata.TwilioChannel, Contact: testdata.Cathy}).createMsg(t, rt, oa), (&msgSpec{Channel: testdata.TwilioChannel, Contact: testdata.Cathy}).createMsg(t, rt, oa), (&msgSpec{Channel: testdata.TwilioChannel, Contact: testdata.Cathy, HighPriority: true}).createMsg(t, rt, oa), - (&msgSpec{Channel: testdata.VonageChannel, Contact: testdata.Cathy}).createMsg(t, rt, oa), - } + }) - msgio.QueueCourierMessages(rc, testdata.Cathy.ID, msgs) + // and a Vonage message + msgio.QueueCourierMessages(rc, oa, testdata.Cathy.ID, vonage, []*models.Msg{ + (&msgSpec{Channel: testdata.VonageChannel, Contact: testdata.Cathy}).createMsg(t, rt, oa), + }) testsuite.AssertCourierQueues(t, map[string][]int{ "msgs:74729f45-7f29-4868-9dc4-90e491e3c7d8|10/0": {2}, // twilio, bulk priority @@ -101,8 +243,8 @@ func TestClearChannelCourierQueue(t *testing.T) { } func TestPushCourierBatch(t *testing.T) { - ctx, rt, _, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetData | testsuite.ResetRedis) @@ -115,7 +257,7 @@ func TestPushCourierBatch(t *testing.T) { msg1 := (&msgSpec{Channel: testdata.TwilioChannel, Contact: testdata.Cathy}).createMsg(t, rt, oa) msg2 := (&msgSpec{Channel: testdata.TwilioChannel, Contact: testdata.Cathy}).createMsg(t, rt, oa) - err = msgio.PushCourierBatch(rc, channel, []*models.Msg{msg1, msg2}, "1636557205.123456") + err = msgio.PushCourierBatch(rc, oa, channel, []*models.Msg{msg1, msg2}, "1636557205.123456") require.NoError(t, err) // check that channel has been added to active list @@ -142,7 +284,7 @@ func TestPushCourierBatch(t *testing.T) { msg3 := (&msgSpec{Channel: testdata.TwilioChannel, Contact: testdata.Cathy}).createMsg(t, rt, oa) - err = msgio.PushCourierBatch(rc, channel, []*models.Msg{msg3}, "1636557205.234567") + err = msgio.PushCourierBatch(rc, oa, channel, []*models.Msg{msg3}, "1636557205.234567") require.NoError(t, err) queued, err = redis.ByteSlices(rc.Do("ZRANGE", "msgs:74729f45-7f29-4868-9dc4-90e491e3c7d8|10/0", 0, -1)) @@ -155,7 +297,7 @@ func TestPushCourierBatch(t *testing.T) { msg4 := (&msgSpec{Channel: testdata.TwilioChannel, Contact: testdata.Cathy}).createMsg(t, rt, oa) - err = msgio.PushCourierBatch(rc, channel, []*models.Msg{msg4}, "1636557205.345678") + err = msgio.PushCourierBatch(rc, oa, channel, []*models.Msg{msg4}, "1636557205.345678") require.NoError(t, err) // check that channel has *not* been added to active list diff --git a/core/msgio/send.go b/core/msgio/send.go index f7894154e..67739d88b 100644 --- a/core/msgio/send.go +++ b/core/msgio/send.go @@ -6,13 +6,19 @@ import ( "github.com/edganiukov/fcm" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/runtime" + "github.com/pkg/errors" "github.com/sirupsen/logrus" ) +type contactAndChannel struct { + contactID models.ContactID + channel *models.Channel +} + // SendMessages tries to send the given messages via Courier or Android syncing -func SendMessages(ctx context.Context, rt *runtime.Runtime, tx models.Queryer, fc *fcm.Client, msgs []*models.Msg) { - // messages to be sent by courier, organized by contact - courierMsgs := make(map[models.ContactID][]*models.Msg, 100) +func SendMessages(ctx context.Context, rt *runtime.Runtime, tx models.Queryer, fc *fcm.Client, msgs []*models.Msg) error { + // messages to be sent by courier, organized by contact+channel + courierMsgs := make(map[contactAndChannel][]*models.Msg, 100) // android channels that need to be notified to sync androidChannels := make([]*models.Channel, 0, 5) @@ -28,7 +34,13 @@ func SendMessages(ctx context.Context, rt *runtime.Runtime, tx models.Queryer, f continue } - channel := msg.Channel() + oa, err := models.GetOrgAssets(ctx, rt, msg.OrgID()) + if err != nil { + return errors.Wrap(err, "error getting org assets") + } + + channel := oa.ChannelByID(msg.ChannelID()) + if channel != nil { if channel.Type() == models.ChannelTypeAndroid { if !androidChannelsSeen[channel] { @@ -36,7 +48,8 @@ func SendMessages(ctx context.Context, rt *runtime.Runtime, tx models.Queryer, f } androidChannelsSeen[channel] = true } else { - courierMsgs[msg.ContactID()] = append(courierMsgs[msg.ContactID()], msg) + cc := contactAndChannel{msg.ContactID(), channel} + courierMsgs[cc] = append(courierMsgs[cc], msg) } } else { pending = append(pending, msg) @@ -48,12 +61,17 @@ func SendMessages(ctx context.Context, rt *runtime.Runtime, tx models.Queryer, f rc := rt.RP.Get() defer rc.Close() - for contactID, contactMsgs := range courierMsgs { - err := QueueCourierMessages(rc, contactID, contactMsgs) + for cc, contactMsgs := range courierMsgs { + oa, err := models.GetOrgAssets(ctx, rt, cc.channel.OrgID()) + if err != nil { + return errors.Wrap(err, "error getting org assets") + } + + err = QueueCourierMessages(rc, oa, cc.contactID, cc.channel, contactMsgs) // not being able to queue a message isn't the end of the world, log but don't return an error if err != nil { - logrus.WithField("messages", contactMsgs).WithField("contact", contactID).WithError(err).Error("error queuing messages") + logrus.WithField("messages", contactMsgs).WithField("contact", cc.contactID).WithError(err).Error("error queuing messages") // in the case of errors we do want to change the messages back to pending however so they // get queued later. (for the common case messages are only inserted and queued, without a status update) @@ -70,14 +88,16 @@ func SendMessages(ctx context.Context, rt *runtime.Runtime, tx models.Queryer, f SyncAndroidChannels(fc, androidChannels) } - // any messages that didn't get sent should be moved back to pending (they are queued at creation to save an - // update in the common case) + // any messages that didn't get sent should be moved back to initializing(I) (they are queued(Q) at creation to + // save an update in the common case) if len(pending) > 0 { err := models.MarkMessagesForRequeuing(ctx, tx, pending) if err != nil { logrus.WithError(err).Error("error marking message as pending") } } + + return nil } func assert(c bool, m string) { diff --git a/core/msgio/send_test.go b/core/msgio/send_test.go index e3ff4d859..7951c5a6a 100644 --- a/core/msgio/send_test.go +++ b/core/msgio/send_test.go @@ -28,7 +28,7 @@ func (m *msgSpec) createMsg(t *testing.T, rt *runtime.Runtime, oa *models.OrgAss status = models.MsgStatusFailed } - flowMsg := testdata.InsertOutgoingMsg(rt.DB, testdata.Org1, m.Channel, m.Contact, "Hello", nil, status, m.HighPriority) + flowMsg := testdata.InsertOutgoingMsg(rt, testdata.Org1, m.Channel, m.Contact, "Hello", nil, status, m.HighPriority) msgs, err := models.GetMessagesByID(context.Background(), rt.DB, testdata.Org1.ID, models.DirectionOut, []models.MsgID{models.MsgID(flowMsg.ID())}) require.NoError(t, err) @@ -43,8 +43,8 @@ func (m *msgSpec) createMsg(t *testing.T, rt *runtime.Runtime, oa *models.OrgAss } func TestSendMessages(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetData) @@ -55,9 +55,9 @@ func TestSendMessages(t *testing.T) { fc := mockFCM.Client("FCMKEY123") // create some Andoid channels - androidChannel1 := testdata.InsertChannel(db, testdata.Org1, "A", "Android 1", []string{"tel"}, "SR", map[string]interface{}{"FCM_ID": "FCMID1"}) - androidChannel2 := testdata.InsertChannel(db, testdata.Org1, "A", "Android 2", []string{"tel"}, "SR", map[string]interface{}{"FCM_ID": "FCMID2"}) - testdata.InsertChannel(db, testdata.Org1, "A", "Android 3", []string{"tel"}, "SR", map[string]interface{}{"FCM_ID": "FCMID3"}) + androidChannel1 := testdata.InsertChannel(rt, testdata.Org1, "A", "Android 1", []string{"tel"}, "SR", map[string]interface{}{"FCM_ID": "FCMID1"}) + androidChannel2 := testdata.InsertChannel(rt, testdata.Org1, "A", "Android 2", []string{"tel"}, "SR", map[string]interface{}{"FCM_ID": "FCMID2"}) + testdata.InsertChannel(rt, testdata.Org1, "A", "Android 3", []string{"tel"}, "SR", map[string]interface{}{"FCM_ID": "FCMID3"}) oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshChannels) require.NoError(t, err) @@ -67,14 +67,14 @@ func TestSendMessages(t *testing.T) { Msgs []msgSpec QueueSizes map[string][]int FCMTokensSynced []string - PendingMsgs int + UnqueuedMsgs int }{ { Description: "no messages", Msgs: []msgSpec{}, QueueSizes: map[string][]int{}, FCMTokensSynced: []string{}, - PendingMsgs: 0, + UnqueuedMsgs: 0, }, { Description: "2 messages for Courier, and 1 Android", @@ -102,7 +102,7 @@ func TestSendMessages(t *testing.T) { "msgs:74729f45-7f29-4868-9dc4-90e491e3c7d8|10/1": {1}, // 1 high priority message for Bob }, FCMTokensSynced: []string{"FCMID1"}, - PendingMsgs: 0, + UnqueuedMsgs: 0, }, { Description: "each Android channel synced once", @@ -122,7 +122,7 @@ func TestSendMessages(t *testing.T) { }, QueueSizes: map[string][]int{}, FCMTokensSynced: []string{"FCMID1", "FCMID2"}, - PendingMsgs: 0, + UnqueuedMsgs: 0, }, { Description: "messages with FAILED status ignored", @@ -135,7 +135,7 @@ func TestSendMessages(t *testing.T) { }, QueueSizes: map[string][]int{}, FCMTokensSynced: []string{}, - PendingMsgs: 0, + UnqueuedMsgs: 0, }, { Description: "messages without channels set to PENDING", @@ -147,7 +147,7 @@ func TestSendMessages(t *testing.T) { }, QueueSizes: map[string][]int{}, FCMTokensSynced: []string{}, - PendingMsgs: 1, + UnqueuedMsgs: 1, }, } @@ -160,7 +160,7 @@ func TestSendMessages(t *testing.T) { rc.Do("FLUSHDB") mockFCM.Messages = nil - msgio.SendMessages(ctx, rt, db, fc, msgs) + msgio.SendMessages(ctx, rt, rt.DB, fc, msgs) testsuite.AssertCourierQueues(t, tc.QueueSizes, "courier queue sizes mismatch in '%s'", tc.Description) @@ -172,6 +172,6 @@ func TestSendMessages(t *testing.T) { assert.Equal(t, tc.FCMTokensSynced, actualTokens, "FCM tokens mismatch in '%s'", tc.Description) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'P'`).Returns(tc.PendingMsgs, `pending messages mismatch in '%s'`, tc.Description) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'I'`).Returns(tc.UnqueuedMsgs, `initializing messages mismatch in '%s'`, tc.Description) } } diff --git a/core/queue/queue.go b/core/queue/queue.go index 3ce06bfad..152efc330 100644 --- a/core/queue/queue.go +++ b/core/queue/queue.go @@ -40,24 +40,6 @@ const ( // HandlerQueue is our queue for message handling or other tasks related to just one contact HandlerQueue = "handler" - - // SendBroadcast is our type for sending a broadcast - SendBroadcast = "send_broadcast" - - // SendBroadcastBatch is our type for sending a broadcast batch - SendBroadcastBatch = "send_broadcast_batch" - - // HandleContactEvent is our task for event handling - HandleContactEvent = "handle_contact_event" - - // StartFlow is our task type to start a flow - StartFlow = "start_flow" - - // StartFlowBatch is our task for starting a flow batch - StartFlowBatch = "start_flow_batch" - - // StartIVRFlowBatch is our task for starting an ivr batch - StartIVRFlowBatch = "start_ivr_flow_batch" ) // Size returns the number of tasks for the passed in queue diff --git a/core/runner/runner.go b/core/runner/runner.go index 3ea70ad6f..df125aafe 100644 --- a/core/runner/runner.go +++ b/core/runner/runner.go @@ -2,22 +2,21 @@ package runner import ( "context" + "encoding/json" "time" "github.com/gomodule/redigo/redis" "github.com/jmoiron/sqlx" "github.com/nyaruka/gocommon/analytics" - "github.com/nyaruka/goflow/assets" "github.com/nyaruka/goflow/excellent/types" "github.com/nyaruka/goflow/flows" "github.com/nyaruka/goflow/flows/triggers" "github.com/nyaruka/mailroom/core/goflow" "github.com/nyaruka/mailroom/core/models" - "github.com/nyaruka/mailroom/core/queue" "github.com/nyaruka/mailroom/runtime" - "github.com/nyaruka/redisx" "github.com/pkg/errors" "github.com/sirupsen/logrus" + "golang.org/x/exp/maps" ) const ( @@ -31,14 +30,11 @@ var startTypeToOrigin = map[models.StartType]string{ models.StartTypeAPIZapier: "zapier", } +// TriggerBuilder defines the interface for building a trigger for the passed in contact +type TriggerBuilder func(contact *flows.Contact) flows.Trigger + // StartOptions define the various parameters that can be used when starting a flow type StartOptions struct { - // ExcludeInAFlow excludes contacts with waiting sessions which would otherwise have to be interrupted - ExcludeInAFlow bool - - // ExcludeStartedPreviously excludes contacts who have been in this flow previously (at least as long as we have runs for) - ExcludeStartedPreviously bool - // Interrupt should be true if we want to interrupt the flows runs for any contact started in this flow Interrupt bool @@ -49,18 +45,6 @@ type StartOptions struct { TriggerBuilder TriggerBuilder } -// NewStartOptions creates and returns the default start options to be used for flow starts -func NewStartOptions() *StartOptions { - return &StartOptions{ - ExcludeInAFlow: false, - ExcludeStartedPreviously: false, - Interrupt: true, - } -} - -// TriggerBuilder defines the interface for building a trigger for the passed in contact -type TriggerBuilder func(contact *flows.Contact) flows.Trigger - // ResumeFlow resumes the passed in session using the passed in session func ResumeFlow(ctx context.Context, rt *runtime.Runtime, oa *models.OrgAssets, session *models.Session, contact *models.Contact, resume flows.Resume, hook models.SessionCommitHook) (*models.Session, error) { start := time.Now() @@ -138,16 +122,13 @@ func ResumeFlow(ctx context.Context, rt *runtime.Runtime, oa *models.OrgAssets, } // StartFlowBatch starts the flow for the passed in org, contacts and flow -func StartFlowBatch( - ctx context.Context, rt *runtime.Runtime, - batch *models.FlowStartBatch) ([]*models.Session, error) { - +func StartFlowBatch(ctx context.Context, rt *runtime.Runtime, batch *models.FlowStartBatch) ([]*models.Session, error) { start := time.Now() // if this is our last start, no matter what try to set the start as complete as a last step - if batch.IsLast() { + if batch.IsLast { defer func() { - err := models.MarkStartComplete(ctx, rt.DB, batch.StartID()) + err := models.MarkStartComplete(ctx, rt.DB, batch.StartID) if err != nil { logrus.WithError(err).WithField("start_id", batch.StartID).Error("error marking start as complete") } @@ -155,53 +136,53 @@ func StartFlowBatch( } // create our org assets - oa, err := models.GetOrgAssets(ctx, rt, batch.OrgID()) + oa, err := models.GetOrgAssets(ctx, rt, batch.OrgID) if err != nil { - return nil, errors.Wrapf(err, "error creating assets for org: %d", batch.OrgID()) + return nil, errors.Wrapf(err, "error creating assets for org: %d", batch.OrgID) } // try to load our flow - flow, err := oa.FlowByID(batch.FlowID()) + flow, err := oa.FlowByID(batch.FlowID) if err == models.ErrNotFound { - logrus.WithField("flow_id", batch.FlowID()).Info("skipping flow start, flow no longer active or archived") + logrus.WithField("flow_id", batch.FlowID).Info("skipping flow start, flow no longer active or archived") return nil, nil } if err != nil { - return nil, errors.Wrapf(err, "error loading campaign flow: %d", batch.FlowID()) + return nil, errors.Wrapf(err, "error loading campaign flow: %d", batch.FlowID) } // get the user that created this flow start if there was one var flowUser *flows.User - if batch.CreatedByID() != models.NilUserID { - user := oa.UserByID(batch.CreatedByID()) + if batch.CreatedByID != models.NilUserID { + user := oa.UserByID(batch.CreatedByID) if user != nil { flowUser = oa.SessionAssets().Users().Get(user.Email()) } } var params *types.XObject - if len(batch.Extra()) > 0 { - params, err = types.ReadXObject(batch.Extra()) + if !batch.Params.IsNull() { + params, err = types.ReadXObject(batch.Params) if err != nil { - return nil, errors.Wrap(err, "unable to read JSON from flow start extra") + return nil, errors.Wrap(err, "unable to read JSON from flow start params") } } var history *flows.SessionHistory - if len(batch.SessionHistory()) > 0 { - history, err = models.ReadSessionHistory(batch.SessionHistory()) + if !batch.SessionHistory.IsNull() { + history, err = models.ReadSessionHistory(batch.SessionHistory) if err != nil { return nil, errors.Wrap(err, "unable to read JSON from flow start history") } } // whether engine allows some functions is based on whether there is more than one contact being started - batchStart := batch.TotalContacts() > 1 + batchStart := batch.TotalContacts > 1 // this will build our trigger for each contact started triggerBuilder := func(contact *flows.Contact) flows.Trigger { - if batch.ParentSummary() != nil { - tb := triggers.NewBuilder(oa.Env(), flow.Reference(), contact).FlowAction(history, batch.ParentSummary()) + if !batch.ParentSummary.IsNull() { + tb := triggers.NewBuilder(oa.Env(), flow.Reference(), contact).FlowAction(history, json.RawMessage(batch.ParentSummary)) if batchStart { tb = tb.AsBatch() } @@ -209,13 +190,13 @@ func StartFlowBatch( } tb := triggers.NewBuilder(oa.Env(), flow.Reference(), contact).Manual() - if batch.Extra() != nil { + if !batch.Params.IsNull() { tb = tb.WithParams(params) } if batchStart { tb = tb.AsBatch() } - return tb.WithUser(flowUser).WithOrigin(startTypeToOrigin[batch.StartType()]).Build() + return tb.WithUser(flowUser).WithOrigin(startTypeToOrigin[batch.StartType]).Build() } // before committing our runs we want to set the start they are associated with @@ -223,21 +204,19 @@ func StartFlowBatch( // for each run in our sessions, set the start id for _, s := range sessions { for _, r := range s.Runs() { - r.SetStartID(batch.StartID()) + r.SetStartID(batch.StartID) } } return nil } - // options for our flow start - options := NewStartOptions() - options.ExcludeStartedPreviously = batch.ExcludeStartedPreviously() - options.ExcludeInAFlow = batch.ExcludeInAFlow() - options.Interrupt = flow.FlowType().Interrupts() - options.TriggerBuilder = triggerBuilder - options.CommitHook = updateStartID + options := &StartOptions{ + Interrupt: flow.FlowType().Interrupts(), + TriggerBuilder: triggerBuilder, + CommitHook: updateStartID, + } - sessions, err := StartFlow(ctx, rt, oa, flow, batch.ContactIDs(), options) + sessions, err := StartFlow(ctx, rt, oa, flow, batch.ContactIDs, options) if err != nil { return nil, errors.Wrapf(err, "error starting flow batch") } @@ -249,285 +228,68 @@ func StartFlowBatch( return sessions, nil } -// FireCampaignEvents starts the flow for the passed in org, contact and flow -func FireCampaignEvents( - ctx context.Context, rt *runtime.Runtime, - orgID models.OrgID, fires []*models.EventFire, flowUUID assets.FlowUUID, - campaign *triggers.CampaignReference, eventUUID triggers.CampaignEventUUID) ([]models.ContactID, error) { - - if len(fires) == 0 { +// StartFlow runs the passed in flow for the passed in contacts +func StartFlow(ctx context.Context, rt *runtime.Runtime, oa *models.OrgAssets, flow *models.Flow, contactIDs []models.ContactID, options *StartOptions) ([]*models.Session, error) { + if len(contactIDs) == 0 { return nil, nil } + // we now need to grab locks for our contacts so that they are never in two starts or handles at the + // same time we try to grab locks for up to five minutes, but do it in batches where we wait for one + // second per contact to prevent deadlocks + sessions := make([]*models.Session, 0, len(contactIDs)) + remaining := contactIDs start := time.Now() - contactIDs := make([]models.ContactID, 0, len(fires)) - fireMap := make(map[models.ContactID]*models.EventFire, len(fires)) - skippedContacts := make(map[models.ContactID]*models.EventFire, len(fires)) - for _, f := range fires { - contactIDs = append(contactIDs, f.ContactID) - fireMap[f.ContactID] = f - skippedContacts[f.ContactID] = f - } - - // create our org assets - oa, err := models.GetOrgAssets(ctx, rt, orgID) - if err != nil { - return nil, errors.Wrapf(err, "error creating assets for org: %d", orgID) - } - - // find our actual event - dbEvent := oa.CampaignEventByID(fires[0].EventID) - - // no longer active? delete these event fires and return - if dbEvent == nil { - err := models.DeleteEventFires(ctx, rt.DB, fires) - if err != nil { - return nil, errors.Wrapf(err, "error deleting events for already fired events") - } - return nil, nil - } - - // try to load our flow - flow, err := oa.FlowByUUID(flowUUID) - if err == models.ErrNotFound { - err := models.DeleteEventFires(ctx, rt.DB, fires) - if err != nil { - return nil, errors.Wrapf(err, "error deleting events for archived or inactive flow") - } - return nil, nil - } - if err != nil { - return nil, errors.Wrapf(err, "error loading campaign flow: %s", flowUUID) - } - dbFlow := flow.(*models.Flow) - - // our start options are based on the start mode for our event - options := NewStartOptions() - switch dbEvent.StartMode() { - case models.StartModeInterrupt: - options.ExcludeInAFlow = false - options.ExcludeStartedPreviously = false - options.Interrupt = true - case models.StartModePassive: - options.ExcludeInAFlow = false - options.ExcludeStartedPreviously = false - options.Interrupt = false - case models.StartModeSkip: - options.ExcludeInAFlow = true - options.ExcludeStartedPreviously = false - options.Interrupt = true - default: - return nil, errors.Errorf("unknown start mode: %s", dbEvent.StartMode()) - } - - // if this is an ivr flow, we need to create a task to perform the start there - if dbFlow.FlowType() == models.FlowTypeVoice { - // Trigger our IVR flow start - err := TriggerIVRFlow(ctx, rt, oa.OrgID(), dbFlow.ID(), contactIDs, func(ctx context.Context, tx *sqlx.Tx) error { - return models.MarkEventsFired(ctx, tx, fires, time.Now(), models.FireResultFired) - }) - if err != nil { - return nil, errors.Wrapf(err, "error triggering ivr flow start") - } - return contactIDs, nil - } - - // our builder for the triggers that will be created for contacts - flowRef := assets.NewFlowReference(flow.UUID(), flow.Name()) - options.TriggerBuilder = func(contact *flows.Contact) flows.Trigger { - delete(skippedContacts, models.ContactID(contact.ID())) - return triggers.NewBuilder(oa.Env(), flowRef, contact).Campaign(campaign, eventUUID).Build() - } - - // this is our pre commit callback for our sessions, we'll mark the event fires associated - // with the passed in sessions as complete in the same transaction - fired := time.Now() - options.CommitHook = func(ctx context.Context, tx *sqlx.Tx, rp *redis.Pool, oa *models.OrgAssets, sessions []*models.Session) error { - // build up our list of event fire ids based on the session contact ids - fires := make([]*models.EventFire, 0, len(sessions)) - for _, s := range sessions { - fire, found := fireMap[s.ContactID()] - if !found { - return errors.Errorf("unable to find associated event fire for contact %d", s.Contact().ID()) - } - fires = append(fires, fire) - } - - // mark those events as fired - err := models.MarkEventsFired(ctx, tx, fires, fired, models.FireResultFired) - if err != nil { - return errors.Wrapf(err, "error marking events fired") - } - - // now build up our list of skipped contacts (no trigger was built for them) - fires = make([]*models.EventFire, 0, len(skippedContacts)) - for _, e := range skippedContacts { - fires = append(fires, e) - } - - // and mark those as skipped - err = models.MarkEventsFired(ctx, tx, fires, fired, models.FireResultSkipped) + for len(remaining) > 0 && time.Since(start) < time.Minute*5 { + ss, skipped, err := tryToStartWithLock(ctx, rt, oa, flow, remaining, options) if err != nil { - return errors.Wrapf(err, "error marking events skipped") + return nil, err } - // clear those out - skippedContacts = make(map[models.ContactID]*models.EventFire) - return nil - } - - sessions, err := StartFlow(ctx, rt, oa, dbFlow, contactIDs, options) - if err != nil { - logrus.WithField("contact_ids", contactIDs).WithError(err).Errorf("error starting flow for campaign event: %s", eventUUID) - } else { - // make sure any skipped contacts are marked as fired this can occur if all fires were skipped - fires := make([]*models.EventFire, 0, len(sessions)) - for _, e := range skippedContacts { - fires = append(fires, e) - } - err = models.MarkEventsFired(ctx, rt.DB, fires, fired, models.FireResultSkipped) - if err != nil { - logrus.WithField("fire_ids", fires).WithError(err).Errorf("error marking events as skipped: %s", eventUUID) - } + sessions = append(sessions, ss...) + remaining = skipped // skipped are now our remaining } - // log both our total and average - analytics.Gauge("mr.campaign_event_elapsed", float64(time.Since(start))/float64(time.Second)) - analytics.Gauge("mr.campaign_event_count", float64(len(sessions))) - - // build the list of contacts actually started - startedContacts := make([]models.ContactID, len(sessions)) - for i := range sessions { - startedContacts[i] = sessions[i].ContactID() - } - return startedContacts, nil + return sessions, nil } -// StartFlow runs the passed in flow for the passed in contact -func StartFlow( - ctx context.Context, rt *runtime.Runtime, oa *models.OrgAssets, - flow *models.Flow, contactIDs []models.ContactID, options *StartOptions) ([]*models.Session, error) { - - if len(contactIDs) == 0 { - return nil, nil +// tries to start the given contacts, returning sessions for those we could, and the ids that were skipped because we +// couldn't get their locks +func tryToStartWithLock(ctx context.Context, rt *runtime.Runtime, oa *models.OrgAssets, flow *models.Flow, ids []models.ContactID, options *StartOptions) ([]*models.Session, []models.ContactID, error) { + // try to get locks for these contacts, waiting for up to a second for each contact + locks, skipped, err := models.LockContacts(ctx, rt, oa.OrgID(), ids, time.Second) + if err != nil { + return nil, nil, err } + locked := maps.Keys(locks) - // figures out which contacts need to be excluded if any - exclude := make(map[models.ContactID]bool, 5) + // whatever happens, we need to unlock the contacts + defer models.UnlockContacts(rt, oa.OrgID(), locks) - // filter out anybody who has has a flow run in this flow if appropriate - if options.ExcludeStartedPreviously { - // find all participants that have been in this flow - started, err := models.FindFlowStartedOverlap(ctx, rt.DB, flow.ID(), contactIDs) - if err != nil { - return nil, errors.Wrapf(err, "error finding others started flow: %d", flow.ID()) - } - for _, c := range started { - exclude[c] = true - } + // load our locked contacts + contacts, err := models.LoadContacts(ctx, rt.ReadonlyDB, oa, locked) + if err != nil { + return nil, nil, errors.Wrapf(err, "error loading contacts to start") } - // filter out our list of contacts to only include those that should be started - if options.ExcludeInAFlow { - // find all participants active in any flow - active, err := models.FilterByWaitingSession(ctx, rt.DB, contactIDs) + // build our triggers + triggers := make([]flows.Trigger, 0, len(locked)) + for _, c := range contacts { + contact, err := c.FlowContact(oa) if err != nil { - return nil, errors.Wrapf(err, "error finding other active flow: %d", flow.ID()) - } - for _, c := range active { - exclude[c] = true - } - } - - // filter into our final list of contacts - includedContacts := make([]models.ContactID, 0, len(contactIDs)) - for _, c := range contactIDs { - if !exclude[c] { - includedContacts = append(includedContacts, c) + return nil, nil, errors.Wrapf(err, "error creating flow contact") } + trigger := options.TriggerBuilder(contact) + triggers = append(triggers, trigger) } - // no contacts left? we are done - if len(includedContacts) == 0 { - return nil, nil - } - - // we now need to grab locks for our contacts so that they are never in two starts or handles at the - // same time we try to grab locks for up to five minutes, but do it in batches where we wait for one - // second per contact to prevent deadlocks - sessions := make([]*models.Session, 0, len(includedContacts)) - remaining := includedContacts - start := time.Now() - - // map of locks we've released - released := make(map[*redisx.Locker]bool) - - for len(remaining) > 0 && time.Since(start) < time.Minute*5 { - locked := make([]models.ContactID, 0, len(remaining)) - locks := make([]string, 0, len(remaining)) - skipped := make([]models.ContactID, 0, 5) - - // try up to a second to get a lock for a contact - for _, contactID := range remaining { - locker := models.GetContactLocker(oa.OrgID(), contactID) - - lock, err := locker.Grab(rt.RP, time.Second) - if err != nil { - return nil, errors.Wrapf(err, "error attempting to grab lock") - } - if lock == "" { - skipped = append(skipped, contactID) - continue - } - locked = append(locked, contactID) - locks = append(locks, lock) - - // defer unlocking if we exit due to error - defer func() { - if !released[locker] { - locker.Release(rt.RP, lock) - } - }() - } - - // load our locked contacts - contacts, err := models.LoadContacts(ctx, rt.ReadonlyDB, oa, locked) - if err != nil { - return nil, errors.Wrapf(err, "error loading contacts to start") - } - - // ok, we've filtered our contacts, build our triggers - triggers := make([]flows.Trigger, 0, len(locked)) - for _, c := range contacts { - contact, err := c.FlowContact(oa) - if err != nil { - return nil, errors.Wrapf(err, "error creating flow contact") - } - trigger := options.TriggerBuilder(contact) - triggers = append(triggers, trigger) - } - - ss, err := StartFlowForContacts(ctx, rt, oa, flow, contacts, triggers, options.CommitHook, options.Interrupt) - if err != nil { - return nil, errors.Wrapf(err, "error starting flow for contacts") - } - - // append all the sessions that were started - sessions = append(sessions, ss...) - - // release all our locks - for i := range locked { - locker := models.GetContactLocker(oa.OrgID(), locked[i]) - locker.Release(rt.RP, locks[i]) - released[locker] = true - } - - // skipped are now our remaining - remaining = skipped + ss, err := StartFlowForContacts(ctx, rt, oa, flow, contacts, triggers, options.CommitHook, options.Interrupt) + if err != nil { + return nil, nil, errors.Wrapf(err, "error starting flow for contacts") } - return sessions, nil + return ss, skipped, nil } // StartFlowForContacts runs the passed in flow for the passed in contact @@ -710,51 +472,3 @@ func StartFlowForContacts( log.WithField("elapsed", time.Since(start)).WithField("count", len(dbSessions)).Info("flow started, sessions created") return dbSessions, nil } - -type DBHook func(ctx context.Context, tx *sqlx.Tx) error - -// TriggerIVRFlow will create a new flow start with the passed in flow and set of contacts. This will cause us to -// request calls to start, which once we get the callback will trigger our actual flow to start. -func TriggerIVRFlow(ctx context.Context, rt *runtime.Runtime, orgID models.OrgID, flowID models.FlowID, contactIDs []models.ContactID, hook DBHook) error { - tx, _ := rt.DB.BeginTxx(ctx, nil) - - // create our start - start := models.NewFlowStart(orgID, models.StartTypeTrigger, models.FlowTypeVoice, flowID). - WithContactIDs(contactIDs) - - // insert it - err := models.InsertFlowStarts(ctx, tx, []*models.FlowStart{start}) - if err != nil { - tx.Rollback() - return errors.Wrapf(err, "error inserting ivr flow start") - } - - // call our hook if we have one - if hook != nil { - err = hook(ctx, tx) - if err != nil { - tx.Rollback() - return errors.Wrapf(err, "error while calling db hook") - } - } - - // commit our transaction - err = tx.Commit() - if err != nil { - tx.Rollback() - return errors.Wrapf(err, "error committing transaction for ivr flow starts") - } - - // create our batch of all our contacts - task := start.CreateBatch(contactIDs, true, len(contactIDs)) - - // queue this to our ivr starter, it will take care of creating the calls then calling back in - rc := rt.RP.Get() - defer rc.Close() - err = queue.AddTask(rc, queue.BatchQueue, queue.StartIVRFlowBatch, int(orgID), task, queue.HighPriority) - if err != nil { - return errors.Wrapf(err, "error queuing ivr flow start") - } - - return nil -} diff --git a/core/runner/runner_test.go b/core/runner/runner_test.go index 9844ac14c..8085daddc 100644 --- a/core/runner/runner_test.go +++ b/core/runner/runner_test.go @@ -2,9 +2,7 @@ package runner_test import ( "context" - "encoding/json" "testing" - "time" "github.com/gomodule/redigo/redis" "github.com/jmoiron/sqlx" @@ -25,233 +23,62 @@ import ( "github.com/stretchr/testify/require" ) -func TestFireCampaignEvents(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() +func TestStartFlowBatch(t *testing.T) { + ctx, rt := testsuite.Runtime() - defer testsuite.Reset(testsuite.ResetData | testsuite.ResetRedis) - - campaign := triggers.NewCampaignReference(triggers.CampaignUUID(testdata.RemindersCampaign.UUID), "Doctor Reminders") + defer testsuite.Reset(testsuite.ResetAll) - // create event fires for event #3 (Pick A Number, start mode SKIP) - now := time.Now() - fire1ID := testdata.InsertEventFire(rt.DB, testdata.Cathy, testdata.RemindersEvent3, now) - fire2ID := testdata.InsertEventFire(rt.DB, testdata.Bob, testdata.RemindersEvent3, now) - fire3ID := testdata.InsertEventFire(rt.DB, testdata.Alexandria, testdata.RemindersEvent3, now) + // create a start object + start1 := models.NewFlowStart(models.OrgID(1), models.StartTypeManual, models.FlowTypeMessaging, testdata.SingleMessage.ID). + WithContactIDs([]models.ContactID{testdata.Cathy.ID, testdata.Bob.ID, testdata.George.ID, testdata.Alexandria.ID}) + err := models.InsertFlowStarts(ctx, rt.DB, []*models.FlowStart{start1}) + require.NoError(t, err) - // create waiting sessions for Cathy and Alexandria - testdata.InsertWaitingSession(db, testdata.Org1, testdata.Cathy, models.FlowTypeVoice, testdata.IVRFlow, models.NilCallID, time.Now(), time.Now(), false, nil) - testdata.InsertWaitingSession(db, testdata.Org1, testdata.Alexandria, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now(), false, nil) + batch1 := start1.CreateBatch([]models.ContactID{testdata.Cathy.ID, testdata.Bob.ID}, false, 4) + batch2 := start1.CreateBatch([]models.ContactID{testdata.George.ID, testdata.Alexandria.ID}, true, 4) - fires := []*models.EventFire{ - { - FireID: fire1ID, - EventID: testdata.RemindersEvent3.ID, - ContactID: testdata.Cathy.ID, - Scheduled: now, - }, - { - FireID: fire2ID, - EventID: testdata.RemindersEvent3.ID, - ContactID: testdata.Bob.ID, - Scheduled: now, - }, - { - FireID: fire3ID, - EventID: testdata.RemindersEvent3.ID, - ContactID: testdata.Alexandria.ID, - Scheduled: now, - }, - } - startedIDs, err := runner.FireCampaignEvents(ctx, rt, testdata.Org1.ID, fires, testdata.PickANumber.UUID, campaign, triggers.CampaignEventUUID(testdata.RemindersEvent3.UUID)) - assert.NoError(t, err) - assert.ElementsMatch(t, []models.ContactID{testdata.Bob.ID}, startedIDs) - - // cathy has her existing waiting session because event skipped her - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'W'`, testdata.Cathy.ID).Returns(1) - assertdb.Query(t, db, `SELECT current_flow_id FROM flows_flowsession WHERE contact_id = $1 AND status = 'W'`, testdata.Cathy.ID).Returns(int64(testdata.IVRFlow.ID)) - assertdb.Query(t, db, `SELECT fired_result from campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Cathy.ID, testdata.RemindersEvent3.ID).Returns("S") - - // bob's waiting session is the campaign event because he didn't have a waiting session - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'W'`, testdata.Bob.ID).Returns(1) - assertdb.Query(t, db, `SELECT current_flow_id FROM flows_flowsession WHERE contact_id = $1 AND status = 'W'`, testdata.Bob.ID).Returns(int64(testdata.PickANumber.ID)) - assertdb.Query(t, db, `SELECT fired_result from campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Bob.ID, testdata.RemindersEvent3.ID).Returns("F") - - // alexandria has her existing waiting session because event skipped her - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'W'`, testdata.Alexandria.ID).Returns(1) - assertdb.Query(t, db, `SELECT current_flow_id FROM flows_flowsession WHERE contact_id = $1 AND status = 'W'`, testdata.Alexandria.ID).Returns(int64(testdata.Favorites.ID)) - assertdb.Query(t, db, `SELECT fired_result from campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Alexandria.ID, testdata.RemindersEvent3.ID).Returns("S") - - // all event fires fired - assertdb.Query(t, db, `SELECT count(*) from campaigns_eventfire WHERE fired IS NULL`).Returns(0) - - // create event fires for event #2 (message, start mode PASSIVE) - now = time.Now() - fire4ID := testdata.InsertEventFire(rt.DB, testdata.Cathy, testdata.RemindersEvent2, now) - fire5ID := testdata.InsertEventFire(rt.DB, testdata.Bob, testdata.RemindersEvent2, now) - fire6ID := testdata.InsertEventFire(rt.DB, testdata.Alexandria, testdata.RemindersEvent2, now) - - fires = []*models.EventFire{ - { - FireID: fire4ID, - EventID: testdata.RemindersEvent2.ID, - ContactID: testdata.Cathy.ID, - Scheduled: now, - }, - { - FireID: fire5ID, - EventID: testdata.RemindersEvent2.ID, - ContactID: testdata.Bob.ID, - Scheduled: now, - }, - { - FireID: fire6ID, - EventID: testdata.RemindersEvent2.ID, - ContactID: testdata.Alexandria.ID, - Scheduled: now, - }, - } + // start the first batch... + sessions, err := runner.StartFlowBatch(ctx, rt, batch1) + require.NoError(t, err) + assert.Len(t, sessions, 2) - startedIDs, err = runner.FireCampaignEvents(ctx, rt, testdata.Org1.ID, fires, testdata.CampaignFlow.UUID, campaign, triggers.CampaignEventUUID(testdata.RemindersEvent2.UUID)) - assert.NoError(t, err) - assert.ElementsMatch(t, []models.ContactID{testdata.Cathy.ID, testdata.Bob.ID, testdata.Alexandria.ID}, startedIDs) - - // cathy still has her existing waiting session and now a completed one - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'W'`, testdata.Cathy.ID).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'C'`, testdata.Cathy.ID).Returns(1) - assertdb.Query(t, db, `SELECT fired_result from campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Cathy.ID, testdata.RemindersEvent2.ID).Returns("F") - - // bob still has one waiting session from the previous campaign event and now a completed one - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'W'`, testdata.Bob.ID).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'C'`, testdata.Bob.ID).Returns(1) - assertdb.Query(t, db, `SELECT fired_result from campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Bob.ID, testdata.RemindersEvent2.ID).Returns("F") - - // alexandria still has her existing waiting session and now a completed one - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'W'`, testdata.Alexandria.ID).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'C'`, testdata.Alexandria.ID).Returns(1) - assertdb.Query(t, db, `SELECT fired_result from campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Alexandria.ID, testdata.RemindersEvent2.ID).Returns("F") - - // create event fires for event #1 (flow, start mode INTERRUPT) - now = time.Now() - fire7ID := testdata.InsertEventFire(rt.DB, testdata.Cathy, testdata.RemindersEvent1, now) - fire8ID := testdata.InsertEventFire(rt.DB, testdata.Bob, testdata.RemindersEvent1, now) - fire9ID := testdata.InsertEventFire(rt.DB, testdata.Alexandria, testdata.RemindersEvent1, now) - - fires = []*models.EventFire{ - { - FireID: fire7ID, - EventID: testdata.RemindersEvent1.ID, - ContactID: testdata.Cathy.ID, - Scheduled: now, - }, - { - FireID: fire8ID, - EventID: testdata.RemindersEvent1.ID, - ContactID: testdata.Bob.ID, - Scheduled: now, - }, - { - FireID: fire9ID, - EventID: testdata.RemindersEvent1.ID, - ContactID: testdata.Alexandria.ID, - Scheduled: now, - }, - } + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = ANY($1) + AND status = 'C' AND responded = FALSE AND org_id = 1 AND call_id IS NULL AND output IS NOT NULL`, pq.Array([]models.ContactID{testdata.Cathy.ID, testdata.Bob.ID})). + Returns(2) - startedIDs, err = runner.FireCampaignEvents(ctx, rt, testdata.Org1.ID, fires, testdata.Favorites.UUID, campaign, triggers.CampaignEventUUID(testdata.RemindersEvent1.UUID)) - assert.NoError(t, err) - assert.ElementsMatch(t, []models.ContactID{testdata.Cathy.ID, testdata.Bob.ID, testdata.Alexandria.ID}, startedIDs) - - // cathy's existing waiting session should now be interrupted and now she has a waiting session in the Favorites flow - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'I'`, testdata.Cathy.ID).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'C'`, testdata.Cathy.ID).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'W' AND current_flow_id = $2`, testdata.Cathy.ID, testdata.Favorites.ID).Returns(1) - assertdb.Query(t, db, `SELECT fired_result from campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Cathy.ID, testdata.RemindersEvent1.ID).Returns("F") - - // bob's session from the first campaign event should now be interrupted and he has a new waiting session - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'I'`, testdata.Bob.ID).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'C'`, testdata.Bob.ID).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'W' AND current_flow_id = $2`, testdata.Bob.ID, testdata.Favorites.ID).Returns(1) - assertdb.Query(t, db, `SELECT fired_result from campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Bob.ID, testdata.RemindersEvent1.ID).Returns("F") - - // alexandria's existing waiting session should now be interrupted and now she has a waiting session in the Favorites flow - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'I'`, testdata.Alexandria.ID).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'C'`, testdata.Alexandria.ID).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'W' AND current_flow_id = $2`, testdata.Alexandria.ID, testdata.Favorites.ID).Returns(1) - assertdb.Query(t, db, `SELECT fired_result from campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Alexandria.ID, testdata.RemindersEvent1.ID).Returns("F") -} + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowrun WHERE contact_id = ANY($1) and flow_id = $2 AND responded = FALSE AND org_id = 1 AND status = 'C' + AND results IS NOT NULL AND path IS NOT NULL AND session_id IS NOT NULL`, pq.Array([]models.ContactID{testdata.Cathy.ID, testdata.Bob.ID}), testdata.SingleMessage.ID). + Returns(2) -func TestBatchStart(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE contact_id = ANY($1) AND text = 'Hey, how are you?' AND org_id = 1 AND status = 'Q' + AND queued_on IS NOT NULL AND direction = 'O' AND msg_type = 'T' AND channel_id = $2`, pq.Array([]models.ContactID{testdata.Cathy.ID, testdata.Bob.ID}), testdata.TwilioChannel.ID). + Returns(2) - defer testsuite.Reset(testsuite.ResetAll) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowstart WHERE id = $1`, start1.ID).Returns("P") - // create a start object - testdata.InsertFlowStart(db, testdata.Org1, testdata.SingleMessage, nil) + // start the second batch... + sessions, err = runner.StartFlowBatch(ctx, rt, batch2) + require.NoError(t, err) + assert.Len(t, sessions, 2) - // and our batch object - contactIDs := []models.ContactID{testdata.Cathy.ID, testdata.Bob.ID} + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowstart WHERE id = $1`, start1.ID).Returns("C") - tcs := []struct { - Flow models.FlowID - ExcludeStartedPreviously bool - ExcludeInAFlow bool - Extra json.RawMessage - Msg string - Count int - TotalCount int - }{ - {testdata.SingleMessage.ID, false, false, nil, "Hey, how are you?", 2, 2}, - {testdata.SingleMessage.ID, true, false, nil, "Hey, how are you?", 0, 2}, - {testdata.SingleMessage.ID, true, true, nil, "Hey, how are you?", 0, 2}, - {testdata.SingleMessage.ID, false, true, nil, "Hey, how are you?", 2, 4}, - { - Flow: testdata.IncomingExtraFlow.ID, - ExcludeStartedPreviously: false, - ExcludeInAFlow: true, - Extra: json.RawMessage([]byte(`{"name":"Fred", "age":33}`)), - Msg: "Great to meet you Fred. Your age is 33.", - Count: 2, - TotalCount: 2, - }, - } + // create a start object with params + testdata.InsertFlowStart(rt, testdata.Org1, testdata.IncomingExtraFlow, nil) + start2 := models.NewFlowStart(models.OrgID(1), models.StartTypeManual, models.FlowTypeMessaging, testdata.IncomingExtraFlow.ID). + WithContactIDs([]models.ContactID{testdata.Cathy.ID}). + WithParams([]byte(`{"name":"Fred", "age":33}`)) + batch3 := start2.CreateBatch([]models.ContactID{testdata.Cathy.ID}, true, 1) - last := time.Now() + sessions, err = runner.StartFlowBatch(ctx, rt, batch3) + require.NoError(t, err) + assert.Len(t, sessions, 1) - for i, tc := range tcs { - start := models.NewFlowStart(models.OrgID(1), models.StartTypeManual, models.FlowTypeMessaging, tc.Flow). - WithContactIDs(contactIDs). - WithExcludeInAFlow(tc.ExcludeInAFlow). - WithExcludeStartedPreviously(tc.ExcludeStartedPreviously). - WithExtra(tc.Extra) - batch := start.CreateBatch(contactIDs, true, len(contactIDs)) - - sessions, err := runner.StartFlowBatch(ctx, rt, batch) - require.NoError(t, err) - assert.Equal(t, tc.Count, len(sessions), "%d: unexpected number of sessions created", i) - - assertdb.Query(t, db, - `SELECT count(*) FROM flows_flowsession WHERE contact_id = ANY($1) - AND status = 'C' AND responded = FALSE AND org_id = 1 AND call_id IS NULL AND output IS NOT NULL AND created_on > $2`, pq.Array(contactIDs), last). - Returns(tc.Count, "%d: unexpected number of sessions", i) - - assertdb.Query(t, db, - `SELECT count(*) FROM flows_flowrun WHERE contact_id = ANY($1) and flow_id = $2 - AND responded = FALSE AND org_id = 1 AND status = 'C' - AND results IS NOT NULL AND path IS NOT NULL AND session_id IS NOT NULL`, pq.Array(contactIDs), tc.Flow). - Returns(tc.TotalCount, "%d: unexpected number of runs", i) - - assertdb.Query(t, db, - `SELECT count(*) FROM msgs_msg WHERE contact_id = ANY($1) AND text = $2 AND org_id = 1 AND status = 'Q' - AND queued_on IS NOT NULL AND direction = 'O' AND msg_type = 'F' AND channel_id = $3`, - pq.Array(contactIDs), tc.Msg, testdata.TwilioChannel.ID). - Returns(tc.TotalCount, "%d: unexpected number of messages", i) - - last = time.Now() - } + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE text = 'Great to meet you Fred. Your age is 33.'`).Returns(1) } func TestResume(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData | testsuite.ResetStorage) @@ -264,22 +91,22 @@ func TestResume(t *testing.T) { flow, err := oa.FlowByID(testdata.Favorites.ID) require.NoError(t, err) - modelContact, flowContact := testdata.Cathy.Load(db, oa) + modelContact, flowContact := testdata.Cathy.Load(rt, oa) trigger := triggers.NewBuilder(oa.Env(), flow.Reference(), flowContact).Manual().Build() sessions, err := runner.StartFlowForContacts(ctx, rt, oa, flow, []*models.Contact{modelContact}, []flows.Trigger{trigger}, nil, true) assert.NoError(t, err) assert.NotNil(t, sessions) - assertdb.Query(t, db, + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND current_flow_id = $2 AND status = 'W' AND responded = FALSE AND org_id = 1 AND call_id IS NULL AND output IS NULL`, modelContact.ID(), flow.ID()).Returns(1) - assertdb.Query(t, db, + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowrun WHERE contact_id = $1 AND flow_id = $2 AND status = 'W' AND responded = FALSE AND org_id = 1`, modelContact.ID(), flow.ID()).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE contact_id = $1 AND direction = 'O' AND text like '%favorite color%'`, modelContact.ID()).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE contact_id = $1 AND direction = 'O' AND text like '%favorite color%'`, modelContact.ID()).Returns(1) tcs := []struct { Message string @@ -304,7 +131,7 @@ func TestResume(t *testing.T) { assert.NoError(t, err) assert.NotNil(t, session) - assertdb.Query(t, db, + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = $2 AND responded = TRUE AND org_id = 1 AND call_id IS NULL AND output IS NULL AND output_url IS NOT NULL`, modelContact.ID(), tc.SessionStatus). Returns(1, "%d: didn't find expected session", i) @@ -313,25 +140,25 @@ func TestResume(t *testing.T) { AND status = $3 AND responded = TRUE AND org_id = 1 AND current_node_uuid IS NOT NULL AND json_array_length(path::json) = $4 AND session_id IS NOT NULL` - assertdb.Query(t, db, runQuery, modelContact.ID(), flow.ID(), tc.RunStatus, tc.PathLength). + assertdb.Query(t, rt.DB, runQuery, modelContact.ID(), flow.ID(), tc.RunStatus, tc.PathLength). Returns(1, "%d: didn't find expected run", i) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE contact_id = $1 AND direction = 'O' AND text like $2`, modelContact.ID(), tc.Substring). + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE contact_id = $1 AND direction = 'O' AND text like $2`, modelContact.ID(), tc.Substring). Returns(1, "%d: didn't find expected message", i) } } func TestStartFlowConcurrency(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData | testsuite.ResetRedis) // check everything works with big ids - db.MustExec(`ALTER SEQUENCE flows_flowrun_id_seq RESTART WITH 5000000000;`) - db.MustExec(`ALTER SEQUENCE flows_flowsession_id_seq RESTART WITH 5000000000;`) + rt.DB.MustExec(`ALTER SEQUENCE flows_flowrun_id_seq RESTART WITH 5000000000;`) + rt.DB.MustExec(`ALTER SEQUENCE flows_flowsession_id_seq RESTART WITH 5000000000;`) // create a flow which has a send_broadcast action which will mean handlers grabbing redis connections - flow := testdata.InsertFlow(db, testdata.Org1, testsuite.ReadFile("testdata/broadcast_flow.json")) + flow := testdata.InsertFlow(rt, testdata.Org1, testsuite.ReadFile("testdata/broadcast_flow.json")) oa := testdata.Org1.Load(rt) @@ -342,12 +169,10 @@ func TestStartFlowConcurrency(t *testing.T) { // create a lot of contacts... contacts := make([]*testdata.Contact, 100) for i := range contacts { - contacts[i] = testdata.InsertContact(db, testdata.Org1, flows.ContactUUID(uuids.New()), "Jim", envs.NilLanguage, models.ContactStatusActive) + contacts[i] = testdata.InsertContact(rt, testdata.Org1, flows.ContactUUID(uuids.New()), "Jim", envs.NilLanguage, models.ContactStatusActive) } options := &runner.StartOptions{ - ExcludeStartedPreviously: false, - ExcludeInAFlow: false, TriggerBuilder: func(contact *flows.Contact) flows.Trigger { return triggers.NewBuilder(oa.Env(), flowRef, contact).Manual().Build() }, @@ -363,6 +188,6 @@ func TestStartFlowConcurrency(t *testing.T) { assert.Equal(t, 1, len(sessions)) }) - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowrun`).Returns(len(contacts)) - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession`).Returns(len(contacts)) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowrun`).Returns(len(contacts)) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession`).Returns(len(contacts)) } diff --git a/core/search/groups.go b/core/search/groups.go index 8e5d9b698..72e82c312 100644 --- a/core/search/groups.go +++ b/core/search/groups.go @@ -4,8 +4,8 @@ import ( "context" "time" - "github.com/jmoiron/sqlx" "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/runtime" "github.com/olivere/elastic/v7" "github.com/pkg/errors" "github.com/sirupsen/logrus" @@ -13,8 +13,8 @@ import ( // PopulateSmartGroup calculates which members should be part of a group and populates the contacts // for that group by performing the minimum number of inserts / deletes. -func PopulateSmartGroup(ctx context.Context, db *sqlx.DB, es *elastic.Client, oa *models.OrgAssets, groupID models.GroupID, query string) (int, error) { - err := models.UpdateGroupStatus(ctx, db, groupID, models.GroupStatusEvaluating) +func PopulateSmartGroup(ctx context.Context, rt *runtime.Runtime, es *elastic.Client, oa *models.OrgAssets, groupID models.GroupID, query string) (int, error) { + err := models.UpdateGroupStatus(ctx, rt.DB, groupID, models.GroupStatusEvaluating) if err != nil { return 0, errors.Wrapf(err, "error marking dynamic group as evaluating") } @@ -24,7 +24,7 @@ func PopulateSmartGroup(ctx context.Context, db *sqlx.DB, es *elastic.Client, oa // we have a bit of a race with the indexer process.. we want to make sure that any contacts that changed // before this group was updated but after the last index are included, so if a contact was modified // more recently than 10 seconds ago, we wait that long before starting in populating our group - newest, err := models.GetNewestContactModifiedOn(ctx, db, oa) + newest, err := models.GetNewestContactModifiedOn(ctx, rt.DB, oa) if err != nil { return 0, errors.Wrapf(err, "error getting most recent contact modified_on for org: %d", oa.OrgID()) } @@ -40,7 +40,7 @@ func PopulateSmartGroup(ctx context.Context, db *sqlx.DB, es *elastic.Client, oa } // get current set of contacts in our group - ids, err := models.ContactIDsForGroupIDs(ctx, db, []models.GroupID{groupID}) + ids, err := models.ContactIDsForGroupIDs(ctx, rt.DB, []models.GroupID{groupID}) if err != nil { return 0, errors.Wrapf(err, "unable to look up contact ids for group: %d", groupID) } @@ -50,7 +50,7 @@ func PopulateSmartGroup(ctx context.Context, db *sqlx.DB, es *elastic.Client, oa } // calculate new set of ids - new, err := GetContactIDsForQuery(ctx, es, oa, query, -1) + new, err := GetContactIDsForQuery(ctx, rt, oa, query, -1) if err != nil { return 0, errors.Wrapf(err, "error performing query: %s for group: %d", query, groupID) } @@ -71,19 +71,19 @@ func PopulateSmartGroup(ctx context.Context, db *sqlx.DB, es *elastic.Client, oa } // first remove all the contacts - err = models.RemoveContactsFromGroupAndCampaigns(ctx, db, oa, groupID, removals) + err = models.RemoveContactsFromGroupAndCampaigns(ctx, rt.DB, oa, groupID, removals) if err != nil { return 0, errors.Wrapf(err, "error removing contacts from group: %d", groupID) } // then add them all - err = models.AddContactsToGroupAndCampaigns(ctx, db, oa, groupID, adds) + err = models.AddContactsToGroupAndCampaigns(ctx, rt.DB, oa, groupID, adds) if err != nil { return 0, errors.Wrapf(err, "error adding contacts to group: %d", groupID) } // mark our group as no longer evaluating - err = models.UpdateGroupStatus(ctx, db, groupID, models.GroupStatusReady) + err = models.UpdateGroupStatus(ctx, rt.DB, groupID, models.GroupStatusReady) if err != nil { return 0, errors.Wrapf(err, "error marking dynamic group as ready") } @@ -93,7 +93,7 @@ func PopulateSmartGroup(ctx context.Context, db *sqlx.DB, es *elastic.Client, oa changed = append(changed, adds...) changed = append(changed, removals...) - err = models.UpdateContactModifiedOn(ctx, db, changed) + err = models.UpdateContactModifiedOn(ctx, rt.DB, changed) if err != nil { return 0, errors.Wrapf(err, "error updating contact modified_on after group population") } diff --git a/core/search/groups_test.go b/core/search/groups_test.go index 25c22931b..ad358546b 100644 --- a/core/search/groups_test.go +++ b/core/search/groups_test.go @@ -14,79 +14,70 @@ import ( ) func TestSmartGroups(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) // insert an event on our campaign - newEvent := testdata.InsertCampaignFlowEvent(db, testdata.RemindersCampaign, testdata.Favorites, testdata.JoinedField, 1000, "W") + newEvent := testdata.InsertCampaignFlowEvent(rt, testdata.RemindersCampaign, testdata.Favorites, testdata.JoinedField, 1000, "W") // clear Cathy's value - db.MustExec( - `update contacts_contact set fields = fields - $2 - WHERE id = $1`, testdata.Cathy.ID, testdata.JoinedField.UUID) + rt.DB.MustExec(`update contacts_contact set fields = fields - $2 WHERE id = $1`, testdata.Cathy.ID, testdata.JoinedField.UUID) // and populate Bob's - db.MustExec( - fmt.Sprintf(`update contacts_contact set fields = fields || - '{"%s": { "text": "2029-09-15T12:00:00+00:00", "datetime": "2029-09-15T12:00:00+00:00" }}'::jsonb - WHERE id = $1`, testdata.JoinedField.UUID), testdata.Bob.ID) + rt.DB.MustExec( + fmt.Sprintf(`update contacts_contact set fields = fields || '{"%s": { "text": "2029-09-15T12:00:00+00:00", "datetime": "2029-09-15T12:00:00+00:00" }}'::jsonb WHERE id = $1`, testdata.JoinedField.UUID), + testdata.Bob.ID, + ) + + testsuite.ReindexElastic(ctx) oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshCampaigns|models.RefreshGroups) assert.NoError(t, err) - mockES := testsuite.NewMockElasticServer() - defer mockES.Close() - - es := mockES.Client() - - mockES.AddResponse(testdata.Cathy.ID) - mockES.AddResponse(testdata.Bob.ID) - mockES.AddResponse(testdata.Bob.ID) - tcs := []struct { - Query string - ContactIDs []models.ContactID - EventContactIDs []models.ContactID + query string + expectedContactIDs []models.ContactID + expectedEventIDs []models.ContactID }{ - { - "cathy", - []models.ContactID{testdata.Cathy.ID}, - []models.ContactID{}, + { // 0 + query: "cathy", + expectedContactIDs: []models.ContactID{testdata.Cathy.ID}, + expectedEventIDs: []models.ContactID{}, }, - { - "bob", - []models.ContactID{testdata.Bob.ID}, - []models.ContactID{testdata.Bob.ID}, + { // 1 + query: "bob", + expectedContactIDs: []models.ContactID{testdata.Bob.ID}, + expectedEventIDs: []models.ContactID{testdata.Bob.ID}, }, - { - "unchanged", - []models.ContactID{testdata.Bob.ID}, - []models.ContactID{testdata.Bob.ID}, + { // 2 + query: "name = BOB", + expectedContactIDs: []models.ContactID{testdata.Bob.ID}, + expectedEventIDs: []models.ContactID{testdata.Bob.ID}, }, } - for _, tc := range tcs { - err := models.UpdateGroupStatus(ctx, db, testdata.DoctorsGroup.ID, models.GroupStatusInitializing) + for i, tc := range tcs { + err := models.UpdateGroupStatus(ctx, rt.DB, testdata.DoctorsGroup.ID, models.GroupStatusInitializing) assert.NoError(t, err) - count, err := search.PopulateSmartGroup(ctx, db, es, oa, testdata.DoctorsGroup.ID, tc.Query) - assert.NoError(t, err, "error populating smart group for: %s", tc.Query) + count, err := search.PopulateSmartGroup(ctx, rt, rt.ES, oa, testdata.DoctorsGroup.ID, tc.query) + assert.NoError(t, err, "error populating smart group for: %s", tc.query) - assert.Equal(t, count, len(tc.ContactIDs)) + assert.Equal(t, count, len(tc.expectedContactIDs), "%d: contact count mismatch", i) // assert the current group membership - contactIDs, err := models.ContactIDsForGroupIDs(ctx, db, []models.GroupID{testdata.DoctorsGroup.ID}) + contactIDs, err := models.ContactIDsForGroupIDs(ctx, rt.DB, []models.GroupID{testdata.DoctorsGroup.ID}) assert.NoError(t, err) - assert.Equal(t, tc.ContactIDs, contactIDs) + assert.Equal(t, tc.expectedContactIDs, contactIDs) - assertdb.Query(t, db, `SELECT count(*) from contacts_contactgroup WHERE id = $1 AND status = 'R'`, testdata.DoctorsGroup.ID). - Returns(1, "wrong number of contacts in group for query: %s", tc.Query) + assertdb.Query(t, rt.DB, `SELECT count(*) from contacts_contactgroup WHERE id = $1 AND status = 'R'`, testdata.DoctorsGroup.ID). + Returns(1, "wrong number of contacts in group for query: %s", tc.query) - assertdb.Query(t, db, `SELECT count(*) from campaigns_eventfire WHERE event_id = $1`, newEvent.ID). - Returns(len(tc.EventContactIDs), "wrong number of contacts with events for query: %s", tc.Query) + assertdb.Query(t, rt.DB, `SELECT count(*) from campaigns_eventfire WHERE event_id = $1`, newEvent.ID). + Returns(len(tc.expectedEventIDs), "wrong number of contacts with events for query: %s", tc.query) - assertdb.Query(t, db, `SELECT count(*) from campaigns_eventfire WHERE event_id = $1 AND contact_id = ANY($2)`, newEvent.ID, pq.Array(tc.EventContactIDs)). - Returns(len(tc.EventContactIDs), "wrong contacts with events for query: %s", tc.Query) + assertdb.Query(t, rt.DB, `SELECT count(*) from campaigns_eventfire WHERE event_id = $1 AND contact_id = ANY($2)`, newEvent.ID, pq.Array(tc.expectedEventIDs)). + Returns(len(tc.expectedEventIDs), "wrong contacts with events for query: %s", tc.query) } } diff --git a/core/search/queries.go b/core/search/queries.go index 94fc81538..4a5fd0e5b 100644 --- a/core/search/queries.go +++ b/core/search/queries.go @@ -4,7 +4,6 @@ import ( "time" "github.com/nyaruka/gocommon/dates" - "github.com/nyaruka/gocommon/urns" "github.com/nyaruka/goflow/contactql" "github.com/nyaruka/goflow/envs" "github.com/nyaruka/goflow/flows" @@ -12,16 +11,8 @@ import ( "github.com/pkg/errors" ) -// Exclusions are preset exclusion conditions -type Exclusions struct { - NonActive bool `json:"non_active"` // contacts who are blocked, stopped or archived - InAFlow bool `json:"in_a_flow"` // contacts who are currently in a flow (including this one) - StartedPreviously bool `json:"started_previously"` // contacts who have been in this flow in the last 90 days - NotSeenSinceDays int `json:"not_seen_since_days"` // contacts who have not been seen for more than this number of days -} - -// BuildStartQuery builds a start query for the given flow and start options -func BuildStartQuery(oa *models.OrgAssets, flow *models.Flow, groups []*models.Group, contactUUIDs []flows.ContactUUID, urnz []urns.URN, userQuery string, excs Exclusions) (string, error) { +// BuildRecipientsQuery builds a query from a set of inclusions/exclusions (i.e. a flow start or broadcast) +func BuildRecipientsQuery(oa *models.OrgAssets, flow *models.Flow, groups []*models.Group, contactUUIDs []flows.ContactUUID, userQuery string, excs models.Exclusions, excGroups []*models.Group) (string, error) { var parsedQuery *contactql.ContactQuery var err error @@ -32,10 +23,10 @@ func BuildStartQuery(oa *models.OrgAssets, flow *models.Flow, groups []*models.G } } - return contactql.Stringify(buildStartQuery(oa.Env(), flow, groups, contactUUIDs, urnz, parsedQuery, excs)), nil + return contactql.Stringify(buildRecipientsQuery(oa.Env(), flow, groups, contactUUIDs, parsedQuery, excs, excGroups)), nil } -func buildStartQuery(env envs.Environment, flow *models.Flow, groups []*models.Group, contactUUIDs []flows.ContactUUID, urnz []urns.URN, userQuery *contactql.ContactQuery, excs Exclusions) contactql.QueryNode { +func buildRecipientsQuery(env envs.Environment, flow *models.Flow, groups []*models.Group, contactUUIDs []flows.ContactUUID, userQuery *contactql.ContactQuery, excs models.Exclusions, excGroups []*models.Group) contactql.QueryNode { inclusions := make([]contactql.QueryNode, 0, 10) for _, group := range groups { @@ -44,10 +35,6 @@ func buildStartQuery(env envs.Environment, flow *models.Flow, groups []*models.G for _, contactUUID := range contactUUIDs { inclusions = append(inclusions, contactql.NewCondition("uuid", contactql.PropertyTypeAttribute, contactql.OpEqual, string(contactUUID))) } - for _, urn := range urnz { - scheme, path, _, _ := urn.ToParts() - inclusions = append(inclusions, contactql.NewCondition(scheme, contactql.PropertyTypeScheme, contactql.OpEqual, path)) - } if userQuery != nil { inclusions = append(inclusions, userQuery.Root()) } @@ -59,13 +46,16 @@ func buildStartQuery(env envs.Environment, flow *models.Flow, groups []*models.G if excs.InAFlow { exclusions = append(exclusions, contactql.NewCondition("flow", contactql.PropertyTypeAttribute, contactql.OpEqual, "")) } - if excs.StartedPreviously { + if excs.StartedPreviously && flow != nil { exclusions = append(exclusions, contactql.NewCondition("history", contactql.PropertyTypeAttribute, contactql.OpNotEqual, flow.Name())) } if excs.NotSeenSinceDays > 0 { seenSince := dates.Now().Add(-time.Hour * time.Duration(24*excs.NotSeenSinceDays)) exclusions = append(exclusions, contactql.NewCondition("last_seen_on", contactql.PropertyTypeAttribute, contactql.OpGreaterThan, formatQueryDate(env, seenSince))) } + for _, group := range excGroups { + exclusions = append(exclusions, contactql.NewCondition("group", contactql.PropertyTypeAttribute, contactql.OpNotEqual, group.Name())) + } return contactql.NewBoolCombination(contactql.BoolOperatorAnd, contactql.NewBoolCombination(contactql.BoolOperatorOr, inclusions...), diff --git a/core/search/queries_test.go b/core/search/queries_test.go index e846ecd56..b53da9c99 100644 --- a/core/search/queries_test.go +++ b/core/search/queries_test.go @@ -5,7 +5,6 @@ import ( "time" "github.com/nyaruka/gocommon/dates" - "github.com/nyaruka/gocommon/urns" "github.com/nyaruka/goflow/flows" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/core/search" @@ -15,8 +14,8 @@ import ( "github.com/stretchr/testify/require" ) -func TestBuildStartQuery(t *testing.T) { - _, rt, _, _ := testsuite.Get() +func TestBuildRecipientsQuery(t *testing.T) { + _, rt := testsuite.Runtime() dates.SetNowSource(dates.NewFixedNowSource(time.Date(2022, 4, 20, 15, 30, 45, 0, time.UTC))) defer dates.SetNowSource(dates.DefaultNowSource) @@ -29,48 +28,47 @@ func TestBuildStartQuery(t *testing.T) { testers := oa.GroupByID(testdata.TestersGroup.ID) tcs := []struct { - groups []*models.Group - contactUUIDs []flows.ContactUUID - urns []urns.URN - userQuery string - exclusions search.Exclusions - expected string - err string + groups []*models.Group + contactUUIDs []flows.ContactUUID + userQuery string + exclusions models.Exclusions + excludeGroups []*models.Group + expected string + err string }{ { groups: []*models.Group{doctors, testers}, contactUUIDs: []flows.ContactUUID{testdata.Cathy.UUID, testdata.George.UUID}, - urns: []urns.URN{"tel:+1234567890", "telegram:9876543210"}, - exclusions: search.Exclusions{}, - expected: `group = "Doctors" OR group = "Testers" OR uuid = "6393abc0-283d-4c9b-a1b3-641a035c34bf" OR uuid = "8d024bcd-f473-4719-a00a-bd0bb1190135" OR tel = "+1234567890" OR telegram = 9876543210`, + exclusions: models.Exclusions{}, + expected: `group = "Doctors" OR group = "Testers" OR uuid = "6393abc0-283d-4c9b-a1b3-641a035c34bf" OR uuid = "8d024bcd-f473-4719-a00a-bd0bb1190135"`, }, { groups: []*models.Group{doctors}, contactUUIDs: []flows.ContactUUID{testdata.Cathy.UUID}, - urns: []urns.URN{"tel:+1234567890"}, - exclusions: search.Exclusions{ + exclusions: models.Exclusions{ NonActive: true, InAFlow: true, StartedPreviously: true, NotSeenSinceDays: 90, }, - expected: `(group = "Doctors" OR uuid = "6393abc0-283d-4c9b-a1b3-641a035c34bf" OR tel = "+1234567890") AND status = "active" AND flow = "" AND history != "Favorites" AND last_seen_on > "20-01-2022"`, + excludeGroups: []*models.Group{testers}, + expected: `(group = "Doctors" OR uuid = "6393abc0-283d-4c9b-a1b3-641a035c34bf") AND status = "active" AND flow = "" AND history != "Favorites" AND last_seen_on > "20-01-2022" AND group != "Testers"`, }, { contactUUIDs: []flows.ContactUUID{testdata.Cathy.UUID}, - exclusions: search.Exclusions{ + exclusions: models.Exclusions{ NonActive: true, }, expected: `uuid = "6393abc0-283d-4c9b-a1b3-641a035c34bf" AND status = "active"`, }, { userQuery: `gender = "M"`, - exclusions: search.Exclusions{}, + exclusions: models.Exclusions{}, expected: `gender = "M"`, }, { userQuery: `gender = "M"`, - exclusions: search.Exclusions{ + exclusions: models.Exclusions{ NonActive: true, InAFlow: true, StartedPreviously: true, @@ -80,7 +78,7 @@ func TestBuildStartQuery(t *testing.T) { }, { userQuery: `name ~ ben`, - exclusions: search.Exclusions{ + exclusions: models.Exclusions{ NonActive: false, InAFlow: false, StartedPreviously: false, @@ -90,7 +88,7 @@ func TestBuildStartQuery(t *testing.T) { }, { userQuery: `name ~ ben OR name ~ eric`, - exclusions: search.Exclusions{ + exclusions: models.Exclusions{ NonActive: false, InAFlow: false, StartedPreviously: false, @@ -100,18 +98,18 @@ func TestBuildStartQuery(t *testing.T) { }, { userQuery: `name ~`, // syntactically invalid user query - exclusions: search.Exclusions{}, + exclusions: models.Exclusions{}, err: "invalid user query: mismatched input '' expecting {TEXT, STRING}", }, { userQuery: `goats > 14`, // no such field - exclusions: search.Exclusions{}, + exclusions: models.Exclusions{}, err: "invalid user query: can't resolve 'goats' to attribute, scheme or field", }, } for _, tc := range tcs { - actual, err := search.BuildStartQuery(oa, flow, tc.groups, tc.contactUUIDs, tc.urns, tc.userQuery, tc.exclusions) + actual, err := search.BuildRecipientsQuery(oa, flow, tc.groups, tc.contactUUIDs, tc.userQuery, tc.exclusions, tc.excludeGroups) if tc.err != "" { assert.Equal(t, "", actual) assert.EqualError(t, err, tc.err) diff --git a/core/search/resolve.go b/core/search/resolve.go new file mode 100644 index 000000000..3c474f82b --- /dev/null +++ b/core/search/resolve.go @@ -0,0 +1,111 @@ +package search + +import ( + "context" + + "github.com/nyaruka/gocommon/urns" + "github.com/nyaruka/goflow/flows" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/runtime" + "github.com/pkg/errors" +) + +type Recipients struct { + ContactIDs []models.ContactID + GroupIDs []models.GroupID + URNs []urns.URN + Query string + Exclusions models.Exclusions + ExcludeGroupIDs []models.GroupID +} + +// ResolveRecipients resolves a set of contacts, groups, urns etc into a set of unique contacts +func ResolveRecipients(ctx context.Context, rt *runtime.Runtime, oa *models.OrgAssets, flow *models.Flow, recipients *Recipients, limit int) ([]models.ContactID, error) { + idsSeen := make(map[models.ContactID]bool) + + // start by loading the explicitly listed contacts + includeContacts, err := models.LoadContacts(ctx, rt.DB, oa, recipients.ContactIDs) + if err != nil { + return nil, err + } + for _, c := range includeContacts { + idsSeen[c.ID()] = true + } + + // created contacts are handled separately because they won't be indexed + var createdContacts map[urns.URN]*models.Contact + + // resolve any raw URNs + if len(recipients.URNs) > 0 { + fetchedByURN, createdByURN, err := models.GetOrCreateContactsFromURNs(ctx, rt.DB, oa, recipients.URNs) + if err != nil { + return nil, errors.Wrap(err, "error getting contact ids from urns") + } + for _, c := range fetchedByURN { + if !idsSeen[c.ID()] { + includeContacts = append(includeContacts, c) + idsSeen[c.ID()] = true + } + } + + createdContacts = createdByURN + } + + includeGroups := make([]*models.Group, 0, len(recipients.GroupIDs)) + excludeGroups := make([]*models.Group, 0, len(recipients.ExcludeGroupIDs)) + + for _, groupID := range recipients.GroupIDs { + group := oa.GroupByID(groupID) + if group != nil { + includeGroups = append(includeGroups, group) + } + } + for _, groupID := range recipients.ExcludeGroupIDs { + group := oa.GroupByID(groupID) + if group != nil { + excludeGroups = append(excludeGroups, group) + } + } + + var matches []models.ContactID + + // if we're only including individual contacts and there are no exclusions, we can just return those contacts + if len(includeGroups) == 0 && recipients.Query == "" && recipients.Exclusions == models.NoExclusions && len(excludeGroups) == 0 { + matches := make([]models.ContactID, 0, len(includeContacts)+len(createdContacts)) + for _, c := range includeContacts { + matches = append(matches, c.ID()) + } + for _, c := range createdContacts { + matches = append(matches, c.ID()) + } + return matches, nil + } + + if len(includeContacts) > 0 || len(includeGroups) > 0 || recipients.Query != "" { + // reduce contacts to UUIDs + includeContactUUIDs := make([]flows.ContactUUID, len(includeContacts)) + for i, contact := range includeContacts { + includeContactUUIDs[i] = contact.UUID() + } + + query, err := BuildRecipientsQuery(oa, flow, includeGroups, includeContactUUIDs, recipients.Query, recipients.Exclusions, excludeGroups) + if err != nil { + return nil, errors.Wrap(err, "error building query") + } + + matches, err = GetContactIDsForQuery(ctx, rt, oa, query, limit) + if err != nil { + return nil, errors.Wrap(err, "error performing contact search") + } + } + + // only add created contacts if not excluding contacts based on last seen - other exclusions can't apply to a newly + // created contact + if recipients.Exclusions.NotSeenSinceDays == 0 { + for _, c := range createdContacts { + matches = append(matches, c.ID()) + } + } + + return matches, nil +} diff --git a/core/search/resolve_test.go b/core/search/resolve_test.go new file mode 100644 index 000000000..4fd7a1df5 --- /dev/null +++ b/core/search/resolve_test.go @@ -0,0 +1,105 @@ +package search_test + +import ( + "testing" + + "github.com/nyaruka/gocommon/urns" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/core/search" + "github.com/nyaruka/mailroom/testsuite" + "github.com/nyaruka/mailroom/testsuite/testdata" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestResolveRecipients(t *testing.T) { + ctx, rt := testsuite.Runtime() + + defer testsuite.Reset(testsuite.ResetAll) + + group1 := testdata.InsertContactGroup(rt, testdata.Org1, "a85acec9-3895-4ffd-87c1-c69a25781a85", "Group 1", "", testdata.George, testdata.Alexandria) + group2 := testdata.InsertContactGroup(rt, testdata.Org1, "eb578345-595e-4e36-a68b-6941e242cdbb", "Group 2", "", testdata.George) + + oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, testdata.Org1.ID, models.RefreshGroups) + require.NoError(t, err) + + tcs := []struct { + flow *testdata.Flow + recipients *search.Recipients + limit int + expectedIDs []models.ContactID + }{ + { // 0 nobody + recipients: &search.Recipients{}, + expectedIDs: []models.ContactID{}, + }, + { // 1 only explicit contacts + recipients: &search.Recipients{ + ContactIDs: []models.ContactID{testdata.Bob.ID, testdata.Alexandria.ID}, + }, + limit: -1, + expectedIDs: []models.ContactID{testdata.Bob.ID, testdata.Alexandria.ID}, + }, + { // 2 explicit contacts, group and query + recipients: &search.Recipients{ + ContactIDs: []models.ContactID{testdata.Bob.ID}, + GroupIDs: []models.GroupID{group1.ID}, + Query: `name = "Cathy" OR name = "Bob"`, + }, + limit: -1, + expectedIDs: []models.ContactID{testdata.Bob.ID, testdata.George.ID, testdata.Alexandria.ID, testdata.Cathy.ID}, + }, + { // 3 exclude group + recipients: &search.Recipients{ + ContactIDs: []models.ContactID{testdata.George.ID, testdata.Bob.ID}, + ExcludeGroupIDs: []models.GroupID{group2.ID}, + }, + limit: -1, + expectedIDs: []models.ContactID{testdata.Bob.ID}, + }, + { // 4 limit number returned + recipients: &search.Recipients{ + Query: `name = "Cathy" OR name = "Bob"`, + }, + limit: 1, + expectedIDs: []models.ContactID{testdata.Cathy.ID}, + }, + { // 5 create new contacts from URNs + recipients: &search.Recipients{ + ContactIDs: []models.ContactID{testdata.Bob.ID}, + URNs: []urns.URN{"tel:+1234000001", "tel:+1234000002"}, + Exclusions: models.Exclusions{InAFlow: true}, + }, + limit: -1, + expectedIDs: []models.ContactID{testdata.Bob.ID, 30000, 30001}, + }, + { // 6 new contacts not included if excluding based on last seen + recipients: &search.Recipients{ + URNs: []urns.URN{"tel:+1234000003"}, + Exclusions: models.Exclusions{NotSeenSinceDays: 10}, + }, + limit: -1, + expectedIDs: []models.ContactID{}, + }, + { // 7 new contacts is now an existing contact that can be searched + recipients: &search.Recipients{ + URNs: []urns.URN{"tel:+1234000003"}, + }, + limit: -1, + expectedIDs: []models.ContactID{30002}, + }, + } + + for i, tc := range tcs { + testsuite.ReindexElastic(ctx) + + var flow *models.Flow + if tc.flow != nil { + flow = tc.flow.Load(rt, oa) + } + + actualIDs, err := search.ResolveRecipients(ctx, rt, oa, flow, tc.recipients, tc.limit) + assert.NoError(t, err) + assert.ElementsMatch(t, tc.expectedIDs, actualIDs, "contact ids mismatch in %d", i) + } +} diff --git a/core/search/search.go b/core/search/search.go index dc74525bb..e80328de0 100644 --- a/core/search/search.go +++ b/core/search/search.go @@ -11,6 +11,7 @@ import ( "github.com/nyaruka/goflow/contactql" "github.com/nyaruka/goflow/contactql/es" "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/runtime" "github.com/olivere/elastic/v7" "github.com/pkg/errors" "github.com/sirupsen/logrus" @@ -66,14 +67,48 @@ func BuildElasticQuery(oa *models.OrgAssets, group *models.Group, status models. return eq } +// GetContactTotal returns the total count of matching contacts for the given query +func GetContactTotal(ctx context.Context, rt *runtime.Runtime, oa *models.OrgAssets, query string) (*contactql.ContactQuery, int64, error) { + env := oa.Env() + var parsed *contactql.ContactQuery + var err error + + if rt.ES == nil { + return nil, 0, errors.Errorf("no elastic client available, check your configuration") + } + + if query != "" { + parsed, err = contactql.ParseQuery(env, query, oa.SessionAssets()) + if err != nil { + return nil, 0, errors.Wrapf(err, "error parsing query: %s", query) + } + } + + eq := BuildElasticQuery(oa, nil, models.NilContactStatus, nil, parsed) + + count, err := rt.ES.Count(rt.Config.ElasticContactsIndex).Routing(strconv.FormatInt(int64(oa.OrgID()), 10)).Query(eq).Do(ctx) + if err != nil { + // Get *elastic.Error which contains additional information + ee, ok := err.(*elastic.Error) + if !ok { + return nil, 0, errors.Wrap(err, "error performing query") + } + + return nil, 0, errors.Wrapf(err, "error performing query: %s", ee.Details.Reason) + } + + return parsed, count, nil +} + // GetContactIDsForQueryPage returns a page of contact ids for the given query and sort -func GetContactIDsForQueryPage(ctx context.Context, client *elastic.Client, oa *models.OrgAssets, group *models.Group, excludeIDs []models.ContactID, query string, sort string, offset int, pageSize int) (*contactql.ContactQuery, []models.ContactID, int64, error) { +func GetContactIDsForQueryPage(ctx context.Context, rt *runtime.Runtime, oa *models.OrgAssets, group *models.Group, excludeIDs []models.ContactID, query string, sort string, offset int, pageSize int) (*contactql.ContactQuery, []models.ContactID, int64, error) { env := oa.Env() + index := rt.Config.ElasticContactsIndex start := time.Now() var parsed *contactql.ContactQuery var err error - if client == nil { + if rt.ES == nil { return nil, nil, 0, errors.Errorf("no elastic client available, check your configuration") } @@ -91,7 +126,7 @@ func GetContactIDsForQueryPage(ctx context.Context, client *elastic.Client, oa * return nil, nil, 0, errors.Wrapf(err, "error parsing sort") } - s := client.Search("contacts").TrackTotalHits(true).Routing(strconv.FormatInt(int64(oa.OrgID()), 10)) + s := rt.ES.Search(index).TrackTotalHits(true).Routing(strconv.FormatInt(int64(oa.OrgID()), 10)) s = s.Size(pageSize).From(offset).Query(eq).SortBy(fieldSort).FetchSource(false) results, err := s.Do(ctx) @@ -117,11 +152,12 @@ func GetContactIDsForQueryPage(ctx context.Context, client *elastic.Client, oa * } // GetContactIDsForQuery returns up to limit the contact ids that match the given query without sorting. Limit of -1 means return all. -func GetContactIDsForQuery(ctx context.Context, client *elastic.Client, oa *models.OrgAssets, query string, limit int) ([]models.ContactID, error) { +func GetContactIDsForQuery(ctx context.Context, rt *runtime.Runtime, oa *models.OrgAssets, query string, limit int) ([]models.ContactID, error) { env := oa.Env() + index := rt.Config.ElasticContactsIndex start := time.Now() - if client == nil { + if rt.ES == nil { return nil, errors.Errorf("no elastic client available, check your configuration") } @@ -137,7 +173,7 @@ func GetContactIDsForQuery(ctx context.Context, client *elastic.Client, oa *mode // if limit provided that can be done with regular search, do that if limit >= 0 && limit <= 10000 { - results, err := client.Search("contacts").Routing(routing).From(0).Size(limit).Query(eq).FetchSource(false).Do(ctx) + results, err := rt.ES.Search(index).Routing(routing).From(0).Size(limit).Query(eq).FetchSource(false).Do(ctx) if err != nil { return nil, err } @@ -146,7 +182,7 @@ func GetContactIDsForQuery(ctx context.Context, client *elastic.Client, oa *mode // for larger limits, use scroll service // note that this is no longer recommended, see https://www.elastic.co/guide/en/elasticsearch/reference/current/scroll-api.html - scroll := client.Scroll("contacts").Routing(routing).KeepAlive("15m").Size(10000).Query(eq).FetchSource(false) + scroll := rt.ES.Scroll(index).Routing(routing).KeepAlive("15m").Size(10000).Query(eq).FetchSource(false) for { results, err := scroll.Do(ctx) if err == io.EOF { diff --git a/core/search/search_test.go b/core/search/search_test.go index c3cb5769e..74a4d5215 100644 --- a/core/search/search_test.go +++ b/core/search/search_test.go @@ -3,333 +3,134 @@ package search_test import ( "testing" - "github.com/nyaruka/goflow/test" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/core/search" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/olivere/elastic/v7" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) -func TestGetContactIDsForQueryPage(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() +func TestGetContactTotal(t *testing.T) { + ctx, rt := testsuite.Runtime() - mockES := testsuite.NewMockElasticServer() - defer mockES.Close() + oa, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) + require.NoError(t, err) - mockES.AddResponse(testdata.George.ID) - mockES.AddResponse(testdata.George.ID) + tcs := []struct { + query string + expectedTotal int64 + expectedError string + }{ + {query: "george OR bob", expectedTotal: 2}, + {query: "george", expectedTotal: 1}, + {query: "age >= 30", expectedTotal: 1}, + { + query: "goats > 2", // no such contact field + expectedError: "error parsing query: goats > 2: can't resolve 'goats' to attribute, scheme or field", + }, + } - es := mockES.Client() + for i, tc := range tcs { + _, total, err := search.GetContactTotal(ctx, rt, oa, tc.query) + + if tc.expectedError != "" { + assert.EqualError(t, err, tc.expectedError) + } else { + assert.NoError(t, err, "%d: error encountered performing query", i) + assert.Equal(t, tc.expectedTotal, total, "%d: total mismatch", i) + } + } +} + +func TestGetContactIDsForQueryPage(t *testing.T) { + ctx, rt := testsuite.Runtime() oa, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) require.NoError(t, err) tcs := []struct { - Group *testdata.Group - ExcludeIDs []models.ContactID - Query string - Sort string - ExpectedESRequest string - ExpectedContacts []models.ContactID - ExpectedTotal int64 - ExpectedError string + group *testdata.Group + excludeIDs []models.ContactID + query string + sort string + expectedContacts []models.ContactID + expectedTotal int64 + expectedError string }{ - { - Group: testdata.ActiveGroup, - Query: "george", - ExpectedESRequest: `{ - "_source": false, - "from": 0, - "query": { - "bool": { - "must": [ - { - "term": { - "org_id": 1 - } - }, - { - "term": { - "is_active": true - } - }, - { - "term": { - "group_ids": 1 - } - }, - { - "match": { - "name": { - "query": "george" - } - } - } - ] - } - }, - "size": 50, - "sort": [ - { - "id": { - "order": "desc" - } - } - ], - "track_total_hits": true - }`, - ExpectedContacts: []models.ContactID{testdata.George.ID}, - ExpectedTotal: 1, + { // 0 + group: testdata.ActiveGroup, + query: "george OR bob", + expectedContacts: []models.ContactID{testdata.George.ID, testdata.Bob.ID}, + expectedTotal: 2, }, - { - Group: testdata.BlockedGroup, - ExcludeIDs: []models.ContactID{testdata.Bob.ID, testdata.Cathy.ID}, - Query: "age > 32", - Sort: "-age", - ExpectedESRequest: `{ - "_source": false, - "from": 0, - "query": { - "bool": { - "must": [ - { - "term": { - "org_id": 1 - } - }, - { - "term": { - "is_active": true - } - }, - { - "term": { - "group_ids": 2 - } - }, - { - "nested": { - "path": "fields", - "query": { - "bool": { - "must": [ - { - "term": { - "fields.field": "903f51da-2717-47c7-a0d3-f2f32877013d" - } - }, - { - "range": { - "fields.number": { - "from": 32, - "include_lower": false, - "include_upper": true, - "to": null - } - } - } - ] - } - } - } - } - ], - "must_not": { - "ids": { - "type": "_doc", - "values": [ - "10001", - "10000" - ] - } - } - } - }, - "size": 50, - "sort": [ - { - "fields.number": { - "nested": { - "filter": { - "term": { - "fields.field": "903f51da-2717-47c7-a0d3-f2f32877013d" - } - }, - "path": "fields" - }, - "order": "desc" - } - } - ], - "track_total_hits": true - }`, - ExpectedContacts: []models.ContactID{testdata.George.ID}, - ExpectedTotal: 1, + { // 1 + group: testdata.BlockedGroup, + query: "george", + expectedContacts: []models.ContactID{}, + expectedTotal: 0, }, - { - Group: testdata.ActiveGroup, - Query: "goats > 2", // no such contact field - ExpectedError: "error parsing query: goats > 2: can't resolve 'goats' to attribute, scheme or field", + { // 2 + group: testdata.ActiveGroup, + query: "age >= 30", + sort: "-age", + expectedContacts: []models.ContactID{testdata.George.ID}, + expectedTotal: 1, + }, + { // 3 + group: testdata.ActiveGroup, + excludeIDs: []models.ContactID{testdata.George.ID}, + query: "age >= 30", + sort: "-age", + expectedContacts: []models.ContactID{}, + expectedTotal: 0, + }, + { // 4 + group: testdata.BlockedGroup, + query: "goats > 2", // no such contact field + expectedError: "error parsing query: goats > 2: can't resolve 'goats' to attribute, scheme or field", }, } for i, tc := range tcs { - group := oa.GroupByID(tc.Group.ID) + group := oa.GroupByID(tc.group.ID) - _, ids, total, err := search.GetContactIDsForQueryPage(ctx, es, oa, group, tc.ExcludeIDs, tc.Query, tc.Sort, 0, 50) + _, ids, total, err := search.GetContactIDsForQueryPage(ctx, rt, oa, group, tc.excludeIDs, tc.query, tc.sort, 0, 50) - if tc.ExpectedError != "" { - assert.EqualError(t, err, tc.ExpectedError) + if tc.expectedError != "" { + assert.EqualError(t, err, tc.expectedError) } else { assert.NoError(t, err, "%d: error encountered performing query", i) - assert.Equal(t, tc.ExpectedContacts, ids, "%d: ids mismatch", i) - assert.Equal(t, tc.ExpectedTotal, total, "%d: total mismatch", i) - - test.AssertEqualJSON(t, []byte(tc.ExpectedESRequest), []byte(mockES.LastRequestBody), "%d: ES request mismatch", i) + assert.Equal(t, tc.expectedContacts, ids, "%d: ids mismatch", i) + assert.Equal(t, tc.expectedTotal, total, "%d: total mismatch", i) } } } func TestGetContactIDsForQuery(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() - - mockES := testsuite.NewMockElasticServer() - defer mockES.Close() - - mockES.AddResponse(testdata.George.ID) - mockES.AddResponse() - mockES.AddResponse(testdata.George.ID) - - es, err := elastic.NewClient(elastic.SetURL(mockES.URL()), elastic.SetHealthcheck(false), elastic.SetSniff(false)) - require.NoError(t, err) + ctx, rt := testsuite.Runtime() oa, err := models.GetOrgAssets(ctx, rt, 1) require.NoError(t, err) tcs := []struct { - query string - limit int - expectedRequestURL string - expectedRequestBody string - mockedESResponse string - expectedContacts []models.ContactID - expectedError string + query string + limit int + expectedContacts []models.ContactID + expectedError string }{ { - query: "george", - limit: -1, - expectedRequestURL: "/_search/scroll", - expectedRequestBody: `{ - "_source":false, - "query": { - "bool": { - "must": [ - { - "term": { - "org_id": 1 - } - }, - { - "term": { - "is_active": true - } - }, - { - "term": { - "status": "A" - } - }, - { - "match": { - "name": { - "query": "george" - } - } - } - ] - } - }, - "sort":["_doc"] - }`, - expectedContacts: []models.ContactID{testdata.George.ID}, + query: "george OR bob", + limit: -1, + expectedContacts: []models.ContactID{testdata.George.ID, testdata.Bob.ID}, }, { - query: "nobody", - limit: -1, - expectedRequestURL: "/contacts/_search?routing=1&scroll=15m&size=10000", - expectedRequestBody: `{ - "_source":false, - "query": { - "bool": { - "must": [ - { - "term": { - "org_id": 1 - } - }, - { - "term": { - "is_active": true - } - }, - { - "term": { - "status": "A" - } - }, - { - "match": { - "name": { - "query": "nobody" - } - } - } - ] - } - }, - "sort":["_doc"] - }`, + query: "nobody", + limit: -1, expectedContacts: []models.ContactID{}, }, { - query: "george", - limit: 1, - expectedRequestURL: "/contacts/_search?routing=1", - expectedRequestBody: `{ - "_source": false, - "from": 0, - "query": { - "bool": { - "must": [ - { - "term": { - "org_id": 1 - } - }, - { - "term": { - "is_active": true - } - }, - { - "term": { - "status": "A" - } - }, - { - "match": { - "name": { - "query": "george" - } - } - } - ] - } - }, - "size": 1 - }`, + query: "george", + limit: 1, expectedContacts: []models.ContactID{testdata.George.ID}, }, { @@ -340,16 +141,13 @@ func TestGetContactIDsForQuery(t *testing.T) { } for i, tc := range tcs { - ids, err := search.GetContactIDsForQuery(ctx, es, oa, tc.query, tc.limit) + ids, err := search.GetContactIDsForQuery(ctx, rt, oa, tc.query, tc.limit) if tc.expectedError != "" { assert.EqualError(t, err, tc.expectedError) } else { assert.NoError(t, err, "%d: error encountered performing query", i) - assert.Equal(t, tc.expectedContacts, ids, "%d: ids mismatch", i) - - assert.Equal(t, tc.expectedRequestURL, mockES.LastRequestURL, "%d: request URL mismatch", i) - test.AssertEqualJSON(t, []byte(tc.expectedRequestBody), []byte(mockES.LastRequestBody), "%d: request body mismatch", i) + assert.ElementsMatch(t, tc.expectedContacts, ids, "%d: ids mismatch", i) } } } diff --git a/core/tasks/base.go b/core/tasks/base.go index b6f8ca2dd..d24b51a6a 100644 --- a/core/tasks/base.go +++ b/core/tasks/base.go @@ -5,12 +5,12 @@ import ( "encoding/json" "time" + "github.com/gomodule/redigo/redis" "github.com/nyaruka/goflow/utils" "github.com/nyaruka/mailroom" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/core/queue" "github.com/nyaruka/mailroom/runtime" - "github.com/pkg/errors" ) @@ -20,22 +20,13 @@ var registeredTypes = map[string](func() Task){} func RegisterType(name string, initFunc func() Task) { registeredTypes[name] = initFunc - mailroom.AddTaskFunction(name, func(ctx context.Context, rt *runtime.Runtime, task *queue.Task) error { - // decode our task body - typedTask, err := ReadTask(task.Type, task.Task) - if err != nil { - return errors.Wrapf(err, "error reading task of type %s", task.Type) - } - - ctx, cancel := context.WithTimeout(ctx, typedTask.Timeout()) - defer cancel() - - return typedTask.Perform(ctx, rt, models.OrgID(task.OrgID)) - }) + mailroom.AddTaskFunction(name, Perform) } // Task is the common interface for all task types type Task interface { + Type() string + // Timeout is the maximum amount of time the task can run for Timeout() time.Duration @@ -43,6 +34,25 @@ type Task interface { Perform(ctx context.Context, rt *runtime.Runtime, orgID models.OrgID) error } +// Performs a raw task popped from a queue +func Perform(ctx context.Context, rt *runtime.Runtime, task *queue.Task) error { + // decode our task body + typedTask, err := ReadTask(task.Type, task.Task) + if err != nil { + return errors.Wrapf(err, "error reading task of type %s", task.Type) + } + + ctx, cancel := context.WithTimeout(ctx, typedTask.Timeout()) + defer cancel() + + return typedTask.Perform(ctx, rt, models.OrgID(task.OrgID)) +} + +// Queue adds the given task to the named queue +func Queue(rc redis.Conn, qname string, orgID models.OrgID, task Task, priority queue.Priority) error { + return queue.AddTask(rc, qname, task.Type(), int(orgID), task, priority) +} + //------------------------------------------------------------------------------------------ // JSON Encoding / Decoding //------------------------------------------------------------------------------------------ diff --git a/core/tasks/campaigns/cron.go b/core/tasks/campaigns/cron.go index c6bdd0008..12a35f3b2 100644 --- a/core/tasks/campaigns/cron.go +++ b/core/tasks/campaigns/cron.go @@ -11,6 +11,7 @@ import ( "github.com/nyaruka/mailroom" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/core/queue" + "github.com/nyaruka/mailroom/core/tasks" "github.com/nyaruka/mailroom/runtime" "github.com/nyaruka/redisx" @@ -122,7 +123,7 @@ func queueFiresTask(rp *redis.Pool, orgID models.OrgID, task *FireCampaignEventT rc := rp.Get() defer rc.Close() - err := queue.AddTask(rc, queue.BatchQueue, TypeFireCampaignEvent, int(orgID), task, queue.DefaultPriority) + err := tasks.Queue(rc, queue.BatchQueue, orgID, task, queue.DefaultPriority) if err != nil { return errors.Wrap(err, "error queuing task") } diff --git a/core/tasks/campaigns/cron_test.go b/core/tasks/campaigns/cron_test.go index 0236ea71d..f08539cca 100644 --- a/core/tasks/campaigns/cron_test.go +++ b/core/tasks/campaigns/cron_test.go @@ -7,7 +7,6 @@ import ( "testing" "time" - "github.com/gomodule/redigo/redis" "github.com/nyaruka/gocommon/dbutil/assertdb" "github.com/nyaruka/gocommon/jsonx" "github.com/nyaruka/gocommon/uuids" @@ -18,50 +17,50 @@ import ( "github.com/nyaruka/mailroom/core/queue" "github.com/nyaruka/mailroom/core/tasks" "github.com/nyaruka/mailroom/core/tasks/campaigns" + "github.com/nyaruka/mailroom/runtime" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestQueueEventFires(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetData | testsuite.ResetRedis) - org2Campaign := testdata.InsertCampaign(db, testdata.Org2, "Org 2", testdata.DoctorsGroup) - org2CampaignEvent := testdata.InsertCampaignFlowEvent(db, org2Campaign, testdata.Org2Favorites, testdata.AgeField, 1, "D") + org2Campaign := testdata.InsertCampaign(rt, testdata.Org2, "Org 2", testdata.DoctorsGroup) + org2CampaignEvent := testdata.InsertCampaignFlowEvent(rt, org2Campaign, testdata.Org2Favorites, testdata.AgeField, 1, "D") // try with zero fires err := campaigns.QueueEventFires(ctx, rt) assert.NoError(t, err) - assertFireTasks(t, rp, testdata.Org1, [][]models.FireID{}) - assertFireTasks(t, rp, testdata.Org2, [][]models.FireID{}) + assertFireTasks(t, rt, testdata.Org1, [][]models.FireID{}) + assertFireTasks(t, rt, testdata.Org2, [][]models.FireID{}) // create event fires due now for 2 contacts and in the future for another contact - fire1ID := testdata.InsertEventFire(rt.DB, testdata.Cathy, testdata.RemindersEvent1, time.Now().Add(-time.Minute)) - fire2ID := testdata.InsertEventFire(rt.DB, testdata.George, testdata.RemindersEvent1, time.Now().Add(-time.Minute)) - fire3ID := testdata.InsertEventFire(rt.DB, testdata.Org2Contact, org2CampaignEvent, time.Now().Add(-time.Minute)) - fire4ID := testdata.InsertEventFire(rt.DB, testdata.Alexandria, testdata.RemindersEvent2, time.Now().Add(-time.Minute)) - testdata.InsertEventFire(rt.DB, testdata.Alexandria, testdata.RemindersEvent1, time.Now().Add(time.Hour*24)) // in future + fire1ID := testdata.InsertEventFire(rt, testdata.Cathy, testdata.RemindersEvent1, time.Now().Add(-time.Minute)) + fire2ID := testdata.InsertEventFire(rt, testdata.George, testdata.RemindersEvent1, time.Now().Add(-time.Minute)) + fire3ID := testdata.InsertEventFire(rt, testdata.Org2Contact, org2CampaignEvent, time.Now().Add(-time.Minute)) + fire4ID := testdata.InsertEventFire(rt, testdata.Alexandria, testdata.RemindersEvent2, time.Now().Add(-time.Minute)) + testdata.InsertEventFire(rt, testdata.Alexandria, testdata.RemindersEvent1, time.Now().Add(time.Hour*24)) // in future // schedule our campaign to be started err = campaigns.QueueEventFires(ctx, rt) assert.NoError(t, err) - assertFireTasks(t, rp, testdata.Org1, [][]models.FireID{{fire1ID, fire2ID}, {fire4ID}}) - assertFireTasks(t, rp, testdata.Org2, [][]models.FireID{{fire3ID}}) + assertFireTasks(t, rt, testdata.Org1, [][]models.FireID{{fire1ID, fire2ID}, {fire4ID}}) + assertFireTasks(t, rt, testdata.Org2, [][]models.FireID{{fire3ID}}) // running again won't double add those fires err = campaigns.QueueEventFires(ctx, rt) assert.NoError(t, err) - assertFireTasks(t, rp, testdata.Org1, [][]models.FireID{{fire1ID, fire2ID}, {fire4ID}}) - assertFireTasks(t, rp, testdata.Org2, [][]models.FireID{{fire3ID}}) + assertFireTasks(t, rt, testdata.Org1, [][]models.FireID{{fire1ID, fire2ID}, {fire4ID}}) + assertFireTasks(t, rt, testdata.Org2, [][]models.FireID{{fire3ID}}) // clear queued tasks rc.Do("DEL", "batch:active") @@ -69,14 +68,14 @@ func TestQueueEventFires(t *testing.T) { // add 110 scheduled event fires to test batch limits for i := 0; i < 110; i++ { - contact := testdata.InsertContact(db, testdata.Org1, flows.ContactUUID(uuids.New()), fmt.Sprintf("Jim %d", i), envs.NilLanguage, models.ContactStatusActive) - testdata.InsertEventFire(rt.DB, contact, testdata.RemindersEvent1, time.Now().Add(-time.Minute)) + contact := testdata.InsertContact(rt, testdata.Org1, flows.ContactUUID(uuids.New()), fmt.Sprintf("Jim %d", i), envs.NilLanguage, models.ContactStatusActive) + testdata.InsertEventFire(rt, contact, testdata.RemindersEvent1, time.Now().Add(-time.Minute)) } err = campaigns.QueueEventFires(ctx, rt) assert.NoError(t, err) - queuedTasks := testsuite.CurrentOrgTasks(t, rp) + queuedTasks := testsuite.CurrentTasks(t, rt) org1Tasks := queuedTasks[testdata.Org1.ID] assert.Equal(t, 2, len(org1Tasks)) @@ -93,16 +92,16 @@ func TestQueueEventFires(t *testing.T) { assert.Equal(t, 100, len(tk1.FireIDs)) assert.Equal(t, 10, len(tk2.FireIDs)) } -func TestFireCampaignEvents(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() +func TestQueueAndFireEvent(t *testing.T) { + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetData | testsuite.ResetRedis) // create due fires for Cathy and George - f1ID := testdata.InsertEventFire(rt.DB, testdata.Cathy, testdata.RemindersEvent1, time.Now().Add(-time.Minute)) - f2ID := testdata.InsertEventFire(rt.DB, testdata.George, testdata.RemindersEvent1, time.Now().Add(-time.Minute)) + f1ID := testdata.InsertEventFire(rt, testdata.Cathy, testdata.RemindersEvent1, time.Now().Add(-time.Minute)) + f2ID := testdata.InsertEventFire(rt, testdata.George, testdata.RemindersEvent1, time.Now().Add(-time.Minute)) // queue the event task err := campaigns.QueueEventFires(ctx, rt) @@ -121,16 +120,16 @@ func TestFireCampaignEvents(t *testing.T) { assert.NoError(t, err) // should now have a flow run for that contact and flow - assertdb.Query(t, db, `SELECT COUNT(*) FROM flows_flowrun WHERE contact_id = $1 AND flow_id = $2 AND status = 'W'`, testdata.Cathy.ID, testdata.Favorites.ID).Returns(1) - assertdb.Query(t, db, `SELECT COUNT(*) FROM flows_flowrun WHERE contact_id = $1 AND flow_id = $2 AND status = 'W'`, testdata.George.ID, testdata.Favorites.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT COUNT(*) FROM flows_flowrun WHERE contact_id = $1 AND flow_id = $2 AND status = 'W'`, testdata.Cathy.ID, testdata.Favorites.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT COUNT(*) FROM flows_flowrun WHERE contact_id = $1 AND flow_id = $2 AND status = 'W'`, testdata.George.ID, testdata.Favorites.ID).Returns(1) // the event fires should be marked as fired - assertdb.Query(t, db, `SELECT fired IS NOT NULL AS fired, fired_result FROM campaigns_eventfire WHERE id = $1`, f1ID).Columns(map[string]interface{}{"fired": true, "fired_result": "F"}) - assertdb.Query(t, db, `SELECT fired IS NOT NULL AS fired, fired_result FROM campaigns_eventfire WHERE id = $1`, f2ID).Columns(map[string]interface{}{"fired": true, "fired_result": "F"}) + assertdb.Query(t, rt.DB, `SELECT fired IS NOT NULL AS fired, fired_result FROM campaigns_eventfire WHERE id = $1`, f1ID).Columns(map[string]interface{}{"fired": true, "fired_result": "F"}) + assertdb.Query(t, rt.DB, `SELECT fired IS NOT NULL AS fired, fired_result FROM campaigns_eventfire WHERE id = $1`, f2ID).Columns(map[string]interface{}{"fired": true, "fired_result": "F"}) // create due fires for George and Bob for a different event that skips - f3ID := testdata.InsertEventFire(rt.DB, testdata.George, testdata.RemindersEvent3, time.Now().Add(-time.Minute)) - f4ID := testdata.InsertEventFire(rt.DB, testdata.Bob, testdata.RemindersEvent3, time.Now().Add(-time.Minute)) + f3ID := testdata.InsertEventFire(rt, testdata.George, testdata.RemindersEvent3, time.Now().Add(-time.Minute)) + f4ID := testdata.InsertEventFire(rt, testdata.Bob, testdata.RemindersEvent3, time.Now().Add(-time.Minute)) // queue the event task err = campaigns.QueueEventFires(ctx, rt) @@ -148,17 +147,17 @@ func TestFireCampaignEvents(t *testing.T) { err = typedTask.Perform(ctx, rt, models.OrgID(task.OrgID)) assert.NoError(t, err) - assertdb.Query(t, db, `SELECT COUNT(*) FROM flows_flowrun WHERE contact_id = $1 AND flow_id = $2 AND status = 'W'`, testdata.George.ID, testdata.Favorites.ID).Returns(1) - assertdb.Query(t, db, `SELECT COUNT(*) FROM flows_flowrun WHERE contact_id = $1 AND flow_id = $2 AND status = 'W'`, testdata.Bob.ID, testdata.PickANumber.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT COUNT(*) FROM flows_flowrun WHERE contact_id = $1 AND flow_id = $2 AND status = 'W'`, testdata.George.ID, testdata.Favorites.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT COUNT(*) FROM flows_flowrun WHERE contact_id = $1 AND flow_id = $2 AND status = 'W'`, testdata.Bob.ID, testdata.PickANumber.ID).Returns(1) // the event fires should be marked as fired - assertdb.Query(t, db, `SELECT fired IS NOT NULL AS fired, fired_result FROM campaigns_eventfire WHERE id = $1`, f3ID).Columns(map[string]interface{}{"fired": true, "fired_result": "S"}) - assertdb.Query(t, db, `SELECT fired IS NOT NULL AS fired, fired_result FROM campaigns_eventfire WHERE id = $1`, f4ID).Columns(map[string]interface{}{"fired": true, "fired_result": "F"}) + assertdb.Query(t, rt.DB, `SELECT fired IS NOT NULL AS fired, fired_result FROM campaigns_eventfire WHERE id = $1`, f3ID).Columns(map[string]interface{}{"fired": true, "fired_result": "S"}) + assertdb.Query(t, rt.DB, `SELECT fired IS NOT NULL AS fired, fired_result FROM campaigns_eventfire WHERE id = $1`, f4ID).Columns(map[string]interface{}{"fired": true, "fired_result": "F"}) } func TestIVRCampaigns(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetAll) @@ -166,8 +165,8 @@ func TestIVRCampaigns(t *testing.T) { // turn a campaign event into an IVR flow event rt.DB.MustExec(`UPDATE campaigns_campaignevent SET flow_id = $1 WHERE id = $2`, testdata.IVRFlow.ID, testdata.RemindersEvent1.ID) - testdata.InsertEventFire(rt.DB, testdata.Cathy, testdata.RemindersEvent1, time.Now().Add(-time.Minute)) - testdata.InsertEventFire(rt.DB, testdata.George, testdata.RemindersEvent1, time.Now().Add(-time.Minute)) + testdata.InsertEventFire(rt, testdata.Cathy, testdata.RemindersEvent1, time.Now().Add(-time.Minute)) + testdata.InsertEventFire(rt, testdata.George, testdata.RemindersEvent1, time.Now().Add(-time.Minute)) // schedule our campaign to be started err := campaigns.QueueEventFires(ctx, rt) @@ -186,23 +185,23 @@ func TestIVRCampaigns(t *testing.T) { assert.NoError(t, err) // should now have a flow start created - assertdb.Query(t, db, `SELECT COUNT(*) from flows_flowstart WHERE flow_id = $1 AND start_type = 'T' AND status = 'P';`, testdata.IVRFlow.ID).Returns(1) - assertdb.Query(t, db, `SELECT COUNT(*) from flows_flowstart_contacts WHERE contact_id = $1 AND flowstart_id = 1;`, testdata.Cathy.ID).Returns(1) - assertdb.Query(t, db, `SELECT COUNT(*) from flows_flowstart_contacts WHERE contact_id = $1 AND flowstart_id = 1;`, testdata.George.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT COUNT(*) from flows_flowstart WHERE flow_id = $1 AND start_type = 'T' AND status = 'P';`, testdata.IVRFlow.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT COUNT(*) from flows_flowstart_contacts WHERE contact_id = $1 AND flowstart_id = 1;`, testdata.Cathy.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT COUNT(*) from flows_flowstart_contacts WHERE contact_id = $1 AND flowstart_id = 1;`, testdata.George.ID).Returns(1) // event should be marked as fired - assertdb.Query(t, db, `SELECT COUNT(*) from campaigns_eventfire WHERE event_id = $1 AND fired IS NOT NULL;`, testdata.RemindersEvent1.ID).Returns(2) + assertdb.Query(t, rt.DB, `SELECT COUNT(*) from campaigns_eventfire WHERE event_id = $1 AND fired IS NOT NULL;`, testdata.RemindersEvent1.ID).Returns(2) // pop our next task, should be the start task, err = queue.PopNextTask(rc, queue.BatchQueue) assert.NoError(t, err) assert.NotNil(t, task) - assert.Equal(t, task.Type, queue.StartIVRFlowBatch) + assert.Equal(t, "start_ivr_flow_batch", task.Type) } -func assertFireTasks(t *testing.T, rp *redis.Pool, org *testdata.Org, expected [][]models.FireID) { - allTasks := testsuite.CurrentOrgTasks(t, rp) +func assertFireTasks(t *testing.T, rt *runtime.Runtime, org *testdata.Org, expected [][]models.FireID) { + allTasks := testsuite.CurrentTasks(t, rt) actual := make([][]models.FireID, len(allTasks[org.ID])) for i, task := range allTasks[org.ID] { diff --git a/core/tasks/campaigns/fire_campaign_event.go b/core/tasks/campaigns/fire_campaign_event.go index 19ab9d8b5..660e16d3d 100644 --- a/core/tasks/campaigns/fire_campaign_event.go +++ b/core/tasks/campaigns/fire_campaign_event.go @@ -5,12 +5,19 @@ import ( "fmt" "time" + "github.com/gomodule/redigo/redis" + "github.com/jmoiron/sqlx" + "github.com/nyaruka/gocommon/analytics" "github.com/nyaruka/goflow/assets" + "github.com/nyaruka/goflow/flows" "github.com/nyaruka/goflow/flows/triggers" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/core/runner" "github.com/nyaruka/mailroom/core/tasks" + "github.com/nyaruka/mailroom/core/tasks/handler" "github.com/nyaruka/mailroom/runtime" + "golang.org/x/exp/maps" + "golang.org/x/exp/slices" "github.com/pkg/errors" "github.com/sirupsen/logrus" @@ -33,6 +40,10 @@ type FireCampaignEventTask struct { CampaignName string `json:"campaign_name"` } +func (t *FireCampaignEventTask) Type() string { + return TypeFireCampaignEvent +} + // Timeout is the maximum amount of time the task can run for func (t *FireCampaignEventTask) Timeout() time.Duration { // base of 5 minutes + one minute per fire @@ -74,30 +85,26 @@ func (t *FireCampaignEventTask) Perform(ctx context.Context, rt *runtime.Runtime return nil } - contactMap := make(map[models.ContactID]*models.EventFire) - for _, fire := range fires { - contactMap[fire.ContactID] = fire - } - campaign := triggers.NewCampaignReference(triggers.CampaignUUID(t.CampaignUUID), t.CampaignName) - started, err := runner.FireCampaignEvents(ctx, rt, orgID, fires, t.FlowUUID, campaign, triggers.CampaignEventUUID(t.EventUUID)) + handled, err := FireCampaignEvents(ctx, rt, orgID, fires, t.FlowUUID, campaign, triggers.CampaignEventUUID(t.EventUUID)) - // remove all the contacts that were started - for _, contactID := range started { - delete(contactMap, contactID) + handledSet := make(map[*models.EventFire]bool, len(handled)) + for _, f := range handled { + handledSet[f] = true } - // what remains in our contact map are fires that failed for some reason, umark these - if len(contactMap) > 0 { - rc := rp.Get() - for _, failed := range contactMap { - rerr := campaignsMarker.Remove(rc, fmt.Sprintf("%d", failed.FireID)) + // any fires that weren't handled are unmarked so they will be retried + rc := rp.Get() + defer rc.Close() + + for _, f := range fires { + if !handledSet[f] { + rerr := campaignsMarker.Remove(rc, fmt.Sprintf("%d", f.FireID)) if rerr != nil { - log.WithError(rerr).WithField("fire_id", failed.FireID).Error("error unmarking campaign fire") + log.WithError(rerr).WithField("fire_id", f.FireID).Error("error unmarking campaign fire") } } - rc.Close() } if err != nil { @@ -106,3 +113,131 @@ func (t *FireCampaignEventTask) Perform(ctx context.Context, rt *runtime.Runtime return nil } + +// FireCampaignEvents tries to handle the given event fires, returning those that were handled (i.e. skipped, fired or deleted) +func FireCampaignEvents(ctx context.Context, rt *runtime.Runtime, orgID models.OrgID, fires []*models.EventFire, flowUUID assets.FlowUUID, campaign *triggers.CampaignReference, eventUUID triggers.CampaignEventUUID) ([]*models.EventFire, error) { + start := time.Now() + + // create our org assets + oa, err := models.GetOrgAssets(ctx, rt, orgID) + if err != nil { + return nil, errors.Wrapf(err, "error creating assets for org: %d", orgID) + } + + // get the capmaign event object + dbEvent := oa.CampaignEventByID(fires[0].EventID) + if dbEvent == nil { + err := models.DeleteEventFires(ctx, rt.DB, fires) + if err != nil { + return nil, errors.Wrap(err, "error deleting fires for inactive campaign event") + } + return fires, nil + } + + // get the flow it references + flow, err := oa.FlowByUUID(flowUUID) + if err == models.ErrNotFound { + err := models.DeleteEventFires(ctx, rt.DB, fires) + if err != nil { + return nil, errors.Wrapf(err, "error deleting fires for inactive flow") + } + return fires, nil + } + if err != nil { + return nil, errors.Wrapf(err, "error loading campaign event flow: %s", flowUUID) + } + + dbFlow := flow.(*models.Flow) + + // figure out which fires should be skipped if any + firesToSkip := make(map[models.ContactID]*models.EventFire, len(fires)) + + if dbEvent.StartMode() == models.StartModeSkip { + allContactIDs := make([]models.ContactID, len(fires)) + for i := range fires { + allContactIDs[i] = fires[i].ContactID + } + contactsInAFlow, err := models.FilterByWaitingSession(ctx, rt.DB, allContactIDs) + if err != nil { + return nil, errors.Wrap(err, "error finding waiting sessions") + } + for _, f := range fires { + if slices.Contains(contactsInAFlow, f.ContactID) { + firesToSkip[f.ContactID] = f + } + } + } + + // and then which fires should actually be fired + firesToFire := make(map[models.ContactID]*models.EventFire, len(fires)) + for _, f := range fires { + if firesToSkip[f.ContactID] == nil { + firesToFire[f.ContactID] = f + } + } + + // mark the skipped fires as skipped and record as handled + err = models.MarkEventsFired(ctx, rt.DB, maps.Values(firesToSkip), time.Now(), models.FireResultSkipped) + if err != nil { + return nil, errors.Wrap(err, "error marking events skipped") + } + + handled := maps.Values(firesToSkip) + + // if this is an ivr flow, we need to create a task to perform the start there + if dbFlow.FlowType() == models.FlowTypeVoice { + fired := maps.Values(firesToFire) + + err := handler.TriggerIVRFlow(ctx, rt, oa.OrgID(), dbFlow.ID(), maps.Keys(firesToFire), func(ctx context.Context, tx *sqlx.Tx) error { + return models.MarkEventsFired(ctx, tx, fired, time.Now(), models.FireResultFired) + }) + if err != nil { + return nil, errors.Wrapf(err, "error triggering ivr flow start") + } + + handled = append(handled, fired...) + + return handled, nil + } + + // this is our pre commit callback for our sessions, we'll mark the event fires associated + // with the passed in sessions as complete in the same transaction + firedOn := time.Now() + markFired := func(ctx context.Context, tx *sqlx.Tx, rp *redis.Pool, oa *models.OrgAssets, sessions []*models.Session) error { + // build up our list of event fire ids based on the session contact ids + fired := make([]*models.EventFire, 0, len(sessions)) + for _, s := range sessions { + fired = append(fired, firesToFire[s.ContactID()]) + } + + // mark those events as fired + err := models.MarkEventsFired(ctx, tx, fired, firedOn, models.FireResultFired) + if err != nil { + return errors.Wrap(err, "error marking events fired") + } + + handled = append(handled, fired...) + + return nil + } + + // our start options are based on the start mode for our event + options := &runner.StartOptions{ + Interrupt: dbEvent.StartMode() != models.StartModePassive, + TriggerBuilder: func(contact *flows.Contact) flows.Trigger { + return triggers.NewBuilder(oa.Env(), assets.NewFlowReference(flow.UUID(), flow.Name()), contact).Campaign(campaign, eventUUID).Build() + }, + CommitHook: markFired, + } + + _, err = runner.StartFlow(ctx, rt, oa, dbFlow, maps.Keys(firesToFire), options) + if err != nil { + logrus.WithError(err).Errorf("error starting flow for campaign event: %s", eventUUID) + } + + // log both our total and average + analytics.Gauge("mr.campaign_event_elapsed", float64(time.Since(start))/float64(time.Second)) + analytics.Gauge("mr.campaign_event_count", float64(len(handled))) + + return handled, nil +} diff --git a/core/tasks/campaigns/fire_campaign_event_test.go b/core/tasks/campaigns/fire_campaign_event_test.go new file mode 100644 index 000000000..5007c3743 --- /dev/null +++ b/core/tasks/campaigns/fire_campaign_event_test.go @@ -0,0 +1,158 @@ +package campaigns_test + +import ( + "fmt" + "testing" + "time" + + "github.com/nyaruka/gocommon/dbutil/assertdb" + "github.com/nyaruka/goflow/flows/triggers" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/core/queue" + "github.com/nyaruka/mailroom/core/tasks" + "github.com/nyaruka/mailroom/core/tasks/campaigns" + "github.com/nyaruka/mailroom/testsuite" + "github.com/nyaruka/mailroom/testsuite/testdata" + "github.com/nyaruka/redisx" + "github.com/nyaruka/redisx/assertredis" + "github.com/stretchr/testify/assert" +) + +func TestFireCampaignEvents(t *testing.T) { + _, rt := testsuite.Runtime() + + defer testsuite.Reset(testsuite.ResetAll) + + rc := rt.RP.Get() + defer rc.Close() + + marker := redisx.NewIntervalSet("campaign_event", time.Hour*24, 2) + campaign := triggers.NewCampaignReference(triggers.CampaignUUID(testdata.RemindersCampaign.UUID), "Doctor Reminders") + + // create event fires for event #3 (Pick A Number, start mode SKIP) + now := time.Now() + fire1ID := testdata.InsertEventFire(rt, testdata.Cathy, testdata.RemindersEvent3, now) + fire2ID := testdata.InsertEventFire(rt, testdata.Bob, testdata.RemindersEvent3, now) + fire3ID := testdata.InsertEventFire(rt, testdata.Alexandria, testdata.RemindersEvent3, now) + + // create waiting sessions for Cathy and Alexandria + testdata.InsertWaitingSession(rt, testdata.Org1, testdata.Cathy, models.FlowTypeVoice, testdata.IVRFlow, models.NilCallID, time.Now(), time.Now(), false, nil) + testdata.InsertWaitingSession(rt, testdata.Org1, testdata.Alexandria, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now(), false, nil) + + // simulates the cron job picking up pending fires, marking them in redis, and queueing a task to handle them + fireFires := func(fireIDs []models.FireID, flow *testdata.Flow, ce *testdata.CampaignEvent) { + for _, fid := range fireIDs { + marker.Add(rc, fmt.Sprintf("%d", fid)) + } + + task := &campaigns.FireCampaignEventTask{ + FireIDs: fireIDs, + EventID: int64(ce.ID), + EventUUID: string(ce.UUID), + FlowUUID: flow.UUID, + CampaignUUID: string(campaign.UUID), + CampaignName: campaign.Name, + } + + err := tasks.Queue(rc, queue.BatchQueue, testdata.Org1.ID, task, queue.DefaultPriority) + assert.NoError(t, err) + + testsuite.FlushTasks(t, rt) + + // and left in redis marker + for _, fid := range fireIDs { + assertredis.SIsMember(t, rt.RP, fmt.Sprintf("campaign_event:%s", time.Now().Format("2006-01-02")), fmt.Sprintf("%d", fid), true) + } + } + + fireFires([]models.FireID{fire1ID, fire2ID, fire3ID}, testdata.PickANumber, testdata.RemindersEvent3) + + // cathy has her existing waiting session because event skipped her + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'W'`, testdata.Cathy.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT current_flow_id FROM flows_flowsession WHERE contact_id = $1 AND status = 'W'`, testdata.Cathy.ID).Returns(int64(testdata.IVRFlow.ID)) + assertdb.Query(t, rt.DB, `SELECT fired_result from campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Cathy.ID, testdata.RemindersEvent3.ID).Returns("S") + + // bob's waiting session is the campaign event because he didn't have a waiting session + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'W'`, testdata.Bob.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT current_flow_id FROM flows_flowsession WHERE contact_id = $1 AND status = 'W'`, testdata.Bob.ID).Returns(int64(testdata.PickANumber.ID)) + assertdb.Query(t, rt.DB, `SELECT fired_result from campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Bob.ID, testdata.RemindersEvent3.ID).Returns("F") + + // alexandria has her existing waiting session because event skipped her + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'W'`, testdata.Alexandria.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT current_flow_id FROM flows_flowsession WHERE contact_id = $1 AND status = 'W'`, testdata.Alexandria.ID).Returns(int64(testdata.Favorites.ID)) + assertdb.Query(t, rt.DB, `SELECT fired_result from campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Alexandria.ID, testdata.RemindersEvent3.ID).Returns("S") + + // all event fires fired + assertdb.Query(t, rt.DB, `SELECT count(*) from campaigns_eventfire WHERE fired IS NULL`).Returns(0) + + // create event fires for event #2 (message, start mode PASSIVE) + now = time.Now() + fire4ID := testdata.InsertEventFire(rt, testdata.Cathy, testdata.RemindersEvent2, now) + fire5ID := testdata.InsertEventFire(rt, testdata.Bob, testdata.RemindersEvent2, now) + fire6ID := testdata.InsertEventFire(rt, testdata.Alexandria, testdata.RemindersEvent2, now) + + fireFires([]models.FireID{fire4ID, fire5ID, fire6ID}, testdata.CampaignFlow, testdata.RemindersEvent2) + + // cathy still has her existing waiting session and now a completed one + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'W'`, testdata.Cathy.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'C'`, testdata.Cathy.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT fired_result from campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Cathy.ID, testdata.RemindersEvent2.ID).Returns("F") + + // bob still has one waiting session from the previous campaign event and now a completed one + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'W'`, testdata.Bob.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'C'`, testdata.Bob.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT fired_result from campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Bob.ID, testdata.RemindersEvent2.ID).Returns("F") + + // alexandria still has her existing waiting session and now a completed one + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'W'`, testdata.Alexandria.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'C'`, testdata.Alexandria.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT fired_result from campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Alexandria.ID, testdata.RemindersEvent2.ID).Returns("F") + + // create event fires for event #1 (flow, start mode INTERRUPT) + now = time.Now() + fire7ID := testdata.InsertEventFire(rt, testdata.Cathy, testdata.RemindersEvent1, now) + fire8ID := testdata.InsertEventFire(rt, testdata.Bob, testdata.RemindersEvent1, now) + fire9ID := testdata.InsertEventFire(rt, testdata.Alexandria, testdata.RemindersEvent1, now) + + fireFires([]models.FireID{fire7ID, fire8ID, fire9ID}, testdata.Favorites, testdata.RemindersEvent1) + + // cathy's existing waiting session should now be interrupted and now she has a waiting session in the Favorites flow + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'I'`, testdata.Cathy.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'C'`, testdata.Cathy.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'W' AND current_flow_id = $2`, testdata.Cathy.ID, testdata.Favorites.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT fired_result from campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Cathy.ID, testdata.RemindersEvent1.ID).Returns("F") + + // bob's session from the first campaign event should now be interrupted and he has a new waiting session + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'I'`, testdata.Bob.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'C'`, testdata.Bob.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'W' AND current_flow_id = $2`, testdata.Bob.ID, testdata.Favorites.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT fired_result from campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Bob.ID, testdata.RemindersEvent1.ID).Returns("F") + + // alexandria's existing waiting session should now be interrupted and now she has a waiting session in the Favorites flow + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'I'`, testdata.Alexandria.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'C'`, testdata.Alexandria.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'W' AND current_flow_id = $2`, testdata.Alexandria.ID, testdata.Favorites.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT fired_result from campaigns_eventfire WHERE contact_id = $1 AND event_id = $2`, testdata.Alexandria.ID, testdata.RemindersEvent1.ID).Returns("F") + + // test handling fires for a deleted campaign event + rt.DB.MustExec(`UPDATE campaigns_campaignevent SET is_active = FALSE WHERE id = $1`, testdata.RemindersEvent1.ID) + models.FlushCache() + + fire10ID := testdata.InsertEventFire(rt, testdata.Cathy, testdata.RemindersEvent1, now) + + fireFires([]models.FireID{fire10ID}, testdata.Favorites, testdata.RemindersEvent1) + + // event fire should be deleted + assertdb.Query(t, rt.DB, `SELECT count(*) FROM campaigns_eventfire WHERE id = $1`, fire10ID).Returns(0) + + // test handling fires for a deleted flow + rt.DB.MustExec(`UPDATE flows_flow SET is_active = FALSE WHERE id = $1`, testdata.PickANumber.ID) + models.FlushCache() + + fire11ID := testdata.InsertEventFire(rt, testdata.Cathy, testdata.RemindersEvent3, now) + + fireFires([]models.FireID{fire11ID}, testdata.PickANumber, testdata.RemindersEvent3) + + // event fire should be deleted + assertdb.Query(t, rt.DB, `SELECT count(*) FROM campaigns_eventfire WHERE id = $1`, fire11ID).Returns(0) +} diff --git a/core/tasks/campaigns/schedule_campaign_event.go b/core/tasks/campaigns/schedule_campaign_event.go index 31df1e1d1..1bf862d75 100644 --- a/core/tasks/campaigns/schedule_campaign_event.go +++ b/core/tasks/campaigns/schedule_campaign_event.go @@ -26,6 +26,10 @@ type ScheduleCampaignEventTask struct { CampaignEventID models.CampaignEventID `json:"campaign_event_id"` } +func (t *ScheduleCampaignEventTask) Type() string { + return TypeScheduleCampaignEvent +} + // Timeout is the maximum amount of time the task can run for func (t *ScheduleCampaignEventTask) Timeout() time.Duration { return time.Hour diff --git a/core/tasks/campaigns/schedule_campaign_event_test.go b/core/tasks/campaigns/schedule_campaign_event_test.go index a04d4f3fb..144267c9f 100644 --- a/core/tasks/campaigns/schedule_campaign_event_test.go +++ b/core/tasks/campaigns/schedule_campaign_event_test.go @@ -9,27 +9,26 @@ import ( "github.com/nyaruka/mailroom/core/tasks/campaigns" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestScheduleCampaignEvent(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) // add bob, george and alexandria to doctors group which campaign is based on - testdata.DoctorsGroup.Add(db, testdata.Bob, testdata.George, testdata.Alexandria) + testdata.DoctorsGroup.Add(rt, testdata.Bob, testdata.George, testdata.Alexandria) // give bob and george values for joined in the future - db.MustExec(`UPDATE contacts_contact SET fields = '{"d83aae24-4bbf-49d0-ab85-6bfd201eac6d": {"datetime": "2030-01-01T00:00:00Z"}}' WHERE id = $1`, testdata.Bob.ID) - db.MustExec(`UPDATE contacts_contact SET fields = '{"d83aae24-4bbf-49d0-ab85-6bfd201eac6d": {"datetime": "2030-08-18T11:31:30Z"}}' WHERE id = $1`, testdata.George.ID) + rt.DB.MustExec(`UPDATE contacts_contact SET fields = '{"d83aae24-4bbf-49d0-ab85-6bfd201eac6d": {"datetime": "2030-01-01T00:00:00Z"}}' WHERE id = $1`, testdata.Bob.ID) + rt.DB.MustExec(`UPDATE contacts_contact SET fields = '{"d83aae24-4bbf-49d0-ab85-6bfd201eac6d": {"datetime": "2030-08-18T11:31:30Z"}}' WHERE id = $1`, testdata.George.ID) // give alexandria a value in the past - db.MustExec(`UPDATE contacts_contact SET fields = '{"d83aae24-4bbf-49d0-ab85-6bfd201eac6d": {"datetime": "2015-01-01T00:00:00Z"}}' WHERE id = $1`, testdata.Alexandria.ID) + rt.DB.MustExec(`UPDATE contacts_contact SET fields = '{"d83aae24-4bbf-49d0-ab85-6bfd201eac6d": {"datetime": "2015-01-01T00:00:00Z"}}' WHERE id = $1`, testdata.Alexandria.ID) - db.MustExec(`DELETE FROM campaigns_eventfire`) + rt.DB.MustExec(`DELETE FROM campaigns_eventfire`) // campaign has two events configured on the joined field // 1. +5 Days (12:00) start favorites flow @@ -41,7 +40,7 @@ func TestScheduleCampaignEvent(t *testing.T) { require.NoError(t, err) // cathy has no value for joined and alexandia has a value too far in past, but bob and george will have values... - assertContactFires(t, db, testdata.RemindersEvent1.ID, map[models.ContactID]time.Time{ + assertContactFires(t, rt.DB, testdata.RemindersEvent1.ID, map[models.ContactID]time.Time{ testdata.Bob.ID: time.Date(2030, 1, 5, 20, 0, 0, 0, time.UTC), // 12:00 in PST testdata.George.ID: time.Date(2030, 8, 23, 19, 0, 0, 0, time.UTC), // 12:00 in PST with DST }) @@ -51,46 +50,46 @@ func TestScheduleCampaignEvent(t *testing.T) { err = task.Perform(ctx, rt, testdata.Org1.ID) require.NoError(t, err) - assertContactFires(t, db, testdata.RemindersEvent2.ID, map[models.ContactID]time.Time{ + assertContactFires(t, rt.DB, testdata.RemindersEvent2.ID, map[models.ContactID]time.Time{ testdata.Bob.ID: time.Date(2030, 1, 1, 0, 10, 0, 0, time.UTC), testdata.George.ID: time.Date(2030, 8, 18, 11, 42, 0, 0, time.UTC), }) // fires for first event unaffected - assertContactFires(t, db, testdata.RemindersEvent1.ID, map[models.ContactID]time.Time{ + assertContactFires(t, rt.DB, testdata.RemindersEvent1.ID, map[models.ContactID]time.Time{ testdata.Bob.ID: time.Date(2030, 1, 5, 20, 0, 0, 0, time.UTC), testdata.George.ID: time.Date(2030, 8, 23, 19, 0, 0, 0, time.UTC), }) // remove alexandria from campaign group - db.MustExec(`DELETE FROM contacts_contactgroup_contacts WHERE contact_id = $1`, testdata.Alexandria.ID) + rt.DB.MustExec(`DELETE FROM contacts_contactgroup_contacts WHERE contact_id = $1`, testdata.Alexandria.ID) // bump created_on for cathy and alexandria - db.MustExec(`UPDATE contacts_contact SET created_on = '2035-01-01T00:00:00Z' WHERE id = $1 OR id = $2`, testdata.Cathy.ID, testdata.Alexandria.ID) + rt.DB.MustExec(`UPDATE contacts_contact SET created_on = '2035-01-01T00:00:00Z' WHERE id = $1 OR id = $2`, testdata.Cathy.ID, testdata.Alexandria.ID) // create new campaign event based on created_on + 5 minutes - event3 := testdata.InsertCampaignFlowEvent(db, testdata.RemindersCampaign, testdata.Favorites, testdata.CreatedOnField, 5, "M") + event3 := testdata.InsertCampaignFlowEvent(rt, testdata.RemindersCampaign, testdata.Favorites, testdata.CreatedOnField, 5, "M") task = &campaigns.ScheduleCampaignEventTask{CampaignEventID: event3.ID} err = task.Perform(ctx, rt, testdata.Org1.ID) require.NoError(t, err) // only cathy is in the group and new enough to have a fire - assertContactFires(t, db, event3.ID, map[models.ContactID]time.Time{ + assertContactFires(t, rt.DB, event3.ID, map[models.ContactID]time.Time{ testdata.Cathy.ID: time.Date(2035, 1, 1, 0, 5, 0, 0, time.UTC), }) // create new campaign event based on last_seen_on + 1 day - event4 := testdata.InsertCampaignFlowEvent(db, testdata.RemindersCampaign, testdata.Favorites, testdata.LastSeenOnField, 1, "D") + event4 := testdata.InsertCampaignFlowEvent(rt, testdata.RemindersCampaign, testdata.Favorites, testdata.LastSeenOnField, 1, "D") // bump last_seen_on for bob - db.MustExec(`UPDATE contacts_contact SET last_seen_on = '2040-01-01T00:00:00Z' WHERE id = $1`, testdata.Bob.ID) + rt.DB.MustExec(`UPDATE contacts_contact SET last_seen_on = '2040-01-01T00:00:00Z' WHERE id = $1`, testdata.Bob.ID) task = &campaigns.ScheduleCampaignEventTask{CampaignEventID: event4.ID} err = task.Perform(ctx, rt, testdata.Org1.ID) require.NoError(t, err) - assertContactFires(t, db, event4.ID, map[models.ContactID]time.Time{ + assertContactFires(t, rt.DB, event4.ID, map[models.ContactID]time.Time{ testdata.Bob.ID: time.Date(2040, 1, 2, 0, 0, 0, 0, time.UTC), }) } diff --git a/core/tasks/contacts/import_contact_batch.go b/core/tasks/contacts/import_contact_batch.go index 63a6960b5..af08a4f93 100644 --- a/core/tasks/contacts/import_contact_batch.go +++ b/core/tasks/contacts/import_contact_batch.go @@ -24,6 +24,10 @@ type ImportContactBatchTask struct { ContactImportBatchID models.ContactImportBatchID `json:"contact_import_batch_id"` } +func (t *ImportContactBatchTask) Type() string { + return TypeImportContactBatch +} + // Timeout is the maximum amount of time the task can run for func (t *ImportContactBatchTask) Timeout() time.Duration { return time.Minute * 10 @@ -33,21 +37,21 @@ func (t *ImportContactBatchTask) Timeout() time.Duration { func (t *ImportContactBatchTask) Perform(ctx context.Context, rt *runtime.Runtime, orgID models.OrgID) error { batch, err := models.LoadContactImportBatch(ctx, rt.DB, t.ContactImportBatchID) if err != nil { - return errors.Wrapf(err, "unable to load contact import batch with id %d", t.ContactImportBatchID) + return errors.Wrap(err, "error loading contact import batch") } - batchErr := batch.Import(ctx, rt, orgID) + imp, err := models.LoadContactImport(ctx, rt.DB, batch.ImportID) + if err != nil { + return errors.Wrap(err, "error loading contact import") + } + + batchErr := batch.Import(ctx, rt, orgID, imp.CreatedByID) // decrement the redis key that holds remaining batches to see if the overall import is now finished rc := rt.RP.Get() defer rc.Close() remaining, _ := redis.Int(rc.Do("decr", fmt.Sprintf("contact_import_batches_remaining:%d", batch.ImportID))) if remaining == 0 { - imp, err := models.LoadContactImport(ctx, rt.DB, batch.ImportID) - if err != nil { - return errors.Wrap(err, "error loading contact import") - } - // if any batch failed, then import is considered failed status := models.ContactImportStatusComplete for _, s := range imp.BatchStatuses { diff --git a/core/tasks/contacts/import_contact_batch_test.go b/core/tasks/contacts/import_contact_batch_test.go index cce2d2b60..3258b002b 100644 --- a/core/tasks/contacts/import_contact_batch_test.go +++ b/core/tasks/contacts/import_contact_batch_test.go @@ -14,18 +14,18 @@ import ( ) func TestImportContactBatch(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetData) - importID := testdata.InsertContactImport(db, testdata.Org1, testdata.Admin) - batch1ID := testdata.InsertContactImportBatch(db, importID, []byte(`[ + importID := testdata.InsertContactImport(rt, testdata.Org1, testdata.Admin) + batch1ID := testdata.InsertContactImportBatch(rt, importID, []byte(`[ {"name": "Norbert", "language": "eng", "urns": ["tel:+16055740001"]}, {"name": "Leah", "urns": ["tel:+16055740002"]} ]`)) - batch2ID := testdata.InsertContactImportBatch(db, importID, []byte(`[ + batch2ID := testdata.InsertContactImportBatch(rt, importID, []byte(`[ {"name": "Rowan", "language": "spa", "urns": ["tel:+16055740003"]} ]`)) @@ -37,21 +37,21 @@ func TestImportContactBatch(t *testing.T) { require.NoError(t, err) // import is still in progress - assertdb.Query(t, db, `SELECT status FROM contacts_contactimport WHERE id = $1`, importID).Columns(map[string]interface{}{"status": "O"}) + assertdb.Query(t, rt.DB, `SELECT status FROM contacts_contactimport WHERE id = $1`, importID).Columns(map[string]interface{}{"status": "O"}) // perform second batch task... task2 := &contacts.ImportContactBatchTask{ContactImportBatchID: batch2ID} err = task2.Perform(ctx, rt, testdata.Org1.ID) require.NoError(t, err) - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contact WHERE id >= 30000`).Returns(3) - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contact WHERE name = 'Norbert' AND language = 'eng'`).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contact WHERE name = 'Leah' AND language IS NULL`).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contact WHERE name = 'Rowan' AND language = 'spa'`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contact WHERE id >= 30000`).Returns(3) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contact WHERE name = 'Norbert' AND language = 'eng'`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contact WHERE name = 'Leah' AND language IS NULL`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contact WHERE name = 'Rowan' AND language = 'spa'`).Returns(1) // import is now complete and there is a notification for the creator - assertdb.Query(t, db, `SELECT status FROM contacts_contactimport WHERE id = $1`, importID).Columns(map[string]interface{}{"status": "C"}) - assertdb.Query(t, db, `SELECT org_id, notification_type, scope, user_id FROM notifications_notification WHERE contact_import_id = $1`, importID). + assertdb.Query(t, rt.DB, `SELECT status FROM contacts_contactimport WHERE id = $1`, importID).Columns(map[string]interface{}{"status": "C"}) + assertdb.Query(t, rt.DB, `SELECT org_id, notification_type, scope, user_id FROM notifications_notification WHERE contact_import_id = $1`, importID). Columns(map[string]interface{}{ "org_id": int64(testdata.Org1.ID), "notification_type": "import:finished", diff --git a/core/tasks/contacts/populate_dynamic_group.go b/core/tasks/contacts/populate_dynamic_group.go index b12baf1fb..9ded3ea49 100644 --- a/core/tasks/contacts/populate_dynamic_group.go +++ b/core/tasks/contacts/populate_dynamic_group.go @@ -10,7 +10,6 @@ import ( "github.com/nyaruka/mailroom/core/tasks" "github.com/nyaruka/mailroom/runtime" "github.com/nyaruka/redisx" - "github.com/pkg/errors" "github.com/sirupsen/logrus" ) @@ -30,6 +29,10 @@ type PopulateDynamicGroupTask struct { Query string `json:"query"` } +func (t *PopulateDynamicGroupTask) Type() string { + return TypePopulateDynamicGroup +} + // Timeout is the maximum amount of time the task can run for func (t *PopulateDynamicGroupTask) Timeout() time.Duration { return time.Hour @@ -58,7 +61,7 @@ func (t *PopulateDynamicGroupTask) Perform(ctx context.Context, rt *runtime.Runt return errors.Wrapf(err, "unable to load org when populating group: %d", t.GroupID) } - count, err := search.PopulateSmartGroup(ctx, rt.DB, rt.ES, oa, t.GroupID, t.Query) + count, err := search.PopulateSmartGroup(ctx, rt, rt.ES, oa, t.GroupID, t.Query) if err != nil { return errors.Wrapf(err, "error populating smart group: %d", t.GroupID) } diff --git a/core/tasks/contacts/populate_dynamic_group_test.go b/core/tasks/contacts/populate_dynamic_group_test.go index caefa03b0..b4eed9c71 100644 --- a/core/tasks/contacts/populate_dynamic_group_test.go +++ b/core/tasks/contacts/populate_dynamic_group_test.go @@ -13,18 +13,11 @@ import ( ) func TestPopulateTask(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) - mockES := testsuite.NewMockElasticServer() - defer mockES.Close() - - mockES.AddResponse(testdata.Cathy.ID) - - rt.ES = mockES.Client() - - group := testdata.InsertContactGroup(db, testdata.Org1, "e52fee05-2f95-4445-aef6-2fe7dac2fd56", "Women", "gender = F") + group := testdata.InsertContactGroup(rt, testdata.Org1, "e52fee05-2f95-4445-aef6-2fe7dac2fd56", "Women", "gender = F") start := dates.Now() task := &contacts.PopulateDynamicGroupTask{ @@ -34,7 +27,7 @@ func TestPopulateTask(t *testing.T) { err := task.Perform(ctx, rt, testdata.Org1.ID) require.NoError(t, err) - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contactgroup_contacts WHERE contactgroup_id = $1`, group.ID).Returns(1) - assertdb.Query(t, db, `SELECT contact_id FROM contacts_contactgroup_contacts WHERE contactgroup_id = $1`, group.ID).Returns(int64(testdata.Cathy.ID)) - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contact WHERE id = $1 AND modified_on > $2`, testdata.Cathy.ID, start).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contactgroup_contacts WHERE contactgroup_id = $1`, group.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT contact_id FROM contacts_contactgroup_contacts WHERE contactgroup_id = $1`, group.ID).Returns(int64(testdata.Cathy.ID)) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contact WHERE id = $1 AND modified_on > $2`, testdata.Cathy.ID, start).Returns(1) } diff --git a/core/tasks/expirations/cron.go b/core/tasks/expirations/cron.go index aaf8c47a4..615a7c5c5 100644 --- a/core/tasks/expirations/cron.go +++ b/core/tasks/expirations/cron.go @@ -183,7 +183,7 @@ func ExpireVoiceSessions(ctx context.Context, rt *runtime.Runtime) error { log.WithField("count", len(expiredSessions)).WithField("elapsed", time.Since(start)).Info("expired and hung up on call") } - if err := models.InsertChannelLogs(ctx, rt.DB, clogs); err != nil { + if err := models.InsertChannelLogs(ctx, rt, clogs); err != nil { return errors.Wrap(err, "error inserting channel logs") } diff --git a/core/tasks/expirations/cron_test.go b/core/tasks/expirations/cron_test.go index 02dfb2c82..37655cdaa 100644 --- a/core/tasks/expirations/cron_test.go +++ b/core/tasks/expirations/cron_test.go @@ -18,37 +18,37 @@ import ( ) func TestExpirations(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetData | testsuite.ResetRedis) // create a blocked contact - blake := testdata.InsertContact(db, testdata.Org1, "9eef59ef-21b3-4f51-a296-937529a30e38", "Blake", envs.NilLanguage, models.ContactStatusBlocked) + blake := testdata.InsertContact(rt, testdata.Org1, "9eef59ef-21b3-4f51-a296-937529a30e38", "Blake", envs.NilLanguage, models.ContactStatusBlocked) // create single run session for Cathy, no parent to resume - s1ID := testdata.InsertWaitingSession(db, testdata.Org1, testdata.Cathy, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now(), false, nil) - r1ID := testdata.InsertFlowRun(db, testdata.Org1, s1ID, testdata.Cathy, testdata.Favorites, models.RunStatusWaiting) + s1ID := testdata.InsertWaitingSession(rt, testdata.Org1, testdata.Cathy, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now(), false, nil) + r1ID := testdata.InsertFlowRun(rt, testdata.Org1, s1ID, testdata.Cathy, testdata.Favorites, models.RunStatusWaiting) // create parent/child session for George, can resume - s2ID := testdata.InsertWaitingSession(db, testdata.Org1, testdata.George, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now(), true, nil) - r2ID := testdata.InsertFlowRun(db, testdata.Org1, s2ID, testdata.George, testdata.Favorites, models.RunStatusActive) - r3ID := testdata.InsertFlowRun(db, testdata.Org1, s2ID, testdata.George, testdata.Favorites, models.RunStatusWaiting) + s2ID := testdata.InsertWaitingSession(rt, testdata.Org1, testdata.George, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now(), true, nil) + r2ID := testdata.InsertFlowRun(rt, testdata.Org1, s2ID, testdata.George, testdata.Favorites, models.RunStatusActive) + r3ID := testdata.InsertFlowRun(rt, testdata.Org1, s2ID, testdata.George, testdata.Favorites, models.RunStatusWaiting) // create session for Bob with expiration in future - s3ID := testdata.InsertWaitingSession(db, testdata.Org1, testdata.Bob, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now().Add(time.Hour), true, nil) - r4ID := testdata.InsertFlowRun(db, testdata.Org1, s3ID, testdata.Bob, testdata.Favorites, models.RunStatusWaiting) + s3ID := testdata.InsertWaitingSession(rt, testdata.Org1, testdata.Bob, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now().Add(time.Hour), true, nil) + r4ID := testdata.InsertFlowRun(rt, testdata.Org1, s3ID, testdata.Bob, testdata.Favorites, models.RunStatusWaiting) // create an IVR session for Alexandria - call := testdata.InsertCall(db, testdata.Org1, testdata.TwilioChannel, testdata.Alexandria) - s4ID := testdata.InsertWaitingSession(db, testdata.Org1, testdata.Alexandria, models.FlowTypeVoice, testdata.IVRFlow, call, time.Now(), time.Now(), false, nil) - r5ID := testdata.InsertFlowRun(db, testdata.Org1, s4ID, testdata.Alexandria, testdata.IVRFlow, models.RunStatusWaiting) + call := testdata.InsertCall(rt, testdata.Org1, testdata.TwilioChannel, testdata.Alexandria) + s4ID := testdata.InsertWaitingSession(rt, testdata.Org1, testdata.Alexandria, models.FlowTypeVoice, testdata.IVRFlow, call, time.Now(), time.Now(), false, nil) + r5ID := testdata.InsertFlowRun(rt, testdata.Org1, s4ID, testdata.Alexandria, testdata.IVRFlow, models.RunStatusWaiting) // create a parent/child session for the blocked contact - s5ID := testdata.InsertWaitingSession(db, testdata.Org1, blake, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now(), true, nil) - r6ID := testdata.InsertFlowRun(db, testdata.Org1, s5ID, blake, testdata.Favorites, models.RunStatusActive) - r7ID := testdata.InsertFlowRun(db, testdata.Org1, s5ID, blake, testdata.Favorites, models.RunStatusWaiting) + s5ID := testdata.InsertWaitingSession(rt, testdata.Org1, blake, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now(), true, nil) + r6ID := testdata.InsertFlowRun(rt, testdata.Org1, s5ID, blake, testdata.Favorites, models.RunStatusActive) + r7ID := testdata.InsertFlowRun(rt, testdata.Org1, s5ID, blake, testdata.Favorites, models.RunStatusWaiting) time.Sleep(5 * time.Millisecond) @@ -57,26 +57,26 @@ func TestExpirations(t *testing.T) { assert.NoError(t, err) // Cathy's session should be expired along with its runs - assertdb.Query(t, db, `SELECT status FROM flows_flowsession WHERE id = $1;`, s1ID).Columns(map[string]interface{}{"status": "X"}) - assertdb.Query(t, db, `SELECT status FROM flows_flowrun WHERE id = $1;`, r1ID).Columns(map[string]interface{}{"status": "X"}) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowsession WHERE id = $1;`, s1ID).Columns(map[string]interface{}{"status": "X"}) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowrun WHERE id = $1;`, r1ID).Columns(map[string]interface{}{"status": "X"}) // Bob's session and runs should be unchanged because it's been queued for resumption - assertdb.Query(t, db, `SELECT status FROM flows_flowsession WHERE id = $1;`, s2ID).Columns(map[string]interface{}{"status": "W"}) - assertdb.Query(t, db, `SELECT status FROM flows_flowrun WHERE id = $1;`, r2ID).Columns(map[string]interface{}{"status": "A"}) - assertdb.Query(t, db, `SELECT status FROM flows_flowrun WHERE id = $1;`, r3ID).Columns(map[string]interface{}{"status": "W"}) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowsession WHERE id = $1;`, s2ID).Columns(map[string]interface{}{"status": "W"}) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowrun WHERE id = $1;`, r2ID).Columns(map[string]interface{}{"status": "A"}) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowrun WHERE id = $1;`, r3ID).Columns(map[string]interface{}{"status": "W"}) // George's session and run should be unchanged - assertdb.Query(t, db, `SELECT status FROM flows_flowsession WHERE id = $1;`, s3ID).Columns(map[string]interface{}{"status": "W"}) - assertdb.Query(t, db, `SELECT status FROM flows_flowrun WHERE id = $1;`, r4ID).Columns(map[string]interface{}{"status": "W"}) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowsession WHERE id = $1;`, s3ID).Columns(map[string]interface{}{"status": "W"}) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowrun WHERE id = $1;`, r4ID).Columns(map[string]interface{}{"status": "W"}) // Alexandria's session and run should be unchanged because IVR expirations are handled separately - assertdb.Query(t, db, `SELECT status FROM flows_flowsession WHERE id = $1;`, s4ID).Columns(map[string]interface{}{"status": "W"}) - assertdb.Query(t, db, `SELECT status FROM flows_flowrun WHERE id = $1;`, r5ID).Columns(map[string]interface{}{"status": "W"}) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowsession WHERE id = $1;`, s4ID).Columns(map[string]interface{}{"status": "W"}) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowrun WHERE id = $1;`, r5ID).Columns(map[string]interface{}{"status": "W"}) // blocked contact's session and runs sshould be unchanged because it's been queued for resumption.. like any other contact - assertdb.Query(t, db, `SELECT status FROM flows_flowsession WHERE id = $1;`, s5ID).Columns(map[string]interface{}{"status": "W"}) - assertdb.Query(t, db, `SELECT status FROM flows_flowrun WHERE id = $1;`, r6ID).Columns(map[string]interface{}{"status": "A"}) - assertdb.Query(t, db, `SELECT status FROM flows_flowrun WHERE id = $1;`, r7ID).Columns(map[string]interface{}{"status": "W"}) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowsession WHERE id = $1;`, s5ID).Columns(map[string]interface{}{"status": "W"}) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowrun WHERE id = $1;`, r6ID).Columns(map[string]interface{}{"status": "A"}) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowrun WHERE id = $1;`, r7ID).Columns(map[string]interface{}{"status": "W"}) // should have created two expiration tasks task, err := queue.PopNextTask(rc, queue.HandlerQueue) @@ -84,7 +84,7 @@ func TestExpirations(t *testing.T) { assert.NotNil(t, task) // check the first task - eventTask := &handler.HandleEventTask{} + eventTask := &handler.HandleContactEventTask{} jsonx.MustUnmarshal(task.Task, eventTask) assert.Equal(t, testdata.George.ID, eventTask.ContactID) @@ -93,7 +93,7 @@ func TestExpirations(t *testing.T) { assert.NotNil(t, task) // check the second task - eventTask = &handler.HandleEventTask{} + eventTask = &handler.HandleContactEventTask{} jsonx.MustUnmarshal(task.Task, eventTask) assert.Equal(t, blake.ID, eventTask.ContactID) @@ -104,25 +104,25 @@ func TestExpirations(t *testing.T) { } func TestExpireVoiceSessions(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetData | testsuite.ResetRedis) // create voice session for Cathy - conn1ID := testdata.InsertCall(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy) - s1ID := testdata.InsertWaitingSession(db, testdata.Org1, testdata.Cathy, models.FlowTypeVoice, testdata.IVRFlow, conn1ID, time.Now(), time.Now(), false, nil) - r1ID := testdata.InsertFlowRun(db, testdata.Org1, s1ID, testdata.Cathy, testdata.Favorites, models.RunStatusWaiting) + conn1ID := testdata.InsertCall(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy) + s1ID := testdata.InsertWaitingSession(rt, testdata.Org1, testdata.Cathy, models.FlowTypeVoice, testdata.IVRFlow, conn1ID, time.Now(), time.Now(), false, nil) + r1ID := testdata.InsertFlowRun(rt, testdata.Org1, s1ID, testdata.Cathy, testdata.Favorites, models.RunStatusWaiting) // create voice session for Bob with expiration in future - conn2ID := testdata.InsertCall(db, testdata.Org1, testdata.TwilioChannel, testdata.Bob) - s2ID := testdata.InsertWaitingSession(db, testdata.Org1, testdata.Bob, models.FlowTypeMessaging, testdata.IVRFlow, conn2ID, time.Now(), time.Now().Add(time.Hour), false, nil) - r2ID := testdata.InsertFlowRun(db, testdata.Org1, s2ID, testdata.Bob, testdata.IVRFlow, models.RunStatusWaiting) + conn2ID := testdata.InsertCall(rt, testdata.Org1, testdata.TwilioChannel, testdata.Bob) + s2ID := testdata.InsertWaitingSession(rt, testdata.Org1, testdata.Bob, models.FlowTypeMessaging, testdata.IVRFlow, conn2ID, time.Now(), time.Now().Add(time.Hour), false, nil) + r2ID := testdata.InsertFlowRun(rt, testdata.Org1, s2ID, testdata.Bob, testdata.IVRFlow, models.RunStatusWaiting) // create a messaging session for Alexandria - s3ID := testdata.InsertWaitingSession(db, testdata.Org1, testdata.Alexandria, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now(), false, nil) - r3ID := testdata.InsertFlowRun(db, testdata.Org1, s3ID, testdata.Alexandria, testdata.Favorites, models.RunStatusWaiting) + s3ID := testdata.InsertWaitingSession(rt, testdata.Org1, testdata.Alexandria, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now(), false, nil) + r3ID := testdata.InsertFlowRun(rt, testdata.Org1, s3ID, testdata.Alexandria, testdata.Favorites, models.RunStatusWaiting) time.Sleep(5 * time.Millisecond) @@ -131,14 +131,14 @@ func TestExpireVoiceSessions(t *testing.T) { assert.NoError(t, err) // Cathy's session should be expired along with its runs - assertdb.Query(t, db, `SELECT status FROM flows_flowsession WHERE id = $1;`, s1ID).Columns(map[string]interface{}{"status": "X"}) - assertdb.Query(t, db, `SELECT status FROM flows_flowrun WHERE id = $1;`, r1ID).Columns(map[string]interface{}{"status": "X"}) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowsession WHERE id = $1;`, s1ID).Columns(map[string]interface{}{"status": "X"}) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowrun WHERE id = $1;`, r1ID).Columns(map[string]interface{}{"status": "X"}) // Bob's session and run should be unchanged - assertdb.Query(t, db, `SELECT status FROM flows_flowsession WHERE id = $1;`, s2ID).Columns(map[string]interface{}{"status": "W"}) - assertdb.Query(t, db, `SELECT status FROM flows_flowrun WHERE id = $1;`, r2ID).Columns(map[string]interface{}{"status": "W"}) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowsession WHERE id = $1;`, s2ID).Columns(map[string]interface{}{"status": "W"}) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowrun WHERE id = $1;`, r2ID).Columns(map[string]interface{}{"status": "W"}) // Alexandria's session and run should be unchanged because message expirations are handled separately - assertdb.Query(t, db, `SELECT status FROM flows_flowsession WHERE id = $1;`, s3ID).Columns(map[string]interface{}{"status": "W"}) - assertdb.Query(t, db, `SELECT status FROM flows_flowrun WHERE id = $1;`, r3ID).Columns(map[string]interface{}{"status": "W"}) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowsession WHERE id = $1;`, s3ID).Columns(map[string]interface{}{"status": "W"}) + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowrun WHERE id = $1;`, r3ID).Columns(map[string]interface{}{"status": "W"}) } diff --git a/core/tasks/handler/worker.go b/core/tasks/handler/contact_tasks.go similarity index 77% rename from core/tasks/handler/worker.go rename to core/tasks/handler/contact_tasks.go index 6cabf6e16..59c1aaedb 100644 --- a/core/tasks/handler/worker.go +++ b/core/tasks/handler/contact_tasks.go @@ -3,13 +3,10 @@ package handler import ( "context" "encoding/json" - "fmt" "time" "github.com/gomodule/redigo/redis" "github.com/jmoiron/sqlx" - "github.com/nyaruka/gocommon/analytics" - "github.com/nyaruka/gocommon/dbutil" "github.com/nyaruka/gocommon/urns" "github.com/nyaruka/goflow/excellent/types" "github.com/nyaruka/goflow/flows" @@ -18,13 +15,14 @@ import ( "github.com/nyaruka/goflow/flows/resumes" "github.com/nyaruka/goflow/flows/triggers" "github.com/nyaruka/goflow/utils" - "github.com/nyaruka/mailroom" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/core/msgio" "github.com/nyaruka/mailroom/core/queue" "github.com/nyaruka/mailroom/core/runner" + "github.com/nyaruka/mailroom/core/tasks" + "github.com/nyaruka/mailroom/core/tasks/ivr" "github.com/nyaruka/mailroom/runtime" - "github.com/nyaruka/null" + "github.com/nyaruka/null/v2" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) @@ -41,161 +39,6 @@ const ( TicketClosedEventType = "ticket_closed" ) -func init() { - mailroom.AddTaskFunction(queue.HandleContactEvent, HandleEvent) -} - -func HandleEvent(ctx context.Context, rt *runtime.Runtime, task *queue.Task) error { - return handleContactEvent(ctx, rt, task) -} - -// Called when an event comes in for a contact. To make sure we don't get into a situation of being off by one, -// this task ingests and handles all the events for a contact, one by one. -func handleContactEvent(ctx context.Context, rt *runtime.Runtime, task *queue.Task) error { - ctx, cancel := context.WithTimeout(ctx, time.Minute*5) - defer cancel() - - eventTask := &HandleEventTask{} - err := json.Unmarshal(task.Task, eventTask) - if err != nil { - return errors.Wrapf(err, "error decoding contact event task") - } - - // acquire the lock for this contact - locker := models.GetContactLocker(models.OrgID(task.OrgID), eventTask.ContactID) - - lock, err := locker.Grab(rt.RP, time.Second*10) - if err != nil { - return errors.Wrapf(err, "error acquiring lock for contact %d", eventTask.ContactID) - } - - // we didn't get the lock within our timeout, skip and requeue for later - if lock == "" { - rc := rt.RP.Get() - defer rc.Close() - err = queueContactTask(rc, models.OrgID(task.OrgID), eventTask.ContactID) - if err != nil { - return errors.Wrapf(err, "error re-adding contact task after failing to get lock") - } - logrus.WithFields(logrus.Fields{ - "org_id": task.OrgID, - "contact_id": eventTask.ContactID, - }).Info("failed to get lock for contact, requeued and skipping") - return nil - } - defer locker.Release(rt.RP, lock) - - // read all the events for this contact, one by one - contactQ := fmt.Sprintf("c:%d:%d", task.OrgID, eventTask.ContactID) - for { - // pop the next event off this contacts queue - rc := rt.RP.Get() - event, err := redis.String(rc.Do("lpop", contactQ)) - rc.Close() - - // out of tasks? that's ok, exit - if err == redis.ErrNil { - return nil - } - - // real error? report - if err != nil { - return errors.Wrapf(err, "error popping contact event") - } - - start := time.Now() - - // decode our event, this is a normal task at its top level - contactEvent := &queue.Task{} - err = json.Unmarshal([]byte(event), contactEvent) - if err != nil { - return errors.Wrapf(err, "error unmarshalling contact event: %s", event) - } - - // hand off to the appropriate handler - switch contactEvent.Type { - - case StopEventType: - evt := &StopEvent{} - err = json.Unmarshal(contactEvent.Task, evt) - if err != nil { - return errors.Wrapf(err, "error unmarshalling stop event: %s", event) - } - err = handleStopEvent(ctx, rt, evt) - - case NewConversationEventType, ReferralEventType, MOMissEventType, WelcomeMessageEventType: - evt := &models.ChannelEvent{} - err = json.Unmarshal(contactEvent.Task, evt) - if err != nil { - return errors.Wrapf(err, "error unmarshalling channel event: %s", event) - } - _, err = HandleChannelEvent(ctx, rt, models.ChannelEventType(contactEvent.Type), evt, nil) - - case MsgEventType: - msg := &MsgEvent{} - err = json.Unmarshal(contactEvent.Task, msg) - if err != nil { - return errors.Wrapf(err, "error unmarshalling msg event: %s", event) - } - err = handleMsgEvent(ctx, rt, msg) - - case TicketClosedEventType: - evt := &models.TicketEvent{} - err = json.Unmarshal(contactEvent.Task, evt) - if err != nil { - return errors.Wrapf(err, "error unmarshalling ticket event: %s", event) - } - err = handleTicketEvent(ctx, rt, evt) - - case TimeoutEventType, ExpirationEventType: - evt := &TimedEvent{} - err = json.Unmarshal(contactEvent.Task, evt) - if err != nil { - return errors.Wrapf(err, "error unmarshalling timeout event: %s", event) - } - err = handleTimedEvent(ctx, rt, contactEvent.Type, evt) - - default: - return errors.Errorf("unknown contact event type: %s", contactEvent.Type) - } - - // log our processing time to librato - analytics.Gauge(fmt.Sprintf("mr.%s_elapsed", contactEvent.Type), float64(time.Since(start))/float64(time.Second)) - - // and total latency for this task since it was queued - analytics.Gauge(fmt.Sprintf("mr.%s_latency", contactEvent.Type), float64(time.Since(task.QueuedOn))/float64(time.Second)) - - // if we get an error processing an event, requeue it for later and return our error - if err != nil { - log := logrus.WithFields(logrus.Fields{ - "org_id": task.OrgID, - "contact_id": eventTask.ContactID, - "event": event, - }) - - if qerr := dbutil.AsQueryError(err); qerr != nil { - query, params := qerr.Query() - log = log.WithFields(logrus.Fields{"sql": query, "sql_params": params}) - } - - contactEvent.ErrorCount++ - if contactEvent.ErrorCount < 3 { - rc := rt.RP.Get() - retryErr := queueHandleTask(rc, eventTask.ContactID, contactEvent, true) - if retryErr != nil { - logrus.WithError(retryErr).Error("error requeuing errored contact event") - } - rc.Close() - - log.WithError(err).WithField("error_count", contactEvent.ErrorCount).Error("error handling contact event") - return nil - } - log.WithError(err).Error("error handling contact event, permanent failure") - return nil - } - } -} - // handleTimedEvent is called for timeout events func handleTimedEvent(ctx context.Context, rt *runtime.Runtime, eventType string, event *TimedEvent) error { start := time.Now() @@ -385,7 +228,7 @@ func HandleChannelEvent(ctx context.Context, rt *runtime.Runtime, eventType mode // if this is an IVR flow and we don't have a call, trigger that asynchronously if flow.FlowType() == models.FlowTypeVoice && call == nil { - err = runner.TriggerIVRFlow(ctx, rt, oa.OrgID(), flow.ID(), []models.ContactID{modelContact.ID()}, nil) + err = TriggerIVRFlow(ctx, rt, oa.OrgID(), flow.ID(), []models.ContactID{modelContact.ID()}, nil) if err != nil { return nil, errors.Wrapf(err, "error while triggering ivr flow") } @@ -514,7 +357,7 @@ func handleMsgEvent(ctx context.Context, rt *runtime.Runtime, event *MsgEvent) e // contact has been deleted, or is blocked, or channel no longer exists, ignore this message but mark it as handled if modelContact == nil || modelContact.Status() == models.ContactStatusBlocked || channel == nil { - err := models.UpdateMessage(ctx, rt.DB, event.MsgID, models.MsgStatusHandled, models.VisibilityArchived, models.MsgTypeInbox, models.NilFlowID, attachments, logUUIDs) + err := models.MarkMessageHandled(ctx, rt.DB, event.MsgID, models.MsgStatusHandled, models.VisibilityArchived, models.NilFlowID, models.NilTicketID, attachments, logUUIDs) if err != nil { return errors.Wrapf(err, "error updating message for deleted contact") } @@ -554,12 +397,12 @@ func handleMsgEvent(ctx context.Context, rt *runtime.Runtime, event *MsgEvent) e } } - // look up any open tickets for this contact and forward this message to them - tickets, err := models.LoadOpenTicketsForContact(ctx, rt.DB, modelContact) + // look up any open tickes for this contact and forward this message to that + ticket, err := models.LoadOpenTicketForContact(ctx, rt.DB, modelContact) if err != nil { return errors.Wrapf(err, "unable to look up open tickets for contact") } - for _, ticket := range tickets { + if ticket != nil { ticket.ForwardIncoming(ctx, rt, oa, event.MsgUUID, event.Text, attachments) } @@ -608,7 +451,7 @@ func handleMsgEvent(ctx context.Context, rt *runtime.Runtime, event *MsgEvent) e } sessions[0].SetIncomingMsg(event.MsgID, event.MsgExternalID) - return markMsgHandled(ctx, tx, contact, msgIn, flow, attachments, tickets, logUUIDs) + return markMsgHandled(ctx, tx, contact, msgIn, flow, attachments, ticket, logUUIDs) } // we found a trigger and their session is nil or doesn't ignore keywords @@ -625,9 +468,9 @@ func handleMsgEvent(ctx context.Context, rt *runtime.Runtime, event *MsgEvent) e // if this is an IVR flow, we need to trigger that start (which happens in a different queue) if flow.FlowType() == models.FlowTypeVoice { ivrMsgHook := func(ctx context.Context, tx *sqlx.Tx) error { - return markMsgHandled(ctx, tx, contact, msgIn, flow, attachments, tickets, logUUIDs) + return markMsgHandled(ctx, tx, contact, msgIn, flow, attachments, ticket, logUUIDs) } - err = runner.TriggerIVRFlow(ctx, rt, oa.OrgID(), flow.ID(), []models.ContactID{modelContact.ID()}, ivrMsgHook) + err = TriggerIVRFlow(ctx, rt, oa.OrgID(), flow.ID(), []models.ContactID{modelContact.ID()}, ivrMsgHook) if err != nil { return errors.Wrapf(err, "error while triggering ivr flow") } @@ -655,7 +498,7 @@ func handleMsgEvent(ctx context.Context, rt *runtime.Runtime, event *MsgEvent) e } // this message didn't trigger and new sessions or resume any existing ones, so handle as inbox - err = handleAsInbox(ctx, rt, oa, contact, msgIn, attachments, logUUIDs, tickets) + err = handleAsInbox(ctx, rt, oa, contact, msgIn, attachments, logUUIDs, ticket) if err != nil { return errors.Wrapf(err, "error handling inbox message") } @@ -726,7 +569,7 @@ func handleTicketEvent(ctx context.Context, rt *runtime.Runtime, event *models.T // if this is an IVR flow, we need to trigger that start (which happens in a different queue) if flow.FlowType() == models.FlowTypeVoice { - err = runner.TriggerIVRFlow(ctx, rt, oa.OrgID(), flow.ID(), []models.ContactID{modelContact.ID()}, nil) + err = TriggerIVRFlow(ctx, rt, oa.OrgID(), flow.ID(), []models.ContactID{modelContact.ID()}, nil) if err != nil { return errors.Wrapf(err, "error while triggering ivr flow") } @@ -758,8 +601,8 @@ func handleTicketEvent(ctx context.Context, rt *runtime.Runtime, event *models.T return nil } -// handles a message as an inbox message -func handleAsInbox(ctx context.Context, rt *runtime.Runtime, oa *models.OrgAssets, contact *flows.Contact, msg *flows.MsgIn, attachments []utils.Attachment, logUUIDs []models.ChannelLogUUID, tickets []*models.Ticket) error { +// handles a message as an inbox message, i.e. no flow +func handleAsInbox(ctx context.Context, rt *runtime.Runtime, oa *models.OrgAssets, contact *flows.Contact, msg *flows.MsgIn, attachments []utils.Attachment, logUUIDs []models.ChannelLogUUID, ticket *models.Ticket) error { // usually last_seen_on is updated by handling the msg_received event in the engine sprint, but since this is an inbox // message we manually create that event and handle it msgEvent := events.NewMsgReceived(msg) @@ -771,37 +614,35 @@ func handleAsInbox(ctx context.Context, rt *runtime.Runtime, oa *models.OrgAsset return errors.Wrap(err, "error handling inbox message events") } - return markMsgHandled(ctx, rt.DB, contact, msg, nil, attachments, tickets, logUUIDs) + return markMsgHandled(ctx, rt.DB, contact, msg, nil, attachments, ticket, logUUIDs) } // utility to mark as message as handled and update any open contact tickets -func markMsgHandled(ctx context.Context, db models.Queryer, contact *flows.Contact, msg *flows.MsgIn, flow *models.Flow, attachments []utils.Attachment, tickets []*models.Ticket, logUUIDs []models.ChannelLogUUID) error { - msgType := models.MsgTypeInbox +func markMsgHandled(ctx context.Context, db models.Queryer, contact *flows.Contact, msg *flows.MsgIn, flow *models.Flow, attachments []utils.Attachment, ticket *models.Ticket, logUUIDs []models.ChannelLogUUID) error { flowID := models.NilFlowID if flow != nil { - msgType = models.MsgTypeFlow flowID = flow.ID() } + ticketID := models.NilTicketID + if ticket != nil { + ticketID = ticket.ID() + } - err := models.UpdateMessage(ctx, db, models.MsgID(msg.ID()), models.MsgStatusHandled, models.VisibilityVisible, msgType, flowID, attachments, logUUIDs) + err := models.MarkMessageHandled(ctx, db, models.MsgID(msg.ID()), models.MsgStatusHandled, models.VisibilityVisible, flowID, ticketID, attachments, logUUIDs) if err != nil { return errors.Wrapf(err, "error marking message as handled") } - if len(tickets) > 0 { - err = models.UpdateTicketLastActivity(ctx, db, tickets) + if ticket != nil { + err = models.UpdateTicketLastActivity(ctx, db, []*models.Ticket{ticket}) if err != nil { - return errors.Wrapf(err, "error updating last activity for open tickets") + return errors.Wrapf(err, "error updating last activity for open ticket") } } return nil } -type HandleEventTask struct { - ContactID models.ContactID `json:"contact_id"` -} - type TimedEvent struct { ContactID models.ContactID `json:"contact_id"` OrgID models.OrgID `json:"org_id"` @@ -861,3 +702,48 @@ func NewTimeoutTask(orgID models.OrgID, contactID models.ContactID, sessionID mo func NewExpirationTask(orgID models.OrgID, contactID models.ContactID, sessionID models.SessionID, time time.Time) *queue.Task { return newTimedTask(ExpirationEventType, orgID, contactID, sessionID, time) } + +type DBHook func(ctx context.Context, tx *sqlx.Tx) error + +// TriggerIVRFlow will create a new flow start with the passed in flow and set of contacts. This will cause us to +// request calls to start, which once we get the callback will trigger our actual flow to start. +func TriggerIVRFlow(ctx context.Context, rt *runtime.Runtime, orgID models.OrgID, flowID models.FlowID, contactIDs []models.ContactID, hook DBHook) error { + tx, _ := rt.DB.BeginTxx(ctx, nil) + + // create and insert our flow start + start := models.NewFlowStart(orgID, models.StartTypeTrigger, models.FlowTypeVoice, flowID).WithContactIDs(contactIDs) + err := models.InsertFlowStarts(ctx, tx, []*models.FlowStart{start}) + if err != nil { + tx.Rollback() + return errors.Wrapf(err, "error inserting ivr flow start") + } + + // call our hook if we have one + if hook != nil { + err = hook(ctx, tx) + if err != nil { + tx.Rollback() + return errors.Wrapf(err, "error while calling db hook") + } + } + + // commit our transaction + err = tx.Commit() + if err != nil { + tx.Rollback() + return errors.Wrapf(err, "error committing transaction for ivr flow starts") + } + + // create our batch of all our contacts + task := &ivr.StartIVRFlowBatchTask{FlowStartBatch: start.CreateBatch(contactIDs, true, len(contactIDs))} + + // queue this to our ivr starter, it will take care of creating the calls then calling back in + rc := rt.RP.Get() + defer rc.Close() + err = tasks.Queue(rc, queue.BatchQueue, orgID, task, queue.HighPriority) + if err != nil { + return errors.Wrapf(err, "error queuing ivr flow start") + } + + return nil +} diff --git a/core/tasks/handler/cron_test.go b/core/tasks/handler/cron_test.go index ebca11d80..e304edba2 100644 --- a/core/tasks/handler/cron_test.go +++ b/core/tasks/handler/cron_test.go @@ -9,6 +9,7 @@ import ( _ "github.com/nyaruka/mailroom/core/handlers" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/core/queue" + "github.com/nyaruka/mailroom/core/tasks" "github.com/nyaruka/mailroom/core/tasks/handler" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" @@ -16,8 +17,8 @@ import ( ) func TestRetryMsgs(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetAll) @@ -37,9 +38,9 @@ func TestRetryMsgs(t *testing.T) { } for _, msg := range testMsgs { - db.MustExec( - `INSERT INTO msgs_msg(uuid, org_id, channel_id, contact_id, contact_urn_id, text, direction, status, created_on, visibility, msg_count, error_count, next_attempt) - VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, 'V', 1, 0, NOW())`, + rt.DB.MustExec( + `INSERT INTO msgs_msg(uuid, org_id, channel_id, contact_id, contact_urn_id, text, direction, msg_type, status, created_on, visibility, msg_count, error_count, next_attempt) + VALUES($1, $2, $3, $4, $5, $6, $7, 'T', $8, $9, 'V', 1, 0, NOW())`, uuids.New(), testdata.Org1.ID, testdata.TwilioChannel.ID, testdata.Cathy.ID, testdata.Cathy.URNID, msg.Text, models.DirectionIn, msg.Status, msg.CreatedOn) } @@ -49,11 +50,11 @@ func TestRetryMsgs(t *testing.T) { // should have one message requeued task, _ := queue.PopNextTask(rc, queue.HandlerQueue) assert.NotNil(t, task) - err = handler.HandleEvent(ctx, rt, task) + err = tasks.Perform(ctx, rt, task) assert.NoError(t, err) // message should be handled now - assertdb.Query(t, db, `SELECT count(*) from msgs_msg WHERE text = 'pending' AND status = 'H'`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) from msgs_msg WHERE text = 'pending' AND status = 'H'`).Returns(1) // only one message was queued task, _ = queue.PopNextTask(rc, queue.HandlerQueue) diff --git a/core/tasks/handler/handle_contact_event.go b/core/tasks/handler/handle_contact_event.go new file mode 100644 index 000000000..4b42ee4ba --- /dev/null +++ b/core/tasks/handler/handle_contact_event.go @@ -0,0 +1,169 @@ +package handler + +import ( + "context" + "encoding/json" + "fmt" + "time" + + "github.com/gomodule/redigo/redis" + "github.com/nyaruka/gocommon/analytics" + "github.com/nyaruka/gocommon/dbutil" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/core/queue" + "github.com/nyaruka/mailroom/core/tasks" + "github.com/nyaruka/mailroom/runtime" + "github.com/pkg/errors" + "github.com/sirupsen/logrus" +) + +// TypeHandleContactEvent is the task type for flagging that a contact has tasks to be handled +const TypeHandleContactEvent = "handle_contact_event" + +func init() { + tasks.RegisterType(TypeHandleContactEvent, func() tasks.Task { return &HandleContactEventTask{} }) +} + +// HandleContactEventTask is the task to flag that a contact has tasks +type HandleContactEventTask struct { + ContactID models.ContactID `json:"contact_id"` +} + +func (t *HandleContactEventTask) Type() string { + return TypeHandleContactEvent +} + +// Timeout is the maximum amount of time the task can run for +func (t *HandleContactEventTask) Timeout() time.Duration { + return time.Minute * 5 +} + +// Perform is called when an event comes in for a contact. To make sure we don't get into a situation of being off by one, +// this task ingests and handles all the events for a contact, one by one. +func (t *HandleContactEventTask) Perform(ctx context.Context, rt *runtime.Runtime, orgID models.OrgID) error { + // try to get the lock for this contact, waiting up to 10 seconds + locks, _, err := models.LockContacts(ctx, rt, orgID, []models.ContactID{t.ContactID}, time.Second*10) + if err != nil { + return errors.Wrapf(err, "error acquiring lock for contact %d", t.ContactID) + } + + // we didn't get the lock.. requeue for later + if len(locks) == 0 { + rc := rt.RP.Get() + defer rc.Close() + err = tasks.Queue(rc, queue.HandlerQueue, orgID, &HandleContactEventTask{ContactID: t.ContactID}, queue.DefaultPriority) + if err != nil { + return errors.Wrapf(err, "error re-adding contact task after failing to get lock") + } + logrus.WithFields(logrus.Fields{"org_id": orgID, "contact_id": t.ContactID}).Info("failed to get lock for contact, requeued and skipping") + return nil + } + + defer models.UnlockContacts(rt, orgID, locks) + + // read all the events for this contact, one by one + contactQ := fmt.Sprintf("c:%d:%d", orgID, t.ContactID) + for { + // pop the next event off this contacts queue + rc := rt.RP.Get() + event, err := redis.String(rc.Do("lpop", contactQ)) + rc.Close() + + // out of tasks? that's ok, exit + if err == redis.ErrNil { + return nil + } + + // real error? report + if err != nil { + return errors.Wrapf(err, "error popping contact event") + } + + start := time.Now() + + // decode our event, this is a normal task at its top level + contactEvent := &queue.Task{} + err = json.Unmarshal([]byte(event), contactEvent) + if err != nil { + return errors.Wrapf(err, "error unmarshalling contact event: %s", event) + } + + // hand off to the appropriate handler + switch contactEvent.Type { + + case StopEventType: + evt := &StopEvent{} + err = json.Unmarshal(contactEvent.Task, evt) + if err != nil { + return errors.Wrapf(err, "error unmarshalling stop event: %s", event) + } + err = handleStopEvent(ctx, rt, evt) + + case NewConversationEventType, ReferralEventType, MOMissEventType, WelcomeMessageEventType: + evt := &models.ChannelEvent{} + err = json.Unmarshal(contactEvent.Task, evt) + if err != nil { + return errors.Wrapf(err, "error unmarshalling channel event: %s", event) + } + _, err = HandleChannelEvent(ctx, rt, models.ChannelEventType(contactEvent.Type), evt, nil) + + case MsgEventType: + msg := &MsgEvent{} + err = json.Unmarshal(contactEvent.Task, msg) + if err != nil { + return errors.Wrapf(err, "error unmarshalling msg event: %s", event) + } + err = handleMsgEvent(ctx, rt, msg) + + case TicketClosedEventType: + evt := &models.TicketEvent{} + err = json.Unmarshal(contactEvent.Task, evt) + if err != nil { + return errors.Wrapf(err, "error unmarshalling ticket event: %s", event) + } + err = handleTicketEvent(ctx, rt, evt) + + case TimeoutEventType, ExpirationEventType: + evt := &TimedEvent{} + err = json.Unmarshal(contactEvent.Task, evt) + if err != nil { + return errors.Wrapf(err, "error unmarshalling timeout event: %s", event) + } + err = handleTimedEvent(ctx, rt, contactEvent.Type, evt) + + default: + return errors.Errorf("unknown contact event type: %s", contactEvent.Type) + } + + // log our processing time to librato + analytics.Gauge(fmt.Sprintf("mr.%s_elapsed", contactEvent.Type), float64(time.Since(start))/float64(time.Second)) + + // and total latency for this task since it was queued + analytics.Gauge(fmt.Sprintf("mr.%s_latency", contactEvent.Type), float64(time.Since(contactEvent.QueuedOn))/float64(time.Second)) + + // if we get an error processing an event, requeue it for later and return our error + if err != nil { + log := logrus.WithFields(logrus.Fields{"org_id": orgID, "contact_id": t.ContactID, "event": event}) + + if qerr := dbutil.AsQueryError(err); qerr != nil { + query, params := qerr.Query() + log = log.WithFields(logrus.Fields{"sql": query, "sql_params": params}) + } + + contactEvent.ErrorCount++ + if contactEvent.ErrorCount < 3 { + rc := rt.RP.Get() + retryErr := queueHandleTask(rc, t.ContactID, contactEvent, true) + if retryErr != nil { + logrus.WithError(retryErr).Error("error requeuing errored contact event") + } + rc.Close() + + log.WithError(err).WithField("error_count", contactEvent.ErrorCount).Error("error handling contact event") + return nil + } + log.WithError(err).Error("error handling contact event, permanent failure") + return nil + } + } +} diff --git a/core/tasks/handler/handler_test.go b/core/tasks/handler/handle_contact_event_test.go similarity index 69% rename from core/tasks/handler/handler_test.go rename to core/tasks/handler/handle_contact_event_test.go index c988ad170..3400e46fe 100644 --- a/core/tasks/handler/handler_test.go +++ b/core/tasks/handler/handle_contact_event_test.go @@ -14,52 +14,51 @@ import ( _ "github.com/nyaruka/mailroom/core/handlers" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/core/queue" + "github.com/nyaruka/mailroom/core/tasks" "github.com/nyaruka/mailroom/core/tasks/handler" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestMsgEvents(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetAll) - testdata.InsertKeywordTrigger(db, testdata.Org1, testdata.Favorites, "start", models.MatchOnly, nil, nil) - testdata.InsertKeywordTrigger(db, testdata.Org1, testdata.IVRFlow, "ivr", models.MatchOnly, nil, nil) + testdata.InsertKeywordTrigger(rt, testdata.Org1, testdata.Favorites, "start", models.MatchOnly, nil, nil) + testdata.InsertKeywordTrigger(rt, testdata.Org1, testdata.IVRFlow, "ivr", models.MatchOnly, nil, nil) - testdata.InsertKeywordTrigger(db, testdata.Org2, testdata.Org2Favorites, "start", models.MatchOnly, nil, nil) - testdata.InsertCatchallTrigger(db, testdata.Org2, testdata.Org2SingleMessage, nil, nil) + testdata.InsertKeywordTrigger(rt, testdata.Org2, testdata.Org2Favorites, "start", models.MatchOnly, nil, nil) + testdata.InsertCatchallTrigger(rt, testdata.Org2, testdata.Org2SingleMessage, nil, nil) // give Cathy and Bob some tickets... openTickets := map[*testdata.Contact][]*testdata.Ticket{ testdata.Cathy: { - testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Ok", "", time.Now(), nil), - testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Ok", "", time.Now(), nil), + testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Ok", "", time.Now(), nil), }, } closedTickets := map[*testdata.Contact][]*testdata.Ticket{ testdata.Cathy: { - testdata.InsertClosedTicket(db, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "", "", nil), + testdata.InsertClosedTicket(rt, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "", "", nil), }, testdata.Bob: { - testdata.InsertClosedTicket(db, testdata.Org1, testdata.Bob, testdata.Mailgun, testdata.DefaultTopic, "Ok", "", nil), + testdata.InsertClosedTicket(rt, testdata.Org1, testdata.Bob, testdata.Mailgun, testdata.DefaultTopic, "Ok", "", nil), }, } - db.MustExec(`UPDATE tickets_ticket SET last_activity_on = '2021-01-01T00:00:00Z'`) + rt.DB.MustExec(`UPDATE tickets_ticket SET last_activity_on = '2021-01-01T00:00:00Z'`) // clear all of Alexandria's URNs - db.MustExec(`UPDATE contacts_contacturn SET contact_id = NULL WHERE contact_id = $1`, testdata.Alexandria.ID) + rt.DB.MustExec(`UPDATE contacts_contacturn SET contact_id = NULL WHERE contact_id = $1`, testdata.Alexandria.ID) models.FlushCache() // insert a dummy message into the database that will get the updates from handling each message event which pretends to be it - dbMsg := testdata.InsertIncomingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "", models.MsgStatusPending) + dbMsg := testdata.InsertIncomingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "", models.MsgStatusPending) tcs := []struct { preHook func() @@ -68,7 +67,6 @@ func TestMsgEvents(t *testing.T) { contact *testdata.Contact text string expectedReply string - expectedType models.MsgType expectedFlow *testdata.Flow }{ // 0: @@ -78,7 +76,6 @@ func TestMsgEvents(t *testing.T) { contact: testdata.Cathy, text: "noop", expectedReply: "", - expectedType: models.MsgTypeInbox, }, // 1: @@ -88,7 +85,6 @@ func TestMsgEvents(t *testing.T) { contact: testdata.Cathy, text: "start other", expectedReply: "", - expectedType: models.MsgTypeInbox, }, // 2: @@ -98,7 +94,6 @@ func TestMsgEvents(t *testing.T) { contact: testdata.Cathy, text: "start", expectedReply: "What is your favorite color?", - expectedType: models.MsgTypeFlow, expectedFlow: testdata.Favorites, }, @@ -109,7 +104,6 @@ func TestMsgEvents(t *testing.T) { contact: testdata.Cathy, text: "purple", expectedReply: "I don't know that color. Try again.", - expectedType: models.MsgTypeFlow, expectedFlow: testdata.Favorites, }, @@ -120,7 +114,6 @@ func TestMsgEvents(t *testing.T) { contact: testdata.Cathy, text: "blue", expectedReply: "Good choice, I like Blue too! What is your favorite beer?", - expectedType: models.MsgTypeFlow, expectedFlow: testdata.Favorites, }, @@ -131,7 +124,6 @@ func TestMsgEvents(t *testing.T) { contact: testdata.Cathy, text: "MUTZIG", expectedReply: "Mmmmm... delicious Mutzig. If only they made blue Mutzig! Lastly, what is your name?", - expectedType: models.MsgTypeFlow, expectedFlow: testdata.Favorites, }, @@ -142,7 +134,6 @@ func TestMsgEvents(t *testing.T) { contact: testdata.Cathy, text: "Cathy", expectedReply: "Thanks Cathy, we are all done!", - expectedType: models.MsgTypeFlow, expectedFlow: testdata.Favorites, }, @@ -153,7 +144,6 @@ func TestMsgEvents(t *testing.T) { contact: testdata.Cathy, text: "noop", expectedReply: "", - expectedType: models.MsgTypeInbox, }, // 8: @@ -163,7 +153,6 @@ func TestMsgEvents(t *testing.T) { contact: testdata.Org2Contact, text: "other", expectedReply: "Hey, how are you?", - expectedType: models.MsgTypeFlow, expectedFlow: testdata.Org2SingleMessage, }, @@ -174,7 +163,6 @@ func TestMsgEvents(t *testing.T) { contact: testdata.Org2Contact, text: "start", expectedReply: "What is your favorite color?", - expectedType: models.MsgTypeFlow, expectedFlow: testdata.Org2Favorites, }, @@ -185,7 +173,6 @@ func TestMsgEvents(t *testing.T) { contact: testdata.Org2Contact, text: "green", expectedReply: "Good choice, I like Green too! What is your favorite beer?", - expectedType: models.MsgTypeFlow, expectedFlow: testdata.Org2Favorites, }, @@ -196,7 +183,6 @@ func TestMsgEvents(t *testing.T) { contact: testdata.Org2Contact, text: "primus", expectedReply: "Mmmmm... delicious Primus. If only they made green Primus! Lastly, what is your name?", - expectedType: models.MsgTypeFlow, expectedFlow: testdata.Org2Favorites, }, @@ -207,7 +193,6 @@ func TestMsgEvents(t *testing.T) { contact: testdata.Org2Contact, text: "george", expectedReply: "Thanks george, we are all done!", - expectedType: models.MsgTypeFlow, expectedFlow: testdata.Org2Favorites, }, @@ -218,7 +203,6 @@ func TestMsgEvents(t *testing.T) { contact: testdata.Org2Contact, text: "blargh", expectedReply: "Hey, how are you?", - expectedType: models.MsgTypeFlow, expectedFlow: testdata.Org2SingleMessage, }, @@ -229,21 +213,19 @@ func TestMsgEvents(t *testing.T) { contact: testdata.Bob, text: "ivr", expectedReply: "", - expectedType: models.MsgTypeFlow, expectedFlow: testdata.IVRFlow, }, // 15: stopped contact should be unstopped { preHook: func() { - db.MustExec(`UPDATE contacts_contact SET status = 'S' WHERE id = $1`, testdata.George.ID) + rt.DB.MustExec(`UPDATE contacts_contact SET status = 'S' WHERE id = $1`, testdata.George.ID) }, org: testdata.Org1, channel: testdata.TwitterChannel, contact: testdata.George, text: "start", expectedReply: "What is your favorite color?", - expectedType: models.MsgTypeFlow, expectedFlow: testdata.Favorites, }, @@ -254,7 +236,6 @@ func TestMsgEvents(t *testing.T) { contact: testdata.Alexandria, text: "start", expectedReply: "", - expectedType: models.MsgTypeFlow, expectedFlow: testdata.Favorites, }, @@ -265,35 +246,32 @@ func TestMsgEvents(t *testing.T) { contact: testdata.Org2Contact, text: "start", expectedReply: "What is your favorite color?", - expectedType: models.MsgTypeFlow, expectedFlow: testdata.Org2Favorites, }, // 18: { preHook: func() { - db.MustExec(`UPDATE flows_flow SET is_active = FALSE WHERE id = $1`, testdata.Org2Favorites.ID) + rt.DB.MustExec(`UPDATE flows_flow SET is_active = FALSE WHERE id = $1`, testdata.Org2Favorites.ID) }, org: testdata.Org2, channel: testdata.Org2Channel, contact: testdata.Org2Contact, text: "red", expectedReply: "Hey, how are you?", - expectedType: models.MsgTypeFlow, expectedFlow: testdata.Org2SingleMessage, }, // 19: start Fred back in our favorites flow to test retries { preHook: func() { - db.MustExec(`UPDATE flows_flow SET is_active = TRUE WHERE id = $1`, testdata.Org2Favorites.ID) + rt.DB.MustExec(`UPDATE flows_flow SET is_active = TRUE WHERE id = $1`, testdata.Org2Favorites.ID) }, org: testdata.Org2, channel: testdata.Org2Channel, contact: testdata.Org2Contact, text: "start", expectedReply: "What is your favorite color?", - expectedType: models.MsgTypeFlow, expectedFlow: testdata.Org2Favorites, }, } @@ -317,7 +295,7 @@ func TestMsgEvents(t *testing.T) { models.FlushCache() // reset our dummy db message into an unhandled state - db.MustExec(`UPDATE msgs_msg SET status = 'P', msg_type = NULL WHERE id = $1`, dbMsg.ID()) + rt.DB.MustExec(`UPDATE msgs_msg SET status = 'P', flow_id = NULL WHERE id = $1`, dbMsg.ID()) // run our setup hook if we have one if tc.preHook != nil { @@ -332,45 +310,45 @@ func TestMsgEvents(t *testing.T) { task, err = queue.PopNextTask(rc, queue.HandlerQueue) assert.NoError(t, err, "%d: error popping next task", i) - err = handler.HandleEvent(ctx, rt, task) + err = tasks.Perform(ctx, rt, task) assert.NoError(t, err, "%d: error when handling event", i) - var expectedFlowID interface{} + var expectedFlowID any if tc.expectedFlow != nil { expectedFlowID = int64(tc.expectedFlow.ID) } - // check that message is marked as handled with expected type - assertdb.Query(t, db, `SELECT status, msg_type, flow_id FROM msgs_msg WHERE id = $1`, dbMsg.ID()). - Columns(map[string]interface{}{"status": "H", "msg_type": string(tc.expectedType), "flow_id": expectedFlowID}, "%d: msg state mismatch", i) + // check that message is marked as handled + assertdb.Query(t, rt.DB, `SELECT status, msg_type, flow_id FROM msgs_msg WHERE id = $1`, dbMsg.ID()). + Columns(map[string]any{"status": "H", "msg_type": "T", "flow_id": expectedFlowID}, "%d: msg state mismatch", i) // if we are meant to have a reply, check it if tc.expectedReply != "" { - assertdb.Query(t, db, `SELECT text, status FROM msgs_msg WHERE contact_id = $1 AND created_on > $2 ORDER BY id DESC LIMIT 1`, tc.contact.ID, last). - Columns(map[string]interface{}{"text": tc.expectedReply, "status": "Q"}, "%d: response mismatch", i) + assertdb.Query(t, rt.DB, `SELECT text, status FROM msgs_msg WHERE contact_id = $1 AND created_on > $2 ORDER BY id DESC LIMIT 1`, tc.contact.ID, last). + Columns(map[string]any{"text": tc.expectedReply, "status": "Q"}, "%d: response mismatch", i) } - // check any open tickets for this contact where updated + // check last open ticket for this contact was updated numOpenTickets := len(openTickets[tc.contact]) - assertdb.Query(t, db, `SELECT count(*) FROM tickets_ticket WHERE contact_id = $1 AND status = 'O' AND last_activity_on > $2`, tc.contact.ID, last). + assertdb.Query(t, rt.DB, `SELECT count(*) FROM tickets_ticket WHERE contact_id = $1 AND status = 'O' AND last_activity_on > $2`, tc.contact.ID, last). Returns(numOpenTickets, "%d: updated open ticket mismatch", i) // check any closed tickets are unchanged numClosedTickets := len(closedTickets[tc.contact]) - assertdb.Query(t, db, `SELECT count(*) FROM tickets_ticket WHERE contact_id = $1 AND status = 'C' AND last_activity_on = '2021-01-01T00:00:00Z'`, tc.contact.ID). + assertdb.Query(t, rt.DB, `SELECT count(*) FROM tickets_ticket WHERE contact_id = $1 AND status = 'C' AND last_activity_on = '2021-01-01T00:00:00Z'`, tc.contact.ID). Returns(numClosedTickets, "%d: unchanged closed ticket mismatch", i) last = time.Now() } // should have one remaining IVR task to handle for Bob - orgTasks := testsuite.CurrentOrgTasks(t, rp) + orgTasks := testsuite.CurrentTasks(t, rt) assert.Equal(t, 1, len(orgTasks[testdata.Org1.ID])) task, err := queue.PopNextTask(rc, queue.BatchQueue) assert.NoError(t, err) assert.NotNil(t, task) - assert.Equal(t, queue.StartIVRFlowBatch, task.Type) + assert.Equal(t, "start_ivr_flow_batch", task.Type) // check messages queued to courier testsuite.AssertCourierQueues(t, map[string][]int{ @@ -379,11 +357,11 @@ func TestMsgEvents(t *testing.T) { }) // Fred's sessions should not have a timeout because courier will set them - assertdb.Query(t, db, `SELECT count(*) from flows_flowsession where contact_id = $1`, testdata.Org2Contact.ID).Returns(6) - assertdb.Query(t, db, `SELECT count(*) from flows_flowsession where contact_id = $1 and timeout_on IS NULL`, testdata.Org2Contact.ID).Returns(6) + assertdb.Query(t, rt.DB, `SELECT count(*) from flows_flowsession where contact_id = $1`, testdata.Org2Contact.ID).Returns(6) + assertdb.Query(t, rt.DB, `SELECT count(*) from flows_flowsession where contact_id = $1 and timeout_on IS NULL`, testdata.Org2Contact.ID).Returns(6) // force an error by marking our run for fred as complete (our session is still active so this will blow up) - db.MustExec(`UPDATE flows_flowrun SET status = 'C', exited_on = NOW() WHERE contact_id = $1`, testdata.Org2Contact.ID) + rt.DB.MustExec(`UPDATE flows_flowrun SET status = 'C', exited_on = NOW() WHERE contact_id = $1`, testdata.Org2Contact.ID) task = makeMsgTask(testdata.Org2, testdata.Org2Channel, testdata.Org2Contact, "red") handler.QueueHandleTask(rc, testdata.Org2Contact.ID, task) @@ -391,7 +369,7 @@ func TestMsgEvents(t *testing.T) { for i := 0; i < 3; i++ { task, _ = queue.PopNextTask(rc, queue.HandlerQueue) assert.NotNil(t, task) - err := handler.HandleEvent(ctx, rt, task) + err := tasks.Perform(ctx, rt, task) assert.NoError(t, err) } @@ -401,7 +379,7 @@ func TestMsgEvents(t *testing.T) { assert.Nil(t, task) // mark Fred's flow as inactive - db.MustExec(`UPDATE flows_flow SET is_active = FALSE where id = $1`, testdata.Org2Favorites.ID) + rt.DB.MustExec(`UPDATE flows_flow SET is_active = FALSE where id = $1`, testdata.Org2Favorites.ID) models.FlushCache() // try to resume now @@ -409,39 +387,39 @@ func TestMsgEvents(t *testing.T) { handler.QueueHandleTask(rc, testdata.Org2Contact.ID, task) task, _ = queue.PopNextTask(rc, queue.HandlerQueue) assert.NotNil(t, task) - err = handler.HandleEvent(ctx, rt, task) + err = tasks.Perform(ctx, rt, task) assert.NoError(t, err) // should get our catch all trigger - assertdb.Query(t, db, `SELECT text FROM msgs_msg WHERE contact_id = $1 AND direction = 'O' ORDER BY id DESC LIMIT 1`, testdata.Org2Contact.ID).Returns("Hey, how are you?") + assertdb.Query(t, rt.DB, `SELECT text FROM msgs_msg WHERE contact_id = $1 AND direction = 'O' ORDER BY id DESC LIMIT 1`, testdata.Org2Contact.ID).Returns("Hey, how are you?") previous := time.Now() // and should have failed previous session - assertdb.Query(t, db, `SELECT count(*) from flows_flowsession where contact_id = $1 and status = 'F'`, testdata.Org2Contact.ID).Returns(2) + assertdb.Query(t, rt.DB, `SELECT count(*) from flows_flowsession where contact_id = $1 and status = 'F'`, testdata.Org2Contact.ID).Returns(2) // trigger should also not start a new session task = makeMsgTask(testdata.Org2, testdata.Org2Channel, testdata.Org2Contact, "start") handler.QueueHandleTask(rc, testdata.Org2Contact.ID, task) task, _ = queue.PopNextTask(rc, queue.HandlerQueue) - err = handler.HandleEvent(ctx, rt, task) + err = tasks.Perform(ctx, rt, task) assert.NoError(t, err) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE contact_id = $1 AND direction = 'O' AND created_on > $2`, testdata.Org2Contact.ID, previous).Returns(0) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE contact_id = $1 AND direction = 'O' AND created_on > $2`, testdata.Org2Contact.ID, previous).Returns(0) } func TestChannelEvents(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetAll) // add some channel event triggers - testdata.InsertNewConversationTrigger(db, testdata.Org1, testdata.Favorites, testdata.TwitterChannel) - testdata.InsertReferralTrigger(db, testdata.Org1, testdata.PickANumber, "", testdata.VonageChannel) + testdata.InsertNewConversationTrigger(rt, testdata.Org1, testdata.Favorites, testdata.TwitterChannel) + testdata.InsertReferralTrigger(rt, testdata.Org1, testdata.PickANumber, "", testdata.VonageChannel) // add a URN for cathy so we can test twitter URNs - testdata.InsertContactURN(db, testdata.Org1, testdata.Bob, urns.URN("twitterid:123456"), 10) + testdata.InsertContactURN(rt, testdata.Org1, testdata.Bob, urns.URN("twitterid:123456"), 10) tcs := []struct { EventType models.ChannelEventType @@ -482,18 +460,18 @@ func TestChannelEvents(t *testing.T) { task, err = queue.PopNextTask(rc, queue.HandlerQueue) assert.NoError(t, err, "%d: error popping next task", i) - err = handler.HandleEvent(ctx, rt, task) + err = tasks.Perform(ctx, rt, task) assert.NoError(t, err, "%d: error when handling event", i) // if we are meant to have a response if tc.Response != "" { - assertdb.Query(t, db, `SELECT text FROM msgs_msg WHERE contact_id = $1 AND contact_urn_id = $2 AND created_on > $3 ORDER BY id DESC LIMIT 1`, tc.ContactID, tc.URNID, start). + assertdb.Query(t, rt.DB, `SELECT text FROM msgs_msg WHERE contact_id = $1 AND contact_urn_id = $2 AND created_on > $3 ORDER BY id DESC LIMIT 1`, tc.ContactID, tc.URNID, start). Returns(tc.Response, "%d: response mismatch", i) } if tc.UpdateLastSeen { var lastSeen time.Time - err = db.Get(&lastSeen, `SELECT last_seen_on FROM contacts_contact WHERE id = $1`, tc.ContactID) + err = rt.DB.Get(&lastSeen, `SELECT last_seen_on FROM contacts_contact WHERE id = $1`, tc.ContactID) assert.NoError(t, err) assert.True(t, lastSeen.Equal(start) || lastSeen.After(start), "%d: expected last seen to be updated", i) } @@ -501,17 +479,17 @@ func TestChannelEvents(t *testing.T) { } func TestTicketEvents(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetAll) // add a ticket closed trigger - testdata.InsertTicketClosedTrigger(rt.DB, testdata.Org1, testdata.Favorites) + testdata.InsertTicketClosedTrigger(rt, testdata.Org1, testdata.Favorites) - ticket := testdata.InsertClosedTicket(rt.DB, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Where are my shoes?", "", nil) - modelTicket := ticket.Load(db) + ticket := testdata.InsertClosedTicket(rt, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Where are my shoes?", "", nil) + modelTicket := ticket.Load(rt) event := models.NewTicketClosedEvent(modelTicket, testdata.Admin.ID) @@ -521,25 +499,25 @@ func TestTicketEvents(t *testing.T) { task, err := queue.PopNextTask(rc, queue.HandlerQueue) require.NoError(t, err) - err = handler.HandleEvent(ctx, rt, task) + err = tasks.Perform(ctx, rt, task) require.NoError(t, err) assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE contact_id = $1 AND direction = 'O' AND text = 'What is your favorite color?'`, testdata.Cathy.ID).Returns(1) } func TestStopEvent(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetAll) // schedule an event for cathy and george - testdata.InsertEventFire(rt.DB, testdata.Cathy, testdata.RemindersEvent1, time.Now()) - testdata.InsertEventFire(rt.DB, testdata.George, testdata.RemindersEvent1, time.Now()) + testdata.InsertEventFire(rt, testdata.Cathy, testdata.RemindersEvent1, time.Now()) + testdata.InsertEventFire(rt, testdata.George, testdata.RemindersEvent1, time.Now()) // and george to doctors group, cathy is already part of it - db.MustExec(`INSERT INTO contacts_contactgroup_contacts(contactgroup_id, contact_id) VALUES($1, $2);`, testdata.DoctorsGroup.ID, testdata.George.ID) + rt.DB.MustExec(`INSERT INTO contacts_contactgroup_contacts(contactgroup_id, contact_id) VALUES($1, $2);`, testdata.DoctorsGroup.ID, testdata.George.ID) event := &handler.StopEvent{OrgID: testdata.Org1.ID, ContactID: testdata.Cathy.ID} eventJSON, err := json.Marshal(event) @@ -556,31 +534,31 @@ func TestStopEvent(t *testing.T) { task, err = queue.PopNextTask(rc, queue.HandlerQueue) assert.NoError(t, err, "error popping next task") - err = handler.HandleEvent(ctx, rt, task) + err = tasks.Perform(ctx, rt, task) assert.NoError(t, err, "error when handling event") // check that only george is in our group - assertdb.Query(t, db, `SELECT count(*) from contacts_contactgroup_contacts WHERE contactgroup_id = $1 AND contact_id = $2`, testdata.DoctorsGroup.ID, testdata.Cathy.ID).Returns(0) - assertdb.Query(t, db, `SELECT count(*) from contacts_contactgroup_contacts WHERE contactgroup_id = $1 AND contact_id = $2`, testdata.DoctorsGroup.ID, testdata.George.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) from contacts_contactgroup_contacts WHERE contactgroup_id = $1 AND contact_id = $2`, testdata.DoctorsGroup.ID, testdata.Cathy.ID).Returns(0) + assertdb.Query(t, rt.DB, `SELECT count(*) from contacts_contactgroup_contacts WHERE contactgroup_id = $1 AND contact_id = $2`, testdata.DoctorsGroup.ID, testdata.George.ID).Returns(1) // that cathy is stopped - assertdb.Query(t, db, `SELECT count(*) FROM contacts_contact WHERE id = $1 AND status = 'S'`, testdata.Cathy.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM contacts_contact WHERE id = $1 AND status = 'S'`, testdata.Cathy.ID).Returns(1) // and has no upcoming events - assertdb.Query(t, db, `SELECT count(*) FROM campaigns_eventfire WHERE contact_id = $1`, testdata.Cathy.ID).Returns(0) - assertdb.Query(t, db, `SELECT count(*) FROM campaigns_eventfire WHERE contact_id = $1`, testdata.George.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM campaigns_eventfire WHERE contact_id = $1`, testdata.Cathy.ID).Returns(0) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM campaigns_eventfire WHERE contact_id = $1`, testdata.George.ID).Returns(1) } func TestTimedEvents(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetAll) // create some keyword triggers - testdata.InsertKeywordTrigger(db, testdata.Org1, testdata.Favorites, "start", models.MatchOnly, nil, nil) - testdata.InsertKeywordTrigger(db, testdata.Org1, testdata.PickANumber, "pick", models.MatchOnly, nil, nil) + testdata.InsertKeywordTrigger(rt, testdata.Org1, testdata.Favorites, "start", models.MatchOnly, nil, nil) + testdata.InsertKeywordTrigger(rt, testdata.Org1, testdata.PickANumber, "pick", models.MatchOnly, nil, nil) tcs := []struct { EventType string @@ -660,8 +638,8 @@ func TestTimedEvents(t *testing.T) { if tc.Message == "bad" { expiration = time.Now() } else if tc.Message == "child" { - db.Get(&expiration, `SELECT wait_expires_on FROM flows_flowsession WHERE id = $1 AND status != 'W'`, sessionID) - db.Get(&runID, `SELECT id FROM flows_flowrun WHERE session_id = $1 AND status NOT IN ('A', 'W')`, sessionID) + rt.DB.Get(&expiration, `SELECT wait_expires_on FROM flows_flowsession WHERE id = $1 AND status != 'W'`, sessionID) + rt.DB.Get(&runID, `SELECT id FROM flows_flowrun WHERE session_id = $1 AND status NOT IN ('A', 'W')`, sessionID) } else { expiration = time.Now().Add(time.Hour * 24) } @@ -672,7 +650,7 @@ func TestTimedEvents(t *testing.T) { timeoutOn := time.Now().Round(time.Millisecond) // so that there's no difference between this and what we read from the db // usually courier will set timeout_on after sending the last message - db.MustExec(`UPDATE flows_flowsession SET timeout_on = $2 WHERE id = $1`, sessionID, timeoutOn) + rt.DB.MustExec(`UPDATE flows_flowsession SET timeout_on = $2 WHERE id = $1`, sessionID, timeoutOn) task = handler.NewTimeoutTask(tc.Org.ID, tc.Contact.ID, sessionID, timeoutOn) } @@ -683,30 +661,30 @@ func TestTimedEvents(t *testing.T) { task, err = queue.PopNextTask(rc, queue.HandlerQueue) assert.NoError(t, err, "%d: error popping next task", i) - err = handler.HandleEvent(ctx, rt, task) + err = tasks.Perform(ctx, rt, task) assert.NoError(t, err, "%d: error when handling event", i) if tc.Response != "" { - assertdb.Query(t, db, `SELECT text FROM msgs_msg WHERE contact_id = $1 AND created_on > $2 ORDER BY id DESC LIMIT 1`, tc.Contact.ID, last). + assertdb.Query(t, rt.DB, `SELECT text FROM msgs_msg WHERE contact_id = $1 AND created_on > $2 ORDER BY id DESC LIMIT 1`, tc.Contact.ID, last). Returns(tc.Response, "%d: response: mismatch", i) } - err = db.Get(&sessionID, `SELECT id FROM flows_flowsession WHERE contact_id = $1 ORDER BY created_on DESC LIMIT 1`, tc.Contact.ID) + err = rt.DB.Get(&sessionID, `SELECT id FROM flows_flowsession WHERE contact_id = $1 ORDER BY created_on DESC LIMIT 1`, tc.Contact.ID) assert.NoError(t, err) - err = db.Get(&runID, `SELECT id FROM flows_flowrun WHERE contact_id = $1 ORDER BY created_on DESC LIMIT 1`, tc.Contact.ID) + err = rt.DB.Get(&runID, `SELECT id FROM flows_flowrun WHERE contact_id = $1 ORDER BY created_on DESC LIMIT 1`, tc.Contact.ID) assert.NoError(t, err) last = time.Now() } // should only have a single waiting session/run with no timeout - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE status = 'W' AND contact_id = $1`, testdata.Cathy.ID).Returns(1) - assertdb.Query(t, db, `SELECT timeout_on FROM flows_flowsession WHERE status = 'W' AND contact_id = $1`, testdata.Cathy.ID).Returns(nil) - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowrun WHERE status = 'W' AND contact_id = $1`, testdata.Cathy.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE status = 'W' AND contact_id = $1`, testdata.Cathy.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT timeout_on FROM flows_flowsession WHERE status = 'W' AND contact_id = $1`, testdata.Cathy.ID).Returns(nil) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowrun WHERE status = 'W' AND contact_id = $1`, testdata.Cathy.ID).Returns(1) // test the case of a run and session no longer being the most recent but somehow still active, expiration should still work - r, err := db.QueryContext(ctx, `SELECT id, session_id from flows_flowrun WHERE contact_id = $1 and status = 'I' order by created_on asc limit 1`, testdata.Cathy.ID) + r, err := rt.DB.QueryContext(ctx, `SELECT id, session_id from flows_flowrun WHERE contact_id = $1 and status = 'I' order by created_on asc limit 1`, testdata.Cathy.ID) assert.NoError(t, err) defer r.Close() r.Next() @@ -715,12 +693,12 @@ func TestTimedEvents(t *testing.T) { expiration := time.Now() // set both to be active (this requires us to disable the status change triggers) - db.MustExec(`ALTER TABLE flows_flowrun DISABLE TRIGGER temba_flowrun_status_change`) - db.MustExec(`ALTER TABLE flows_flowsession DISABLE TRIGGER temba_flowsession_status_change`) - db.MustExec(`UPDATE flows_flowrun SET status = 'W' WHERE id = $1`, runID) - db.MustExec(`UPDATE flows_flowsession SET status = 'W', wait_started_on = NOW(), wait_expires_on = $2 WHERE id = $1`, sessionID, expiration) - db.MustExec(`ALTER TABLE flows_flowrun ENABLE TRIGGER temba_flowrun_status_change`) - db.MustExec(`ALTER TABLE flows_flowsession ENABLE TRIGGER temba_flowsession_status_change`) + rt.DB.MustExec(`ALTER TABLE flows_flowrun DISABLE TRIGGER temba_flowrun_status_change`) + rt.DB.MustExec(`ALTER TABLE flows_flowsession DISABLE TRIGGER temba_flowsession_status_change`) + rt.DB.MustExec(`UPDATE flows_flowrun SET status = 'W' WHERE id = $1`, runID) + rt.DB.MustExec(`UPDATE flows_flowsession SET status = 'W', wait_started_on = NOW(), wait_expires_on = $2 WHERE id = $1`, sessionID, expiration) + rt.DB.MustExec(`ALTER TABLE flows_flowrun ENABLE TRIGGER temba_flowrun_status_change`) + rt.DB.MustExec(`ALTER TABLE flows_flowsession ENABLE TRIGGER temba_flowsession_status_change`) // try to expire the run task := handler.NewExpirationTask(testdata.Org1.ID, testdata.Cathy.ID, sessionID, expiration) @@ -731,6 +709,6 @@ func TestTimedEvents(t *testing.T) { task, err = queue.PopNextTask(rc, queue.HandlerQueue) assert.NoError(t, err) - err = handler.HandleEvent(ctx, rt, task) + err = tasks.Perform(ctx, rt, task) assert.NoError(t, err) } diff --git a/core/tasks/handler/queue.go b/core/tasks/handler/queue.go index 6520a4317..adeab2a19 100644 --- a/core/tasks/handler/queue.go +++ b/core/tasks/handler/queue.go @@ -9,6 +9,7 @@ import ( "github.com/nyaruka/gocommon/jsonx" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/core/queue" + "github.com/nyaruka/mailroom/core/tasks" "github.com/pkg/errors" ) @@ -17,6 +18,24 @@ func QueueHandleTask(rc redis.Conn, contactID models.ContactID, task *queue.Task return queueHandleTask(rc, contactID, task, false) } +// QueueTicketEvent queues a ticket event to be handled +func QueueTicketEvent(rc redis.Conn, contactID models.ContactID, evt *models.TicketEvent) error { + eventJSON := jsonx.MustMarshal(evt) + var task *queue.Task + + switch evt.EventType() { + case models.TicketEventTypeClosed: + task = &queue.Task{ + Type: TicketClosedEventType, + OrgID: int(evt.OrgID()), + Task: eventJSON, + QueuedOn: dates.Now(), + } + } + + return queueHandleTask(rc, contactID, task, false) +} + // queueHandleTask queues a single task for the passed in contact. `front` specifies whether the task // should be inserted in front of all other tasks for that contact func queueHandleTask(rc redis.Conn, contactID models.ContactID, task *queue.Task, front bool) error { @@ -38,38 +57,10 @@ func queueHandleTask(rc redis.Conn, contactID models.ContactID, task *queue.Task return errors.Wrapf(err, "error adding contact event") } - return queueContactTask(rc, models.OrgID(task.OrgID), contactID) -} - -// pushes a single contact task on our queue. Note this does not push the actual content of the task -// only that a task exists for the contact, addHandleTask should be used if the task has already been pushed -// off the contact specific queue. -func queueContactTask(rc redis.Conn, orgID models.OrgID, contactID models.ContactID) error { - // create our contact event - contactTask := &HandleEventTask{ContactID: contactID} - - // then add a handle task for that contact on our global handler queue - err := queue.AddTask(rc, queue.HandlerQueue, queue.HandleContactEvent, int(orgID), contactTask, queue.DefaultPriority) + // then add a handle task for that contact on our global handler queue to + err = tasks.Queue(rc, queue.HandlerQueue, models.OrgID(task.OrgID), &HandleContactEventTask{ContactID: contactID}, queue.DefaultPriority) if err != nil { return errors.Wrapf(err, "error adding handle event task") } return nil } - -// QueueTicketEvent queues a ticket event to be handled -func QueueTicketEvent(rc redis.Conn, contactID models.ContactID, evt *models.TicketEvent) error { - eventJSON := jsonx.MustMarshal(evt) - var task *queue.Task - - switch evt.EventType() { - case models.TicketEventTypeClosed: - task = &queue.Task{ - Type: TicketClosedEventType, - OrgID: int(evt.OrgID()), - Task: eventJSON, - QueuedOn: dates.Now(), - } - } - - return queueHandleTask(rc, contactID, task, false) -} diff --git a/core/tasks/incidents/end_incidents_test.go b/core/tasks/incidents/end_incidents_test.go index dfffb40f4..632a68938 100644 --- a/core/tasks/incidents/end_incidents_test.go +++ b/core/tasks/incidents/end_incidents_test.go @@ -21,7 +21,7 @@ import ( ) func TestEndIncidents(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData | testsuite.ResetRedis) @@ -42,22 +42,22 @@ func TestEndIncidents(t *testing.T) { node1.Record(rt, createWebhookEvents(10, time.Second*30)) // create incident for org 1 based on node which is still unhealthy - id1, err := models.IncidentWebhooksUnhealthy(ctx, db, rp, oa1, []flows.NodeUUID{"3c703019-8c92-4d28-9be0-a926a934486b"}) + id1, err := models.IncidentWebhooksUnhealthy(ctx, rt.DB, rt.RP, oa1, []flows.NodeUUID{"3c703019-8c92-4d28-9be0-a926a934486b"}) require.NoError(t, err) node2 := &models.WebhookNode{UUID: "07d69080-475b-4395-aa96-ea6c28ea6cb6"} node2.Record(rt, createWebhookEvents(10, time.Second*1)) // create incident for org 2 based on node which is now healthy - id2, err := models.IncidentWebhooksUnhealthy(ctx, db, rp, oa2, []flows.NodeUUID{"07d69080-475b-4395-aa96-ea6c28ea6cb6"}) + id2, err := models.IncidentWebhooksUnhealthy(ctx, rt.DB, rt.RP, oa2, []flows.NodeUUID{"07d69080-475b-4395-aa96-ea6c28ea6cb6"}) require.NoError(t, err) err = incidents.EndIncidents(ctx, rt) assert.NoError(t, err) - assertdb.Query(t, db, `SELECT count(*) FROM notifications_incident WHERE id = $1 AND ended_on IS NULL`, id1).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM notifications_incident WHERE id = $1 AND ended_on IS NOT NULL`, id2).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM notifications_incident WHERE id = $1 AND ended_on IS NULL`, id1).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM notifications_incident WHERE id = $1 AND ended_on IS NOT NULL`, id2).Returns(1) - assertredis.SMembers(t, rp, fmt.Sprintf("incident:%d:nodes", id1), []string{"3c703019-8c92-4d28-9be0-a926a934486b"}) - assertredis.SMembers(t, rp, fmt.Sprintf("incident:%d:nodes", id2), []string{}) // healthy node removed + assertredis.SMembers(t, rt.RP, fmt.Sprintf("incident:%d:nodes", id1), []string{"3c703019-8c92-4d28-9be0-a926a934486b"}) + assertredis.SMembers(t, rt.RP, fmt.Sprintf("incident:%d:nodes", id2), []string{}) // healthy node removed } diff --git a/core/tasks/interrupts/interrupt_channel.go b/core/tasks/interrupts/interrupt_channel.go index ddb8bb787..abbe46249 100644 --- a/core/tasks/interrupts/interrupt_channel.go +++ b/core/tasks/interrupts/interrupt_channel.go @@ -23,6 +23,10 @@ type InterruptChannelTask struct { ChannelID models.ChannelID `json:"channel_id"` } +func (t *InterruptChannelTask) Type() string { + return TypeInterruptChannel +} + // Perform implements tasks.Task func (t *InterruptChannelTask) Perform(ctx context.Context, rt *runtime.Runtime, orgID models.OrgID) error { db := rt.DB diff --git a/core/tasks/interrupts/interrupt_channel_test.go b/core/tasks/interrupts/interrupt_channel_test.go index 628d9d069..5d0c7dd54 100644 --- a/core/tasks/interrupts/interrupt_channel_test.go +++ b/core/tasks/interrupts/interrupt_channel_test.go @@ -6,77 +6,73 @@ import ( "github.com/nyaruka/gocommon/dbutil/assertdb" "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/core/queue" + "github.com/nyaruka/mailroom/core/tasks" "github.com/nyaruka/mailroom/core/tasks/interrupts" "github.com/nyaruka/mailroom/core/tasks/msgs" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestInterruptChannel(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() - defer testsuite.Reset(testsuite.ResetData) + defer testsuite.Reset(testsuite.ResetData | testsuite.ResetRedis) insertSession := func(org *testdata.Org, contact *testdata.Contact, flow *testdata.Flow, connectionID models.CallID) models.SessionID { - sessionID := testdata.InsertWaitingSession(db, org, contact, models.FlowTypeMessaging, flow, connectionID, time.Now(), time.Now(), false, nil) + sessionID := testdata.InsertWaitingSession(rt, org, contact, models.FlowTypeMessaging, flow, connectionID, time.Now(), time.Now(), false, nil) // give session one waiting run too - testdata.InsertFlowRun(db, org, sessionID, contact, flow, models.RunStatusWaiting) + testdata.InsertFlowRun(rt, org, sessionID, contact, flow, models.RunStatusWaiting) return sessionID } // twilio call - twilioCallID := testdata.InsertCall(db, testdata.Org1, testdata.TwilioChannel, testdata.Alexandria) + twilioCallID := testdata.InsertCall(rt, testdata.Org1, testdata.TwilioChannel, testdata.Alexandria) // vonage call - vonageCallID := testdata.InsertCall(db, testdata.Org1, testdata.VonageChannel, testdata.George) + vonageCallID := testdata.InsertCall(rt, testdata.Org1, testdata.VonageChannel, testdata.George) sessionID1 := insertSession(testdata.Org1, testdata.Cathy, testdata.Favorites, models.NilCallID) sessionID2 := insertSession(testdata.Org1, testdata.George, testdata.Favorites, vonageCallID) sessionID3 := insertSession(testdata.Org1, testdata.Alexandria, testdata.Favorites, twilioCallID) - testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "how can we help", nil, models.MsgStatusPending, false) - testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.VonageChannel, testdata.Bob, "this failed", nil, models.MsgStatusQueued, false) - testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.VonageChannel, testdata.George, "no URN", nil, models.MsgStatusPending, false) - testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.VonageChannel, testdata.George, "no URN", nil, models.MsgStatusErrored, false) - testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.VonageChannel, testdata.George, "no URN", nil, models.MsgStatusFailed, false) + testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "how can we help", nil, models.MsgStatusPending, false) + testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.VonageChannel, testdata.Bob, "this failed", nil, models.MsgStatusQueued, false) + testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.VonageChannel, testdata.George, "no URN", nil, models.MsgStatusPending, false) + testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.VonageChannel, testdata.George, "no URN", nil, models.MsgStatusErrored, false) + testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.VonageChannel, testdata.George, "no URN", nil, models.MsgStatusFailed, false) - assertdb.Query(t, db, `SELECT status FROM flows_flowsession WHERE id = $1`, sessionID1).Returns("W") - assertdb.Query(t, db, `SELECT status FROM flows_flowsession WHERE id = $1`, sessionID2).Returns("W") - assertdb.Query(t, db, `SELECT status FROM flows_flowsession WHERE id = $1`, sessionID3).Returns("W") + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowsession WHERE id = $1`, sessionID1).Returns("W") + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowsession WHERE id = $1`, sessionID2).Returns("W") + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowsession WHERE id = $1`, sessionID3).Returns("W") - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'F' and failed_reason = 'R' and channel_id = $1`, testdata.VonageChannel.ID).Returns(0) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'F' and channel_id = $1`, testdata.VonageChannel.ID).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'F' and channel_id = $1`, testdata.TwilioChannel.ID).Returns(0) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'F' and failed_reason = 'R' and channel_id = $1`, testdata.VonageChannel.ID).Returns(0) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'F' and channel_id = $1`, testdata.VonageChannel.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'F' and channel_id = $1`, testdata.TwilioChannel.ID).Returns(0) - // twilio channel task - task := &interrupts.InterruptChannelTask{ - ChannelID: testdata.TwilioChannel.ID, - } - - // execute it - err := task.Perform(ctx, rt, testdata.Org1.ID) - assert.NoError(t, err) + // queue and perform a task to interrupt the Twilio channel + tasks.Queue(rc, queue.BatchQueue, testdata.Org1.ID, &interrupts.InterruptChannelTask{ChannelID: testdata.TwilioChannel.ID}, queue.DefaultPriority) + testsuite.FlushTasks(t, rt) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'F' and channel_id = $1`, testdata.VonageChannel.ID).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'F' and failed_reason = 'R' and channel_id = $1`, testdata.VonageChannel.ID).Returns(0) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'F' and failed_reason = 'R' and channel_id = $1`, testdata.TwilioChannel.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'F' and channel_id = $1`, testdata.VonageChannel.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'F' and failed_reason = 'R' and channel_id = $1`, testdata.VonageChannel.ID).Returns(0) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'F' and failed_reason = 'R' and channel_id = $1`, testdata.TwilioChannel.ID).Returns(1) - assertdb.Query(t, db, `SELECT status FROM flows_flowsession WHERE id = $1`, sessionID1).Returns("W") - assertdb.Query(t, db, `SELECT status FROM flows_flowsession WHERE id = $1`, sessionID2).Returns("W") - assertdb.Query(t, db, `SELECT status FROM flows_flowsession WHERE id = $1`, sessionID3).Returns("I") + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowsession WHERE id = $1`, sessionID1).Returns("W") + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowsession WHERE id = $1`, sessionID2).Returns("W") + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowsession WHERE id = $1`, sessionID3).Returns("I") - testdata.InsertErroredOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "Hi", 1, time.Now().Add(-time.Hour), false) - testdata.InsertErroredOutgoingMsg(db, testdata.Org1, testdata.VonageChannel, testdata.Bob, "Hi", 2, time.Now().Add(-time.Minute), false) - testdata.InsertErroredOutgoingMsg(db, testdata.Org1, testdata.VonageChannel, testdata.Bob, "Hi", 2, time.Now().Add(-time.Minute), false) - testdata.InsertErroredOutgoingMsg(db, testdata.Org1, testdata.VonageChannel, testdata.Bob, "Hi", 2, time.Now().Add(-time.Minute), true) // high priority + testdata.InsertErroredOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "Hi", 1, time.Now().Add(-time.Hour), false) + testdata.InsertErroredOutgoingMsg(rt, testdata.Org1, testdata.VonageChannel, testdata.Bob, "Hi", 2, time.Now().Add(-time.Minute), false) + testdata.InsertErroredOutgoingMsg(rt, testdata.Org1, testdata.VonageChannel, testdata.Bob, "Hi", 2, time.Now().Add(-time.Minute), false) + testdata.InsertErroredOutgoingMsg(rt, testdata.Org1, testdata.VonageChannel, testdata.Bob, "Hi", 2, time.Now().Add(-time.Minute), true) // high priority // just to create courier queues - err = msgs.RetryErroredMessages(ctx, rt) + err := msgs.RetryErroredMessages(ctx, rt) require.NoError(t, err) testsuite.AssertCourierQueues(t, map[string][]int{ @@ -85,22 +81,17 @@ func TestInterruptChannel(t *testing.T) { "msgs:19012bfd-3ce3-4cae-9bb9-76cf92c73d49|10/1": {1}, // vonage, high priority }) - // vonage channel task - task = &interrupts.InterruptChannelTask{ - ChannelID: testdata.VonageChannel.ID, - } - - // execute it - err = task.Perform(ctx, rt, testdata.Org1.ID) - assert.NoError(t, err) + // queue and perform a task to interrupt the Vonage channel + tasks.Queue(rc, queue.BatchQueue, testdata.Org1.ID, &interrupts.InterruptChannelTask{ChannelID: testdata.VonageChannel.ID}, queue.DefaultPriority) + testsuite.FlushTasks(t, rt) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'F' and failed_reason = 'R' and channel_id = $1`, testdata.VonageChannel.ID).Returns(6) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'F' and channel_id = $1`, testdata.VonageChannel.ID).Returns(7) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'F' and failed_reason = 'R' and channel_id = $1`, testdata.TwilioChannel.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'F' and failed_reason = 'R' and channel_id = $1`, testdata.VonageChannel.ID).Returns(6) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'F' and channel_id = $1`, testdata.VonageChannel.ID).Returns(7) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'F' and failed_reason = 'R' and channel_id = $1`, testdata.TwilioChannel.ID).Returns(1) - assertdb.Query(t, db, `SELECT status FROM flows_flowsession WHERE id = $1`, sessionID1).Returns("W") - assertdb.Query(t, db, `SELECT status FROM flows_flowsession WHERE id = $1`, sessionID2).Returns("I") - assertdb.Query(t, db, `SELECT status FROM flows_flowsession WHERE id = $1`, sessionID3).Returns("I") + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowsession WHERE id = $1`, sessionID1).Returns("W") + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowsession WHERE id = $1`, sessionID2).Returns("I") + assertdb.Query(t, rt.DB, `SELECT status FROM flows_flowsession WHERE id = $1`, sessionID3).Returns("I") // vonage queues should be cleared testsuite.AssertCourierQueues(t, map[string][]int{ diff --git a/core/tasks/interrupts/interrupt_sessions.go b/core/tasks/interrupts/interrupt_sessions.go index 787474cdd..74b6313bb 100644 --- a/core/tasks/interrupts/interrupt_sessions.go +++ b/core/tasks/interrupts/interrupt_sessions.go @@ -24,6 +24,10 @@ type InterruptSessionsTask struct { FlowIDs []models.FlowID `json:"flow_ids,omitempty"` } +func (t *InterruptSessionsTask) Type() string { + return TypeInterruptSessions +} + // Timeout is the maximum amount of time the task can run for func (t *InterruptSessionsTask) Timeout() time.Duration { return time.Hour diff --git a/core/tasks/interrupts/interrupt_sessions_test.go b/core/tasks/interrupts/interrupt_sessions_test.go index 5183947a4..0d2bd2ef5 100644 --- a/core/tasks/interrupts/interrupt_sessions_test.go +++ b/core/tasks/interrupts/interrupt_sessions_test.go @@ -14,15 +14,15 @@ import ( ) func TestInterrupts(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) insertSession := func(org *testdata.Org, contact *testdata.Contact, flow *testdata.Flow, connectionID models.CallID) models.SessionID { - sessionID := testdata.InsertWaitingSession(db, org, contact, models.FlowTypeMessaging, flow, connectionID, time.Now(), time.Now(), false, nil) + sessionID := testdata.InsertWaitingSession(rt, org, contact, models.FlowTypeMessaging, flow, connectionID, time.Now(), time.Now(), false, nil) // give session one waiting run too - testdata.InsertFlowRun(db, org, sessionID, contact, flow, models.RunStatusWaiting) + testdata.InsertFlowRun(rt, org, sessionID, contact, flow, models.RunStatusWaiting) return sessionID } @@ -60,10 +60,10 @@ func TestInterrupts(t *testing.T) { for i, tc := range tcs { // mark any remaining flow sessions as inactive - db.MustExec(`UPDATE flows_flowsession SET status='C', ended_on=NOW() WHERE status = 'W';`) + rt.DB.MustExec(`UPDATE flows_flowsession SET status='C', ended_on=NOW() WHERE status = 'W';`) // twilio call - twilioCallID := testdata.InsertCall(db, testdata.Org1, testdata.TwilioChannel, testdata.Alexandria) + twilioCallID := testdata.InsertCall(rt, testdata.Org1, testdata.TwilioChannel, testdata.Alexandria) sessionIDs := make([]models.SessionID, 5) @@ -90,12 +90,12 @@ func TestInterrupts(t *testing.T) { // check session statuses are as expected for j, sID := range sessionIDs { var status string - err := db.Get(&status, `SELECT status FROM flows_flowsession WHERE id = $1`, sID) + err := rt.DB.Get(&status, `SELECT status FROM flows_flowsession WHERE id = $1`, sID) assert.NoError(t, err) assert.Equal(t, tc.expectedStatuses[j], status, "%d: status mismatch for session #%d", i, j) // check for runs with a different status to the session - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowrun WHERE session_id = $1 AND status != $2`, sID, tc.expectedStatuses[j]). + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowrun WHERE session_id = $1 AND status != $2`, sID, tc.expectedStatuses[j]). Returns(0, "%d: unexpected un-interrupted runs for session #%d", i, j) } } diff --git a/core/tasks/ivr/cron.go b/core/tasks/ivr/cron.go index 8022d7594..2716e222a 100644 --- a/core/tasks/ivr/cron.go +++ b/core/tasks/ivr/cron.go @@ -83,7 +83,7 @@ func RetryCalls(ctx context.Context, rt *runtime.Runtime) error { } // log any error inserting our channel logs, but continue - if err := models.InsertChannelLogs(ctx, rt.DB, clogs); err != nil { + if err := models.InsertChannelLogs(ctx, rt, clogs); err != nil { logrus.WithError(err).Error("error inserting channel logs") } diff --git a/core/tasks/ivr/cron_test.go b/core/tasks/ivr/cron_test.go index 9e7c75b94..50d3be89c 100644 --- a/core/tasks/ivr/cron_test.go +++ b/core/tasks/ivr/cron_test.go @@ -1,23 +1,24 @@ package ivr_test import ( - "encoding/json" "testing" "github.com/nyaruka/gocommon/dbutil/assertdb" "github.com/nyaruka/mailroom/core/ivr" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/core/queue" + "github.com/nyaruka/mailroom/core/tasks" ivrtasks "github.com/nyaruka/mailroom/core/tasks/ivr" "github.com/nyaruka/mailroom/core/tasks/starts" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestRetries(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetAll) @@ -26,50 +27,43 @@ func TestRetries(t *testing.T) { ivr.RegisterServiceType(models.ChannelType("ZZ"), NewMockProvider) // update our twilio channel to be of type 'ZZ' and set max_concurrent_events to 1 - db.MustExec(`UPDATE channels_channel SET channel_type = 'ZZ', config = '{"max_concurrent_events": 1}' WHERE id = $1`, testdata.TwilioChannel.ID) + rt.DB.MustExec(`UPDATE channels_channel SET channel_type = 'ZZ', config = '{"max_concurrent_events": 1}' WHERE id = $1`, testdata.TwilioChannel.ID) // create a flow start for cathy start := models.NewFlowStart(testdata.Org1.ID, models.StartTypeTrigger, models.FlowTypeVoice, testdata.IVRFlow.ID). WithContactIDs([]models.ContactID{testdata.Cathy.ID}) - // call our master starter - err := starts.CreateFlowBatches(ctx, rt, start) - assert.NoError(t, err) - - // should have one task in our ivr queue - task, err := queue.PopNextTask(rc, queue.HandlerQueue) - assert.NoError(t, err) - batch := &models.FlowStartBatch{} - err = json.Unmarshal(task.Task, batch) - assert.NoError(t, err) + err := tasks.Queue(rc, queue.BatchQueue, testdata.Org1.ID, &starts.StartFlowTask{FlowStart: start}, queue.DefaultPriority) + require.NoError(t, err) service.callError = nil service.callID = ivr.CallID("call1") - err = ivrtasks.HandleFlowStartBatch(ctx, rt, batch) - assert.NoError(t, err) - assertdb.Query(t, db, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2 AND external_id = $3`, + + testsuite.FlushTasks(t, rt) + + assertdb.Query(t, rt.DB, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2 AND external_id = $3`, testdata.Cathy.ID, models.CallStatusWired, "call1").Returns(1) // change our call to be errored instead of wired - db.MustExec(`UPDATE ivr_call SET status = 'E', next_attempt = NOW() WHERE external_id = 'call1';`) + rt.DB.MustExec(`UPDATE ivr_call SET status = 'E', next_attempt = NOW() WHERE external_id = 'call1';`) // fire our retries err = ivrtasks.RetryCalls(ctx, rt) assert.NoError(t, err) // should now be in wired state - assertdb.Query(t, db, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2 AND external_id = $3`, + assertdb.Query(t, rt.DB, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2 AND external_id = $3`, testdata.Cathy.ID, models.CallStatusWired, "call1").Returns(1) // back to retry and make the channel inactive - db.MustExec(`UPDATE ivr_call SET status = 'E', next_attempt = NOW() WHERE external_id = 'call1';`) - db.MustExec(`UPDATE channels_channel SET is_active = FALSE WHERE id = $1`, testdata.TwilioChannel.ID) + rt.DB.MustExec(`UPDATE ivr_call SET status = 'E', next_attempt = NOW() WHERE external_id = 'call1';`) + rt.DB.MustExec(`UPDATE channels_channel SET is_active = FALSE WHERE id = $1`, testdata.TwilioChannel.ID) models.FlushCache() err = ivrtasks.RetryCalls(ctx, rt) assert.NoError(t, err) // this time should be failed - assertdb.Query(t, db, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2 AND external_id = $3`, + assertdb.Query(t, rt.DB, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2 AND external_id = $3`, testdata.Cathy.ID, models.CallStatusFailed, "call1").Returns(1) } diff --git a/core/tasks/ivr/start_ivr_flow_batch.go b/core/tasks/ivr/start_ivr_flow_batch.go new file mode 100644 index 000000000..3bd8daf57 --- /dev/null +++ b/core/tasks/ivr/start_ivr_flow_batch.go @@ -0,0 +1,90 @@ +package ivr + +import ( + "context" + "time" + + "github.com/nyaruka/mailroom/core/ivr" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/core/tasks" + "github.com/nyaruka/mailroom/runtime" + "github.com/pkg/errors" + "github.com/sirupsen/logrus" +) + +const TypeStartIVRFlowBatch = "start_ivr_flow_batch" + +func init() { + tasks.RegisterType(TypeStartIVRFlowBatch, func() tasks.Task { return &StartIVRFlowBatchTask{} }) +} + +// StartIVRFlowBatchTask is the start IVR flow batch task +type StartIVRFlowBatchTask struct { + *models.FlowStartBatch +} + +func (t *StartIVRFlowBatchTask) Type() string { + return TypeStartIVRFlowBatch +} + +// Timeout is the maximum amount of time the task can run for +func (t *StartIVRFlowBatchTask) Timeout() time.Duration { + return time.Minute * 5 +} + +func (t *StartIVRFlowBatchTask) Perform(ctx context.Context, rt *runtime.Runtime, orgID models.OrgID) error { + return handleFlowStartBatch(ctx, rt, t.FlowStartBatch) +} + +// starts a batch of contacts in an IVR flow +func handleFlowStartBatch(ctx context.Context, rt *runtime.Runtime, batch *models.FlowStartBatch) error { + // load our org assets + oa, err := models.GetOrgAssets(ctx, rt, batch.OrgID) + if err != nil { + return errors.Wrapf(err, "error loading org assets for org: %d", batch.OrgID) + } + + // ok, we can initiate calls for the remaining contacts + contacts, err := models.LoadContacts(ctx, rt.ReadonlyDB, oa, batch.ContactIDs) + if err != nil { + return errors.Wrapf(err, "error loading contacts") + } + + // for each contacts, request a call start + for _, contact := range contacts { + start := time.Now() + + ctx, cancel := context.WithTimeout(ctx, time.Minute) + session, err := ivr.RequestCall(ctx, rt, oa, batch, contact) + cancel() + if err != nil { + logrus.WithError(err).Errorf("error starting ivr flow for contact: %d and flow: %d", contact.ID(), batch.FlowID) + continue + } + if session == nil { + logrus.WithFields(logrus.Fields{ + "elapsed": time.Since(start), + "contact_id": contact.ID(), + "start_id": batch.StartID, + }).Info("call start skipped, no suitable channel") + continue + } + logrus.WithFields(logrus.Fields{ + "elapsed": time.Since(start), + "contact_id": contact.ID(), + "status": session.Status(), + "start_id": batch.StartID, + "external_id": session.ExternalID(), + }).Info("requested call for contact") + } + + // if this is a last batch, mark our start as started + if batch.IsLast { + err := models.MarkStartComplete(ctx, rt.DB, batch.StartID) + if err != nil { + return errors.Wrapf(err, "error trying to set batch as complete") + } + } + + return nil +} diff --git a/core/tasks/ivr/worker_test.go b/core/tasks/ivr/start_ivr_flow_batch_test.go similarity index 70% rename from core/tasks/ivr/worker_test.go rename to core/tasks/ivr/start_ivr_flow_batch_test.go index d06353188..e1042acce 100644 --- a/core/tasks/ivr/worker_test.go +++ b/core/tasks/ivr/start_ivr_flow_batch_test.go @@ -2,7 +2,6 @@ package ivr_test import ( "context" - "encoding/json" "net/http" "testing" @@ -12,19 +11,18 @@ import ( "github.com/nyaruka/mailroom/core/ivr" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/core/queue" - ivrtasks "github.com/nyaruka/mailroom/core/tasks/ivr" + "github.com/nyaruka/mailroom/core/tasks" "github.com/nyaruka/mailroom/core/tasks/starts" "github.com/nyaruka/mailroom/runtime" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/pkg/errors" - "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestIVR(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + _, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetAll) @@ -33,40 +31,43 @@ func TestIVR(t *testing.T) { ivr.RegisterServiceType(models.ChannelType("ZZ"), NewMockProvider) // update our twilio channel to be of type 'ZZ' and set max_concurrent_events to 1 - db.MustExec(`UPDATE channels_channel SET channel_type = 'ZZ', config = '{"max_concurrent_events": 1}' WHERE id = $1`, testdata.TwilioChannel.ID) + rt.DB.MustExec(`UPDATE channels_channel SET channel_type = 'ZZ', config = '{"max_concurrent_events": 1}' WHERE id = $1`, testdata.TwilioChannel.ID) // create a flow start for cathy start := models.NewFlowStart(testdata.Org1.ID, models.StartTypeTrigger, models.FlowTypeVoice, testdata.IVRFlow.ID). WithContactIDs([]models.ContactID{testdata.Cathy.ID}) - // call our master starter - err := starts.CreateFlowBatches(ctx, rt, start) - assert.NoError(t, err) + service.callError = errors.Errorf("unable to create call") - // should have one task in our ivr queue - task, err := queue.PopNextTask(rc, queue.HandlerQueue) - assert.NoError(t, err) - batch := &models.FlowStartBatch{} - err = json.Unmarshal(task.Task, batch) - assert.NoError(t, err) + err := tasks.Queue(rc, queue.BatchQueue, testdata.Org1.ID, &starts.StartFlowTask{FlowStart: start}, queue.DefaultPriority) + require.NoError(t, err) - service.callError = errors.Errorf("unable to create call") - err = ivrtasks.HandleFlowStartBatch(ctx, rt, batch) - assert.NoError(t, err) - assertdb.Query(t, db, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2`, testdata.Cathy.ID, models.CallStatusFailed).Returns(1) + testsuite.FlushTasks(t, rt) + // should have one call in a failed state + assertdb.Query(t, rt.DB, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2`, testdata.Cathy.ID, models.CallStatusFailed).Returns(1) + + // re-queue the start and try again service.callError = nil service.callID = ivr.CallID("call1") - err = ivrtasks.HandleFlowStartBatch(ctx, rt, batch) - assert.NoError(t, err) - assertdb.Query(t, db, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2 AND external_id = $3`, testdata.Cathy.ID, models.CallStatusWired, "call1").Returns(1) + + err = tasks.Queue(rc, queue.BatchQueue, testdata.Org1.ID, &starts.StartFlowTask{FlowStart: start}, queue.DefaultPriority) + require.NoError(t, err) + + testsuite.FlushTasks(t, rt) + + assertdb.Query(t, rt.DB, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2 AND external_id = $3`, testdata.Cathy.ID, models.CallStatusWired, "call1").Returns(1) // trying again should put us in a throttled state (queued) service.callError = nil service.callID = ivr.CallID("call1") - err = ivrtasks.HandleFlowStartBatch(ctx, rt, batch) - assert.NoError(t, err) - assertdb.Query(t, db, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2 AND next_attempt IS NOT NULL;`, testdata.Cathy.ID, models.CallStatusQueued).Returns(1) + + err = tasks.Queue(rc, queue.BatchQueue, testdata.Org1.ID, &starts.StartFlowTask{FlowStart: start}, queue.DefaultPriority) + require.NoError(t, err) + + testsuite.FlushTasks(t, rt) + + assertdb.Query(t, rt.DB, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2 AND next_attempt IS NOT NULL;`, testdata.Cathy.ID, models.CallStatusQueued).Returns(1) } var service = &MockService{} diff --git a/core/tasks/ivr/worker.go b/core/tasks/ivr/worker.go deleted file mode 100644 index 8d5f5e71b..000000000 --- a/core/tasks/ivr/worker.go +++ /dev/null @@ -1,124 +0,0 @@ -package ivr - -import ( - "context" - "encoding/json" - "time" - - "github.com/nyaruka/mailroom" - "github.com/nyaruka/mailroom/core/ivr" - "github.com/nyaruka/mailroom/core/models" - "github.com/nyaruka/mailroom/core/queue" - "github.com/nyaruka/mailroom/runtime" - "github.com/pkg/errors" - "github.com/sirupsen/logrus" -) - -func init() { - mailroom.AddTaskFunction(queue.StartIVRFlowBatch, handleFlowStartTask) -} - -func handleFlowStartTask(ctx context.Context, rt *runtime.Runtime, task *queue.Task) error { - // decode our task body - if task.Type != queue.StartIVRFlowBatch { - return errors.Errorf("unknown event type passed to ivr worker: %s", task.Type) - } - batch := &models.FlowStartBatch{} - err := json.Unmarshal(task.Task, batch) - if err != nil { - return errors.Wrapf(err, "error unmarshalling flow start batch: %s", string(task.Task)) - } - - return HandleFlowStartBatch(ctx, rt, batch) -} - -// HandleFlowStartBatch starts a batch of contacts in an IVR flow -func HandleFlowStartBatch(bg context.Context, rt *runtime.Runtime, batch *models.FlowStartBatch) error { - ctx, cancel := context.WithTimeout(bg, time.Minute*5) - defer cancel() - - // contacts we will exclude either because they are in a flow or have already been in this one - exclude := make(map[models.ContactID]bool, 5) - - // filter out anybody who has has a flow run in this flow if appropriate - if batch.ExcludeStartedPreviously() { - // find all participants that have been in this flow - started, err := models.FindFlowStartedOverlap(ctx, rt.DB, batch.FlowID(), batch.ContactIDs()) - if err != nil { - return errors.Wrapf(err, "error finding others started flow: %d", batch.FlowID()) - } - for _, c := range started { - exclude[c] = true - } - } - - // filter out our list of contacts to only include those that should be started - if batch.ExcludeInAFlow() { - // find all participants active in other sessions - active, err := models.FilterByWaitingSession(ctx, rt.DB, batch.ContactIDs()) - if err != nil { - return errors.Wrapf(err, "error finding other active sessions: %d", batch.FlowID()) - } - for _, c := range active { - exclude[c] = true - } - } - - // filter into our final list of contacts - contactIDs := make([]models.ContactID, 0, len(batch.ContactIDs())) - for _, c := range batch.ContactIDs() { - if !exclude[c] { - contactIDs = append(contactIDs, c) - } - } - - // load our org assets - oa, err := models.GetOrgAssets(ctx, rt, batch.OrgID()) - if err != nil { - return errors.Wrapf(err, "error loading org assets for org: %d", batch.OrgID()) - } - - // ok, we can initiate calls for the remaining contacts - contacts, err := models.LoadContacts(ctx, rt.ReadonlyDB, oa, contactIDs) - if err != nil { - return errors.Wrapf(err, "error loading contacts") - } - - // for each contacts, request a call start - for _, contact := range contacts { - start := time.Now() - - ctx, cancel := context.WithTimeout(bg, time.Minute) - session, err := ivr.RequestCall(ctx, rt, oa, batch, contact) - cancel() - if err != nil { - logrus.WithError(err).Errorf("error starting ivr flow for contact: %d and flow: %d", contact.ID(), batch.FlowID()) - continue - } - if session == nil { - logrus.WithFields(logrus.Fields{ - "elapsed": time.Since(start), - "contact_id": contact.ID(), - "start_id": batch.StartID(), - }).Info("call start skipped, no suitable channel") - continue - } - logrus.WithFields(logrus.Fields{ - "elapsed": time.Since(start), - "contact_id": contact.ID(), - "status": session.Status(), - "start_id": batch.StartID(), - "external_id": session.ExternalID(), - }).Info("requested call for contact") - } - - // if this is a last batch, mark our start as started - if batch.IsLast() { - err := models.MarkStartComplete(bg, rt.DB, batch.StartID()) - if err != nil { - return errors.Wrapf(err, "error trying to set batch as complete") - } - } - - return nil -} diff --git a/core/tasks/msgs/retries.go b/core/tasks/msgs/retry_errored_messages.go similarity index 100% rename from core/tasks/msgs/retries.go rename to core/tasks/msgs/retry_errored_messages.go diff --git a/core/tasks/msgs/retries_test.go b/core/tasks/msgs/retry_errored_messages_test.go similarity index 63% rename from core/tasks/msgs/retries_test.go rename to core/tasks/msgs/retry_errored_messages_test.go index 46c3b8ee3..b87e075dd 100644 --- a/core/tasks/msgs/retries_test.go +++ b/core/tasks/msgs/retry_errored_messages_test.go @@ -13,8 +13,8 @@ import ( ) func TestRetryErroredMessages(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetData | testsuite.ResetRedis) @@ -26,29 +26,29 @@ func TestRetryErroredMessages(t *testing.T) { testsuite.AssertCourierQueues(t, map[string][]int{}) // a non-errored outgoing message (should be ignored) - testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "Hi", nil, models.MsgStatusDelivered, false) + testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "Hi", nil, models.MsgStatusDelivered, false) // an errored message with a next-attempt in the future (should be ignored) - testdata.InsertErroredOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "Hi", 2, time.Now().Add(time.Hour), false) + testdata.InsertErroredOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "Hi", 2, time.Now().Add(time.Hour), false) // errored messages with a next-attempt in the past - testdata.InsertErroredOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "Hi", 1, time.Now().Add(-time.Hour), false) - testdata.InsertErroredOutgoingMsg(db, testdata.Org1, testdata.VonageChannel, testdata.Bob, "Hi", 2, time.Now().Add(-time.Minute), false) - msg5 := testdata.InsertErroredOutgoingMsg(db, testdata.Org1, testdata.VonageChannel, testdata.Bob, "Hi", 2, time.Now().Add(-time.Minute), false) - testdata.InsertErroredOutgoingMsg(db, testdata.Org1, testdata.VonageChannel, testdata.Bob, "Hi", 2, time.Now().Add(-time.Minute), true) // high priority + testdata.InsertErroredOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "Hi", 1, time.Now().Add(-time.Hour), false) + testdata.InsertErroredOutgoingMsg(rt, testdata.Org1, testdata.VonageChannel, testdata.Bob, "Hi", 2, time.Now().Add(-time.Minute), false) + msg5 := testdata.InsertErroredOutgoingMsg(rt, testdata.Org1, testdata.VonageChannel, testdata.Bob, "Hi", 2, time.Now().Add(-time.Minute), false) + testdata.InsertErroredOutgoingMsg(rt, testdata.Org1, testdata.VonageChannel, testdata.Bob, "Hi", 2, time.Now().Add(-time.Minute), true) // high priority - db.MustExec(`UPDATE msgs_msg SET status = 'P' WHERE id = $1`, msg5.ID()) + rt.DB.MustExec(`UPDATE msgs_msg SET status = 'I' WHERE id = $1`, msg5.ID()) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'P'`).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'E'`).Returns(4) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'I'`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'E'`).Returns(4) // try again... err = msgs.RetryErroredMessages(ctx, rt) require.NoError(t, err) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'D'`).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'E'`).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE status = 'Q'`).Returns(4) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'D'`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'E'`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE status = 'Q'`).Returns(4) testsuite.AssertCourierQueues(t, map[string][]int{ "msgs:74729f45-7f29-4868-9dc4-90e491e3c7d8|10/0": {1}, // twilio, bulk priority diff --git a/core/tasks/msgs/send_broadcast.go b/core/tasks/msgs/send_broadcast.go index 09940d7e6..b62cce328 100644 --- a/core/tasks/msgs/send_broadcast.go +++ b/core/tasks/msgs/send_broadcast.go @@ -2,174 +2,112 @@ package msgs import ( "context" - "encoding/json" "time" - "github.com/nyaruka/gocommon/urns" - "github.com/nyaruka/mailroom" + "github.com/nyaruka/goflow/contactql" "github.com/nyaruka/mailroom/core/models" - "github.com/nyaruka/mailroom/core/msgio" "github.com/nyaruka/mailroom/core/queue" + "github.com/nyaruka/mailroom/core/search" + "github.com/nyaruka/mailroom/core/tasks" "github.com/nyaruka/mailroom/runtime" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) const ( + // TypeSendBroadcast is the task type for sending a broadcast + TypeSendBroadcast = "send_broadcast" + startBatchSize = 100 ) func init() { - mailroom.AddTaskFunction(queue.SendBroadcast, handleSendBroadcast) - mailroom.AddTaskFunction(queue.SendBroadcastBatch, handleSendBroadcastBatch) + tasks.RegisterType(TypeSendBroadcast, func() tasks.Task { return &SendBroadcastTask{} }) } -// handleSendBroadcast creates all the batches of contacts that need to be sent to -func handleSendBroadcast(ctx context.Context, rt *runtime.Runtime, task *queue.Task) error { - ctx, cancel := context.WithTimeout(ctx, time.Minute*60) - defer cancel() +// SendBroadcastTask is the task send broadcasts +type SendBroadcastTask struct { + *models.Broadcast +} - // decode our task body - if task.Type != queue.SendBroadcast { - return errors.Errorf("unknown event type passed to send worker: %s", task.Type) - } - broadcast := &models.Broadcast{} - err := json.Unmarshal(task.Task, broadcast) - if err != nil { - return errors.Wrapf(err, "error unmarshalling broadcast: %s", string(task.Task)) - } +func (t *SendBroadcastTask) Type() string { + return TypeSendBroadcast +} - return CreateBroadcastBatches(ctx, rt, broadcast) +// Timeout is the maximum amount of time the task can run for +func (t *SendBroadcastTask) Timeout() time.Duration { + return time.Minute * 60 } -// CreateBroadcastBatches takes our master broadcast and creates batches of broadcast sends for all the unique contacts -func CreateBroadcastBatches(ctx context.Context, rt *runtime.Runtime, bcast *models.Broadcast) error { - // we are building a set of contact ids, start with the explicit ones - contactIDs := make(map[models.ContactID]bool) - for _, id := range bcast.ContactIDs() { - contactIDs[id] = true - } +// Perform handles sending the broadcast by creating batches of broadcast sends for all the unique contacts +func (t *SendBroadcastTask) Perform(ctx context.Context, rt *runtime.Runtime, orgID models.OrgID) error { + if err := createBroadcastBatches(ctx, rt, t.Broadcast); err != nil { + if t.Broadcast.ID != models.NilBroadcastID { + models.MarkBroadcastFailed(ctx, rt.DB, t.Broadcast.ID) + } - groupContactIDs, err := models.ContactIDsForGroupIDs(ctx, rt.DB, bcast.GroupIDs()) - for _, id := range groupContactIDs { - contactIDs[id] = true + // if error is user created query error.. don't escalate error to sentry + isQueryError, _ := contactql.IsQueryError(err) + if !isQueryError { + return err + } } - oa, err := models.GetOrgAssets(ctx, rt, bcast.OrgID()) + return nil +} + +func createBroadcastBatches(ctx context.Context, rt *runtime.Runtime, bcast *models.Broadcast) error { + oa, err := models.GetOrgAssets(ctx, rt, bcast.OrgID) if err != nil { return errors.Wrapf(err, "error getting org assets") } - // get the contact ids for our URNs - urnMap, err := models.GetOrCreateContactIDsFromURNs(ctx, rt.DB, oa, bcast.URNs()) + contactIDs, err := search.ResolveRecipients(ctx, rt, oa, nil, &search.Recipients{ + ContactIDs: bcast.ContactIDs, + GroupIDs: bcast.GroupIDs, + URNs: bcast.URNs, + Query: string(bcast.Query), + ExcludeGroupIDs: nil, + }, -1) if err != nil { - return errors.Wrapf(err, "error getting contact ids for urns") + return errors.Wrap(err, "error resolving broadcast recipients") } - urnContacts := make(map[models.ContactID]urns.URN) - repeatedContacts := make(map[models.ContactID]urns.URN) - - q := queue.BatchQueue + // if there are no contacts to send to, mark our broadcast as sent, we are done + if len(contactIDs) == 0 { + if bcast.ID != models.NilBroadcastID { + err = models.MarkBroadcastSent(ctx, rt.DB, bcast.ID) + if err != nil { + return errors.Wrapf(err, "error marking broadcast as sent") + } + } + return nil + } // two or fewer contacts? queue to our handler queue for sending + q := queue.BatchQueue if len(contactIDs) <= 2 { q = queue.HandlerQueue } - // we want to remove contacts that are also present in URN sends, these will be a special case in our last batch - for u, id := range urnMap { - if contactIDs[id] { - repeatedContacts[id] = u - delete(contactIDs, id) - } - urnContacts[id] = u - } - rc := rt.RP.Get() defer rc.Close() - contacts := make([]models.ContactID, 0, 100) - - // utility functions for queueing the current set of contacts - queueBatch := func(isLast bool) { - // if this is our last batch include those contacts that overlap with our urns - if isLast { - for id := range repeatedContacts { - contacts = append(contacts, id) - } - } - - batch := bcast.CreateBatch(contacts) + // create tasks for batches of contacts + idBatches := models.ChunkSlice(contactIDs, startBatchSize) + for i, idBatch := range idBatches { + isLast := (i == len(idBatches)-1) - // also set our URNs - if isLast { - batch.IsLast = true - batch.URNs = urnContacts - } - - err = queue.AddTask(rc, q, queue.SendBroadcastBatch, int(bcast.OrgID()), batch, queue.DefaultPriority) + batch := bcast.CreateBatch(idBatch, isLast) + err = tasks.Queue(rc, q, bcast.OrgID, &SendBroadcastBatchTask{BroadcastBatch: batch}, queue.DefaultPriority) if err != nil { - logrus.WithError(err).Error("error while queuing broadcast batch") - } - contacts = make([]models.ContactID, 0, 100) - } - - // build up batches of contacts to start - for c := range contactIDs { - if len(contacts) == startBatchSize { - queueBatch(false) - } - contacts = append(contacts, c) - } - - // queue our last batch - queueBatch(true) - - return nil -} - -// handleSendBroadcastBatch sends our messages -func handleSendBroadcastBatch(ctx context.Context, rt *runtime.Runtime, task *queue.Task) error { - ctx, cancel := context.WithTimeout(ctx, time.Minute*60) - defer cancel() - - // decode our task body - if task.Type != queue.SendBroadcastBatch { - return errors.Errorf("unknown event type passed to send worker: %s", task.Type) - } - broadcast := &models.BroadcastBatch{} - err := json.Unmarshal(task.Task, broadcast) - if err != nil { - return errors.Wrapf(err, "error unmarshalling broadcast: %s", string(task.Task)) - } - - // try to send the batch - return SendBroadcastBatch(ctx, rt, broadcast) -} - -// SendBroadcastBatch sends the passed in broadcast batch -func SendBroadcastBatch(ctx context.Context, rt *runtime.Runtime, bcast *models.BroadcastBatch) error { - // always set our broadcast as sent if it is our last - defer func() { - if bcast.IsLast { - err := models.MarkBroadcastSent(ctx, rt.DB, bcast.BroadcastID) - if err != nil { - logrus.WithError(err).Error("error marking broadcast as sent") + if i == 0 { + return errors.Wrap(err, "error queuing broadcast batch") } + // if we've already queued other batches.. we don't want to error and have the task be retried + logrus.WithError(err).Error("error queuing broadcast batch") } - }() - - oa, err := models.GetOrgAssets(ctx, rt, bcast.OrgID) - if err != nil { - return errors.Wrapf(err, "error getting org assets") - } - - // create this batch of messages - msgs, err := bcast.CreateMessages(ctx, rt, oa) - if err != nil { - return errors.Wrapf(err, "error creating broadcast messages") } - msgio.SendMessages(ctx, rt, rt.DB, nil, msgs) return nil } diff --git a/core/tasks/msgs/send_broadcast_batch.go b/core/tasks/msgs/send_broadcast_batch.go new file mode 100644 index 000000000..17dcc0f9e --- /dev/null +++ b/core/tasks/msgs/send_broadcast_batch.go @@ -0,0 +1,59 @@ +package msgs + +import ( + "context" + "time" + + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/core/msgio" + "github.com/nyaruka/mailroom/core/tasks" + "github.com/nyaruka/mailroom/runtime" + "github.com/pkg/errors" + "github.com/sirupsen/logrus" +) + +const TypeSendBroadcastBatch = "send_broadcast_batch" + +func init() { + tasks.RegisterType(TypeSendBroadcastBatch, func() tasks.Task { return &SendBroadcastBatchTask{} }) +} + +// SendBroadcastTask is the task send broadcast batches +type SendBroadcastBatchTask struct { + *models.BroadcastBatch +} + +func (t *SendBroadcastBatchTask) Type() string { + return TypeSendBroadcastBatch +} + +// Timeout is the maximum amount of time the task can run for +func (t *SendBroadcastBatchTask) Timeout() time.Duration { + return time.Minute * 60 +} + +func (t *SendBroadcastBatchTask) Perform(ctx context.Context, rt *runtime.Runtime, orgID models.OrgID) error { + // always set our broadcast as sent if it is our last + defer func() { + if t.BroadcastBatch.IsLast && t.BroadcastBatch.BroadcastID != models.NilBroadcastID { + err := models.MarkBroadcastSent(ctx, rt.DB, t.BroadcastBatch.BroadcastID) + if err != nil { + logrus.WithError(err).Error("error marking broadcast as sent") + } + } + }() + + oa, err := models.GetOrgAssets(ctx, rt, t.BroadcastBatch.OrgID) + if err != nil { + return errors.Wrapf(err, "error getting org assets") + } + + // create this batch of messages + msgs, err := t.BroadcastBatch.CreateMessages(ctx, rt, oa) + if err != nil { + return errors.Wrapf(err, "error creating broadcast messages") + } + + msgio.SendMessages(ctx, rt, rt.DB, nil, msgs) + return nil +} diff --git a/core/tasks/msgs/send_broadcast_test.go b/core/tasks/msgs/send_broadcast_test.go index b5d9c769c..18db05199 100644 --- a/core/tasks/msgs/send_broadcast_test.go +++ b/core/tasks/msgs/send_broadcast_test.go @@ -14,25 +14,27 @@ import ( _ "github.com/nyaruka/mailroom/core/handlers" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/core/queue" + "github.com/nyaruka/mailroom/core/tasks" "github.com/nyaruka/mailroom/core/tasks/msgs" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) -func TestBroadcastEvents(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() - defer rc.Close() +func TestSendBroadcastTask(t *testing.T) { + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) + rc := rt.RP.Get() + defer rc.Close() + oa, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) - assert.NoError(t, err) + require.NoError(t, err) eng := envs.Language("eng") - basic := map[envs.Language]*events.BroadcastTranslation{ + basic := flows.BroadcastTranslations{ eng: { Text: "hello world", Attachments: nil, @@ -41,80 +43,139 @@ func TestBroadcastEvents(t *testing.T) { } doctors := assets.NewGroupReference(testdata.DoctorsGroup.UUID, "Doctors") - doctorsOnly := []*assets.GroupReference{doctors} - cathy := flows.NewContactReference(testdata.Cathy.UUID, "Cathy") - cathyOnly := []*flows.ContactReference{cathy} // add an extra URN fo cathy - testdata.InsertContactURN(db, testdata.Org1, testdata.Cathy, urns.URN("tel:+12065551212"), 1001) + testdata.InsertContactURN(rt, testdata.Org1, testdata.Cathy, urns.URN("tel:+12065551212"), 1001) // change george's URN to an invalid twitter URN so it can't be sent - db.MustExec( - `UPDATE contacts_contacturn SET identity = 'twitter:invalid-urn', scheme = 'twitter', path='invalid-urn' WHERE id = $1`, testdata.George.URNID, - ) + rt.DB.MustExec(`UPDATE contacts_contacturn SET identity = 'twitter:invalid-urn', scheme = 'twitter', path='invalid-urn' WHERE id = $1`, testdata.George.URNID) george := flows.NewContactReference(testdata.George.UUID, "George") georgeOnly := []*flows.ContactReference{george} tcs := []struct { - Translations map[envs.Language]*events.BroadcastTranslation - BaseLanguage envs.Language - Groups []*assets.GroupReference - Contacts []*flows.ContactReference - URNs []urns.URN - Queue string - BatchCount int - MsgCount int - MsgText string + translations flows.BroadcastTranslations + baseLanguage envs.Language + groups []*assets.GroupReference + contacts []*flows.ContactReference + urns []urns.URN + queue string + expectedBatchCount int + expectedMsgCount int + expectedMsgText string }{ - {basic, eng, doctorsOnly, nil, nil, queue.BatchQueue, 2, 121, "hello world"}, - {basic, eng, doctorsOnly, georgeOnly, nil, queue.BatchQueue, 2, 121, "hello world"}, - {basic, eng, nil, georgeOnly, nil, queue.HandlerQueue, 1, 0, "hello world"}, - {basic, eng, doctorsOnly, cathyOnly, nil, queue.BatchQueue, 2, 121, "hello world"}, - {basic, eng, nil, cathyOnly, nil, queue.HandlerQueue, 1, 1, "hello world"}, - {basic, eng, nil, cathyOnly, []urns.URN{urns.URN("tel:+12065551212")}, queue.HandlerQueue, 1, 1, "hello world"}, - {basic, eng, nil, cathyOnly, []urns.URN{urns.URN("tel:+250700000001")}, queue.HandlerQueue, 1, 2, "hello world"}, - {basic, eng, nil, nil, []urns.URN{urns.URN("tel:+250700000001")}, queue.HandlerQueue, 1, 1, "hello world"}, + { // 0 + translations: basic, + baseLanguage: eng, + groups: []*assets.GroupReference{doctors}, + contacts: nil, + urns: nil, + queue: queue.BatchQueue, + expectedBatchCount: 2, + expectedMsgCount: 121, + expectedMsgText: "hello world", + }, + { // 1 + translations: basic, + baseLanguage: eng, + groups: []*assets.GroupReference{doctors}, + contacts: georgeOnly, + urns: nil, + queue: queue.BatchQueue, + expectedBatchCount: 2, + expectedMsgCount: 122, + expectedMsgText: "hello world", + }, + { // 2 + translations: basic, + baseLanguage: eng, + groups: nil, + contacts: georgeOnly, + urns: nil, + queue: queue.HandlerQueue, + expectedBatchCount: 1, + expectedMsgCount: 1, + expectedMsgText: "hello world", + }, + { // 3 + translations: basic, + baseLanguage: eng, + groups: []*assets.GroupReference{doctors}, + contacts: []*flows.ContactReference{cathy}, + urns: nil, + queue: queue.BatchQueue, + expectedBatchCount: 2, + expectedMsgCount: 121, + expectedMsgText: "hello world", + }, + { // 4 + translations: basic, + baseLanguage: eng, + groups: nil, + contacts: []*flows.ContactReference{cathy}, + urns: nil, + queue: queue.HandlerQueue, + expectedBatchCount: 1, + expectedMsgCount: 1, + expectedMsgText: "hello world", + }, + { // 5 + translations: basic, + baseLanguage: eng, + groups: nil, + contacts: []*flows.ContactReference{cathy}, + urns: []urns.URN{urns.URN("tel:+12065551212")}, + queue: queue.HandlerQueue, + expectedBatchCount: 1, + expectedMsgCount: 1, + expectedMsgText: "hello world", + }, + { // 6 + translations: basic, + baseLanguage: eng, + groups: nil, + contacts: []*flows.ContactReference{cathy}, + urns: []urns.URN{urns.URN("tel:+250700000001")}, + queue: queue.HandlerQueue, + expectedBatchCount: 1, + expectedMsgCount: 2, + expectedMsgText: "hello world", + }, + { // 7 + translations: basic, + baseLanguage: eng, + groups: nil, + contacts: nil, + urns: []urns.URN{urns.URN("tel:+250700000001")}, + queue: queue.HandlerQueue, + expectedBatchCount: 1, + expectedMsgCount: 1, + expectedMsgText: "hello world", + }, } lastNow := time.Now() time.Sleep(10 * time.Millisecond) for i, tc := range tcs { + testsuite.ReindexElastic(ctx) + // handle our start task - event := events.NewBroadcastCreated(tc.Translations, tc.BaseLanguage, tc.Groups, tc.Contacts, tc.URNs) - bcast, err := models.NewBroadcastFromEvent(ctx, db, oa, event) + event := events.NewBroadcastCreated(tc.translations, tc.baseLanguage, tc.groups, tc.contacts, "", tc.urns) + bcast, err := models.NewBroadcastFromEvent(ctx, rt.DB, oa, event) assert.NoError(t, err) - err = msgs.CreateBroadcastBatches(ctx, rt, bcast) + err = tasks.Queue(rc, tc.queue, testdata.Org1.ID, &msgs.SendBroadcastTask{Broadcast: bcast}, queue.DefaultPriority) assert.NoError(t, err) - // pop all our tasks and execute them - var task *queue.Task - count := 0 - for { - task, err = queue.PopNextTask(rc, tc.Queue) - assert.NoError(t, err) - if task == nil { - break - } - - count++ - assert.Equal(t, queue.SendBroadcastBatch, task.Type) - batch := &models.BroadcastBatch{} - err = json.Unmarshal(task.Task, batch) - assert.NoError(t, err) - - err = msgs.SendBroadcastBatch(ctx, rt, batch) - assert.NoError(t, err) - } + taskCounts := testsuite.FlushTasks(t, rt) // assert our count of batches - assert.Equal(t, tc.BatchCount, count, "%d: unexpected batch count", i) + assert.Equal(t, tc.expectedBatchCount, taskCounts["send_broadcast_batch"], "%d: unexpected batch count", i) // assert our count of total msgs created - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE org_id = 1 AND created_on > $1 AND text = $2`, lastNow, tc.MsgText). - Returns(tc.MsgCount, "%d: unexpected msg count", i) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE org_id = 1 AND created_on > $1 AND text = $2`, lastNow, tc.expectedMsgText). + Returns(tc.expectedMsgCount, "%d: unexpected msg count", i) lastNow = time.Now() time.Sleep(10 * time.Millisecond) @@ -122,8 +183,8 @@ func TestBroadcastEvents(t *testing.T) { } func TestBroadcastTask(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetAll) @@ -132,51 +193,19 @@ func TestBroadcastTask(t *testing.T) { assert.NoError(t, err) eng := envs.Language("eng") - // insert a broadcast so we can check it is being set to sent - legacyID := testdata.InsertBroadcast(db, testdata.Org1, "base", map[envs.Language]string{"base": "hi @(PROPER(contact.name)) legacy"}, models.NilScheduleID, nil, nil) - - ticket := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "", "", time.Now(), nil) - modelTicket := ticket.Load(db) - - evaluated := map[envs.Language]*models.BroadcastTranslation{ - eng: { - Text: "hello world", - Attachments: nil, - QuickReplies: nil, - }, - } - - legacy := map[envs.Language]*models.BroadcastTranslation{ - eng: { - Text: "hi @(PROPER(contact.name)) legacy URN: @contact.tel_e164 Gender: @contact.gender", - Attachments: nil, - QuickReplies: nil, - }, - } - - template := map[envs.Language]*models.BroadcastTranslation{ - eng: { - Text: "hi @(title(contact.name)) from @globals.org_name goflow URN: @urns.tel Gender: @fields.gender", - Attachments: nil, - QuickReplies: nil, - }, - } - doctorsOnly := []models.GroupID{testdata.DoctorsGroup.ID} cathyOnly := []models.ContactID{testdata.Cathy.ID} // add an extra URN fo cathy - testdata.InsertContactURN(db, testdata.Org1, testdata.Cathy, urns.URN("tel:+12065551212"), 1001) + testdata.InsertContactURN(rt, testdata.Org1, testdata.Cathy, urns.URN("tel:+12065551212"), 1001) tcs := []struct { - BroadcastID models.BroadcastID - Translations map[envs.Language]*models.BroadcastTranslation + Translations flows.BroadcastTranslations TemplateState models.TemplateState BaseLanguage envs.Language GroupIDs []models.GroupID ContactIDs []models.ContactID URNs []urns.URN - TicketID models.TicketID CreatedByID models.UserID Queue string BatchCount int @@ -184,14 +213,18 @@ func TestBroadcastTask(t *testing.T) { MsgText string }{ { - models.NilBroadcastID, - evaluated, + flows.BroadcastTranslations{ + eng: { + Text: "hello world", + Attachments: nil, + QuickReplies: nil, + }, + }, models.TemplateStateEvaluated, eng, doctorsOnly, cathyOnly, nil, - models.NilTicketID, testdata.Admin.ID, queue.BatchQueue, 2, @@ -199,29 +232,18 @@ func TestBroadcastTask(t *testing.T) { "hello world", }, { - legacyID, - legacy, - models.TemplateStateLegacy, - eng, - nil, - cathyOnly, - nil, - models.NilTicketID, - models.NilUserID, - queue.HandlerQueue, - 1, - 1, - "hi Cathy legacy URN: +12065551212 Gender: F", - }, - { - models.NilBroadcastID, - template, + flows.BroadcastTranslations{ + eng: { + Text: "hi @(title(contact.name)) from @globals.org_name goflow URN: @urns.tel Gender: @fields.gender", + Attachments: nil, + QuickReplies: nil, + }, + }, models.TemplateStateUnevaluated, eng, nil, cathyOnly, nil, - ticket.ID, testdata.Agent.ID, queue.HandlerQueue, 1, @@ -234,9 +256,9 @@ func TestBroadcastTask(t *testing.T) { time.Sleep(10 * time.Millisecond) for i, tc := range tcs { - // handle our start task - bcast := models.NewBroadcast(oa.OrgID(), tc.BroadcastID, tc.Translations, tc.TemplateState, tc.BaseLanguage, tc.URNs, tc.ContactIDs, tc.GroupIDs, tc.TicketID, tc.CreatedByID) - err = msgs.CreateBroadcastBatches(ctx, rt, bcast) + bcast := models.NewBroadcast(oa.OrgID(), tc.Translations, tc.TemplateState, tc.BaseLanguage, tc.URNs, tc.ContactIDs, tc.GroupIDs, "", tc.CreatedByID) + + err = (&msgs.SendBroadcastTask{Broadcast: bcast}).Perform(ctx, rt, testdata.Org1.ID) assert.NoError(t, err) // pop all our tasks and execute them @@ -250,12 +272,12 @@ func TestBroadcastTask(t *testing.T) { } count++ - assert.Equal(t, queue.SendBroadcastBatch, task.Type) - batch := &models.BroadcastBatch{} - err = json.Unmarshal(task.Task, batch) + assert.Equal(t, "send_broadcast_batch", task.Type) + taskObj := &msgs.SendBroadcastBatchTask{} + err = json.Unmarshal(task.Task, taskObj) assert.NoError(t, err) - err = msgs.SendBroadcastBatch(ctx, rt, batch) + err = taskObj.Perform(ctx, rt, testdata.Org1.ID) assert.NoError(t, err) } @@ -263,29 +285,10 @@ func TestBroadcastTask(t *testing.T) { assert.Equal(t, tc.BatchCount, count, "%d: unexpected batch count", i) // assert our count of total msgs created - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE org_id = 1 AND created_on > $1 AND text = $2`, lastNow, tc.MsgText). + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE org_id = 1 AND created_on > $1 AND text = $2`, lastNow, tc.MsgText). Returns(tc.MsgCount, "%d: unexpected msg count", i) - // make sure our broadcast is marked as sent - if tc.BroadcastID != models.NilBroadcastID { - assertdb.Query(t, db, `SELECT count(*) FROM msgs_broadcast WHERE id = $1 AND status = 'S'`, tc.BroadcastID). - Returns(1, "%d: broadcast not marked as sent", i) - } - - // if we had a ticket, make sure its replied_on and last_activity_on were updated - if tc.TicketID != models.NilTicketID { - assertdb.Query(t, db, `SELECT count(*) FROM tickets_ticket WHERE id = $1 AND last_activity_on > $2`, tc.TicketID, modelTicket.LastActivityOn()). - Returns(1, "%d: ticket last_activity_on not updated", i) - assertdb.Query(t, db, `SELECT count(*) FROM tickets_ticket WHERE id = $1 AND replied_on IS NOT NULL`, tc.TicketID). - Returns(1, "%d: ticket replied_on not updated", i) - } - lastNow = time.Now() time.Sleep(10 * time.Millisecond) } - - assertdb.Query(t, db, `SELECT SUM(count) FROM tickets_ticketdailycount WHERE count_type = 'R' AND scope = CONCAT('o:', $1::text)`, testdata.Org1.ID).Returns(1) - assertdb.Query(t, db, `SELECT SUM(count) FROM tickets_ticketdailycount WHERE count_type = 'R' AND scope = CONCAT('o:', $1::text, ':u:', $2::text)`, testdata.Org1.ID, testdata.Agent.ID).Returns(1) - - assertdb.Query(t, db, `SELECT SUM(count) FROM tickets_ticketdailytiming WHERE count_type = 'R' AND scope = CONCAT('o:', $1::text)`, testdata.Org1.ID).Returns(1) } diff --git a/core/tasks/schedules/cron.go b/core/tasks/schedules/cron.go index 97e7ff77f..3d499ecf0 100644 --- a/core/tasks/schedules/cron.go +++ b/core/tasks/schedules/cron.go @@ -4,9 +4,13 @@ import ( "context" "time" + "github.com/nyaruka/gocommon/uuids" "github.com/nyaruka/mailroom" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/core/queue" + "github.com/nyaruka/mailroom/core/tasks" + "github.com/nyaruka/mailroom/core/tasks/msgs" + "github.com/nyaruka/mailroom/core/tasks/starts" "github.com/nyaruka/mailroom/runtime" "github.com/pkg/errors" "github.com/sirupsen/logrus" @@ -64,8 +68,7 @@ func checkSchedules(ctx context.Context, rt *runtime.Runtime) error { continue } - var task interface{} - var taskName string + var task tasks.Task // if it is a broadcast if s.Broadcast() != nil { @@ -78,12 +81,12 @@ func checkSchedules(ctx context.Context, rt *runtime.Runtime) error { } // add our task to send this broadcast - task = bcast - taskName = queue.SendBroadcast + task = &msgs.SendBroadcastTask{Broadcast: bcast} broadcasts++ } else if s.FlowStart() != nil { start := s.FlowStart() + start.UUID = uuids.New() // insert our flow start err := models.InsertFlowStarts(ctx, tx, []*models.FlowStart{start}) @@ -94,8 +97,7 @@ func checkSchedules(ctx context.Context, rt *runtime.Runtime) error { } // add our flow start task - task = start - taskName = queue.StartFlow + task = &starts.StartFlowTask{FlowStart: start} triggers++ } else { log.Info("schedule found with no associated active broadcast or trigger, ignoring") @@ -120,9 +122,9 @@ func checkSchedules(ctx context.Context, rt *runtime.Runtime) error { // add our task if we have one if task != nil { - err = queue.AddTask(rc, queue.BatchQueue, taskName, int(s.OrgID()), task, queue.HighPriority) + err = tasks.Queue(rc, queue.BatchQueue, s.OrgID(), task, queue.HighPriority) if err != nil { - log.WithError(err).Error("error firing task with name: ", taskName) + log.WithError(err).Errorf("error queueing %s task from schedule", task.Type()) } } } diff --git a/core/tasks/schedules/cron_test.go b/core/tasks/schedules/cron_test.go index 893a35d1e..0deb06a4e 100644 --- a/core/tasks/schedules/cron_test.go +++ b/core/tasks/schedules/cron_test.go @@ -9,20 +9,19 @@ import ( "github.com/nyaruka/mailroom/core/queue" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/stretchr/testify/assert" ) func TestCheckSchedules(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() - defer testsuite.Reset(testsuite.ResetAll) + defer testsuite.Reset(testsuite.ResetData | testsuite.ResetRedis) // add a schedule and tie a broadcast to it var s1 models.ScheduleID - err := db.Get( + err := rt.DB.Get( &s1, `INSERT INTO schedules_schedule(is_active, repeat_period, created_on, modified_on, next_fire, created_by_id, modified_by_id, org_id) VALUES(TRUE, 'O', NOW(), NOW(), NOW()- INTERVAL '1 DAY', 1, 1, $1) RETURNING id`, @@ -30,16 +29,13 @@ func TestCheckSchedules(t *testing.T) { ) assert.NoError(t, err) - b1 := testdata.InsertBroadcast(db, testdata.Org1, "eng", map[envs.Language]string{"eng": "Test message", "fra": "Un Message"}, s1, + b1 := testdata.InsertBroadcast(rt, testdata.Org1, "eng", map[envs.Language]string{"eng": "Test message", "fra": "Un Message"}, s1, []*testdata.Contact{testdata.Cathy, testdata.George}, []*testdata.Group{testdata.DoctorsGroup}, ) - // add a URN - db.MustExec(`INSERT INTO msgs_broadcast_urns(broadcast_id, contacturn_id) VALUES($1, $2)`, b1, testdata.Cathy.URNID) - // add another and tie a trigger to it var s2 models.ScheduleID - err = db.Get( + err = rt.DB.Get( &s2, `INSERT INTO schedules_schedule(is_active, repeat_period, created_on, modified_on, next_fire, created_by_id, modified_by_id, org_id) VALUES(TRUE, 'O', NOW(), NOW(), NOW()- INTERVAL '2 DAY', 1, 1, $1) RETURNING id`, @@ -47,7 +43,7 @@ func TestCheckSchedules(t *testing.T) { ) assert.NoError(t, err) var t1 models.TriggerID - err = db.Get( + err = rt.DB.Get( &t1, `INSERT INTO triggers_trigger(is_active, created_on, modified_on, is_archived, trigger_type, created_by_id, modified_by_id, org_id, flow_id, schedule_id) VALUES(TRUE, NOW(), NOW(), FALSE, 'S', 1, 1, $1, $2, $3) RETURNING id`, @@ -56,13 +52,13 @@ func TestCheckSchedules(t *testing.T) { assert.NoError(t, err) // add a few contacts to the trigger - db.MustExec(`INSERT INTO triggers_trigger_contacts(trigger_id, contact_id) VALUES($1, $2),($1, $3)`, t1, testdata.Cathy.ID, testdata.George.ID) + rt.DB.MustExec(`INSERT INTO triggers_trigger_contacts(trigger_id, contact_id) VALUES($1, $2),($1, $3)`, t1, testdata.Cathy.ID, testdata.George.ID) // and a group - db.MustExec(`INSERT INTO triggers_trigger_groups(trigger_id, contactgroup_id) VALUES($1, $2)`, t1, testdata.DoctorsGroup.ID) + rt.DB.MustExec(`INSERT INTO triggers_trigger_groups(trigger_id, contactgroup_id) VALUES($1, $2)`, t1, testdata.DoctorsGroup.ID) var s3 models.ScheduleID - err = db.Get( + err = rt.DB.Get( &s3, `INSERT INTO schedules_schedule(is_active, repeat_period, created_on, modified_on, next_fire, created_by_id, modified_by_id, org_id) VALUES(TRUE, 'O', NOW(), NOW(), NOW()- INTERVAL '3 DAY', 1, 1, $1) RETURNING id`, @@ -75,23 +71,26 @@ func TestCheckSchedules(t *testing.T) { assert.NoError(t, err) // should have one flow start added to our DB ready to go - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowstart WHERE flow_id = $1 AND start_type = 'T' AND status = 'P'`, testdata.Favorites.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowstart WHERE flow_id = $1 AND start_type = 'T' AND status = 'P'`, testdata.Favorites.ID).Returns(1) // with the right count of groups and contacts - assertdb.Query(t, db, `SELECT count(*) from flows_flowstart_contacts WHERE flowstart_id = 1`).Returns(2) - assertdb.Query(t, db, `SELECT count(*) from flows_flowstart_groups WHERE flowstart_id = 1`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) from flows_flowstart_contacts WHERE flowstart_id = 1`).Returns(2) + assertdb.Query(t, rt.DB, `SELECT count(*) from flows_flowstart_groups WHERE flowstart_id = 1`).Returns(1) // and one broadcast as well - assertdb.Query(t, db, `SELECT count(*) FROM msgs_broadcast WHERE org_id = $1 AND parent_id = $2 - AND text = hstore(ARRAY['eng','Test message', 'fra', 'Un Message']) AND status = 'Q' AND base_language = 'eng'`, testdata.Org1.ID, b1).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_broadcast WHERE org_id = $1 + AND parent_id = $2 + AND translations -> 'eng' ->> 'text' = 'Test message' + AND translations -> 'fra' ->> 'text' = 'Un Message' + AND status = 'Q' + AND base_language = 'eng'`, testdata.Org1.ID, b1).Returns(1) - // with the right count of groups, contacts, urns - assertdb.Query(t, db, `SELECT count(*) from msgs_broadcast_urns WHERE broadcast_id = 2`).Returns(1) - assertdb.Query(t, db, `SELECT count(*) from msgs_broadcast_contacts WHERE broadcast_id = 2`).Returns(2) - assertdb.Query(t, db, `SELECT count(*) from msgs_broadcast_groups WHERE broadcast_id = 2`).Returns(1) + // with the right count of contacts and groups + assertdb.Query(t, rt.DB, `SELECT count(*) from msgs_broadcast_contacts WHERE broadcast_id = 2`).Returns(2) + assertdb.Query(t, rt.DB, `SELECT count(*) from msgs_broadcast_groups WHERE broadcast_id = 2`).Returns(1) // we shouldn't have any pending schedules since there were all one time fires, but all should have last fire - assertdb.Query(t, db, `SELECT count(*) FROM schedules_schedule WHERE next_fire IS NULL and last_fire < NOW();`).Returns(3) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM schedules_schedule WHERE next_fire IS NULL and last_fire < NOW();`).Returns(3) // check the tasks created task, err := queue.PopNextTask(rc, queue.BatchQueue) @@ -99,13 +98,13 @@ func TestCheckSchedules(t *testing.T) { // first should be the flow start assert.NoError(t, err) assert.NotNil(t, task) - assert.Equal(t, queue.StartFlow, task.Type) + assert.Equal(t, "start_flow", task.Type) // then the broadacast task, err = queue.PopNextTask(rc, queue.BatchQueue) assert.NoError(t, err) assert.NotNil(t, task) - assert.Equal(t, queue.SendBroadcast, task.Type) + assert.Equal(t, "send_broadcast", task.Type) // nothing more task, err = queue.PopNextTask(rc, queue.BatchQueue) diff --git a/core/tasks/starts/start_flow.go b/core/tasks/starts/start_flow.go new file mode 100644 index 000000000..1a7aa01d1 --- /dev/null +++ b/core/tasks/starts/start_flow.go @@ -0,0 +1,150 @@ +package starts + +import ( + "context" + "time" + + "github.com/nyaruka/goflow/contactql" + "github.com/nyaruka/goflow/envs" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/core/queue" + "github.com/nyaruka/mailroom/core/search" + "github.com/nyaruka/mailroom/core/tasks" + "github.com/nyaruka/mailroom/core/tasks/ivr" + "github.com/nyaruka/mailroom/runtime" + "github.com/pkg/errors" + "github.com/sirupsen/logrus" +) + +const ( + TypeStartFlow = "start_flow" + + startBatchSize = 100 +) + +func init() { + tasks.RegisterType(TypeStartFlow, func() tasks.Task { return &StartFlowTask{} }) +} + +// StartFlowBatchTask is the start flow batch task +type StartFlowTask struct { + *models.FlowStart +} + +func (t *StartFlowTask) Type() string { + return TypeStartFlow +} + +// Timeout is the maximum amount of time the task can run for +func (t *StartFlowTask) Timeout() time.Duration { + return time.Minute * 60 +} + +func (t *StartFlowTask) Perform(ctx context.Context, rt *runtime.Runtime, orgID models.OrgID) error { + if err := createFlowStartBatches(ctx, rt, t.FlowStart); err != nil { + models.MarkStartFailed(ctx, rt.DB, t.FlowStart.ID) + + // if error is user created query error.. don't escalate error to sentry + isQueryError, _ := contactql.IsQueryError(err) + if !isQueryError { + return err + } + } + + return nil +} + +// creates batches of flow starts for all the unique contacts +func createFlowStartBatches(ctx context.Context, rt *runtime.Runtime, start *models.FlowStart) error { + oa, err := models.GetOrgAssets(ctx, rt, start.OrgID) + if err != nil { + return errors.Wrap(err, "error loading org assets") + } + + flow, err := oa.FlowByID(start.FlowID) + if err != nil { + return errors.Wrap(err, "error loading flow") + } + + var contactIDs []models.ContactID + + if start.CreateContact { + // if we are meant to create a new contact, do so + contact, _, err := models.CreateContact(ctx, rt.DB, oa, models.NilUserID, "", envs.NilLanguage, nil) + if err != nil { + return errors.Wrapf(err, "error creating new contact") + } + contactIDs = []models.ContactID{contact.ID()} + } else { + // otherwise resolve recipients across contacts, groups, urns etc + + // queries in start_session flow actions only match a single contact + limit := -1 + if string(start.Query) != "" && start.StartType == models.StartTypeFlowAction { + limit = 1 + } + + contactIDs, err = search.ResolveRecipients(ctx, rt, oa, flow, &search.Recipients{ + ContactIDs: start.ContactIDs, + GroupIDs: start.GroupIDs, + URNs: start.URNs, + Query: string(start.Query), + Exclusions: start.Exclusions, + ExcludeGroupIDs: start.ExcludeGroupIDs, + }, limit) + if err != nil { + return errors.Wrap(err, "error resolving start recipients") + } + } + + // mark our start as starting, last task will mark as complete + err = models.MarkStartStarted(ctx, rt.DB, start.ID, len(contactIDs)) + if err != nil { + return errors.Wrapf(err, "error marking start as started") + } + + // if there are no contacts to start, mark our start as complete, we are done + if len(contactIDs) == 0 { + err = models.MarkStartComplete(ctx, rt.DB, start.ID) + if err != nil { + return errors.Wrapf(err, "error marking start as complete") + } + return nil + } + + // by default we start in the batch queue unless we have two or fewer contacts + q := queue.BatchQueue + if len(contactIDs) <= 2 { + q = queue.HandlerQueue + } + + rc := rt.RP.Get() + defer rc.Close() + + // create tasks for batches of contacts + idBatches := models.ChunkSlice(contactIDs, startBatchSize) + for i, idBatch := range idBatches { + isLast := (i == len(idBatches)-1) + + batch := start.CreateBatch(idBatch, isLast, len(contactIDs)) + + // task is different if we are an IVR flow + var batchTask tasks.Task + if start.FlowType == models.FlowTypeVoice { + batchTask = &ivr.StartIVRFlowBatchTask{FlowStartBatch: batch} + } else { + batchTask = &StartFlowBatchTask{FlowStartBatch: batch} + } + + err = tasks.Queue(rc, q, start.OrgID, batchTask, queue.DefaultPriority) + if err != nil { + if i == 0 { + return errors.Wrap(err, "error queuing flow start batch") + } + // if we've already queued other batches.. we don't want to error and have the task be retried + logrus.WithError(err).Error("error queuing flow start batch") + } + } + + return nil +} diff --git a/core/tasks/starts/start_flow_batch.go b/core/tasks/starts/start_flow_batch.go new file mode 100644 index 000000000..74309d3bb --- /dev/null +++ b/core/tasks/starts/start_flow_batch.go @@ -0,0 +1,41 @@ +package starts + +import ( + "context" + "time" + + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/core/runner" + "github.com/nyaruka/mailroom/core/tasks" + "github.com/nyaruka/mailroom/runtime" + "github.com/pkg/errors" +) + +const TypeStartFlowBatch = "start_flow_batch" + +func init() { + tasks.RegisterType(TypeStartFlowBatch, func() tasks.Task { return &StartFlowBatchTask{} }) +} + +// StartFlowBatchTask is the start flow batch task +type StartFlowBatchTask struct { + *models.FlowStartBatch +} + +func (t *StartFlowBatchTask) Type() string { + return TypeStartFlowBatch +} + +// Timeout is the maximum amount of time the task can run for +func (t *StartFlowBatchTask) Timeout() time.Duration { + return time.Minute * 15 +} + +func (t *StartFlowBatchTask) Perform(ctx context.Context, rt *runtime.Runtime, orgID models.OrgID) error { + // start these contacts in our flow + _, err := runner.StartFlowBatch(ctx, rt, t.FlowStartBatch) + if err != nil { + return errors.Wrap(err, "error starting flow batch") + } + return nil +} diff --git a/core/tasks/starts/worker_test.go b/core/tasks/starts/start_flow_test.go similarity index 74% rename from core/tasks/starts/worker_test.go rename to core/tasks/starts/start_flow_test.go index 56f2581de..dcd066dba 100644 --- a/core/tasks/starts/worker_test.go +++ b/core/tasks/starts/start_flow_test.go @@ -1,7 +1,6 @@ -package starts +package starts_test import ( - "encoding/json" "testing" "time" @@ -9,41 +8,34 @@ import ( _ "github.com/nyaruka/mailroom/core/handlers" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/core/queue" - "github.com/nyaruka/mailroom/core/runner" + "github.com/nyaruka/mailroom/core/tasks" + "github.com/nyaruka/mailroom/core/tasks/starts" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" ) -func TestStarts(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() - defer rc.Close() +func TestStartFlowTask(t *testing.T) { + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) - mockES := testsuite.NewMockElasticServer() - defer mockES.Close() - - rt.ES = mockES.Client() + rc := rt.RP.Get() + defer rc.Close() // convert our single message flow to an actual background flow that shouldn't interrupt - db.MustExec(`UPDATE flows_flow SET flow_type = 'B' WHERE id = $1`, testdata.SingleMessage.ID) + rt.DB.MustExec(`UPDATE flows_flow SET flow_type = 'B' WHERE id = $1`, testdata.SingleMessage.ID) - sID := testdata.InsertWaitingSession(db, testdata.Org1, testdata.George, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now(), true, nil) - testdata.InsertFlowRun(db, testdata.Org1, sID, testdata.George, testdata.Favorites, models.RunStatusWaiting) + sID := testdata.InsertWaitingSession(rt, testdata.Org1, testdata.George, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now(), true, nil) + testdata.InsertFlowRun(rt, testdata.Org1, sID, testdata.George, testdata.Favorites, models.RunStatusWaiting) tcs := []struct { - label string flowID models.FlowID groupIDs []models.GroupID excludeGroupIDs []models.GroupID contactIDs []models.ContactID createContact bool query string - queryResult []models.ContactID excludeInAFlow bool excludeStartedPreviously bool queue string @@ -53,8 +45,7 @@ func TestStarts(t *testing.T) { expectedStatus models.StartStatus expectedActiveRuns map[models.FlowID]int }{ - { - label: "Empty flow start", + { // 0: empty flow start flowID: testdata.Favorites.ID, excludeInAFlow: true, excludeStartedPreviously: true, @@ -65,8 +56,7 @@ func TestStarts(t *testing.T) { expectedStatus: models.StartStatusComplete, expectedActiveRuns: map[models.FlowID]int{testdata.Favorites.ID: 1, testdata.PickANumber.ID: 0, testdata.SingleMessage.ID: 0}, }, - { - label: "Single group", + { // 1: single group flowID: testdata.Favorites.ID, groupIDs: []models.GroupID{testdata.DoctorsGroup.ID}, excludeInAFlow: true, @@ -78,8 +68,7 @@ func TestStarts(t *testing.T) { expectedStatus: models.StartStatusComplete, expectedActiveRuns: map[models.FlowID]int{testdata.Favorites.ID: 122, testdata.PickANumber.ID: 0, testdata.SingleMessage.ID: 0}, }, - { - label: "Group and Contact (but all already active)", + { // 2: group and contact (but all already active) flowID: testdata.Favorites.ID, groupIDs: []models.GroupID{testdata.DoctorsGroup.ID}, contactIDs: []models.ContactID{testdata.Cathy.ID}, @@ -87,13 +76,12 @@ func TestStarts(t *testing.T) { excludeStartedPreviously: true, queue: queue.BatchQueue, expectedContactCount: 121, - expectedBatchCount: 2, + expectedBatchCount: 0, expectedTotalCount: 0, expectedStatus: models.StartStatusComplete, expectedActiveRuns: map[models.FlowID]int{testdata.Favorites.ID: 122, testdata.PickANumber.ID: 0, testdata.SingleMessage.ID: 0}, }, - { - label: "Contact restart", + { // 3: don't exclude started previously flowID: testdata.Favorites.ID, contactIDs: []models.ContactID{testdata.Cathy.ID}, excludeInAFlow: false, @@ -105,46 +93,42 @@ func TestStarts(t *testing.T) { expectedStatus: models.StartStatusComplete, expectedActiveRuns: map[models.FlowID]int{testdata.Favorites.ID: 122, testdata.PickANumber.ID: 0, testdata.SingleMessage.ID: 0}, }, - { - label: "Previous group and one new contact", + { // 4: previous group and one new contact flowID: testdata.Favorites.ID, groupIDs: []models.GroupID{testdata.DoctorsGroup.ID}, contactIDs: []models.ContactID{testdata.Bob.ID}, excludeStartedPreviously: true, queue: queue.BatchQueue, expectedContactCount: 122, - expectedBatchCount: 2, + expectedBatchCount: 1, expectedTotalCount: 1, expectedStatus: models.StartStatusComplete, expectedActiveRuns: map[models.FlowID]int{testdata.Favorites.ID: 123, testdata.PickANumber.ID: 0, testdata.SingleMessage.ID: 0}, }, - { - label: "Single contact, no restart", + { // 5: single contact, no restart flowID: testdata.Favorites.ID, contactIDs: []models.ContactID{testdata.Bob.ID}, excludeStartedPreviously: true, queue: queue.HandlerQueue, expectedContactCount: 1, - expectedBatchCount: 1, + expectedBatchCount: 0, expectedTotalCount: 0, expectedStatus: models.StartStatusComplete, expectedActiveRuns: map[models.FlowID]int{testdata.Favorites.ID: 123, testdata.PickANumber.ID: 0, testdata.SingleMessage.ID: 0}, }, - { - label: "Single contact, include active, but no restart", + { // 6: single contact, include active, but no restart flowID: testdata.Favorites.ID, contactIDs: []models.ContactID{testdata.Bob.ID}, excludeInAFlow: false, excludeStartedPreviously: true, queue: queue.HandlerQueue, expectedContactCount: 1, - expectedBatchCount: 1, + expectedBatchCount: 0, expectedTotalCount: 0, expectedStatus: models.StartStatusComplete, expectedActiveRuns: map[models.FlowID]int{testdata.Favorites.ID: 123, testdata.PickANumber.ID: 0, testdata.SingleMessage.ID: 0}, }, - { - label: "Single contact, include active and restart", + { // 7: single contact, include active and restart flowID: testdata.Favorites.ID, contactIDs: []models.ContactID{testdata.Bob.ID}, excludeInAFlow: false, @@ -156,11 +140,9 @@ func TestStarts(t *testing.T) { expectedStatus: models.StartStatusComplete, expectedActiveRuns: map[models.FlowID]int{testdata.Favorites.ID: 123, testdata.PickANumber.ID: 0, testdata.SingleMessage.ID: 0}, }, - { - label: "Query start", + { // 8: query start flowID: testdata.Favorites.ID, query: "bob", - queryResult: []models.ContactID{testdata.Bob.ID}, excludeInAFlow: false, excludeStartedPreviously: false, queue: queue.HandlerQueue, @@ -170,8 +152,7 @@ func TestStarts(t *testing.T) { expectedStatus: models.StartStatusComplete, expectedActiveRuns: map[models.FlowID]int{testdata.Favorites.ID: 123, testdata.PickANumber.ID: 0, testdata.SingleMessage.ID: 0}, }, - { - label: "Query start with invalid query", + { // 9: query start with invalid query flowID: testdata.Favorites.ID, query: "xyz = 45", excludeInAFlow: false, @@ -183,8 +164,7 @@ func TestStarts(t *testing.T) { expectedStatus: models.StartStatusFailed, expectedActiveRuns: map[models.FlowID]int{testdata.Favorites.ID: 123, testdata.PickANumber.ID: 0, testdata.SingleMessage.ID: 0}, }, - { - label: "New Contact", + { // 10: new contact flowID: testdata.Favorites.ID, createContact: true, queue: queue.HandlerQueue, @@ -194,8 +174,7 @@ func TestStarts(t *testing.T) { expectedStatus: models.StartStatusComplete, expectedActiveRuns: map[models.FlowID]int{testdata.Favorites.ID: 124, testdata.PickANumber.ID: 0, testdata.SingleMessage.ID: 0}, }, - { - label: "Other messaging flow", + { // 11: other messaging flow flowID: testdata.PickANumber.ID, contactIDs: []models.ContactID{testdata.Bob.ID}, excludeInAFlow: false, @@ -207,8 +186,7 @@ func TestStarts(t *testing.T) { expectedStatus: models.StartStatusComplete, expectedActiveRuns: map[models.FlowID]int{testdata.Favorites.ID: 123, testdata.PickANumber.ID: 1, testdata.SingleMessage.ID: 0}, }, - { - label: "Background flow", + { // 12: background flow flowID: testdata.SingleMessage.ID, contactIDs: []models.ContactID{testdata.Bob.ID}, excludeInAFlow: false, @@ -220,8 +198,7 @@ func TestStarts(t *testing.T) { expectedStatus: models.StartStatusComplete, expectedActiveRuns: map[models.FlowID]int{testdata.Favorites.ID: 123, testdata.PickANumber.ID: 1, testdata.SingleMessage.ID: 0}, }, - { - label: "Exclude group", + { // 13: exclude group flowID: testdata.Favorites.ID, contactIDs: []models.ContactID{testdata.Cathy.ID, testdata.Bob.ID}, excludeGroupIDs: []models.GroupID{testdata.DoctorsGroup.ID}, // should exclude Cathy @@ -236,10 +213,8 @@ func TestStarts(t *testing.T) { }, } - for _, tc := range tcs { - if tc.queryResult != nil { - mockES.AddResponse(tc.queryResult...) - } + for i, tc := range tcs { + testsuite.ReindexElastic(ctx) // handle our start task start := models.NewFlowStart(testdata.Org1.ID, models.StartTypeManual, models.FlowTypeMessaging, tc.flowID). @@ -251,53 +226,31 @@ func TestStarts(t *testing.T) { WithExcludeStartedPreviously(tc.excludeStartedPreviously). WithCreateContact(tc.createContact) - err := models.InsertFlowStarts(ctx, db, []*models.FlowStart{start}) + err := models.InsertFlowStarts(ctx, rt.DB, []*models.FlowStart{start}) assert.NoError(t, err) - startJSON, err := json.Marshal(start) - require.NoError(t, err) - - err = handleFlowStart(ctx, rt, &queue.Task{Type: queue.StartFlow, Task: startJSON}) + err = tasks.Queue(rc, tc.queue, testdata.Org1.ID, &starts.StartFlowTask{FlowStart: start}, queue.DefaultPriority) assert.NoError(t, err) - // pop all our tasks and execute them - var task *queue.Task - count := 0 - for { - task, err = queue.PopNextTask(rc, tc.queue) - assert.NoError(t, err) - if task == nil { - break - } - - count++ - assert.Equal(t, queue.StartFlowBatch, task.Type) - batch := &models.FlowStartBatch{} - err = json.Unmarshal(task.Task, batch) - assert.NoError(t, err) - - _, err = runner.StartFlowBatch(ctx, rt, batch) - assert.NoError(t, err) - } + taskCounts := testsuite.FlushTasks(t, rt) // assert our count of batches - assert.Equal(t, tc.expectedBatchCount, count, "unexpected batch count in '%s'", tc.label) + assert.Equal(t, tc.expectedBatchCount, taskCounts["start_flow_batch"], "%d: unexpected batch count", i) // assert our count of total flow runs created - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowrun WHERE flow_id = $1 AND start_id = $2`, tc.flowID, start.ID()).Returns(tc.expectedTotalCount, "unexpected total run count in '%s'", tc.label) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowrun WHERE flow_id = $1 AND start_id = $2`, tc.flowID, start.ID).Returns(tc.expectedTotalCount, "%d: unexpected total run count", i) // assert final status - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowstart where status = $2 AND id = $1`, start.ID(), tc.expectedStatus).Returns(1, "status mismatch in '%s'", tc.label) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowstart where status = $2 AND id = $1`, start.ID, tc.expectedStatus).Returns(1, "%d: status mismatch", i) // assert final contact count if tc.expectedStatus != models.StartStatusFailed { - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowstart where contact_count = $2 AND id = $1`, - []interface{}{start.ID(), tc.expectedContactCount}, 1, "contact count mismatch in '%s'", tc.label) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowstart where contact_count = $2 AND id = $1`, []any{start.ID, tc.expectedContactCount}, 1, "%d: contact count mismatch", i) } // assert count of active runs by flow for flowID, activeRuns := range tc.expectedActiveRuns { - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowrun WHERE status = 'W' AND flow_id = $1`, flowID).Returns(activeRuns, "active runs mismatch for flow #%d in '%s'", flowID, tc.label) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowrun WHERE status = 'W' AND flow_id = $1`, flowID).Returns(activeRuns, "%d: active runs mismatch for flow #%d", i, flowID) } } } diff --git a/core/tasks/starts/worker.go b/core/tasks/starts/worker.go deleted file mode 100644 index a652ee147..000000000 --- a/core/tasks/starts/worker.go +++ /dev/null @@ -1,231 +0,0 @@ -package starts - -import ( - "context" - "encoding/json" - "time" - - "github.com/nyaruka/goflow/contactql" - "github.com/nyaruka/goflow/envs" - "github.com/nyaruka/mailroom" - "github.com/nyaruka/mailroom/core/models" - "github.com/nyaruka/mailroom/core/queue" - "github.com/nyaruka/mailroom/core/runner" - "github.com/nyaruka/mailroom/core/search" - "github.com/nyaruka/mailroom/runtime" - - "github.com/lib/pq" - "github.com/pkg/errors" - "github.com/sirupsen/logrus" -) - -const ( - startBatchSize = 100 -) - -func init() { - mailroom.AddTaskFunction(queue.StartFlow, handleFlowStart) - mailroom.AddTaskFunction(queue.StartFlowBatch, handleFlowStartBatch) -} - -// handleFlowStart creates all the batches of contacts to start in a flow -func handleFlowStart(ctx context.Context, rt *runtime.Runtime, task *queue.Task) error { - ctx, cancel := context.WithTimeout(ctx, time.Minute*60) - defer cancel() - - // decode our task body - if task.Type != queue.StartFlow { - return errors.Errorf("unknown event type passed to start worker: %s", task.Type) - } - startTask := &models.FlowStart{} - err := json.Unmarshal(task.Task, startTask) - if err != nil { - return errors.Wrapf(err, "error unmarshalling flow start task: %s", string(task.Task)) - } - - err = CreateFlowBatches(ctx, rt, startTask) - if err != nil { - models.MarkStartFailed(ctx, rt.DB, startTask.ID()) - - // if error is user created query error.. don't escalate error to sentry - isQueryError, _ := contactql.IsQueryError(err) - if !isQueryError { - return err - } - } - - return nil -} - -// CreateFlowBatches takes our master flow start and creates batches of flow starts for all the unique contacts -func CreateFlowBatches(ctx context.Context, rt *runtime.Runtime, start *models.FlowStart) error { - contactIDs := make(map[models.ContactID]bool) - createdContactIDs := make([]models.ContactID, 0) - - // we are building a set of contact ids, start with the explicit ones - for _, id := range start.ContactIDs() { - contactIDs[id] = true - } - - oa, err := models.GetOrgAssets(ctx, rt, start.OrgID()) - if err != nil { - return errors.Wrapf(err, "error loading org assets") - } - - // look up any contacts by URN - if len(start.URNs()) > 0 { - urnContactIDs, err := models.GetOrCreateContactIDsFromURNs(ctx, rt.DB, oa, start.URNs()) - if err != nil { - return errors.Wrapf(err, "error getting contact ids from urns") - } - for _, id := range urnContactIDs { - if !contactIDs[id] { - createdContactIDs = append(createdContactIDs, id) - } - contactIDs[id] = true - } - } - - // if we are meant to create a new contact, do so - if start.CreateContact() { - contact, _, err := models.CreateContact(ctx, rt.DB, oa, models.NilUserID, "", envs.NilLanguage, nil) - if err != nil { - return errors.Wrapf(err, "error creating new contact") - } - contactIDs[contact.ID()] = true - createdContactIDs = append(createdContactIDs, contact.ID()) - } - - // if we have inclusion groups, add all the contact ids from those groups - if len(start.GroupIDs()) > 0 { - rows, err := rt.DB.QueryxContext(ctx, `SELECT contact_id FROM contacts_contactgroup_contacts WHERE contactgroup_id = ANY($1)`, pq.Array(start.GroupIDs())) - if err != nil { - return errors.Wrapf(err, "error querying contacts from inclusion groups") - } - defer rows.Close() - - var contactID models.ContactID - for rows.Next() { - err := rows.Scan(&contactID) - if err != nil { - return errors.Wrapf(err, "error scanning contact id") - } - contactIDs[contactID] = true - } - } - - // if we have a query, add the contacts that match that as well - if start.Query() != "" { - // queries in start_session flow actions only match a single contact - limit := -1 - if start.Type() == models.StartTypeFlowAction { - limit = 1 - } - matches, err := search.GetContactIDsForQuery(ctx, rt.ES, oa, start.Query(), limit) - if err != nil { - return errors.Wrapf(err, "error performing search for start: %d", start.ID()) - } - - for _, contactID := range matches { - contactIDs[contactID] = true - } - } - - // finally, if we have exclusion groups, remove all the contact ids from those groups - if len(start.ExcludeGroupIDs()) > 0 { - rows, err := rt.DB.QueryxContext(ctx, `SELECT contact_id FROM contacts_contactgroup_contacts WHERE contactgroup_id = ANY($1)`, pq.Array(start.ExcludeGroupIDs())) - if err != nil { - return errors.Wrapf(err, "error querying contacts from exclusion groups") - } - defer rows.Close() - - var contactID models.ContactID - for rows.Next() { - err := rows.Scan(&contactID) - if err != nil { - return errors.Wrapf(err, "error scanning contact id") - } - delete(contactIDs, contactID) - } - } - - rc := rt.RP.Get() - defer rc.Close() - - // mark our start as starting, last task will mark as complete - err = models.MarkStartStarted(ctx, rt.DB, start.ID(), len(contactIDs), createdContactIDs) - if err != nil { - return errors.Wrapf(err, "error marking start as started") - } - - // if there are no contacts to start, mark our start as complete, we are done - if len(contactIDs) == 0 { - err = models.MarkStartComplete(ctx, rt.DB, start.ID()) - if err != nil { - return errors.Wrapf(err, "error marking start as complete") - } - return nil - } - - // by default we start in the batch queue unless we have two or fewer contacts - q := queue.BatchQueue - if len(contactIDs) <= 2 { - q = queue.HandlerQueue - } - - // task is different if we are an IVR flow - taskType := queue.StartFlowBatch - if start.FlowType() == models.FlowTypeVoice { - taskType = queue.StartIVRFlowBatch - } - - contacts := make([]models.ContactID, 0, 100) - queueBatch := func(last bool) { - batch := start.CreateBatch(contacts, last, len(contactIDs)) - err = queue.AddTask(rc, q, taskType, int(start.OrgID()), batch, queue.DefaultPriority) - if err != nil { - // TODO: is continuing the right thing here? what do we do if redis is down? (panic!) - logrus.WithError(err).WithField("start_id", start.ID()).Error("error while queuing start") - } - contacts = make([]models.ContactID, 0, 100) - } - - // build up batches of contacts to start - for c := range contactIDs { - if len(contacts) == startBatchSize { - queueBatch(false) - } - contacts = append(contacts, c) - } - - // queue our last batch - if len(contacts) > 0 { - queueBatch(true) - } - - return nil -} - -// HandleFlowStartBatch starts a batch of contacts in a flow -func handleFlowStartBatch(ctx context.Context, rt *runtime.Runtime, task *queue.Task) error { - ctx, cancel := context.WithTimeout(ctx, time.Minute*15) - defer cancel() - - // decode our task body - if task.Type != queue.StartFlowBatch { - return errors.Errorf("unknown event type passed to start worker: %s", task.Type) - } - startBatch := &models.FlowStartBatch{} - err := json.Unmarshal(task.Task, startBatch) - if err != nil { - return errors.Wrapf(err, "error unmarshalling flow start batch: %s", string(task.Task)) - } - - // start these contacts in our flow - _, err = runner.StartFlowBatch(ctx, rt, startBatch) - if err != nil { - return errors.Wrapf(err, "error starting flow batch: %s", string(task.Task)) - } - - return err -} diff --git a/core/tasks/timeouts/cron_test.go b/core/tasks/timeouts/cron_test.go index 8fffee14d..f0fca90d8 100644 --- a/core/tasks/timeouts/cron_test.go +++ b/core/tasks/timeouts/cron_test.go @@ -16,17 +16,17 @@ import ( ) func TestTimeouts(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetData | testsuite.ResetRedis) // need to create a session that has an expired timeout s1TimeoutOn := time.Now() - testdata.InsertWaitingSession(db, testdata.Org1, testdata.Cathy, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now(), false, &s1TimeoutOn) + testdata.InsertWaitingSession(rt, testdata.Org1, testdata.Cathy, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now(), false, &s1TimeoutOn) s2TimeoutOn := time.Now().Add(time.Hour * 24) - testdata.InsertWaitingSession(db, testdata.Org1, testdata.George, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now(), false, &s2TimeoutOn) + testdata.InsertWaitingSession(rt, testdata.Org1, testdata.George, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now(), false, &s2TimeoutOn) time.Sleep(10 * time.Millisecond) @@ -40,7 +40,7 @@ func TestTimeouts(t *testing.T) { assert.NotNil(t, task) // decode the task - eventTask := &handler.HandleEventTask{} + eventTask := &handler.HandleContactEventTask{} err = json.Unmarshal(task.Task, eventTask) assert.NoError(t, err) diff --git a/go.mod b/go.mod index b6b6827a6..16691cd3d 100644 --- a/go.mod +++ b/go.mod @@ -4,62 +4,63 @@ go 1.19 require ( github.com/Masterminds/semver v1.5.0 - github.com/aws/aws-sdk-go v1.44.146 + github.com/aws/aws-sdk-go v1.44.305 github.com/buger/jsonparser v1.1.1 github.com/edganiukov/fcm v0.4.0 github.com/go-chi/chi v4.1.2+incompatible + github.com/go-playground/validator/v10 v10.14.1 github.com/golang-jwt/jwt v3.2.2+incompatible - github.com/golang/protobuf v1.5.2 + github.com/golang/protobuf v1.5.3 github.com/gomodule/redigo v1.8.9 github.com/gorilla/schema v1.2.0 github.com/jmoiron/sqlx v1.3.5 - github.com/lib/pq v1.10.7 + github.com/lib/pq v1.10.9 github.com/nyaruka/ezconf v0.2.1 - github.com/nyaruka/gocommon v1.33.1 - github.com/nyaruka/goflow v0.178.1 + github.com/nyaruka/gocommon v1.37.0 + github.com/nyaruka/goflow v0.187.0 github.com/nyaruka/logrus_sentry v0.8.2-0.20190129182604-c2962b80ba7d - github.com/nyaruka/null v1.2.0 - github.com/nyaruka/redisx v0.2.2 + github.com/nyaruka/null/v2 v2.0.3 + github.com/nyaruka/redisx v0.3.1 + github.com/nyaruka/rp-indexer/v8 v8.0.3 github.com/olivere/elastic/v7 v7.0.32 github.com/patrickmn/go-cache v2.1.0+incompatible github.com/pkg/errors v0.9.1 - github.com/prometheus/client_model v0.3.0 - github.com/prometheus/common v0.37.0 + github.com/prometheus/client_model v0.4.0 + github.com/prometheus/common v0.44.0 github.com/shopspring/decimal v1.3.1 - github.com/sirupsen/logrus v1.9.0 - github.com/stretchr/testify v1.8.1 - gopkg.in/go-playground/validator.v9 v9.31.0 + github.com/sirupsen/logrus v1.9.3 + github.com/stretchr/testify v1.8.4 + golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1 ) require ( github.com/Shopify/gomail v0.0.0-20220729171026-0784ece65e69 // indirect - github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20220527190237-ee62e23da966 // indirect - github.com/blevesearch/segment v0.9.0 // indirect + github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20221202181307-76fa05c21b12 // indirect + github.com/blevesearch/segment v0.9.1 // indirect github.com/certifi/gocertifi v0.0.0-20210507211836-431795d63e8d // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/fatih/structs v1.1.0 // indirect - github.com/gabriel-vasile/mimetype v1.4.1 // indirect - github.com/getsentry/raven-go v0.1.2-0.20190125112653-238ebd86338d // indirect - github.com/go-playground/locales v0.14.0 // indirect - github.com/go-playground/universal-translator v0.18.0 // indirect - github.com/gofrs/uuid v4.3.1+incompatible // indirect + github.com/gabriel-vasile/mimetype v1.4.2 // indirect + github.com/getsentry/raven-go v0.2.0 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/gofrs/uuid v4.4.0+incompatible // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/josharian/intern v1.0.0 // indirect - github.com/kylelemons/godebug v1.1.0 // indirect - github.com/leodido/go-urn v1.2.1 // indirect + github.com/leodido/go-urn v1.2.4 // indirect github.com/mailru/easyjson v0.7.7 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect github.com/naoina/go-stringutil v0.1.0 // indirect github.com/naoina/toml v0.1.1 // indirect github.com/nyaruka/librato v1.0.0 // indirect - github.com/nyaruka/phonenumbers v1.1.4 // indirect + github.com/nyaruka/phonenumbers v1.1.7 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/sergi/go-diff v1.2.0 // indirect - golang.org/x/exp v0.0.0-20221126150942-6ab00d035af9 // indirect - golang.org/x/net v0.2.0 // indirect - golang.org/x/sys v0.2.0 // indirect - golang.org/x/text v0.4.0 // indirect - google.golang.org/protobuf v1.28.1 // indirect + github.com/sergi/go-diff v1.3.1 // indirect + golang.org/x/crypto v0.11.0 // indirect + golang.org/x/net v0.12.0 // indirect + golang.org/x/sys v0.10.0 // indirect + golang.org/x/text v0.11.0 // indirect + google.golang.org/protobuf v1.31.0 // indirect gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index 0adbaf7ba..f945b0c60 100644 --- a/go.sum +++ b/go.sum @@ -1,170 +1,58 @@ -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= -cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= -cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= -cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= -cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= -cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= -cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= -cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= -cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= -cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= -cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= -cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= -cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= -cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= -cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= -cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= -cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= -cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= -cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= -cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= -cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= -cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= -cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= -cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= -cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= -cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= github.com/Shopify/gomail v0.0.0-20220729171026-0784ece65e69 h1:gPoXdwo3sKq8qcfMu/Nc/wkJMLKwe7kaG9Uo8tOj3cU= github.com/Shopify/gomail v0.0.0-20220729171026-0784ece65e69/go.mod h1:RS+Gaowa0M+gCuiFAiRMGBCMqxLrNA7TESTU/Wbblm8= -github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= -github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20220527190237-ee62e23da966 h1:mEzJ8SH4M5wDL8C4a17yX2YeD/FIXV5w8FJekByaBi0= -github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20220527190237-ee62e23da966/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY= -github.com/aws/aws-sdk-go v1.44.146 h1:7YdGgPxDPRJu/yYffzZp/H7yHzQ6AqmuNFZPYraaN8I= -github.com/aws/aws-sdk-go v1.44.146/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= -github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= -github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= -github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/blevesearch/segment v0.9.0 h1:5lG7yBCx98or7gK2cHMKPukPZ/31Kag7nONpoBt22Ac= -github.com/blevesearch/segment v0.9.0/go.mod h1:9PfHYUdQCgHktBgvtUOF4x+pc4/l8rdH0u5spnW85UQ= +github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20221202181307-76fa05c21b12 h1:npHgfD4Tl2WJS3AJaMUi5ynGDPUBfkg3U3fCzDyXZ+4= +github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20221202181307-76fa05c21b12/go.mod h1:pSwJ0fSY5KhvocuWSx4fz3BA8OrA1bQn+K1Eli3BRwM= +github.com/aws/aws-sdk-go v1.44.204 h1:7/tPUXfNOHB390A63t6fJIwmlwVQAkAwcbzKsU2/6OQ= +github.com/aws/aws-sdk-go v1.44.204/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= +github.com/aws/aws-sdk-go v1.44.305 h1:fU/5lY3WyBjGU9fkmQYd8o4fZu+2RaOv/i+sPaJVvFg= +github.com/aws/aws-sdk-go v1.44.305/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI= +github.com/blevesearch/segment v0.9.1 h1:+dThDy+Lvgj5JMxhmOVlgFfkUtZV2kw49xax4+jTfSU= +github.com/blevesearch/segment v0.9.1/go.mod h1:zN21iLm7+GnBHWTao9I+Au/7MBiL8pPFtJBJTsk6kQw= github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= -github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/certifi/gocertifi v0.0.0-20210507211836-431795d63e8d h1:S2NE3iHSwP0XV47EEXL8mWmRdEfGscSJ+7EgePNgt0s= github.com/certifi/gocertifi v0.0.0-20210507211836-431795d63e8d/go.mod h1:sGbDF6GwGcLpkNXPUTkMRoywsNa/ol15pxFe6ERfguA= -github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= -github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= -github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/edganiukov/fcm v0.4.0 h1:PAZamwbiW2AegM5hGqYNv+djE1xxLyH7zMN6MwWpvoQ= github.com/edganiukov/fcm v0.4.0/go.mod h1:3gL1BLvC3w05anUsF2Wbd1Sz+ZdCu8qsNCa1LyRfwFo= -github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/fatih/structs v1.0.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= -github.com/gabriel-vasile/mimetype v1.4.1 h1:TRWk7se+TOjCYgRth7+1/OYLNiRNIotknkFtf/dnN7Q= -github.com/gabriel-vasile/mimetype v1.4.1/go.mod h1:05Vi0w3Y9c/lNvJOdmIwvrrAhX3rYhfQQCaf9VJcv7M= -github.com/getsentry/raven-go v0.1.2-0.20190125112653-238ebd86338d h1:CIp8WnfXz70wJVQ0ytr3dswFYGoJbAxWgNvaLpiu3sY= -github.com/getsentry/raven-go v0.1.2-0.20190125112653-238ebd86338d/go.mod h1:KungGk8q33+aIAZUIVWZDr2OfAEBsO49PX4NzFV5kcQ= +github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= +github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= +github.com/getsentry/raven-go v0.2.0 h1:no+xWJRb5ZI7eE8TWgIq1jLulQiIoLG0IfYxv5JYMGs= +github.com/getsentry/raven-go v0.2.0/go.mod h1:KungGk8q33+aIAZUIVWZDr2OfAEBsO49PX4NzFV5kcQ= github.com/go-chi/chi v4.1.2+incompatible h1:fGFk2Gmi/YKXk0OmGfBh0WgmN3XB8lVnEyNz34tQRec= github.com/go-chi/chi v4.1.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= -github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= -github.com/go-kit/log v0.2.0/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0= -github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= -github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= -github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= -github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= -github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU= -github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= -github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho= -github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.14.1 h1:9c50NUPC30zyuKprjL3vNZ0m5oG+jU0zvx4AqHGnv4k= +github.com/go-playground/validator/v10 v10.14.1/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= -github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/gofrs/uuid v4.3.1+incompatible h1:0/KbAdpx3UXAx1kEOWHJeOkpbgRFGHVgv+CFIY7dBJI= -github.com/gofrs/uuid v4.3.1+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= -github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gofrs/uuid v4.4.0+incompatible h1:3qXRTX8/NbyulANqlc0lchS1gqAVxRgsuW1YrTJupqA= +github.com/gofrs/uuid v4.4.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY= github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= -github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= -github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= -github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= -github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= -github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= -github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= -github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= -github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= -github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= +github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/gomodule/redigo v1.8.9 h1:Sl3u+2BI/kk+VEatbj0scLdrFhjPmbxOc1myhDP41ws= github.com/gomodule/redigo v1.8.9/go.mod h1:7ArFNvsTjH8GMMzB4uy1snslv2BwmginuMs06a1uzZE= -github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= -github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= -github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= -github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/gorilla/schema v1.2.0 h1:YufUaxZYCKGFuAq3c96BOhjgd5nmXiOY9NGzF247Tsc= github.com/gorilla/schema v1.2.0/go.mod h1:kgLaKoK1FELgZqMAVxx/5cbj0kT+57qxUrAlIO2eleU= -github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8= @@ -173,436 +61,141 @@ github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g= github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= -github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= -github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= -github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= -github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= -github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= -github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= -github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= -github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= -github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= +github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.10.7 h1:p7ZhMD+KsSRozJr34udlUrhboJwWAgCg34+/ZZNvZZw= github.com/lib/pq v1.10.7/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg= github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= -github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= -github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= -github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/naoina/go-stringutil v0.1.0 h1:rCUeRUHjBjGTSHl0VC00jUPLz8/F9dDzYI70Hzifhks= github.com/naoina/go-stringutil v0.1.0/go.mod h1:XJ2SJL9jCtBh+P9q5btrd/Ylo8XwT/h1USek5+NqSA0= github.com/naoina/toml v0.1.1 h1:PT/lllxVVN0gzzSqSlHEmP8MJB4MY2U7STGxiouV4X8= github.com/naoina/toml v0.1.1/go.mod h1:NBIhNtsFMo3G2szEBne+bO4gS192HuIYRqfvOWb4i1E= github.com/nyaruka/ezconf v0.2.1 h1:TDXWoqjqYya1uhou1mAJZg7rgFYL98EB0Tb3+BWtUh0= github.com/nyaruka/ezconf v0.2.1/go.mod h1:ey182kYkw2MIi4XiWe1FR/mzI33WCmTWuceDYYxgnQw= -github.com/nyaruka/gocommon v1.33.1 h1:RUy1O5Ly4tAaQDDpahds8z+4uewwsXg6SNCH0hYm7pE= -github.com/nyaruka/gocommon v1.33.1/go.mod h1:gusIA2aNC8EPB3ozlP4O0PaBiHUNq5+f1peRNvcn0DI= -github.com/nyaruka/goflow v0.178.1 h1:ubVQXcrlFIebDnfJOvDRMaGc3CyGpngrtJLiVDgsHDc= -github.com/nyaruka/goflow v0.178.1/go.mod h1:C3Hj+jvJ2RY6w/ANx4zjcbVjYzd8gzOcryyPW2OEa8E= +github.com/nyaruka/gocommon v1.37.0 h1:1wCSJtdjpBQ4FXWQy5zXBSiHENGEpL6sTy5IN79+0Iw= +github.com/nyaruka/gocommon v1.37.0/go.mod h1:HaUQmWPrZfKS9MLnXKQj28zF4KlJrzFou+DGuqT7RbE= +github.com/nyaruka/goflow v0.187.0 h1:HvMtTSLNdQwC2hCILkeA7jiffRuN7qpe5dLj4WDWMWY= +github.com/nyaruka/goflow v0.187.0/go.mod h1:8O8eCOZWBYYapaAQhRkdD9pYWcon4Mj1LQtxXV/GuXY= github.com/nyaruka/librato v1.0.0 h1:Vznj9WCeC1yZXbBYyYp40KnbmXLbEkjKmHesV/v2SR0= github.com/nyaruka/librato v1.0.0/go.mod h1:pkRNLFhFurOz0QqBz6/DuTFhHHxAubWxs4Jx+J7yUgg= github.com/nyaruka/logrus_sentry v0.8.2-0.20190129182604-c2962b80ba7d h1:hyp9u36KIwbTCo2JAJ+TuJcJBc+UZzEig7RI/S5Dvkc= github.com/nyaruka/logrus_sentry v0.8.2-0.20190129182604-c2962b80ba7d/go.mod h1:FGdPJVDTNqbRAD+2RvnK9YoO2HcEW7ogSMPzc90b638= -github.com/nyaruka/null v1.2.0 h1:uEbkyy4Z+zPB2Pr3ryQh/0N2965I9kEsXq/cGpyJ7PA= -github.com/nyaruka/null v1.2.0/go.mod h1:HSAFbLNOaEhHnoU0VCveCPz0GDtJ3GEtFWhvnBNkhPE= -github.com/nyaruka/phonenumbers v1.1.4 h1:de8exybd7+g9q+gXP04Ypt9ijFYXXm8wrgqPf+Ckk20= -github.com/nyaruka/phonenumbers v1.1.4/go.mod h1:yShPJHDSH3aTKzCbXyVxNpbl2kA+F+Ne5Pun/MvFRos= -github.com/nyaruka/redisx v0.2.2 h1:OAJ4g1So2opn6O5akDWEWiDWgEOvPMKU10EUCG/Nv9Y= -github.com/nyaruka/redisx v0.2.2/go.mod h1:cdbAm4y/+oFWu7qFzH2ERPeqRXJC2CtgRhwcBacM4Oc= +github.com/nyaruka/null/v2 v2.0.3 h1:rdmMRQyVzrOF3Jff/gpU/7BDR9mQX0lcLl4yImsA3kw= +github.com/nyaruka/null/v2 v2.0.3/go.mod h1:OCVeCkCXwrg5/qE6RU0c1oUVZBy+ZDrT+xYg1XSaIWA= +github.com/nyaruka/phonenumbers v1.1.7 h1:5UUI9hE79Kk0dymSquXbMYB7IlNDNhvu2aNlJpm9et8= +github.com/nyaruka/phonenumbers v1.1.7/go.mod h1:DC7jZd321FqUe+qWSNcHi10tyIyGNXGcNbfkPvdp1Vs= +github.com/nyaruka/redisx v0.3.1 h1:vnq1tHQwDh+7oG9BANyEVkqGjacgu8wpPxKBOx/exiw= +github.com/nyaruka/redisx v0.3.1/go.mod h1:v3PY8t0gyf/0E7S0Cxb1RpCCxYo9GUFAIQdF/RufsVw= +github.com/nyaruka/rp-indexer/v8 v8.0.3 h1:TDh4QP/8ytuqUXQwWPIr2waVnXUGBASkIPmkgDVNlxs= +github.com/nyaruka/rp-indexer/v8 v8.0.3/go.mod h1:u65K3Ssn60qMb8+XzMefYwz8gsuPhCwJSq+yR4iNHwQ= github.com/olivere/elastic/v7 v7.0.32 h1:R7CXvbu8Eq+WlsLgxmKVKPox0oOwAE/2T9Si5BnvK6E= github.com/olivere/elastic/v7 v7.0.32/go.mod h1:c7PVmLe3Fxq77PIfY/bZmxY/TAamBhCzZ8xDOE09a9k= github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc= github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ= -github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= -github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= -github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= -github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= -github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= -github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= -github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.3.0 h1:UBgGFHqYdG/TPFD1B1ogZywDqEkwp3fBMvqdiQ7Xew4= github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= -github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= -github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= -github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= -github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= -github.com/prometheus/common v0.37.0 h1:ccBbHCgIiT9uSoFY0vX8H3zsNR5eLt17/RQLUvn8pXE= -github.com/prometheus/common v0.37.0/go.mod h1:phzohg0JFMnBEFGxTDbfu3QyL5GI8gTQJFhYO5B3mfA= -github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= -github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= -github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= -github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= -github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ= -github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/prometheus/client_model v0.4.0 h1:5lQXD3cAg1OXBf4Wq03gTrXHeaV0TQvGfUooCfx1yqY= +github.com/prometheus/client_model v0.4.0/go.mod h1:oMQmHW1/JoDwqLtg57MGgP/Fb1CJEYF2imWWhWtMkYU= +github.com/prometheus/common v0.39.0 h1:oOyhkDq05hPZKItWVBkJ6g6AtGxi+fy7F4JvUV8uhsI= +github.com/prometheus/common v0.39.0/go.mod h1:6XBZ7lYdLCbkAVhwRsWTZn+IN5AB9F/NXd5w0BbEX0Y= +github.com/prometheus/common v0.44.0 h1:+5BrQJwiBB9xsMygAB3TNvpQKOwlkc25LbISbrdOOfY= +github.com/prometheus/common v0.44.0/go.mod h1:ofAIvZbQ1e/nugmZGz4/qCb9Ap1VoSTIO7x0VV9VvuY= +github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8= +github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I= github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= -github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= -github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0= github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= -github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= -go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= -go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= -golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= -golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= -golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20221126150942-6ab00d035af9 h1:yZNXmy+j/JpX19vZkVktWqAo7Gny4PBWYYK3zskGpx4= -golang.org/x/exp v0.0.0-20221126150942-6ab00d035af9/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc= -golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= -golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= -golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= -golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= -golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= -golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= -golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/crypto v0.11.0 h1:6Ewdq3tDic1mg5xRO4milcWCfMVQhI4NkqWWvqejpuA= +golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio= +golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1 h1:MGwJjxBy0HJshjDNfLsYO8xppfqWlA5ZT9OhtUUhTNw= +golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= -golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= -golang.org/x/net v0.0.0-20220624214902-1bab6f366d9e/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= -golang.org/x/net v0.2.0 h1:sZfSu1wtKLGlWI4ZZayP0ck9Y73K1ynO6gqzTdBVdPU= -golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= -golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/net v0.12.0 h1:cfawfvKITfUsFCeJIHJrbSxpeu/E81khclypR0GVT50= +golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.2.0 h1:ljd4t30dBnAvMZaQCevtY0xLLD0A+bRZXbgLMLU1F/A= -golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA= +golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.4.0 h1:BrVqGRd7+k1DiOgtnFvAkoQEWQvBc25ouMJM6429SFg= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4= +golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= -google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= -google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= -google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= -google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= -google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= -google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= -google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= -google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= -google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= -google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= -google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= -google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= -google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= -google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= -google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= -google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= -google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= +google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc h1:2gGKlE2+asNV9m7xrywl36YYNnBG5ZQ0r/BOOxqPpmk= gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc/go.mod h1:m7x9LTH6d71AHyAX77c9yqWCCa3UKHcVEj9y7hAtKDk= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/go-playground/assert.v1 v1.2.1 h1:xoYuJVE7KT85PYWrN730RguIQO0ePzVRfFMXadIrXTM= -gopkg.in/go-playground/validator.v9 v9.31.0 h1:bmXmP2RSNtFES+bn4uYuHT7iJFJv7Vj+an+ZQdDaD1M= -gopkg.in/go-playground/validator.v9 v9.31.0/go.mod h1:+c9/zcJMFNgbLvly1L1V+PpxWdVbfP1avr/N00E2vyQ= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/mail.v2 v2.3.1 h1:WYFn/oANrAGP2C0dcV6/pbkPzv8yGzqTjPmTeO7qoXk= gopkg.in/mail.v2 v2.3.1/go.mod h1:htwXN1Qh09vZJ1NVKxQqHPBaCBbzKhp5GzuJEA4VJWw= -gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= -honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= -rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/goreleaser.yml b/goreleaser.yml index 787b50d7c..da134413d 100644 --- a/goreleaser.yml +++ b/goreleaser.yml @@ -8,9 +8,14 @@ build: - amd64 - arm64 +changelog: + filters: + exclude: + - "^Update CHANGELOG.md" + archives: - files: - - LICENSE - - README.md - - docs/* - - docs/**/* + - LICENSE + - README.md + - docs/* + - docs/**/* diff --git a/mailroom.go b/mailroom.go index 4d6d064c5..906be83e8 100644 --- a/mailroom.go +++ b/mailroom.go @@ -130,32 +130,30 @@ func (mr *Mailroom) Start() error { return err } mr.rt.AttachmentStorage = storage.NewS3(s3Client, mr.rt.Config.S3AttachmentsBucket, c.S3Region, s3.BucketCannedACLPublicRead, 32) - mr.rt.SessionStorage = storage.NewS3(s3Client, mr.rt.Config.S3SessionBucket, c.S3Region, s3.ObjectCannedACLPrivate, 32) + mr.rt.SessionStorage = storage.NewS3(s3Client, mr.rt.Config.S3SessionsBucket, c.S3Region, s3.ObjectCannedACLPrivate, 32) + mr.rt.LogStorage = storage.NewS3(s3Client, mr.rt.Config.S3LogsBucket, c.S3Region, s3.ObjectCannedACLPrivate, 32) } else { - mr.rt.AttachmentStorage = storage.NewFS("_storage", 0766) - mr.rt.SessionStorage = storage.NewFS("_storage", 0766) + mr.rt.AttachmentStorage = storage.NewFS("_storage/attachments", 0766) + mr.rt.SessionStorage = storage.NewFS("_storage/sessions", 0766) + mr.rt.LogStorage = storage.NewFS("_storage/logs", 0766) } - // test our attachment storage - ctx, cancel := context.WithTimeout(context.Background(), time.Second*10) - err = mr.rt.AttachmentStorage.Test(ctx) - cancel() - - if err != nil { + // check our storages + if err := checkStorage(mr.rt.AttachmentStorage); err != nil { log.WithError(err).Error(mr.rt.AttachmentStorage.Name() + " attachment storage not available") } else { log.Info(mr.rt.AttachmentStorage.Name() + " attachment storage ok") } - - ctx, cancel = context.WithTimeout(context.Background(), time.Second*10) - err = mr.rt.SessionStorage.Test(ctx) - cancel() - - if err != nil { - log.WithError(err).Warn(mr.rt.SessionStorage.Name() + " session storage not available") + if err := checkStorage(mr.rt.SessionStorage); err != nil { + log.WithError(err).Error(mr.rt.SessionStorage.Name() + " session storage not available") } else { log.Info(mr.rt.SessionStorage.Name() + " session storage ok") } + if err := checkStorage(mr.rt.LogStorage); err != nil { + log.WithError(err).Error(mr.rt.LogStorage.Name() + " log storage not available") + } else { + log.Info(mr.rt.LogStorage.Name() + " log storage ok") + } // initialize our elastic client mr.rt.ES, err = newElasticClient(c.Elastic, c.ElasticUsername, c.ElasticPassword) @@ -288,3 +286,10 @@ func newElasticClient(url string, username string, password string) (*elastic.Cl elastic.SetBasicAuth(username, password), ) } + +func checkStorage(s storage.Storage) error { + ctx, cancel := context.WithTimeout(context.Background(), time.Second*10) + err := s.Test(ctx) + cancel() + return err +} diff --git a/mailroom_test.dump b/mailroom_test.dump index 91d0cad12..89ca8b680 100644 Binary files a/mailroom_test.dump and b/mailroom_test.dump differ diff --git a/runtime/config.go b/runtime/config.go index 4af44ff87..64e51bafa 100644 --- a/runtime/config.go +++ b/runtime/config.go @@ -7,9 +7,9 @@ import ( "os" "strings" + "github.com/go-playground/validator/v10" "github.com/nyaruka/goflow/utils" "github.com/pkg/errors" - "gopkg.in/go-playground/validator.v9" ) func init() { @@ -48,16 +48,17 @@ type Config struct { MaxValueLength int `help:"the maximum size in characters for contact field values and run result values"` SessionStorage string `validate:"omitempty,session_storage" help:"where to store session output (s3|db)"` - Elastic string `validate:"url" help:"the URL of your ElasticSearch instance"` - ElasticUsername string `help:"the username for ElasticSearch if using basic auth"` - ElasticPassword string `help:"the password for ElasticSearch if using basic auth"` + Elastic string `validate:"url" help:"the URL of your ElasticSearch instance"` + ElasticUsername string `help:"the username for ElasticSearch if using basic auth"` + ElasticPassword string `help:"the password for ElasticSearch if using basic auth"` + ElasticContactsIndex string `help:"the name of index alias for contacts"` S3Endpoint string `help:"the S3 endpoint we will write attachments to"` S3Region string `help:"the S3 region we will write attachments to"` S3AttachmentsBucket string `help:"the S3 bucket we will write attachments to"` S3AttachmentsPrefix string `help:"the prefix that will be added to attachment filenames"` - S3SessionBucket string `help:"the S3 bucket we will write attachments to"` - S3SessionPrefix string `help:"the prefix that will be added to attachment filenames"` + S3SessionsBucket string `help:"the S3 bucket we will write attachments to"` + S3LogsBucket string `help:"the S3 bucket we will write logs to"` S3DisableSSL bool `help:"whether we disable SSL when accessing S3. Should always be set to False unless you're hosting an S3 compatible service within a secure internal network"` S3ForcePathStyle bool `help:"whether we force S3 path style. Should generally need to default to False unless you're hosting an S3 compatible service"` @@ -108,16 +109,17 @@ func NewDefaultConfig() *Config { MaxValueLength: 640, SessionStorage: "db", - Elastic: "http://localhost:9200", - ElasticUsername: "", - ElasticPassword: "", + Elastic: "http://localhost:9200", + ElasticUsername: "", + ElasticPassword: "", + ElasticContactsIndex: "contacts", S3Endpoint: "https://s3.amazonaws.com", S3Region: "us-east-1", - S3AttachmentsBucket: "mailroom-attachments", - S3AttachmentsPrefix: "/attachments/", - S3SessionBucket: "mailroom-sessions", - S3SessionPrefix: "/", + S3AttachmentsBucket: "attachments-bucket", + S3AttachmentsPrefix: "attachments/", + S3SessionsBucket: "sessions-bucket", + S3LogsBucket: "logs-bucket", S3DisableSSL: false, S3ForcePathStyle: false, diff --git a/runtime/runtime.go b/runtime/runtime.go index b29afbd75..bef20fc91 100644 --- a/runtime/runtime.go +++ b/runtime/runtime.go @@ -16,5 +16,6 @@ type Runtime struct { ES *elastic.Client AttachmentStorage storage.Storage SessionStorage storage.Storage + LogStorage storage.Storage Config *Config } diff --git a/services/ivr/twiml/service.go b/services/ivr/twiml/service.go index b2bf0e9de..7aa262405 100644 --- a/services/ivr/twiml/service.go +++ b/services/ivr/twiml/service.go @@ -62,7 +62,7 @@ const ( ) // https://www.twilio.com/docs/voice/twiml/say -var supportedSayLanguages = utils.StringSet([]string{ +var supportedSayLanguages = utils.Set([]string{ "da-DK", "de-DE", "en-AU", @@ -136,7 +136,9 @@ func NewService(httpClient *http.Client, accountSID string, authToken string) iv } func (s *service) DownloadMedia(url string) (*http.Response, error) { - return http.Get(url) + req, _ := http.NewRequest(http.MethodGet, url, nil) + req.SetBasicAuth(s.accountSID, s.authToken) + return http.DefaultClient.Do(req) } func (s *service) CheckStartRequest(r *http.Request) models.CallError { @@ -530,5 +532,8 @@ func ResponseForSprint(cfg *runtime.Config, urn urns.URN, resumeURL string, es [ } func (s *service) RedactValues(ch *models.Channel) []string { - return []string{ch.ConfigValue(authTokenConfig, "")} + return []string{ + httpx.BasicAuth(ch.ConfigValue(accountSIDConfig, ""), ch.ConfigValue(authTokenConfig, "")), + ch.ConfigValue(authTokenConfig, ""), + } } diff --git a/services/ivr/twiml/service_test.go b/services/ivr/twiml/service_test.go index abbf272fb..03e8dc87c 100644 --- a/services/ivr/twiml/service_test.go +++ b/services/ivr/twiml/service_test.go @@ -22,7 +22,7 @@ import ( ) func TestResponseForSprint(t *testing.T) { - _, rt, _, _ := testsuite.Get() + _, rt := testsuite.Runtime() urn := urns.URN("tel:+12067799294") expiresOn := time.Now().Add(time.Hour) @@ -138,11 +138,11 @@ func TestURNForRequest(t *testing.T) { } func TestRedactValues(t *testing.T) { - _, rt, _, _ := testsuite.Get() + _, rt := testsuite.Runtime() oa := testdata.Org1.Load(rt) ch := oa.ChannelByUUID(testdata.TwilioChannel.UUID) svc, _ := ivr.GetService(ch) - assert.Equal(t, []string{"sesame"}, svc.RedactValues(ch)) + assert.Equal(t, []string{"U0lEMTIzNDU2Nzg5OnNlc2FtZQ==", "sesame"}, svc.RedactValues(ch)) } diff --git a/services/ivr/vonage/client.go b/services/ivr/vonage/client.go index 9b1404c3d..33214b59e 100644 --- a/services/ivr/vonage/client.go +++ b/services/ivr/vonage/client.go @@ -23,7 +23,7 @@ type CallRequest struct { EventMethod string `json:"event_method"` NCCO []NCCO `json:"ncco,omitempty"` - MachineDetection string `json:"machine_detection"` + MachineDetection string `json:"machine_detection,omitempty"` LengthTimer int `json:"length_timer,omitempty"` RingingTimer int `json:"ringing_timer,omitempty"` } diff --git a/services/ivr/vonage/service_test.go b/services/ivr/vonage/service_test.go index 3ddd11129..b9af84449 100644 --- a/services/ivr/vonage/service_test.go +++ b/services/ivr/vonage/service_test.go @@ -23,8 +23,8 @@ import ( ) func TestResponseForSprint(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetAll) @@ -45,10 +45,10 @@ func TestResponseForSprint(t *testing.T) { resumeURL := "http://temba.io/resume?session=1" // deactivate our twilio channel - db.MustExec(`UPDATE channels_channel SET is_active = FALSE WHERE id = $1`, testdata.TwilioChannel.ID) + rt.DB.MustExec(`UPDATE channels_channel SET is_active = FALSE WHERE id = $1`, testdata.TwilioChannel.ID) // update callback domain and roles for channel - db.MustExec(`UPDATE channels_channel SET config = config::jsonb || '{"callback_domain": "localhost:8090"}'::jsonb, role='SRCA' WHERE id = $1`, testdata.VonageChannel.ID) + rt.DB.MustExec(`UPDATE channels_channel SET config = config || '{"callback_domain": "localhost:8090"}'::jsonb, role='SRCA' WHERE id = $1`, testdata.VonageChannel.ID) // set our UUID generator uuids.SetGenerator(uuids.NewSeededGenerator(0)) @@ -64,7 +64,7 @@ func TestResponseForSprint(t *testing.T) { provider := p.(*service) - conn, err := models.InsertCall(ctx, db, testdata.Org1.ID, testdata.VonageChannel.ID, models.NilStartID, testdata.Bob.ID, testdata.Bob.URNID, models.CallDirectionOut, models.CallStatusInProgress, "EX123") + conn, err := models.InsertCall(ctx, rt.DB, testdata.Org1.ID, testdata.VonageChannel.ID, models.NilStartID, testdata.Bob.ID, testdata.Bob.URNID, models.CallDirectionOut, models.CallStatusInProgress, "EX123") require.NoError(t, err) indentMarshal = false @@ -128,7 +128,7 @@ func TestResponseForSprint(t *testing.T) { } for i, tc := range tcs { - response, err := provider.responseForSprint(ctx, rp, channel, conn, resumeURL, tc.events) + response, err := provider.responseForSprint(ctx, rt.RP, channel, conn, resumeURL, tc.events) assert.NoError(t, err, "%d: unexpected error") assert.Equal(t, tc.expected, response, "%d: unexpected response", i) } @@ -143,7 +143,7 @@ func TestResponseForSprint(t *testing.T) { } func TestRedactValues(t *testing.T) { - _, rt, _, _ := testsuite.Get() + _, rt := testsuite.Runtime() oa := testdata.Org1.Load(rt) ch := oa.ChannelByUUID(testdata.VonageChannel.UUID) diff --git a/services/tickets/intern/service_test.go b/services/tickets/intern/service_test.go index e97f01c22..640bbdbe6 100644 --- a/services/tickets/intern/service_test.go +++ b/services/tickets/intern/service_test.go @@ -14,13 +14,12 @@ import ( intern "github.com/nyaruka/mailroom/services/tickets/intern" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestOpenAndForward(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer uuids.SetGenerator(uuids.DefaultGenerator) uuids.SetGenerator(uuids.NewSeededGenerator(12345)) @@ -43,7 +42,7 @@ func TestOpenAndForward(t *testing.T) { defaultTopic := oa.SessionAssets().Topics().FindByName("General") env := envs.NewBuilder().Build() - _, contact := testdata.Cathy.Load(db, oa) + _, contact := testdata.Cathy.Load(rt, oa) ticket, err := svc.Open(env, contact, defaultTopic, "Where are my cookies?", nil, logger.Log) assert.NoError(t, err) @@ -70,7 +69,7 @@ func TestOpenAndForward(t *testing.T) { } func TestCloseAndReopen(t *testing.T) { - _, rt, _, _ := testsuite.Get() + _, rt := testsuite.Runtime() defer uuids.SetGenerator(uuids.DefaultGenerator) uuids.SetGenerator(uuids.NewSeededGenerator(12345)) diff --git a/services/tickets/mailgun/service_test.go b/services/tickets/mailgun/service_test.go index 638f40f30..91f13f17d 100644 --- a/services/tickets/mailgun/service_test.go +++ b/services/tickets/mailgun/service_test.go @@ -24,7 +24,7 @@ import ( ) func TestOpenAndForward(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() session, _, err := test.CreateTestSession("", envs.RedactionPolicyNone) require.NoError(t, err) @@ -118,7 +118,7 @@ func TestOpenAndForward(t *testing.T) { } func TestCloseAndReopen(t *testing.T) { - _, rt, _, _ := testsuite.Get() + _, rt := testsuite.Runtime() defer uuids.SetGenerator(uuids.DefaultGenerator) defer httpx.SetRequestor(httpx.DefaultRequestor) diff --git a/services/tickets/mailgun/web.go b/services/tickets/mailgun/web.go index 8f3735bf1..c614d49dd 100644 --- a/services/tickets/mailgun/web.go +++ b/services/tickets/mailgun/web.go @@ -22,7 +22,7 @@ import ( func init() { base := "/mr/tickets/types/mailgun" - web.RegisterJSONRoute(http.MethodPost, base+"/receive", web.WithHTTPLogs(handleReceive)) + web.RegisterRoute(http.MethodPost, base+"/receive", web.MarshaledResponse(web.WithHTTPLogs(handleReceive))) } type receiveRequest struct { @@ -61,7 +61,7 @@ type receiveResponse struct { var addressRegex = regexp.MustCompile(`^ticket\+([0-9a-fA-F]{8}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{12})@.*$`) -func handleReceive(ctx context.Context, rt *runtime.Runtime, r *http.Request, l *models.HTTPLogger) (interface{}, int, error) { +func handleReceive(ctx context.Context, rt *runtime.Runtime, r *http.Request, l *models.HTTPLogger) (any, int, error) { request := &receiveRequest{} if err := web.DecodeAndValidateForm(request, r); err != nil { return errors.Wrapf(err, "error decoding form"), http.StatusBadRequest, nil diff --git a/services/tickets/mailgun/web_test.go b/services/tickets/mailgun/web_test.go index f831263f4..ceb278c99 100644 --- a/services/tickets/mailgun/web_test.go +++ b/services/tickets/mailgun/web_test.go @@ -6,16 +6,15 @@ import ( "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/nyaruka/mailroom/web" ) func TestReceive(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData | testsuite.ResetStorage) // create a mailgun ticket for Cathy - ticket := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Have you seen my cookies?", "", time.Now(), nil) + ticket := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Have you seen my cookies?", "", time.Now(), nil) - web.RunWebTests(t, ctx, rt, "testdata/receive.json", map[string]string{"cathy_ticket_uuid": string(ticket.UUID)}) + testsuite.RunWebTests(t, ctx, rt, "testdata/receive.json", map[string]string{"cathy_ticket_uuid": string(ticket.UUID)}) } diff --git a/services/tickets/rocketchat/service_test.go b/services/tickets/rocketchat/service_test.go index 1420ab5ef..b93b1582e 100644 --- a/services/tickets/rocketchat/service_test.go +++ b/services/tickets/rocketchat/service_test.go @@ -24,7 +24,7 @@ import ( ) func TestOpenAndForward(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer dates.SetNowSource(dates.DefaultNowSource) dates.SetNowSource(dates.NewSequentialNowSource(time.Date(2019, 10, 7, 15, 21, 30, 0, time.UTC))) @@ -109,7 +109,7 @@ func TestOpenAndForward(t *testing.T) { } func TestCloseAndReopen(t *testing.T) { - _, rt, _, _ := testsuite.Get() + _, rt := testsuite.Runtime() defer uuids.SetGenerator(uuids.DefaultGenerator) defer httpx.SetRequestor(httpx.DefaultRequestor) diff --git a/services/tickets/rocketchat/web.go b/services/tickets/rocketchat/web.go index 68a99247d..239f827f7 100644 --- a/services/tickets/rocketchat/web.go +++ b/services/tickets/rocketchat/web.go @@ -20,7 +20,7 @@ import ( func init() { base := "/mr/tickets/types/rocketchat" - web.RegisterJSONRoute(http.MethodPost, base+"/event_callback/{ticketer:[a-f0-9\\-]+}", web.WithHTTPLogs(handleEventCallback)) + web.RegisterRoute(http.MethodPost, base+"/event_callback/{ticketer:[a-f0-9\\-]+}", web.MarshaledResponse(web.WithHTTPLogs(handleEventCallback))) } type eventCallbackRequest struct { @@ -37,7 +37,7 @@ type agentMessageData struct { } `json:"attachments"` } -func handleEventCallback(ctx context.Context, rt *runtime.Runtime, r *http.Request, l *models.HTTPLogger) (interface{}, int, error) { +func handleEventCallback(ctx context.Context, rt *runtime.Runtime, r *http.Request, l *models.HTTPLogger) (any, int, error) { ticketerUUID := assets.TicketerUUID(chi.URLParam(r, "ticketer")) // look up ticketer diff --git a/services/tickets/rocketchat/web_test.go b/services/tickets/rocketchat/web_test.go index b37567263..2c682e23b 100644 --- a/services/tickets/rocketchat/web_test.go +++ b/services/tickets/rocketchat/web_test.go @@ -6,16 +6,15 @@ import ( "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/nyaruka/mailroom/web" ) func TestEventCallback(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData | testsuite.ResetStorage) // create a rocketchat ticket for Cathy - ticket := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.RocketChat, testdata.DefaultTopic, "Have you seen my cookies?", "1234", time.Now(), nil) + ticket := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.RocketChat, testdata.DefaultTopic, "Have you seen my cookies?", "1234", time.Now(), nil) - web.RunWebTests(t, ctx, rt, "testdata/event_callback.json", map[string]string{"cathy_ticket_uuid": string(ticket.UUID)}) + testsuite.RunWebTests(t, ctx, rt, "testdata/event_callback.json", map[string]string{"cathy_ticket_uuid": string(ticket.UUID)}) } diff --git a/services/tickets/utils.go b/services/tickets/utils.go index 805203c75..0d3366008 100644 --- a/services/tickets/utils.go +++ b/services/tickets/utils.go @@ -9,6 +9,7 @@ import ( "path/filepath" "time" + "github.com/nyaruka/gocommon/dates" "github.com/nyaruka/gocommon/httpx" "github.com/nyaruka/gocommon/uuids" "github.com/nyaruka/goflow/assets" @@ -19,7 +20,6 @@ import ( "github.com/nyaruka/mailroom/core/msgio" "github.com/nyaruka/mailroom/core/tasks/handler" "github.com/nyaruka/mailroom/runtime" - "github.com/pkg/errors" ) @@ -85,6 +85,17 @@ func SendReply(ctx context.Context, rt *runtime.Runtime, ticket *models.Ticket, return nil, errors.Wrapf(err, "error looking up org #%d", ticket.OrgID()) } + // load the contact and generate as a flow contact + c, err := models.LoadContact(ctx, rt.DB, oa, ticket.ContactID()) + if err != nil { + return nil, errors.Wrap(err, "error loading contact") + } + + contact, err := c.FlowContact(oa) + if err != nil { + return nil, errors.Wrap(err, "error creating flow contact") + } + // upload files to create message attachments attachments := make([]utils.Attachment, len(files)) for i, file := range files { @@ -96,20 +107,23 @@ func SendReply(ctx context.Context, rt *runtime.Runtime, ticket *models.Ticket, } } - // build a simple translation - base := &models.BroadcastTranslation{Text: text, Attachments: attachments} - translations := map[envs.Language]*models.BroadcastTranslation{envs.Language("base"): base} + out, ch := models.NewMsgOut(oa, contact, text, attachments, nil, contact.Locale(oa.Env())) + msg, err := models.NewOutgoingTicketMsg(rt, oa.Org(), ch, contact, out, dates.Now(), ticket.ID(), models.NilUserID) + if err != nil { + return nil, errors.Wrap(err, "error creating outgoing message") + } - // we'll use a broadcast to send this message - bcast := models.NewBroadcast(oa.OrgID(), models.NilBroadcastID, translations, models.TemplateStateEvaluated, envs.Language("base"), nil, nil, nil, ticket.ID(), models.NilUserID) - batch := bcast.CreateBatch([]models.ContactID{ticket.ContactID()}) - msgs, err := batch.CreateMessages(ctx, rt, oa) + err = models.InsertMessages(ctx, rt.DB, []*models.Msg{msg}) if err != nil { - return nil, errors.Wrapf(err, "error creating message batch") + return nil, errors.Wrap(err, "error inserting outgoing message") + } + + if err := models.RecordTicketReply(ctx, rt.DB, oa, ticket.ID(), models.NilUserID); err != nil { + return nil, errors.Wrap(err, "error recording ticket reply") } - msgio.SendMessages(ctx, rt, rt.DB, nil, msgs) - return msgs[0], nil + msgio.SendMessages(ctx, rt, rt.DB, nil, []*models.Msg{msg}) + return msg, nil } var retries = httpx.NewFixedRetries(time.Second*5, time.Second*10) diff --git a/services/tickets/utils_test.go b/services/tickets/utils_test.go index dfdfbc247..b7a2903ee 100644 --- a/services/tickets/utils_test.go +++ b/services/tickets/utils_test.go @@ -22,12 +22,12 @@ import ( ) func TestGetContactDisplay(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() oa, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) require.NoError(t, err) - contact, err := models.LoadContact(ctx, db, oa, testdata.Cathy.ID) + contact, err := models.LoadContact(ctx, rt.DB, oa, testdata.Cathy.ID) require.NoError(t, err) flowContact, err := contact.FlowContact(oa) @@ -47,16 +47,16 @@ func TestGetContactDisplay(t *testing.T) { } func TestFromTicketUUID(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) // create some tickets - ticket1 := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Have you seen my cookies?", "", time.Now(), nil) - ticket2 := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Have you seen my shoes?", "", time.Now(), nil) + ticket1 := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Have you seen my cookies?", "", time.Now(), nil) + ticket2 := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Have you seen my shoes?", "", time.Now(), nil) // break mailgun configuration - db.MustExec(`UPDATE tickets_ticketer SET config = '{"foo":"bar"}'::jsonb WHERE id = $1`, testdata.Mailgun.ID) + rt.DB.MustExec(`UPDATE tickets_ticketer SET config = '{"foo":"bar"}'::jsonb WHERE id = $1`, testdata.Mailgun.ID) models.FlushCache() @@ -82,12 +82,12 @@ func TestFromTicketUUID(t *testing.T) { } func TestFromTicketerUUID(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) // break mailgun configuration - db.MustExec(`UPDATE tickets_ticketer SET config = '{"foo":"bar"}'::jsonb WHERE id = $1`, testdata.Mailgun.ID) + rt.DB.MustExec(`UPDATE tickets_ticketer SET config = '{"foo":"bar"}'::jsonb WHERE id = $1`, testdata.Mailgun.ID) // err if no ticketer with UUID _, _, err := tickets.FromTicketerUUID(ctx, rt, "33c54d0c-bd49-4edf-87a9-c391a75a630c", "mailgun") @@ -110,7 +110,7 @@ func TestFromTicketerUUID(t *testing.T) { } func TestSendReply(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) @@ -123,8 +123,8 @@ func TestSendReply(t *testing.T) { image := &tickets.File{URL: "http://coolfiles.com/a.jpg", ContentType: "image/jpeg", Body: imageBody} // create a ticket - ticket := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Have you seen my cookies?", "", time.Now(), nil) - modelTicket := ticket.Load(db) + ticket := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Have you seen my cookies?", "", time.Now(), nil) + modelTicket := ticket.Load(rt) msg, err := tickets.SendReply(ctx, rt, modelTicket, "I'll get back to you", []*tickets.File{image}) require.NoError(t, err) @@ -140,7 +140,7 @@ func TestSendReply(t *testing.T) { } func TestCloseTicket(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) @@ -176,11 +176,11 @@ func TestCloseTicket(t *testing.T) { "contact-display": "Cathy", }, ) - err := models.InsertTickets(ctx, db, oa, []*models.Ticket{ticket1}) + err := models.InsertTickets(ctx, rt.DB, oa, []*models.Ticket{ticket1}) require.NoError(t, err) // create a close ticket trigger - testdata.InsertTicketClosedTrigger(db, testdata.Org1, testdata.Favorites) + testdata.InsertTicketClosedTrigger(rt, testdata.Org1, testdata.Favorites) logger := &models.HTTPLogger{} diff --git a/services/tickets/zendesk/service_test.go b/services/tickets/zendesk/service_test.go index bbf04dda3..69044d8fb 100644 --- a/services/tickets/zendesk/service_test.go +++ b/services/tickets/zendesk/service_test.go @@ -24,7 +24,7 @@ import ( ) func TestOpenAndForward(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() session, _, err := test.CreateTestSession("", envs.RedactionPolicyNone) require.NoError(t, err) @@ -124,7 +124,7 @@ func TestOpenAndForward(t *testing.T) { } func TestCloseAndReopen(t *testing.T) { - _, rt, _, _ := testsuite.Get() + _, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) diff --git a/services/tickets/zendesk/web.go b/services/tickets/zendesk/web.go index bf5c1ce64..363949576 100644 --- a/services/tickets/zendesk/web.go +++ b/services/tickets/zendesk/web.go @@ -16,7 +16,6 @@ import ( "github.com/nyaruka/mailroom/runtime" "github.com/nyaruka/mailroom/services/tickets" "github.com/nyaruka/mailroom/web" - "github.com/pkg/errors" "github.com/sirupsen/logrus" ) @@ -24,9 +23,9 @@ import ( func init() { base := "/mr/tickets/types/zendesk" - web.RegisterJSONRoute(http.MethodPost, base+"/channelback", handleChannelback) - web.RegisterJSONRoute(http.MethodPost, base+"/event_callback", web.WithHTTPLogs(handleEventCallback)) - web.RegisterJSONRoute(http.MethodPost, base+`/target/{ticketer:[a-f0-9\-]+}`, web.WithHTTPLogs(handleTicketerTarget)) + web.RegisterRoute(http.MethodPost, base+"/channelback", web.MarshaledResponse(handleChannelback)) + web.RegisterRoute(http.MethodPost, base+"/event_callback", web.MarshaledResponse(web.WithHTTPLogs(handleEventCallback))) + web.RegisterRoute(http.MethodPost, base+`/target/{ticketer:[a-f0-9\-]+}`, web.MarshaledResponse(web.WithHTTPLogs(handleTicketerTarget))) } type integrationMetadata struct { @@ -48,7 +47,7 @@ type channelbackResponse struct { AllowChannelback bool `json:"allow_channelback"` } -func handleChannelback(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { +func handleChannelback(ctx context.Context, rt *runtime.Runtime, r *http.Request) (any, int, error) { request := &channelbackRequest{} if err := web.DecodeAndValidateForm(request, r); err != nil { return errors.Wrapf(err, "error decoding form"), http.StatusBadRequest, nil @@ -131,7 +130,7 @@ type eventCallbackRequest struct { Events []*channelEvent `json:"events" validate:"required"` } -func handleEventCallback(ctx context.Context, rt *runtime.Runtime, r *http.Request, l *models.HTTPLogger) (interface{}, int, error) { +func handleEventCallback(ctx context.Context, rt *runtime.Runtime, r *http.Request, l *models.HTTPLogger) (any, int, error) { request := &eventCallbackRequest{} if err := web.ReadAndValidateJSON(r, request); err != nil { return err, http.StatusBadRequest, nil @@ -199,7 +198,7 @@ func processChannelEvent(ctx context.Context, rt *runtime.Runtime, event *channe } // delete config values that came from adding this account - remConfig := utils.StringSet([]string{configPushID, configPushToken, configTargetID, configTriggerID}) + remConfig := utils.Set([]string{configPushID, configPushToken, configTargetID, configTriggerID}) if err := ticketer.UpdateConfig(ctx, rt.DB, nil, remConfig); err != nil { return errors.Wrapf(err, "error updating config for ticketer %s", ticketer.UUID()) } @@ -252,7 +251,7 @@ type targetRequest struct { Status string `json:"status"` } -func handleTicketerTarget(ctx context.Context, rt *runtime.Runtime, r *http.Request, l *models.HTTPLogger) (interface{}, int, error) { +func handleTicketerTarget(ctx context.Context, rt *runtime.Runtime, r *http.Request, l *models.HTTPLogger) (any, int, error) { ticketerUUID := assets.TicketerUUID(chi.URLParam(r, "ticketer")) // look up our ticketer diff --git a/services/tickets/zendesk/web_test.go b/services/tickets/zendesk/web_test.go index c597b0da5..0ec384c4f 100644 --- a/services/tickets/zendesk/web_test.go +++ b/services/tickets/zendesk/web_test.go @@ -6,38 +6,37 @@ import ( "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/nyaruka/mailroom/web" ) func TestChannelback(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) // create a zendesk ticket for Cathy - ticket := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Have you seen my cookies?", "1234", time.Now(), nil) + ticket := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Have you seen my cookies?", "1234", time.Now(), nil) - web.RunWebTests(t, ctx, rt, "testdata/channelback.json", map[string]string{"cathy_ticket_uuid": string(ticket.UUID)}) + testsuite.RunWebTests(t, ctx, rt, "testdata/channelback.json", map[string]string{"cathy_ticket_uuid": string(ticket.UUID)}) } func TestEventCallback(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) // tests include destroying ticketer // create a zendesk ticket for Cathy - ticket := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Have you seen my cookies?", "1234", time.Now(), nil) + ticket := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Have you seen my cookies?", "1234", time.Now(), nil) - web.RunWebTests(t, ctx, rt, "testdata/event_callback.json", map[string]string{"cathy_ticket_uuid": string(ticket.UUID)}) + testsuite.RunWebTests(t, ctx, rt, "testdata/event_callback.json", map[string]string{"cathy_ticket_uuid": string(ticket.UUID)}) } func TestTarget(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) // create a zendesk ticket for Cathy - ticket := testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Have you seen my cookies?", "1234", time.Now(), nil) + ticket := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Have you seen my cookies?", "1234", time.Now(), nil) - web.RunWebTests(t, ctx, rt, "testdata/target.json", map[string]string{"cathy_ticket_uuid": string(ticket.UUID)}) + testsuite.RunWebTests(t, ctx, rt, "testdata/target.json", map[string]string{"cathy_ticket_uuid": string(ticket.UUID)}) } diff --git a/testsuite/elastic.go b/testsuite/elastic.go deleted file mode 100644 index d87e9357d..000000000 --- a/testsuite/elastic.go +++ /dev/null @@ -1,115 +0,0 @@ -package testsuite - -import ( - "fmt" - "io" - "net/http" - "net/http/httptest" - - "github.com/nyaruka/gocommon/jsonx" - "github.com/nyaruka/mailroom/core/models" - "github.com/olivere/elastic/v7" -) - -// MockElasticServer is a mock HTTP server/endpoint that can be used to test elastic queries -type MockElasticServer struct { - Server *httptest.Server - LastRequestURL string - LastRequestBody string - Responses [][]byte -} - -// NewMockElasticServer creates a new mock elastic server -func NewMockElasticServer() *MockElasticServer { - m := &MockElasticServer{} - m.Server = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - m.LastRequestURL = r.URL.String() - - // scrolling of results, we are always one page, so return empty hits - if r.URL.String() == "/_search/scroll" { - w.WriteHeader(200) - w.Write([]byte(` - { - "_scroll_id": "anything==", - "took": 7, - "timed_out": false, - "_shards": { - "total": 1, - "successful": 1, - "skipped": 0, - "failed": 0 - }, - "hits": { - "total": 1000, - "max_score": null, - "hits": [] - } - } - `)) - return - } - - // otherwise read our next body and return our next response - body, _ := io.ReadAll(r.Body) - m.LastRequestBody = string(body) - - if len(m.Responses) == 0 { - panic("mock elastic server has no more queued responses") - } - - var response []byte - response, m.Responses = m.Responses[0], m.Responses[1:] - - w.WriteHeader(200) - w.Write(response) - })) - return m -} - -func (m *MockElasticServer) Client() *elastic.Client { - c, _ := elastic.NewClient(elastic.SetURL(m.URL()), elastic.SetHealthcheck(false), elastic.SetSniff(false)) - return c -} - -// Close closes our HTTP server -func (m *MockElasticServer) Close() { - m.Server.Close() -} - -// URL returns the URL to call this server -func (m *MockElasticServer) URL() string { - return m.Server.URL -} - -// AddResponse adds a mock response to the server's queue -func (m *MockElasticServer) AddResponse(ids ...models.ContactID) { - hits := make([]map[string]interface{}, len(ids)) - for i := range ids { - hits[i] = map[string]interface{}{ - "_index": "contacts", - "_type": "_doc", - "_id": fmt.Sprintf("%d", ids[i]), - "_score": nil, - "_routing": "1", - "sort": []int{15124352}, - } - } - - response := jsonx.MustMarshal(map[string]interface{}{ - "_scroll_id": "DXF1ZXJ5QW5kRmV0Y2gBAAAAAAAbgc0WS1hqbHlfb01SM2lLTWJRMnVOSVZDdw==", - "took": 2, - "timed_out": false, - "_shards": map[string]interface{}{ - "total": 1, - "successful": 1, - "skipped": 0, - "failed": 0, - }, - "hits": map[string]interface{}{ - "total": len(ids), - "max_score": nil, - "hits": hits, - }, - }) - m.Responses = append(m.Responses, response) -} diff --git a/testsuite/tasks.go b/testsuite/tasks.go new file mode 100644 index 000000000..e4d84a353 --- /dev/null +++ b/testsuite/tasks.go @@ -0,0 +1,73 @@ +package testsuite + +import ( + "fmt" + "testing" + + "github.com/gomodule/redigo/redis" + "github.com/nyaruka/gocommon/jsonx" + "github.com/nyaruka/mailroom" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/core/queue" + "github.com/nyaruka/mailroom/runtime" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func CurrentTasks(t *testing.T, rt *runtime.Runtime) map[models.OrgID][]*queue.Task { + rc := rt.RP.Get() + defer rc.Close() + + // get all active org queues + active, err := redis.Ints(rc.Do("ZRANGE", "batch:active", 0, -1)) + require.NoError(t, err) + + tasks := make(map[models.OrgID][]*queue.Task) + for _, orgID := range active { + orgTasksEncoded, err := redis.Strings(rc.Do("ZRANGE", fmt.Sprintf("batch:%d", orgID), 0, -1)) + require.NoError(t, err) + + orgTasks := make([]*queue.Task, len(orgTasksEncoded)) + + for i := range orgTasksEncoded { + task := &queue.Task{} + jsonx.MustUnmarshal([]byte(orgTasksEncoded[i]), task) + orgTasks[i] = task + } + + tasks[models.OrgID(orgID)] = orgTasks + } + + return tasks +} + +func FlushTasks(t *testing.T, rt *runtime.Runtime) map[string]int { + rc := rt.RP.Get() + defer rc.Close() + + var task *queue.Task + var err error + counts := make(map[string]int) + + for { + // look for a task on the handler queue + task, err = queue.PopNextTask(rc, queue.HandlerQueue) + require.NoError(t, err) + + if task == nil { + // look for a task on the batch queue + task, err = queue.PopNextTask(rc, queue.BatchQueue) + require.NoError(t, err) + } + + if task == nil { // all done + break + } + + counts[task.Type]++ + + err = mailroom.PerformTask(rt, task) + assert.NoError(t, err) + } + return counts +} diff --git a/testsuite/testdata/campaigns.go b/testsuite/testdata/campaigns.go index 383380d17..f1e3a9ab2 100644 --- a/testsuite/testdata/campaigns.go +++ b/testsuite/testdata/campaigns.go @@ -3,9 +3,9 @@ package testdata import ( "time" - "github.com/jmoiron/sqlx" "github.com/nyaruka/gocommon/uuids" "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/runtime" ) type Campaign struct { @@ -18,20 +18,20 @@ type CampaignEvent struct { UUID models.CampaignEventUUID } -func InsertCampaign(db *sqlx.DB, org *Org, name string, group *Group) *Campaign { +func InsertCampaign(rt *runtime.Runtime, org *Org, name string, group *Group) *Campaign { uuid := models.CampaignUUID(uuids.New()) var id models.CampaignID - must(db.Get(&id, + must(rt.DB.Get(&id, `INSERT INTO campaigns_campaign(uuid, org_id, name, group_id, is_archived, is_system, is_active, created_on, modified_on, created_by_id, modified_by_id) VALUES($1, $2, $3, $4, FALSE, FALSE, TRUE, NOW(), NOW(), 1, 1) RETURNING id`, uuid, org.ID, name, group.ID, )) return &Campaign{id, uuid} } -func InsertCampaignFlowEvent(db *sqlx.DB, campaign *Campaign, flow *Flow, relativeTo *Field, offset int, unit string) *CampaignEvent { +func InsertCampaignFlowEvent(rt *runtime.Runtime, campaign *Campaign, flow *Flow, relativeTo *Field, offset int, unit string) *CampaignEvent { uuid := models.CampaignEventUUID(uuids.New()) var id models.CampaignEventID - must(db.Get(&id, + must(rt.DB.Get(&id, `INSERT INTO campaigns_campaignevent( uuid, campaign_id, event_type, flow_id, relative_to_id, "offset", unit, delivery_hour, start_mode, is_active, created_on, modified_on, created_by_id, modified_by_id @@ -44,9 +44,9 @@ func InsertCampaignFlowEvent(db *sqlx.DB, campaign *Campaign, flow *Flow, relati return &CampaignEvent{id, uuid} } -func InsertEventFire(db *sqlx.DB, contact *Contact, event *CampaignEvent, scheduled time.Time) models.FireID { +func InsertEventFire(rt *runtime.Runtime, contact *Contact, event *CampaignEvent, scheduled time.Time) models.FireID { var id models.FireID - must(db.Get(&id, + must(rt.DB.Get(&id, `INSERT INTO campaigns_eventfire(contact_id, event_id, scheduled) VALUES ($1, $2, $3) RETURNING id;`, contact.ID, event.ID, scheduled, )) return id diff --git a/testsuite/testdata/channels.go b/testsuite/testdata/channels.go index c63bbf5aa..621f2af7f 100644 --- a/testsuite/testdata/channels.go +++ b/testsuite/testdata/channels.go @@ -1,12 +1,11 @@ package testdata import ( - "github.com/jmoiron/sqlx" "github.com/lib/pq" "github.com/nyaruka/gocommon/uuids" "github.com/nyaruka/goflow/assets" "github.com/nyaruka/mailroom/core/models" - "github.com/nyaruka/null" + "github.com/nyaruka/mailroom/runtime" ) type Channel struct { @@ -16,20 +15,20 @@ type Channel struct { } // InsertChannel inserts a channel -func InsertChannel(db *sqlx.DB, org *Org, channelType models.ChannelType, name string, schemes []string, role string, config map[string]interface{}) *Channel { +func InsertChannel(rt *runtime.Runtime, org *Org, channelType models.ChannelType, name string, schemes []string, role string, config map[string]any) *Channel { uuid := assets.ChannelUUID(uuids.New()) var id models.ChannelID - must(db.Get(&id, - `INSERT INTO channels_channel(uuid, org_id, channel_type, name, schemes, role, config, last_seen, is_system, is_active, created_on, modified_on, created_by_id, modified_by_id) - VALUES($1, $2, $3, $4, $5, $6, $7, NOW(), FALSE, TRUE, NOW(), NOW(), 1, 1) RETURNING id`, uuid, org.ID, channelType, name, pq.Array(schemes), role, null.NewMap(config), + must(rt.DB.Get(&id, + `INSERT INTO channels_channel(uuid, org_id, channel_type, name, schemes, role, config, last_seen, is_system, log_policy, is_active, created_on, modified_on, created_by_id, modified_by_id) + VALUES($1, $2, $3, $4, $5, $6, $7, NOW(), FALSE, 'A', TRUE, NOW(), NOW(), 1, 1) RETURNING id`, uuid, org.ID, channelType, name, pq.Array(schemes), role, models.JSONMap(config), )) return &Channel{ID: id, UUID: uuid, Type: channelType} } // InsertCall inserts a call -func InsertCall(db *sqlx.DB, org *Org, channel *Channel, contact *Contact) models.CallID { +func InsertCall(rt *runtime.Runtime, org *Org, channel *Channel, contact *Contact) models.CallID { var id models.CallID - must(db.Get(&id, + must(rt.DB.Get(&id, `INSERT INTO ivr_call(created_on, modified_on, external_id, status, direction, error_count, org_id, channel_id, contact_id, contact_urn_id) VALUES(NOW(), NOW(), 'ext1', 'I', 'I', 0, $1, $2, $3, $4) RETURNING id`, org.ID, channel.ID, contact.ID, contact.URNID, )) diff --git a/testsuite/testdata/contacts.go b/testsuite/testdata/contacts.go index bbc742edc..b80b3841b 100644 --- a/testsuite/testdata/contacts.go +++ b/testsuite/testdata/contacts.go @@ -8,9 +8,8 @@ import ( "github.com/nyaruka/goflow/envs" "github.com/nyaruka/goflow/flows" "github.com/nyaruka/mailroom/core/models" - "github.com/nyaruka/null" - - "github.com/jmoiron/sqlx" + "github.com/nyaruka/mailroom/runtime" + "github.com/nyaruka/null/v2" ) type Contact struct { @@ -20,8 +19,8 @@ type Contact struct { URNID models.URNID } -func (c *Contact) Load(db *sqlx.DB, oa *models.OrgAssets) (*models.Contact, *flows.Contact) { - contacts, err := models.LoadContacts(context.Background(), db, oa, []models.ContactID{c.ID}) +func (c *Contact) Load(rt *runtime.Runtime, oa *models.OrgAssets) (*models.Contact, *flows.Contact) { + contacts, err := models.LoadContacts(context.Background(), rt.DB, oa, []models.ContactID{c.ID}) must(err, len(contacts) == 1) flowContact, err := contacts[0].FlowContact(oa) @@ -35,9 +34,9 @@ type Group struct { UUID assets.GroupUUID } -func (g *Group) Add(db *sqlx.DB, contacts ...*Contact) { +func (g *Group) Add(rt *runtime.Runtime, contacts ...*Contact) { for _, c := range contacts { - db.MustExec(`INSERT INTO contacts_contactgroup_contacts(contactgroup_id, contact_id) VALUES($1, $2)`, g.ID, c.ID) + rt.DB.MustExec(`INSERT INTO contacts_contactgroup_contacts(contactgroup_id, contact_id) VALUES($1, $2)`, g.ID, c.ID) } } @@ -47,9 +46,9 @@ type Field struct { } // InsertContact inserts a contact -func InsertContact(db *sqlx.DB, org *Org, uuid flows.ContactUUID, name string, language envs.Language, status models.ContactStatus) *Contact { +func InsertContact(rt *runtime.Runtime, org *Org, uuid flows.ContactUUID, name string, language envs.Language, status models.ContactStatus) *Contact { var id models.ContactID - must(db.Get(&id, + must(rt.DB.Get(&id, `INSERT INTO contacts_contact (org_id, is_active, ticket_count, uuid, name, language, status, created_on, modified_on, created_by_id, modified_by_id) VALUES($1, TRUE, 0, $2, $3, $4, $5, NOW(), NOW(), 1, 1) RETURNING id`, org.ID, uuid, name, language, status, )) @@ -57,23 +56,29 @@ func InsertContact(db *sqlx.DB, org *Org, uuid flows.ContactUUID, name string, l } // InsertContactGroup inserts a contact group -func InsertContactGroup(db *sqlx.DB, org *Org, uuid assets.GroupUUID, name, query string) *Group { +func InsertContactGroup(rt *runtime.Runtime, org *Org, uuid assets.GroupUUID, name, query string, contacts ...*Contact) *Group { groupType := "M" if query != "" { groupType = "Q" } var id models.GroupID - must(db.Get(&id, + must(rt.DB.Get(&id, `INSERT INTO contacts_contactgroup(uuid, org_id, group_type, name, query, status, is_system, is_active, created_by_id, created_on, modified_by_id, modified_on) VALUES($1, $2, $3, $4, $5, 'R', FALSE, TRUE, 1, NOW(), 1, NOW()) RETURNING id`, uuid, org.ID, groupType, name, null.String(query), )) + + for _, contact := range contacts { + rt.DB.MustExec(`INSERT INTO contacts_contactgroup_contacts(contactgroup_id, contact_id) VALUES($1, $2)`, id, contact.ID) + rt.DB.MustExec(`UPDATE contacts_contact SET modified_on = NOW() WHERE id = $1`, contact.ID) + } + return &Group{id, uuid} } // InsertContactURN inserts a contact URN -func InsertContactURN(db *sqlx.DB, org *Org, contact *Contact, urn urns.URN, priority int) models.URNID { - scheme, path, _, _ := urn.ToParts() +func InsertContactURN(rt *runtime.Runtime, org *Org, contact *Contact, urn urns.URN, priority int) models.URNID { + scheme, path, _, display := urn.ToParts() contactID := models.NilContactID if contact != nil { @@ -81,9 +86,9 @@ func InsertContactURN(db *sqlx.DB, org *Org, contact *Contact, urn urns.URN, pri } var id models.URNID - must(db.Get(&id, - `INSERT INTO contacts_contacturn(org_id, contact_id, scheme, path, identity, priority) - VALUES($1, $2, $3, $4, $5, $6) RETURNING id`, org.ID, contactID, scheme, path, urn.Identity(), priority, + must(rt.DB.Get(&id, + `INSERT INTO contacts_contacturn(org_id, contact_id, scheme, path, display, identity, priority) + VALUES($1, $2, $3, $4, $5, $6, $7) RETURNING id`, org.ID, contactID, scheme, path, display, urn.Identity(), priority, )) return id } diff --git a/testsuite/testdata/flows.go b/testsuite/testdata/flows.go index 55e35d148..cab77fe53 100644 --- a/testsuite/testdata/flows.go +++ b/testsuite/testdata/flows.go @@ -5,11 +5,10 @@ import ( "time" "github.com/buger/jsonparser" - "github.com/jmoiron/sqlx" "github.com/nyaruka/gocommon/uuids" "github.com/nyaruka/goflow/assets" "github.com/nyaruka/mailroom/core/models" - "github.com/nyaruka/null" + "github.com/nyaruka/mailroom/runtime" ) type Flow struct { @@ -17,12 +16,20 @@ type Flow struct { UUID assets.FlowUUID } +func (f *Flow) Load(rt *runtime.Runtime, oa *models.OrgAssets) *models.Flow { + flow, err := oa.FlowByID(f.ID) + if err != nil { + panic(err) + } + return flow +} + func (f *Flow) Reference() *assets.FlowReference { return &assets.FlowReference{UUID: f.UUID, Name: ""} } // InsertFlow inserts a flow -func InsertFlow(db *sqlx.DB, org *Org, definition []byte) *Flow { +func InsertFlow(rt *runtime.Runtime, org *Org, definition []byte) *Flow { uuid, err := jsonparser.GetString(definition, "uuid") if err != nil { panic(err) @@ -33,18 +40,18 @@ func InsertFlow(db *sqlx.DB, org *Org, definition []byte) *Flow { } var id models.FlowID - must(db.Get(&id, + must(rt.DB.Get(&id, `INSERT INTO flows_flow(org_id, uuid, name, flow_type, version_number, base_language, expires_after_minutes, ignore_triggers, has_issues, is_active, is_archived, is_system, created_by_id, created_on, modified_by_id, modified_on, saved_on, saved_by_id) VALUES($1, $2, $3, 'M', '13.1.0', 'eng', 10, FALSE, FALSE, TRUE, FALSE, FALSE, $4, NOW(), $4, NOW(), NOW(), $4) RETURNING id`, org.ID, uuid, name, Admin.ID, )) - db.MustExec(`INSERT INTO flows_flowrevision(flow_id, definition, spec_version, revision, is_active, created_by_id, created_on, modified_by_id, modified_on) + rt.DB.MustExec(`INSERT INTO flows_flowrevision(flow_id, definition, spec_version, revision, is_active, created_by_id, created_on, modified_by_id, modified_on) VALUES($1, $2, '13.1.0', 1, TRUE, $3, NOW(), $3, NOW())`, id, definition, Admin.ID) return &Flow{ID: id, UUID: assets.FlowUUID(uuid)} } -func ImportFlows(db *sqlx.DB, org *Org, path string) []*Flow { +func ImportFlows(rt *runtime.Runtime, org *Org, path string) []*Flow { assetsJSON, err := os.ReadFile(path) if err != nil { panic(err) @@ -58,7 +65,7 @@ func ImportFlows(db *sqlx.DB, org *Org, path string) []*Flow { flows := []*Flow{} _, err = jsonparser.ArrayEach(flowsJSON, func(flowJSON []byte, dataType jsonparser.ValueType, offset int, err error) { - flow := InsertFlow(db, org, flowJSON) + flow := InsertFlow(rt, org, flowJSON) flows = append(flows, flow) }) if err != nil { @@ -69,22 +76,22 @@ func ImportFlows(db *sqlx.DB, org *Org, path string) []*Flow { } // InsertFlowStart inserts a flow start -func InsertFlowStart(db *sqlx.DB, org *Org, flow *Flow, contacts []*Contact) models.StartID { +func InsertFlowStart(rt *runtime.Runtime, org *Org, flow *Flow, contacts []*Contact) models.StartID { var id models.StartID - must(db.Get(&id, - `INSERT INTO flows_flowstart(uuid, org_id, flow_id, start_type, created_on, modified_on, restart_participants, include_active, contact_count, status, created_by_id) - VALUES($1, $2, $3, 'M', NOW(), NOW(), TRUE, TRUE, 2, 'P', 1) RETURNING id`, uuids.New(), org.ID, flow.ID, + must(rt.DB.Get(&id, + `INSERT INTO flows_flowstart(uuid, org_id, flow_id, start_type, exclusions, created_on, modified_on, contact_count, status, created_by_id) + VALUES($1, $2, $3, 'M', '{}', NOW(), NOW(), 2, 'P', 1) RETURNING id`, uuids.New(), org.ID, flow.ID, )) for _, c := range contacts { - db.MustExec(`INSERT INTO flows_flowstart_contacts(flowstart_id, contact_id) VALUES($1, $2)`, id, c.ID) + rt.DB.MustExec(`INSERT INTO flows_flowstart_contacts(flowstart_id, contact_id) VALUES($1, $2)`, id, c.ID) } return id } // InsertFlowSession inserts a flow session -func InsertFlowSession(db *sqlx.DB, org *Org, contact *Contact, sessionType models.FlowType, status models.SessionStatus, currentFlow *Flow, callID models.CallID) models.SessionID { +func InsertFlowSession(rt *runtime.Runtime, org *Org, contact *Contact, sessionType models.FlowType, status models.SessionStatus, currentFlow *Flow, callID models.CallID) models.SessionID { now := time.Now() tomorrow := now.Add(time.Hour * 24) @@ -97,7 +104,7 @@ func InsertFlowSession(db *sqlx.DB, org *Org, contact *Contact, sessionType mode } var id models.SessionID - must(db.Get(&id, + must(rt.DB.Get(&id, `INSERT INTO flows_flowsession(uuid, org_id, contact_id, status, output, responded, created_on, session_type, current_flow_id, call_id, wait_started_on, wait_expires_on, wait_resume_on_expire, ended_on) VALUES($1, $2, $3, $4, '{}', TRUE, NOW(), $5, $6, $7, $8, $9, FALSE, $10) RETURNING id`, uuids.New(), org.ID, contact.ID, status, sessionType, currentFlow.ID, callID, waitStartedOn, waitExpiresOn, endedOn, )) @@ -105,9 +112,9 @@ func InsertFlowSession(db *sqlx.DB, org *Org, contact *Contact, sessionType mode } // InsertWaitingSession inserts a waiting flow session -func InsertWaitingSession(db *sqlx.DB, org *Org, contact *Contact, sessionType models.FlowType, currentFlow *Flow, callID models.CallID, waitStartedOn, waitExpiresOn time.Time, waitResumeOnExpire bool, waitTimeoutOn *time.Time) models.SessionID { +func InsertWaitingSession(rt *runtime.Runtime, org *Org, contact *Contact, sessionType models.FlowType, currentFlow *Flow, callID models.CallID, waitStartedOn, waitExpiresOn time.Time, waitResumeOnExpire bool, waitTimeoutOn *time.Time) models.SessionID { var id models.SessionID - must(db.Get(&id, + must(rt.DB.Get(&id, `INSERT INTO flows_flowsession(uuid, org_id, contact_id, status, output, responded, created_on, session_type, current_flow_id, call_id, wait_started_on, wait_expires_on, wait_resume_on_expire, timeout_on) VALUES($1, $2, $3, 'W', '{"status":"waiting"}', TRUE, NOW(), $4, $5, $6, $7, $8, $9, $10) RETURNING id`, uuids.New(), org.ID, contact.ID, sessionType, currentFlow.ID, callID, waitStartedOn, waitExpiresOn, waitResumeOnExpire, waitTimeoutOn, )) @@ -115,7 +122,7 @@ func InsertWaitingSession(db *sqlx.DB, org *Org, contact *Contact, sessionType m } // InsertFlowRun inserts a flow run -func InsertFlowRun(db *sqlx.DB, org *Org, sessionID models.SessionID, contact *Contact, flow *Flow, status models.RunStatus) models.FlowRunID { +func InsertFlowRun(rt *runtime.Runtime, org *Org, sessionID models.SessionID, contact *Contact, flow *Flow, status models.RunStatus) models.FlowRunID { now := time.Now() var exitedOn *time.Time @@ -124,9 +131,9 @@ func InsertFlowRun(db *sqlx.DB, org *Org, sessionID models.SessionID, contact *C } var id models.FlowRunID - must(db.Get(&id, + must(rt.DB.Get(&id, `INSERT INTO flows_flowrun(uuid, org_id, session_id, contact_id, flow_id, status, responded, created_on, modified_on, exited_on) - VALUES($1, $2, $3, $4, $5, $6, TRUE, NOW(), NOW(), $7) RETURNING id`, uuids.New(), org.ID, null.Int(sessionID), contact.ID, flow.ID, status, exitedOn, + VALUES($1, $2, $3, $4, $5, $6, TRUE, NOW(), NOW(), $7) RETURNING id`, uuids.New(), org.ID, sessionID, contact.ID, flow.ID, status, exitedOn, )) return id } diff --git a/testsuite/testdata/imports.go b/testsuite/testdata/imports.go index 2e8610a33..0f30a333a 100644 --- a/testsuite/testdata/imports.go +++ b/testsuite/testdata/imports.go @@ -6,26 +6,25 @@ import ( "github.com/nyaruka/gocommon/dates" "github.com/nyaruka/gocommon/jsonx" "github.com/nyaruka/mailroom/core/models" - - "github.com/jmoiron/sqlx" + "github.com/nyaruka/mailroom/runtime" ) // InsertContactImport inserts a contact import -func InsertContactImport(db *sqlx.DB, org *Org, createdBy *User) models.ContactImportID { +func InsertContactImport(rt *runtime.Runtime, org *Org, createdBy *User) models.ContactImportID { var importID models.ContactImportID - must(db.Get(&importID, `INSERT INTO contacts_contactimport(org_id, file, original_filename, mappings, num_records, group_id, started_on, status, created_on, created_by_id, modified_on, modified_by_id, is_active) + must(rt.DB.Get(&importID, `INSERT INTO contacts_contactimport(org_id, file, original_filename, mappings, num_records, group_id, started_on, status, created_on, created_by_id, modified_on, modified_by_id, is_active) VALUES($1, 'contact_imports/1234.xlsx', 'contacts.xlsx', '{}', 30, NULL, $2, 'O', $2, $3, $2, $3, TRUE) RETURNING id`, org.ID, dates.Now(), createdBy.ID, )) return importID } // InsertContactImportBatch inserts a contact import batch -func InsertContactImportBatch(db *sqlx.DB, importID models.ContactImportID, specs json.RawMessage) models.ContactImportBatchID { +func InsertContactImportBatch(rt *runtime.Runtime, importID models.ContactImportID, specs json.RawMessage) models.ContactImportBatchID { var splitSpecs []json.RawMessage must(jsonx.Unmarshal(specs, &splitSpecs)) var batchID models.ContactImportBatchID - must(db.Get(&batchID, `INSERT INTO contacts_contactimportbatch(contact_import_id, status, specs, record_start, record_end, num_created, num_updated, num_errored, errors, finished_on) + must(rt.DB.Get(&batchID, `INSERT INTO contacts_contactimportbatch(contact_import_id, status, specs, record_start, record_end, num_created, num_updated, num_errored, errors, finished_on) VALUES($1, 'P', $2, 0, $3, 0, 0, 0, '[]', NULL) RETURNING id`, importID, specs, len(splitSpecs), )) return batchID diff --git a/testsuite/testdata/msgs.go b/testsuite/testdata/msgs.go index 44d7069c4..02d054e9e 100644 --- a/testsuite/testdata/msgs.go +++ b/testsuite/testdata/msgs.go @@ -1,11 +1,9 @@ package testdata import ( - "database/sql" "time" "github.com/lib/pq" - "github.com/lib/pq/hstore" "github.com/nyaruka/gocommon/dates" "github.com/nyaruka/gocommon/uuids" "github.com/nyaruka/goflow/assets" @@ -13,8 +11,7 @@ import ( "github.com/nyaruka/goflow/flows" "github.com/nyaruka/goflow/utils" "github.com/nyaruka/mailroom/core/models" - - "github.com/jmoiron/sqlx" + "github.com/nyaruka/mailroom/runtime" ) type Label struct { @@ -22,13 +19,13 @@ type Label struct { UUID assets.LabelUUID } -// InsertIncomingMsg inserts an incoming message -func InsertIncomingMsg(db *sqlx.DB, org *Org, channel *Channel, contact *Contact, text string, status models.MsgStatus) *flows.MsgIn { +// InsertIncomingMsg inserts an incoming text message +func InsertIncomingMsg(rt *runtime.Runtime, org *Org, channel *Channel, contact *Contact, text string, status models.MsgStatus) *flows.MsgIn { msgUUID := flows.MsgUUID(uuids.New()) var id flows.MsgID - must(db.Get(&id, - `INSERT INTO msgs_msg(uuid, text, created_on, direction, status, visibility, msg_count, error_count, next_attempt, contact_id, contact_urn_id, org_id, channel_id) - VALUES($1, $2, NOW(), 'I', $3, 'V', 1, 0, NOW(), $4, $5, $6, $7) RETURNING id`, msgUUID, text, status, contact.ID, contact.URNID, org.ID, channel.ID, + must(rt.DB.Get(&id, + `INSERT INTO msgs_msg(uuid, text, created_on, direction, msg_type, status, visibility, msg_count, error_count, next_attempt, contact_id, contact_urn_id, org_id, channel_id) + VALUES($1, $2, NOW(), 'I', $3, $4, 'V', 1, 0, NOW(), $5, $6, $7, $8) RETURNING id`, msgUUID, text, models.MsgTypeText, status, contact.ID, contact.URNID, org.ID, channel.ID, )) msg := flows.NewMsgIn(msgUUID, contact.URN, assets.NewChannelReference(channel.UUID, ""), text, nil) @@ -36,17 +33,17 @@ func InsertIncomingMsg(db *sqlx.DB, org *Org, channel *Channel, contact *Contact return msg } -// InsertOutgoingMsg inserts an outgoing message -func InsertOutgoingMsg(db *sqlx.DB, org *Org, channel *Channel, contact *Contact, text string, attachments []utils.Attachment, status models.MsgStatus, highPriority bool) *flows.MsgOut { - return insertOutgoingMsg(db, org, channel, contact, text, attachments, status, highPriority, 0, nil) +// InsertOutgoingMsg inserts an outgoing text message +func InsertOutgoingMsg(rt *runtime.Runtime, org *Org, channel *Channel, contact *Contact, text string, attachments []utils.Attachment, status models.MsgStatus, highPriority bool) *flows.MsgOut { + return insertOutgoingMsg(rt, org, channel, contact, text, attachments, envs.Locale(`eng-US`), models.MsgTypeText, status, highPriority, 0, nil) } -// InsertErroredOutgoingMsg inserts an ERRORED(E) outgoing message -func InsertErroredOutgoingMsg(db *sqlx.DB, org *Org, channel *Channel, contact *Contact, text string, errorCount int, nextAttempt time.Time, highPriority bool) *flows.MsgOut { - return insertOutgoingMsg(db, org, channel, contact, text, nil, models.MsgStatusErrored, highPriority, errorCount, &nextAttempt) +// InsertErroredOutgoingMsg inserts an ERRORED(E) outgoing text message +func InsertErroredOutgoingMsg(rt *runtime.Runtime, org *Org, channel *Channel, contact *Contact, text string, errorCount int, nextAttempt time.Time, highPriority bool) *flows.MsgOut { + return insertOutgoingMsg(rt, org, channel, contact, text, nil, envs.NilLocale, models.MsgTypeText, models.MsgStatusErrored, highPriority, errorCount, &nextAttempt) } -func insertOutgoingMsg(db *sqlx.DB, org *Org, channel *Channel, contact *Contact, text string, attachments []utils.Attachment, status models.MsgStatus, highPriority bool, errorCount int, nextAttempt *time.Time) *flows.MsgOut { +func insertOutgoingMsg(rt *runtime.Runtime, org *Org, channel *Channel, contact *Contact, text string, attachments []utils.Attachment, locale envs.Locale, typ models.MsgType, status models.MsgStatus, highPriority bool, errorCount int, nextAttempt *time.Time) *flows.MsgOut { var channelRef *assets.ChannelReference var channelID models.ChannelID if channel != nil { @@ -63,32 +60,32 @@ func insertOutgoingMsg(db *sqlx.DB, org *Org, channel *Channel, contact *Contact } var id flows.MsgID - must(db.Get(&id, - `INSERT INTO msgs_msg(uuid, text, attachments, created_on, direction, status, visibility, contact_id, contact_urn_id, org_id, channel_id, sent_on, msg_count, error_count, next_attempt, high_priority) - VALUES($1, $2, $3, NOW(), 'O', $4, 'V', $5, $6, $7, $8, $9, 1, $10, $11, $12) RETURNING id`, - msg.UUID(), text, pq.Array(attachments), status, contact.ID, contact.URNID, org.ID, channelID, sentOn, errorCount, nextAttempt, highPriority, + must(rt.DB.Get(&id, + `INSERT INTO msgs_msg(uuid, text, attachments, locale, created_on, direction, msg_type, status, visibility, contact_id, contact_urn_id, org_id, channel_id, sent_on, msg_count, error_count, next_attempt, high_priority) + VALUES($1, $2, $3, $4, NOW(), 'O', $5, $6, 'V', $7, $8, $9, $10, $11, 1, $12, $13, $14) RETURNING id`, + msg.UUID(), text, pq.Array(attachments), locale, typ, status, contact.ID, contact.URNID, org.ID, channelID, sentOn, errorCount, nextAttempt, highPriority, )) msg.SetID(id) return msg } -func InsertBroadcast(db *sqlx.DB, org *Org, baseLanguage envs.Language, text map[envs.Language]string, schedID models.ScheduleID, contacts []*Contact, groups []*Group) models.BroadcastID { - textMap := make(map[string]sql.NullString, len(text)) +func InsertBroadcast(rt *runtime.Runtime, org *Org, baseLanguage envs.Language, text map[envs.Language]string, schedID models.ScheduleID, contacts []*Contact, groups []*Group) models.BroadcastID { + translations := make(flows.BroadcastTranslations) for lang, t := range text { - textMap[string(lang)] = sql.NullString{String: t, Valid: true} + translations[lang] = &flows.BroadcastTranslation{Text: t} } var id models.BroadcastID - must(db.Get(&id, - `INSERT INTO msgs_broadcast(org_id, base_language, text, schedule_id, status, send_all, created_on, modified_on, created_by_id, modified_by_id, is_active) - VALUES($1, $2, $3, $4, 'P', TRUE, NOW(), NOW(), 1, 1, TRUE) RETURNING id`, org.ID, baseLanguage, hstore.Hstore{Map: textMap}, schedID, + must(rt.DB.Get(&id, + `INSERT INTO msgs_broadcast(org_id, base_language, translations, schedule_id, status, created_on, modified_on, created_by_id, modified_by_id, is_active) + VALUES($1, $2, $3, $4, 'P', NOW(), NOW(), 1, 1, TRUE) RETURNING id`, org.ID, baseLanguage, translations, schedID, )) for _, contact := range contacts { - db.MustExec(`INSERT INTO msgs_broadcast_contacts(broadcast_id, contact_id) VALUES($1, $2)`, id, contact.ID) + rt.DB.MustExec(`INSERT INTO msgs_broadcast_contacts(broadcast_id, contact_id) VALUES($1, $2)`, id, contact.ID) } for _, group := range groups { - db.MustExec(`INSERT INTO msgs_broadcast_groups(broadcast_id, contactgroup_id) VALUES($1, $2)`, id, group.ID) + rt.DB.MustExec(`INSERT INTO msgs_broadcast_groups(broadcast_id, contactgroup_id) VALUES($1, $2)`, id, group.ID) } return id diff --git a/testsuite/testdata/tickets.go b/testsuite/testdata/tickets.go index e2173a3c7..b1c4f6f3b 100644 --- a/testsuite/testdata/tickets.go +++ b/testsuite/testdata/tickets.go @@ -9,8 +9,7 @@ import ( "github.com/nyaruka/goflow/assets" "github.com/nyaruka/goflow/flows" "github.com/nyaruka/mailroom/core/models" - - "github.com/jmoiron/sqlx" + "github.com/nyaruka/mailroom/runtime" ) type Topic struct { @@ -28,8 +27,8 @@ type Team struct { UUID models.TeamUUID } -func (k *Ticket) Load(db *sqlx.DB) *models.Ticket { - tickets, err := models.LoadTickets(context.Background(), db, []models.TicketID{k.ID}) +func (k *Ticket) Load(rt *runtime.Runtime) *models.Ticket { + tickets, err := models.LoadTickets(context.Background(), rt.DB, []models.TicketID{k.ID}) must(err, len(tickets) == 1) return tickets[0] } @@ -40,27 +39,30 @@ type Ticketer struct { } // InsertOpenTicket inserts an open ticket -func InsertOpenTicket(db *sqlx.DB, org *Org, contact *Contact, ticketer *Ticketer, topic *Topic, body, externalID string, openedOn time.Time, assignee *User) *Ticket { - return insertTicket(db, org, contact, ticketer, models.TicketStatusOpen, topic, body, externalID, openedOn, assignee) +func InsertOpenTicket(rt *runtime.Runtime, org *Org, contact *Contact, ticketer *Ticketer, topic *Topic, body, externalID string, openedOn time.Time, assignee *User) *Ticket { + return insertTicket(rt, org, contact, ticketer, models.TicketStatusOpen, topic, body, externalID, openedOn, assignee) } // InsertClosedTicket inserts a closed ticket -func InsertClosedTicket(db *sqlx.DB, org *Org, contact *Contact, ticketer *Ticketer, topic *Topic, body, externalID string, assignee *User) *Ticket { - return insertTicket(db, org, contact, ticketer, models.TicketStatusClosed, topic, body, externalID, dates.Now(), assignee) +func InsertClosedTicket(rt *runtime.Runtime, org *Org, contact *Contact, ticketer *Ticketer, topic *Topic, body, externalID string, assignee *User) *Ticket { + return insertTicket(rt, org, contact, ticketer, models.TicketStatusClosed, topic, body, externalID, dates.Now(), assignee) } -func insertTicket(db *sqlx.DB, org *Org, contact *Contact, ticketer *Ticketer, status models.TicketStatus, topic *Topic, body, externalID string, openedOn time.Time, assignee *User) *Ticket { +func insertTicket(rt *runtime.Runtime, org *Org, contact *Contact, ticketer *Ticketer, status models.TicketStatus, topic *Topic, body, externalID string, openedOn time.Time, assignee *User) *Ticket { uuid := flows.TicketUUID(uuids.New()) + + lastActivityOn := openedOn var closedOn *time.Time if status == models.TicketStatusClosed { t := dates.Now() + lastActivityOn = t closedOn = &t } var id models.TicketID - must(db.Get(&id, + must(rt.DB.Get(&id, `INSERT INTO tickets_ticket(uuid, org_id, contact_id, ticketer_id, status, topic_id, body, external_id, opened_on, modified_on, closed_on, last_activity_on, assignee_id) - VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, NOW(), $10, NOW(), $11) RETURNING id`, uuid, org.ID, contact.ID, ticketer.ID, status, topic.ID, body, externalID, openedOn, closedOn, assignee.SafeID(), + VALUES($1, $2, $3, $4, $5, $6, $7, $8, $9, NOW(), $10, $11, $12) RETURNING id`, uuid, org.ID, contact.ID, ticketer.ID, status, topic.ID, body, externalID, openedOn, closedOn, lastActivityOn, assignee.SafeID(), )) return &Ticket{id, uuid} } diff --git a/testsuite/testdata/triggers.go b/testsuite/testdata/triggers.go index a83b612d3..ff1414a32 100644 --- a/testsuite/testdata/triggers.go +++ b/testsuite/testdata/triggers.go @@ -2,50 +2,49 @@ package testdata import ( "github.com/nyaruka/mailroom/core/models" - - "github.com/jmoiron/sqlx" + "github.com/nyaruka/mailroom/runtime" ) -func InsertKeywordTrigger(db *sqlx.DB, org *Org, flow *Flow, keyword string, matchType models.MatchType, includeGroups []*Group, excludeGroups []*Group) models.TriggerID { - return insertTrigger(db, org, models.KeywordTriggerType, flow, keyword, matchType, includeGroups, excludeGroups, nil, "", nil) +func InsertKeywordTrigger(rt *runtime.Runtime, org *Org, flow *Flow, keyword string, matchType models.MatchType, includeGroups []*Group, excludeGroups []*Group) models.TriggerID { + return insertTrigger(rt, org, models.KeywordTriggerType, flow, keyword, matchType, includeGroups, excludeGroups, nil, "", nil) } -func InsertIncomingCallTrigger(db *sqlx.DB, org *Org, flow *Flow, includeGroups, excludeGroups []*Group) models.TriggerID { - return insertTrigger(db, org, models.IncomingCallTriggerType, flow, "", "", includeGroups, excludeGroups, nil, "", nil) +func InsertIncomingCallTrigger(rt *runtime.Runtime, org *Org, flow *Flow, includeGroups, excludeGroups []*Group) models.TriggerID { + return insertTrigger(rt, org, models.IncomingCallTriggerType, flow, "", "", includeGroups, excludeGroups, nil, "", nil) } -func InsertMissedCallTrigger(db *sqlx.DB, org *Org, flow *Flow) models.TriggerID { - return insertTrigger(db, org, models.MissedCallTriggerType, flow, "", "", nil, nil, nil, "", nil) +func InsertMissedCallTrigger(rt *runtime.Runtime, org *Org, flow *Flow) models.TriggerID { + return insertTrigger(rt, org, models.MissedCallTriggerType, flow, "", "", nil, nil, nil, "", nil) } -func InsertNewConversationTrigger(db *sqlx.DB, org *Org, flow *Flow, channel *Channel) models.TriggerID { - return insertTrigger(db, org, models.NewConversationTriggerType, flow, "", "", nil, nil, nil, "", channel) +func InsertNewConversationTrigger(rt *runtime.Runtime, org *Org, flow *Flow, channel *Channel) models.TriggerID { + return insertTrigger(rt, org, models.NewConversationTriggerType, flow, "", "", nil, nil, nil, "", channel) } -func InsertReferralTrigger(db *sqlx.DB, org *Org, flow *Flow, referrerID string, channel *Channel) models.TriggerID { - return insertTrigger(db, org, models.ReferralTriggerType, flow, "", "", nil, nil, nil, referrerID, channel) +func InsertReferralTrigger(rt *runtime.Runtime, org *Org, flow *Flow, referrerID string, channel *Channel) models.TriggerID { + return insertTrigger(rt, org, models.ReferralTriggerType, flow, "", "", nil, nil, nil, referrerID, channel) } -func InsertCatchallTrigger(db *sqlx.DB, org *Org, flow *Flow, includeGroups, excludeGroups []*Group) models.TriggerID { - return insertTrigger(db, org, models.CatchallTriggerType, flow, "", "", includeGroups, excludeGroups, nil, "", nil) +func InsertCatchallTrigger(rt *runtime.Runtime, org *Org, flow *Flow, includeGroups, excludeGroups []*Group) models.TriggerID { + return insertTrigger(rt, org, models.CatchallTriggerType, flow, "", "", includeGroups, excludeGroups, nil, "", nil) } -func InsertScheduledTrigger(db *sqlx.DB, org *Org, flow *Flow, includeGroups, excludeGroups []*Group, includeContacts []*Contact) models.TriggerID { - return insertTrigger(db, org, models.ScheduleTriggerType, flow, "", "", includeGroups, excludeGroups, includeContacts, "", nil) +func InsertScheduledTrigger(rt *runtime.Runtime, org *Org, flow *Flow, includeGroups, excludeGroups []*Group, includeContacts []*Contact) models.TriggerID { + return insertTrigger(rt, org, models.ScheduleTriggerType, flow, "", "", includeGroups, excludeGroups, includeContacts, "", nil) } -func InsertTicketClosedTrigger(db *sqlx.DB, org *Org, flow *Flow) models.TriggerID { - return insertTrigger(db, org, models.TicketClosedTriggerType, flow, "", "", nil, nil, nil, "", nil) +func InsertTicketClosedTrigger(rt *runtime.Runtime, org *Org, flow *Flow) models.TriggerID { + return insertTrigger(rt, org, models.TicketClosedTriggerType, flow, "", "", nil, nil, nil, "", nil) } -func insertTrigger(db *sqlx.DB, org *Org, triggerType models.TriggerType, flow *Flow, keyword string, matchType models.MatchType, includeGroups, excludeGroups []*Group, contactIDs []*Contact, referrerID string, channel *Channel) models.TriggerID { +func insertTrigger(rt *runtime.Runtime, org *Org, triggerType models.TriggerType, flow *Flow, keyword string, matchType models.MatchType, includeGroups, excludeGroups []*Group, contactIDs []*Contact, referrerID string, channel *Channel) models.TriggerID { channelID := models.NilChannelID if channel != nil { channelID = channel.ID } var id models.TriggerID - must(db.Get(&id, + must(rt.DB.Get(&id, `INSERT INTO triggers_trigger(is_active, created_on, modified_on, keyword, referrer_id, is_archived, flow_id, trigger_type, match_type, created_by_id, modified_by_id, org_id, channel_id) VALUES(TRUE, now(), now(), $1, $5, false, $2, $3, $4, 1, 1, $7, $6) RETURNING id`, keyword, flow.ID, triggerType, matchType, referrerID, channelID, org.ID, @@ -53,15 +52,15 @@ func insertTrigger(db *sqlx.DB, org *Org, triggerType models.TriggerType, flow * // insert group associations for _, g := range includeGroups { - db.MustExec(`INSERT INTO triggers_trigger_groups(trigger_id, contactgroup_id) VALUES($1, $2)`, id, g.ID) + rt.DB.MustExec(`INSERT INTO triggers_trigger_groups(trigger_id, contactgroup_id) VALUES($1, $2)`, id, g.ID) } for _, g := range excludeGroups { - db.MustExec(`INSERT INTO triggers_trigger_exclude_groups(trigger_id, contactgroup_id) VALUES($1, $2)`, id, g.ID) + rt.DB.MustExec(`INSERT INTO triggers_trigger_exclude_groups(trigger_id, contactgroup_id) VALUES($1, $2)`, id, g.ID) } // insert contact associations for _, c := range contactIDs { - db.MustExec(`INSERT INTO triggers_trigger_contacts(trigger_id, contact_id) VALUES($1, $2)`, id, c.ID) + rt.DB.MustExec(`INSERT INTO triggers_trigger_contacts(trigger_id, contact_id) VALUES($1, $2)`, id, c.ID) } return id diff --git a/testsuite/testsuite.go b/testsuite/testsuite.go index f5192fa47..728b0af48 100644 --- a/testsuite/testsuite.go +++ b/testsuite/testsuite.go @@ -6,32 +6,26 @@ import ( "os" "os/exec" "path" - "strings" - "testing" - "github.com/nyaruka/gocommon/jsonx" + "github.com/gomodule/redigo/redis" + "github.com/jmoiron/sqlx" "github.com/nyaruka/gocommon/storage" "github.com/nyaruka/mailroom/core/models" - "github.com/nyaruka/mailroom/core/queue" "github.com/nyaruka/mailroom/runtime" - "github.com/stretchr/testify/require" - - "github.com/gomodule/redigo/redis" - "github.com/jmoiron/sqlx" + "github.com/nyaruka/rp-indexer/v8/indexers" + "github.com/olivere/elastic/v7" "github.com/sirupsen/logrus" ) -/*var tableHashes = map[string]string{ - "channels_channel": "3587399bad341401f1880431c0bc772a", - "contacts_contact": "0382ef6e58e260c0c76dcc84550e6793", - "orgs_org": "0f650bf7b9fb77ffa3ff0992be98da53", - "tickets_ticketer": "6487a4aed61e16c3aa0d6cf117f58de3", -}*/ - var _db *sqlx.DB -const AttachmentStorageDir = "_test_attachments_storage" -const SessionStorageDir = "_test_session_storage" +const elasticURL = "http://localhost:9200" +const elasticContactsIndex = "test_contacts" +const postgresContainerName = "textit-postgres-1" + +const attachmentStorageDir = "_test_attachments_storage" +const sessionStorageDir = "_test_session_storage" +const logStorageDir = "_test_log_storage" // Refresh is our type for the pieces of org assets we want fresh (not cached) type ResetFlag int @@ -43,10 +37,13 @@ const ( ResetData = ResetFlag(1 << 2) ResetRedis = ResetFlag(1 << 3) ResetStorage = ResetFlag(1 << 4) + ResetElastic = ResetFlag(1 << 5) ) // Reset clears out both our database and redis DB func Reset(what ResetFlag) { + ctx := context.TODO() + if what&ResetDB > 0 { resetDB() } else if what&ResetData > 0 { @@ -58,35 +55,49 @@ func Reset(what ResetFlag) { if what&ResetStorage > 0 { resetStorage() } + if what&ResetElastic > 0 { + resetElastic(ctx) + } models.FlushCache() } -// Get returns the various runtime things a test might need -func Get() (context.Context, *runtime.Runtime, *sqlx.DB, *redis.Pool) { +// Runtime returns the various runtime things a test might need +func Runtime() (context.Context, *runtime.Runtime) { + es, err := elastic.NewSimpleClient(elastic.SetURL(elasticURL), elastic.SetSniff(false)) + if err != nil { + panic(err) + } + + cfg := runtime.NewDefaultConfig() + cfg.ElasticContactsIndex = elasticContactsIndex + db := getDB() - rp := getRP() rt := &runtime.Runtime{ DB: db, ReadonlyDB: db, - RP: rp, - ES: nil, - AttachmentStorage: storage.NewFS(AttachmentStorageDir, 0766), - SessionStorage: storage.NewFS(SessionStorageDir, 0766), - Config: runtime.NewDefaultConfig(), + RP: getRP(), + ES: es, + AttachmentStorage: storage.NewFS(attachmentStorageDir, 0766), + SessionStorage: storage.NewFS(sessionStorageDir, 0766), + LogStorage: storage.NewFS(logStorageDir, 0766), + Config: cfg, } logrus.SetLevel(logrus.DebugLevel) - /*for name, expected := range tableHashes { - var actual string - must(db.Get(&actual, fmt.Sprintf(`SELECT md5(array_to_string(array_agg(t.* order by id), '|', '')) FROM %s t`, name))) - if actual != expected { - panic(fmt.Sprintf("table has mismatch for %s, expected: %s, got %s", name, expected, actual)) - } - }*/ + return context.Background(), rt +} - return context.Background(), rt, db, rp +// reindexes data changes to Elastic +func ReindexElastic(ctx context.Context) { + db := getDB() + es := getES() + + contactsIndexer := indexers.NewContactIndexer(elasticURL, elasticContactsIndex, 1, 1, 100) + contactsIndexer.Index(db.DB, false, false) + + es.Refresh(elasticContactsIndex).Do(ctx) } // returns an open test database pool @@ -127,6 +138,13 @@ func getRC() redis.Conn { return conn } +// returns an Elastic client +func getES() *elastic.Client { + es, err := elastic.NewSimpleClient(elastic.SetURL(elasticURL), elastic.SetSniff(false)) + noError(err) + return es +} + // resets our database to our base state from our RapidPro dump // // mailroom_test.dump can be regenerated by running: @@ -144,15 +162,17 @@ func resetDB() { } func loadTestDump() { - dir, _ := os.Getwd() + dump, err := os.Open(absPath("./mailroom_test.dump")) + must(err) + defer dump.Close() - // our working directory is set to the directory of the module being tested, we want to get just - // the portion that points to the mailroom directory - for !strings.HasSuffix(dir, "mailroom") && dir != "/" { - dir = path.Dir(dir) - } + cmd := exec.Command("docker", "exec", "-i", postgresContainerName, "pg_restore", "-d", "mailroom_test", "-U", "mailroom_test") + cmd.Stdin = dump - mustExec("pg_restore", "-h", "localhost", "-d", "mailroom_test", "-U", "mailroom_test", path.Join(dir, "./mailroom_test.dump")) + output, err := cmd.CombinedOutput() + if err != nil { + panic(fmt.Sprintf("error restoring database: %s: %s", err, string(output))) + } // force re-connection if _db != nil { @@ -161,6 +181,20 @@ func loadTestDump() { } } +// Converts a project root relative path to an absolute path usable in any test. This is needed because go tests +// are run with a working directory set to the current module being tested. +func absPath(p string) string { + // start in working directory and go up until we are in a directory containing go.mod + dir, _ := os.Getwd() + for dir != "/" { + dir = path.Dir(dir) + if _, err := os.Stat(path.Join(dir, "go.mod")); err == nil { + break + } + } + return path.Join(dir, p) +} + // resets our redis database func resetRedis() { rc, err := redis.Dial("tcp", "localhost:6379") @@ -176,13 +210,38 @@ func resetRedis() { // clears our storage for tests func resetStorage() { - must(os.RemoveAll(AttachmentStorageDir)) - must(os.RemoveAll(SessionStorageDir)) + must(os.RemoveAll(attachmentStorageDir)) + must(os.RemoveAll(sessionStorageDir)) + must(os.RemoveAll(logStorageDir)) +} + +// clears indexed data in Elastic +func resetElastic(ctx context.Context) { + es := getES() + + exists, err := es.IndexExists(elasticContactsIndex).Do(ctx) + noError(err) + + if exists { + // get any indexes for the contacts alias + ar, err := es.Aliases().Index(elasticContactsIndex).Do(ctx) + noError(err) + + // and delete them + for _, index := range ar.IndicesByAlias(elasticContactsIndex) { + _, err := es.DeleteIndex(index).Do(ctx) + noError(err) + } + } + + ReindexElastic(ctx) } var sqlResetTestData = ` UPDATE contacts_contact SET current_flow_id = NULL; +DELETE FROM tickets_ticketdailycount; +DELETE FROM tickets_ticketdailytiming; DELETE FROM notifications_notification; DELETE FROM notifications_incident; DELETE FROM request_logs_httplog; @@ -199,11 +258,20 @@ DELETE FROM flows_flowpathcount; DELETE FROM flows_flownodecount; DELETE FROM flows_flowrunstatuscount; DELETE FROM flows_flowcategorycount; +DELETE FROM flows_flowstart_contacts; +DELETE FROM flows_flowstart_groups; +DELETE FROM flows_flowstart; DELETE FROM flows_flowsession; DELETE FROM flows_flowrevision WHERE flow_id >= 30000; DELETE FROM flows_flow WHERE id >= 30000; DELETE FROM ivr_call; DELETE FROM campaigns_eventfire; +DELETE FROM msgs_msg; +DELETE FROM msgs_broadcast_groups; +DELETE FROM msgs_broadcast_contacts; +DELETE FROM msgs_broadcastmsgcount; +DELETE FROM msgs_broadcast; +DELETE FROM schedules_schedule; DELETE FROM campaigns_campaignevent WHERE id >= 30000; DELETE FROM campaigns_campaign WHERE id >= 30000; DELETE FROM contacts_contactimportbatch; @@ -217,7 +285,9 @@ DELETE FROM contacts_contactgroup WHERE id >= 30000; ALTER SEQUENCE flows_flow_id_seq RESTART WITH 30000; ALTER SEQUENCE tickets_ticket_id_seq RESTART WITH 1; ALTER SEQUENCE msgs_msg_id_seq RESTART WITH 1; +ALTER SEQUENCE msgs_broadcast_id_seq RESTART WITH 1; ALTER SEQUENCE flows_flowrun_id_seq RESTART WITH 1; +ALTER SEQUENCE flows_flowstart_id_seq RESTART WITH 1; ALTER SEQUENCE flows_flowsession_id_seq RESTART WITH 1; ALTER SEQUENCE contacts_contact_id_seq RESTART WITH 30000; ALTER SEQUENCE contacts_contacturn_id_seq RESTART WITH 30000; @@ -235,15 +305,6 @@ func resetData() { models.FlushCache() } -// utility function for running a command panicking if there is any error -func mustExec(command string, args ...string) { - cmd := exec.Command(command, args...) - output, err := cmd.CombinedOutput() - if err != nil { - panic(fmt.Sprintf("error restoring database: %s: %s", err, string(output))) - } -} - // convenience way to call a func and panic if it errors, e.g. must(foo()) func must(err error) { if err != nil { @@ -259,30 +320,3 @@ func ReadFile(path string) []byte { noError(err) return d } - -func CurrentOrgTasks(t *testing.T, rp *redis.Pool) map[models.OrgID][]*queue.Task { - rc := rp.Get() - defer rc.Close() - - // get all active org queues - active, err := redis.Ints(rc.Do("ZRANGE", "batch:active", 0, -1)) - require.NoError(t, err) - - tasks := make(map[models.OrgID][]*queue.Task) - for _, orgID := range active { - orgTasksEncoded, err := redis.Strings(rc.Do("ZRANGE", fmt.Sprintf("batch:%d", orgID), 0, -1)) - require.NoError(t, err) - - orgTasks := make([]*queue.Task, len(orgTasksEncoded)) - - for i := range orgTasksEncoded { - task := &queue.Task{} - jsonx.MustUnmarshal([]byte(orgTasksEncoded[i]), task) - orgTasks[i] = task - } - - tasks[models.OrgID(orgID)] = orgTasks - } - - return tasks -} diff --git a/web/testing.go b/testsuite/web.go similarity index 82% rename from web/testing.go rename to testsuite/web.go index bcb21d4ac..203d9c643 100644 --- a/web/testing.go +++ b/testsuite/web.go @@ -1,4 +1,4 @@ -package web +package testsuite import ( "bytes" @@ -10,6 +10,7 @@ import ( "net/http" "net/textproto" "os" + "regexp" "strings" "sync" "testing" @@ -22,8 +23,7 @@ import ( "github.com/nyaruka/gocommon/uuids" "github.com/nyaruka/goflow/test" "github.com/nyaruka/mailroom/runtime" - "github.com/nyaruka/mailroom/testsuite" - + "github.com/nyaruka/mailroom/web" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -37,7 +37,7 @@ func RunWebTests(t *testing.T, ctx context.Context, rt *runtime.Runtime, truthFi defer dates.SetNowSource(dates.DefaultNowSource) - server := NewServer(ctx, rt, wg) + server := web.NewServer(ctx, rt, wg) server.Start() defer server.Stop() @@ -63,7 +63,7 @@ func RunWebTests(t *testing.T, ctx context.Context, rt *runtime.Runtime, truthFi actualResponse []byte } tcs := make([]TestCase, 0, 20) - tcJSON := testsuite.ReadFile(truthFile) + tcJSON := ReadFile(truthFile) for key, value := range substitutions { tcJSON = bytes.ReplaceAll(tcJSON, []byte("$"+key+"$"), []byte(value)) @@ -77,6 +77,7 @@ func RunWebTests(t *testing.T, ctx context.Context, rt *runtime.Runtime, truthFi var clonedMocks *httpx.MockRequestor if tc.HTTPMocks != nil { + tc.HTTPMocks.SetIgnoreLocal(true) httpx.SetRequestor(tc.HTTPMocks) clonedMocks = tc.HTTPMocks.Clone() } else { @@ -119,11 +120,13 @@ func RunWebTests(t *testing.T, ctx context.Context, rt *runtime.Runtime, truthFi actual := tc actual.Status = resp.StatusCode actual.HTTPMocks = clonedMocks + actual.actualResponse, err = io.ReadAll(resp.Body) - tc.HTTPMocks = clonedMocks - tc.actualResponse, err = io.ReadAll(resp.Body) assert.NoError(t, err, "%s: error reading body", tc.Label) + // some timestamps come from db NOW() which we can't mock, so we replace them with $recent_timestamp$ + actual.actualResponse = overwriteRecentTimestamps(actual.actualResponse) + if !test.UpdateSnapshots { assert.Equal(t, tc.Status, actual.Status, "%s: unexpected status", tc.Label) @@ -131,7 +134,7 @@ func RunWebTests(t *testing.T, ctx context.Context, rt *runtime.Runtime, truthFi expectedIsJSON := false if tc.ResponseFile != "" { - expectedResponse = testsuite.ReadFile(tc.ResponseFile) + expectedResponse = ReadFile(tc.ResponseFile) expectedIsJSON = strings.HasSuffix(tc.ResponseFile, ".json") } else { @@ -140,9 +143,9 @@ func RunWebTests(t *testing.T, ctx context.Context, rt *runtime.Runtime, truthFi } if expectedIsJSON { - test.AssertEqualJSON(t, expectedResponse, tc.actualResponse, "%s: unexpected JSON response", tc.Label) + test.AssertEqualJSON(t, expectedResponse, actual.actualResponse, "%s: unexpected JSON response", tc.Label) } else { - assert.Equal(t, string(expectedResponse), string(tc.actualResponse), "%s: unexpected response", tc.Label) + assert.Equal(t, string(expectedResponse), string(actual.actualResponse), "%s: unexpected response", tc.Label) } for _, dba := range tc.DBAssertions { @@ -156,12 +159,12 @@ func RunWebTests(t *testing.T, ctx context.Context, rt *runtime.Runtime, truthFi // update if we are meant to if test.UpdateSnapshots { - for _, tc := range tcs { - if tc.ResponseFile != "" { - err = os.WriteFile(tc.ResponseFile, tc.actualResponse, 0644) + for i := range tcs { + if tcs[i].ResponseFile != "" { + err = os.WriteFile(tcs[i].ResponseFile, tcs[i].actualResponse, 0644) require.NoError(t, err, "failed to update response file") } else { - tc.Response = tc.actualResponse + tcs[i].Response = tcs[i].actualResponse } } @@ -173,6 +176,18 @@ func RunWebTests(t *testing.T, ctx context.Context, rt *runtime.Runtime, truthFi } } +var isoTimestampRegex = regexp.MustCompile(`\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{1,9}Z`) + +func overwriteRecentTimestamps(resp []byte) []byte { + return isoTimestampRegex.ReplaceAllFunc(resp, func(b []byte) []byte { + t, _ := time.Parse(time.RFC3339, string(b)) + if time.Since(t) < time.Second*10 { + return []byte(`$recent_timestamp$`) + } + return b + }) +} + // MultiPartPart is a single part in a multipart encoded request type MultiPartPart struct { Name string `json:"name"` diff --git a/utils/cron/cron_test.go b/utils/cron/cron_test.go index 45460451a..8410b3ee5 100644 --- a/utils/cron/cron_test.go +++ b/utils/cron/cron_test.go @@ -14,7 +14,7 @@ import ( ) func TestCron(t *testing.T) { - _, rt, _, _ := testsuite.Get() + _, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetRedis) diff --git a/web/contact/utils.go b/web/contact/base.go similarity index 99% rename from web/contact/utils.go rename to web/contact/base.go index 88d817d93..3f9c23b85 100644 --- a/web/contact/utils.go +++ b/web/contact/base.go @@ -6,7 +6,6 @@ import ( "github.com/nyaruka/goflow/flows" "github.com/nyaruka/goflow/flows/modifiers" "github.com/nyaruka/mailroom/core/models" - "github.com/pkg/errors" ) diff --git a/web/contact/base_test.go b/web/contact/base_test.go new file mode 100644 index 000000000..6f4f3acd6 --- /dev/null +++ b/web/contact/base_test.go @@ -0,0 +1,304 @@ +package contact_test + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "net/http" + "sync" + "testing" + "time" + + "github.com/nyaruka/gocommon/urns" + "github.com/nyaruka/goflow/assets" + "github.com/nyaruka/goflow/envs" + _ "github.com/nyaruka/mailroom/core/handlers" + "github.com/nyaruka/mailroom/core/models" + _ "github.com/nyaruka/mailroom/services/tickets/intern" + "github.com/nyaruka/mailroom/testsuite" + "github.com/nyaruka/mailroom/testsuite/testdata" + "github.com/nyaruka/mailroom/web" + "github.com/nyaruka/mailroom/web/contact" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestCreate(t *testing.T) { + ctx, rt := testsuite.Runtime() + + defer testsuite.Reset(testsuite.ResetAll) + + // detach Cathy's tel URN + rt.DB.MustExec(`UPDATE contacts_contacturn SET contact_id = NULL WHERE contact_id = $1`, testdata.Cathy.ID) + + rt.DB.MustExec(`ALTER SEQUENCE contacts_contact_id_seq RESTART WITH 30000`) + + testsuite.RunWebTests(t, ctx, rt, "testdata/create.json", nil) +} + +func TestBulkCreate(t *testing.T) { + ctx, rt := testsuite.Runtime() + + defer testsuite.Reset(testsuite.ResetData) + + rt.DB.MustExec(`ALTER SEQUENCE contacts_contact_id_seq RESTART WITH 30000`) + + testsuite.RunWebTests(t, ctx, rt, "testdata/bulk_create.json", nil) +} + +func TestInspect(t *testing.T) { + ctx, rt := testsuite.Runtime() + + defer testsuite.Reset(testsuite.ResetData) + + // give cathy an unsendable twitterid URN with a display value + testdata.InsertContactURN(rt, testdata.Org1, testdata.Cathy, urns.URN("twitterid:23145325#cathy"), 20000) + + testsuite.RunWebTests(t, ctx, rt, "testdata/inspect.json", nil) +} + +func TestModify(t *testing.T) { + ctx, rt := testsuite.Runtime() + + defer testsuite.Reset(testsuite.ResetAll) + + // to be deterministic, update the creation date on cathy + rt.DB.MustExec(`UPDATE contacts_contact SET created_on = $1 WHERE id = $2`, time.Date(2018, 7, 6, 12, 30, 0, 123456789, time.UTC), testdata.Cathy.ID) + + // make our campaign group dynamic + rt.DB.MustExec(`UPDATE contacts_contactgroup SET query = 'age > 18' WHERE id = $1`, testdata.DoctorsGroup.ID) + + // insert an event on our campaign that is based on created on + testdata.InsertCampaignFlowEvent(rt, testdata.RemindersCampaign, testdata.Favorites, testdata.CreatedOnField, 1000, "W") + + // for simpler tests we clear out cathy's fields and groups to start + rt.DB.MustExec(`UPDATE contacts_contact SET fields = NULL WHERE id = $1`, testdata.Cathy.ID) + rt.DB.MustExec(`DELETE FROM contacts_contactgroup_contacts WHERE contact_id = $1`, testdata.Cathy.ID) + rt.DB.MustExec(`UPDATE contacts_contacturn SET contact_id = NULL WHERE contact_id = $1`, testdata.Cathy.ID) + + // because we made changes to a group above, need to make sure we don't use stale org assets + models.FlushCache() + + // lock a contact to test skipping them + models.LockContacts(ctx, rt, testdata.Org1.ID, []models.ContactID{testdata.Alexandria.ID}, time.Second) + + testsuite.RunWebTests(t, ctx, rt, "testdata/modify.json", nil) +} + +func TestResolve(t *testing.T) { + ctx, rt := testsuite.Runtime() + + defer testsuite.Reset(testsuite.ResetAll) + + // detach Cathy's tel URN + rt.DB.MustExec(`UPDATE contacts_contacturn SET contact_id = NULL WHERE contact_id = $1`, testdata.Cathy.ID) + + rt.DB.MustExec(`ALTER SEQUENCE contacts_contact_id_seq RESTART WITH 30000`) + + testsuite.RunWebTests(t, ctx, rt, "testdata/resolve.json", nil) +} + +func TestInterrupt(t *testing.T) { + ctx, rt := testsuite.Runtime() + + defer testsuite.Reset(testsuite.ResetData) + + // give Cathy an completed and a waiting session + testdata.InsertFlowSession(rt, testdata.Org1, testdata.Cathy, models.FlowTypeMessaging, models.SessionStatusCompleted, testdata.Favorites, models.NilCallID) + testdata.InsertWaitingSession(rt, testdata.Org1, testdata.Cathy, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now().Add(time.Hour), true, nil) + + // give Bob a waiting session + testdata.InsertWaitingSession(rt, testdata.Org1, testdata.Bob, models.FlowTypeMessaging, testdata.PickANumber, models.NilCallID, time.Now(), time.Now().Add(time.Hour), true, nil) + + testsuite.RunWebTests(t, ctx, rt, "testdata/interrupt.json", nil) +} + +func TestSearch(t *testing.T) { + ctx, rt := testsuite.Runtime() + + defer testsuite.Reset(testsuite.ResetElastic) + + wg := &sync.WaitGroup{} + + server := web.NewServer(ctx, rt, wg) + server.Start() + + // give our server time to start + time.Sleep(time.Second) + + defer server.Stop() + + tcs := []struct { + method string + url string + body string + expectedStatus int + expectedError string + expectedHits []models.ContactID + expectedQuery string + expectedAttributes []string + expectedFields []*assets.FieldReference + expectedSchemes []string + expectedAllowAsGroup bool + }{ + { // 0 + method: "GET", + url: "/mr/contact/search", + expectedStatus: 405, + expectedError: "illegal method: GET", + }, + { // 1 + method: "POST", + url: "/mr/contact/search", + body: fmt.Sprintf(`{"org_id": 1, "query": "birthday = tomorrow", "group_id": %d}`, testdata.ActiveGroup.ID), + expectedStatus: 400, + expectedError: "can't resolve 'birthday' to attribute, scheme or field", + }, + { // 2 + method: "POST", + url: "/mr/contact/search", + body: fmt.Sprintf(`{"org_id": 1, "query": "age > tomorrow", "group_id": %d}`, testdata.ActiveGroup.ID), + expectedStatus: 400, + expectedError: "can't convert 'tomorrow' to a number", + }, + { // 3 + method: "POST", + url: "/mr/contact/search", + body: fmt.Sprintf(`{"org_id": 1, "query": "Cathy", "group_id": %d}`, testdata.ActiveGroup.ID), + expectedStatus: 200, + expectedHits: []models.ContactID{testdata.Cathy.ID}, + expectedQuery: `name ~ "Cathy"`, + expectedAttributes: []string{"name"}, + expectedFields: []*assets.FieldReference{}, + expectedSchemes: []string{}, + expectedAllowAsGroup: true, + }, + { // 4 + method: "POST", + url: "/mr/contact/search", + body: fmt.Sprintf(`{"org_id": 1, "query": "Cathy OR George", "group_id": %d, "exclude_ids": [%d, %d]}`, testdata.ActiveGroup.ID, testdata.Bob.ID, testdata.George.ID), + expectedStatus: 200, + expectedHits: []models.ContactID{testdata.Cathy.ID}, + expectedQuery: `name ~ "Cathy" OR name ~ "George"`, + expectedAttributes: []string{"name"}, + expectedFields: []*assets.FieldReference{}, + expectedSchemes: []string{}, + expectedAllowAsGroup: true, + }, + { // 5 + method: "POST", + url: "/mr/contact/search", + body: fmt.Sprintf(`{"org_id": 1, "query": "AGE = 10 and gender = M", "group_id": %d}`, testdata.ActiveGroup.ID), + expectedStatus: 200, + expectedHits: []models.ContactID{}, + expectedQuery: `age = 10 AND gender = "M"`, + expectedAttributes: []string{}, + expectedFields: []*assets.FieldReference{ + assets.NewFieldReference("age", "Age"), + assets.NewFieldReference("gender", "Gender"), + }, + expectedSchemes: []string{}, + expectedAllowAsGroup: true, + }, + { // 6 + method: "POST", + url: "/mr/contact/search", + body: fmt.Sprintf(`{"org_id": 1, "query": "", "group_id": %d}`, testdata.TestersGroup.ID), + expectedStatus: 200, + expectedHits: []models.ContactID{10013, 10012, 10011, 10010, 10009, 10008, 10007, 10006, 10005, 10004}, + expectedQuery: ``, + expectedAttributes: []string{}, + expectedFields: []*assets.FieldReference{}, + expectedSchemes: []string{}, + expectedAllowAsGroup: true, + }, + } + + for i, tc := range tcs { + var body io.Reader + if tc.body != "" { + body = bytes.NewReader([]byte(tc.body)) + } + + req, err := http.NewRequest(tc.method, "http://localhost:8090"+tc.url, body) + assert.NoError(t, err, "%d: error creating request", i) + + resp, err := http.DefaultClient.Do(req) + assert.NoError(t, err, "%d: error making request", i) + + assert.Equal(t, tc.expectedStatus, resp.StatusCode, "%d: unexpected status", i) + + content, err := io.ReadAll(resp.Body) + assert.NoError(t, err, "%d: error reading body", i) + + // on 200 responses parse them + if resp.StatusCode == 200 { + r := &contact.SearchResponse{} + err = json.Unmarshal(content, r) + assert.NoError(t, err) + assert.Equal(t, tc.expectedHits, r.ContactIDs, "%d: hits mismatch", i) + assert.Equal(t, tc.expectedQuery, r.Query, "%d: query mismatch", i) + + if len(tc.expectedAttributes) > 0 || len(tc.expectedFields) > 0 || len(tc.expectedSchemes) > 0 { + assert.Equal(t, tc.expectedAttributes, r.Metadata.Attributes) + assert.Equal(t, tc.expectedFields, r.Metadata.Fields) + assert.Equal(t, tc.expectedSchemes, r.Metadata.Schemes) + assert.Equal(t, tc.expectedAllowAsGroup, r.Metadata.AllowAsGroup) + } + } else { + r := &web.ErrorResponse{} + err = json.Unmarshal(content, r) + assert.NoError(t, err) + assert.Equal(t, tc.expectedError, r.Error) + } + } +} + +func TestParseQuery(t *testing.T) { + ctx, rt := testsuite.Runtime() + + defer testsuite.Reset(testsuite.ResetAll) + + testsuite.RunWebTests(t, ctx, rt, "testdata/parse_query.json", nil) +} + +func TestSpecToCreation(t *testing.T) { + ctx, rt := testsuite.Runtime() + + oa, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) + require.NoError(t, err) + + sa := oa.SessionAssets() + env := envs.NewBuilder().Build() + + // empty spec is valid + s := &models.ContactSpec{} + c, err := contact.SpecToCreation(s, env, sa) + assert.NoError(t, err) + assert.Equal(t, "", c.Name) + assert.Equal(t, envs.NilLanguage, c.Language) + assert.Equal(t, 0, len(c.URNs)) + assert.Equal(t, 0, len(c.Mods)) + + // try to set invalid language + lang := "xyzd" + s = &models.ContactSpec{Language: &lang} + _, err = contact.SpecToCreation(s, env, sa) + assert.EqualError(t, err, "invalid language: iso-639-3 codes must be 3 characters, got: xyzd") + + // try to set non-existent contact field + s = &models.ContactSpec{Fields: map[string]string{"goats": "7"}} + _, err = contact.SpecToCreation(s, env, sa) + assert.EqualError(t, err, "unknown contact field 'goats'") + + // try to add to non-existent group + s = &models.ContactSpec{Groups: []assets.GroupUUID{"52f6c50e-f9a8-4f24-bb80-5c9f144ed27f"}} + _, err = contact.SpecToCreation(s, env, sa) + assert.EqualError(t, err, "unknown contact group '52f6c50e-f9a8-4f24-bb80-5c9f144ed27f'") + + // try to add to dynamic group + s = &models.ContactSpec{Groups: []assets.GroupUUID{"52f6c50e-f9a8-4f24-bb80-5c9f144ed27f"}} + _, err = contact.SpecToCreation(s, env, sa) + assert.EqualError(t, err, "unknown contact group '52f6c50e-f9a8-4f24-bb80-5c9f144ed27f'") +} diff --git a/web/contact/bulk_create.go b/web/contact/bulk_create.go new file mode 100644 index 000000000..ab5e75f48 --- /dev/null +++ b/web/contact/bulk_create.go @@ -0,0 +1,85 @@ +package contact + +import ( + "context" + "net/http" + + "github.com/nyaruka/goflow/flows" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/runtime" + "github.com/nyaruka/mailroom/web" + "github.com/pkg/errors" +) + +func init() { + web.RegisterRoute(http.MethodPost, "/mr/contact/bulk_create", web.RequireAuthToken(web.JSONPayload(handleBulkCreate))) +} + +// Request to create new contacts. +// +// { +// "org_id": 1, +// "user_id": 1, +// "specs": [ +// { +// "name": "Joe Blow", +// "language": "eng", +// "urns": ["tel:+250788123123"], +// "fields": {"age": "39"}, +// "groups": ["b0b778db-6657-430b-9272-989ad43a10db"] +// }, +// { +// "name": "Frank", +// "language": "spa", +// "urns": ["tel:+250788124124"], +// "fields": {}, +// "groups": [] +// } +// ] +// } +type bulkCreateRequest struct { + OrgID models.OrgID `json:"org_id" validate:"required"` + UserID models.UserID `json:"user_id" validate:"required"` + Specs []*models.ContactSpec `json:"specs" validate:"required"` +} + +// handles a request to create the given contact +func handleBulkCreate(ctx context.Context, rt *runtime.Runtime, r *bulkCreateRequest) (any, int, error) { + oa, err := models.GetOrgAssets(ctx, rt, r.OrgID) + if err != nil { + return nil, 0, errors.Wrapf(err, "unable to load org assets") + } + + creations := make([]*Creation, len(r.Specs)) + for i, spec := range r.Specs { + c, err := SpecToCreation(spec, oa.Env(), oa.SessionAssets()) + if err != nil { + return err, http.StatusBadRequest, nil + } + creations[i] = c + } + + modifiersByContact := make(map[*flows.Contact][]flows.Modifier, len(r.Specs)) + created := make(map[int]*flows.Contact, len(r.Specs)) + errored := make(map[int]string, len(r.Specs)) + status := http.StatusOK + + for i, c := range creations { + _, contact, err := models.CreateContact(ctx, rt.DB, oa, r.UserID, c.Name, c.Language, c.URNs) + if err != nil { + errored[i] = err.Error() + status = http.StatusMultiStatus + continue + } + + created[i] = contact + modifiersByContact[contact] = c.Mods + } + + _, err = models.ApplyModifiers(ctx, rt, oa, r.UserID, modifiersByContact) + if err != nil { + return nil, 0, errors.Wrap(err, "error modifying new contacts") + } + + return map[string]any{"created": created, "errored": errored}, status, nil +} diff --git a/web/contact/contact.go b/web/contact/contact.go deleted file mode 100644 index c67f35cb6..000000000 --- a/web/contact/contact.go +++ /dev/null @@ -1,257 +0,0 @@ -package contact - -import ( - "context" - "encoding/json" - "net/http" - - "github.com/nyaruka/gocommon/urns" - "github.com/nyaruka/goflow/flows" - "github.com/nyaruka/mailroom/core/goflow" - "github.com/nyaruka/mailroom/core/models" - "github.com/nyaruka/mailroom/runtime" - "github.com/nyaruka/mailroom/web" - - "github.com/pkg/errors" -) - -func init() { - web.RegisterJSONRoute(http.MethodPost, "/mr/contact/create", web.RequireAuthToken(handleCreate)) - web.RegisterJSONRoute(http.MethodPost, "/mr/contact/modify", web.RequireAuthToken(handleModify)) - web.RegisterJSONRoute(http.MethodPost, "/mr/contact/resolve", web.RequireAuthToken(handleResolve)) - web.RegisterJSONRoute(http.MethodPost, "/mr/contact/interrupt", web.RequireAuthToken(handleInterrupt)) -} - -// Request to create a new contact. -// -// { -// "org_id": 1, -// "user_id": 1, -// "contact": { -// "name": "Joe Blow", -// "language": "eng", -// "urns": ["tel:+250788123123"], -// "fields": {"age": "39"}, -// "groups": ["b0b778db-6657-430b-9272-989ad43a10db"] -// } -// } -type createRequest struct { - OrgID models.OrgID `json:"org_id" validate:"required"` - UserID models.UserID `json:"user_id" validate:"required"` - Contact *models.ContactSpec `json:"contact" validate:"required"` -} - -// handles a request to create the given contact -func handleCreate(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - request := &createRequest{} - if err := web.ReadAndValidateJSON(r, request); err != nil { - return errors.Wrapf(err, "request failed validation"), http.StatusBadRequest, nil - } - - // grab our org - oa, err := models.GetOrgAssets(ctx, rt, request.OrgID) - if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "unable to load org assets") - } - - c, err := SpecToCreation(request.Contact, oa.Env(), oa.SessionAssets()) - if err != nil { - return err, http.StatusBadRequest, nil - } - - _, contact, err := models.CreateContact(ctx, rt.DB, oa, request.UserID, c.Name, c.Language, c.URNs) - if err != nil { - return err, http.StatusBadRequest, nil - } - - modifiersByContact := map[*flows.Contact][]flows.Modifier{contact: c.Mods} - _, err = models.ApplyModifiers(ctx, rt, oa, request.UserID, modifiersByContact) - if err != nil { - return nil, http.StatusInternalServerError, errors.Wrap(err, "error modifying new contact") - } - - return map[string]interface{}{"contact": contact}, http.StatusOK, nil -} - -// Request that a set of contacts is modified. -// -// { -// "org_id": 1, -// "user_id": 1, -// "contact_ids": [15,235], -// "modifiers": [{ -// "type": "groups", -// "modification": "add", -// "groups": [{ -// "uuid": "a8e8efdb-78ee-46e7-9eb0-6a578da3b02d", -// "name": "Doctors" -// }] -// }] -// } -type modifyRequest struct { - OrgID models.OrgID `json:"org_id" validate:"required"` - UserID models.UserID `json:"user_id" validate:"required"` - ContactIDs []models.ContactID `json:"contact_ids" validate:"required"` - Modifiers []json.RawMessage `json:"modifiers" validate:"required"` -} - -// Response for a contact update. Will return the full contact state and any errors -// -// { -// "1000": { -// "contact": { -// "id": 123, -// "contact_uuid": "559d4cf7-8ed3-43db-9bbb-2be85345f87e", -// "name": "Joe", -// "language": "eng", -// ... -// }], -// "events": [{ -// .... -// }] -// }, ... -// } -type modifyResult struct { - Contact *flows.Contact `json:"contact"` - Events []flows.Event `json:"events"` -} - -// handles a request to apply the passed in actions -func handleModify(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - request := &modifyRequest{} - if err := web.ReadAndValidateJSON(r, request); err != nil { - return errors.Wrapf(err, "request failed validation"), http.StatusBadRequest, nil - } - - // grab our org assets - oa, err := models.GetOrgAssets(ctx, rt, request.OrgID) - if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "unable to load org assets") - } - - // read the modifiers from the request - mods, err := goflow.ReadModifiers(oa.SessionAssets(), request.Modifiers, goflow.ErrorOnMissing) - if err != nil { - return nil, http.StatusBadRequest, err - } - - // load our contacts - contacts, err := models.LoadContacts(ctx, rt.DB, oa, request.ContactIDs) - if err != nil { - return nil, http.StatusBadRequest, errors.Wrapf(err, "unable to load contact") - } - - // convert to map of flow contacts to modifiers - modifiersByContact := make(map[*flows.Contact][]flows.Modifier, len(contacts)) - for _, contact := range contacts { - flowContact, err := contact.FlowContact(oa) - if err != nil { - return nil, http.StatusBadRequest, errors.Wrapf(err, "error creating flow contact for contact: %d", contact.ID()) - } - - modifiersByContact[flowContact] = mods - } - - eventsByContact, err := models.ApplyModifiers(ctx, rt, oa, request.UserID, modifiersByContact) - if err != nil { - return nil, http.StatusBadRequest, err - } - - // create our results - results := make(map[flows.ContactID]modifyResult, len(contacts)) - for flowContact := range modifiersByContact { - results[flowContact.ID()] = modifyResult{ - Contact: flowContact, - Events: eventsByContact[flowContact], - } - } - - return results, http.StatusOK, nil -} - -// Request to resolve a contact based on a channel and URN -// -// { -// "org_id": 1, -// "channel_id": 234, -// "urn": "tel:+250788123123" -// } -type resolveRequest struct { - OrgID models.OrgID `json:"org_id" validate:"required"` - ChannelID models.ChannelID `json:"channel_id" validate:"required"` - URN urns.URN `json:"urn" validate:"required"` -} - -// handles a request to resolve a contact -func handleResolve(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - request := &resolveRequest{} - if err := web.ReadAndValidateJSON(r, request); err != nil { - return errors.Wrapf(err, "request failed validation"), http.StatusBadRequest, nil - } - - // grab our org - oa, err := models.GetOrgAssets(ctx, rt, request.OrgID) - if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "unable to load org assets") - } - - urn := request.URN.Normalize(string(oa.Env().DefaultCountry())) - - // TODO rework normalization to be idempotent because an invalid number like +2621112222 normalizes to - // 2621112222 (invalid) and then normalizes to +12621112222 (valid) - urn = urn.Normalize(string(oa.Env().DefaultCountry())) - - if err := urn.Validate(); err != nil { - return errors.Wrapf(err, "URN failed validation"), http.StatusBadRequest, nil - } - - _, contact, created, err := models.GetOrCreateContact(ctx, rt.DB, oa, []urns.URN{urn}, request.ChannelID) - if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "error getting or creating contact") - } - - // find the URN on the contact - for _, u := range contact.URNs() { - if urn.Identity() == u.URN().Identity() { - urn = u.URN() - break - } - } - - return map[string]interface{}{ - "contact": contact, - "urn": map[string]interface{}{ - "id": models.GetURNInt(urn, "id"), - "identity": urn.Identity(), - }, - "created": created, - }, http.StatusOK, nil -} - -// Request that a single contact is interrupted. Multiple contacts should be interrupted via the task. -// -// { -// "org_id": 1, -// "user_id": 3, -// "contact_id": 235 -// } -type interruptRequest struct { - OrgID models.OrgID `json:"org_id" validate:"required"` - UserID models.UserID `json:"user_id" validate:"required"` - ContactID models.ContactID `json:"contact_id" validate:"required"` -} - -// handles a request to interrupt a contact -func handleInterrupt(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - request := &interruptRequest{} - if err := web.ReadAndValidateJSON(r, request); err != nil { - return errors.Wrapf(err, "request failed validation"), http.StatusBadRequest, nil - } - - count, err := models.InterruptSessionsForContacts(ctx, rt.DB, []models.ContactID{request.ContactID}) - if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "unable to interrupt contact") - } - - return map[string]interface{}{"sessions": count}, http.StatusOK, nil -} diff --git a/web/contact/contact_test.go b/web/contact/contact_test.go deleted file mode 100644 index b5b42334b..000000000 --- a/web/contact/contact_test.go +++ /dev/null @@ -1,81 +0,0 @@ -package contact - -import ( - "testing" - "time" - - _ "github.com/nyaruka/mailroom/core/handlers" - "github.com/nyaruka/mailroom/core/models" - _ "github.com/nyaruka/mailroom/services/tickets/intern" - "github.com/nyaruka/mailroom/testsuite" - "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/nyaruka/mailroom/web" -) - -func TestCreateContacts(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() - - defer testsuite.Reset(testsuite.ResetAll) - - // detach Cathy's tel URN - db.MustExec(`UPDATE contacts_contacturn SET contact_id = NULL WHERE contact_id = $1`, testdata.Cathy.ID) - - db.MustExec(`ALTER SEQUENCE contacts_contact_id_seq RESTART WITH 30000`) - - web.RunWebTests(t, ctx, rt, "testdata/create.json", nil) -} - -func TestModifyContacts(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() - - defer testsuite.Reset(testsuite.ResetAll) - - // to be deterministic, update the creation date on cathy - db.MustExec(`UPDATE contacts_contact SET created_on = $1 WHERE id = $2`, time.Date(2018, 7, 6, 12, 30, 0, 123456789, time.UTC), testdata.Cathy.ID) - - // make our campaign group dynamic - db.MustExec(`UPDATE contacts_contactgroup SET query = 'age > 18' WHERE id = $1`, testdata.DoctorsGroup.ID) - - // insert an event on our campaign that is based on created on - testdata.InsertCampaignFlowEvent(db, testdata.RemindersCampaign, testdata.Favorites, testdata.CreatedOnField, 1000, "W") - - // for simpler tests we clear out cathy's fields and groups to start - db.MustExec(`UPDATE contacts_contact SET fields = NULL WHERE id = $1`, testdata.Cathy.ID) - db.MustExec(`DELETE FROM contacts_contactgroup_contacts WHERE contact_id = $1`, testdata.Cathy.ID) - db.MustExec(`UPDATE contacts_contacturn SET contact_id = NULL WHERE contact_id = $1`, testdata.Cathy.ID) - - // because we made changes to a group above, need to make sure we don't use stale org assets - models.FlushCache() - - web.RunWebTests(t, ctx, rt, "testdata/modify.json", nil) - - models.FlushCache() -} - -func TestResolveContacts(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() - - defer testsuite.Reset(testsuite.ResetAll) - - // detach Cathy's tel URN - db.MustExec(`UPDATE contacts_contacturn SET contact_id = NULL WHERE contact_id = $1`, testdata.Cathy.ID) - - db.MustExec(`ALTER SEQUENCE contacts_contact_id_seq RESTART WITH 30000`) - - web.RunWebTests(t, ctx, rt, "testdata/resolve.json", nil) -} - -func TestInterruptContact(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() - - defer testsuite.Reset(testsuite.ResetData) - - // give Cathy an completed and a waiting session - testdata.InsertFlowSession(db, testdata.Org1, testdata.Cathy, models.FlowTypeMessaging, models.SessionStatusCompleted, testdata.Favorites, models.NilCallID) - testdata.InsertWaitingSession(db, testdata.Org1, testdata.Cathy, models.FlowTypeMessaging, testdata.Favorites, models.NilCallID, time.Now(), time.Now().Add(time.Hour), true, nil) - - // give Bob a waiting session - testdata.InsertWaitingSession(db, testdata.Org1, testdata.Bob, models.FlowTypeMessaging, testdata.PickANumber, models.NilCallID, time.Now(), time.Now().Add(time.Hour), true, nil) - - web.RunWebTests(t, ctx, rt, "testdata/interrupt.json", nil) -} diff --git a/web/contact/create.go b/web/contact/create.go new file mode 100644 index 000000000..09f960cf4 --- /dev/null +++ b/web/contact/create.go @@ -0,0 +1,61 @@ +package contact + +import ( + "context" + "net/http" + + "github.com/nyaruka/goflow/flows" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/runtime" + "github.com/nyaruka/mailroom/web" + "github.com/pkg/errors" +) + +func init() { + web.RegisterRoute(http.MethodPost, "/mr/contact/create", web.RequireAuthToken(web.JSONPayload(handleCreate))) +} + +// Request to create a new contact. +// +// { +// "org_id": 1, +// "user_id": 1, +// "contact": { +// "name": "Joe Blow", +// "language": "eng", +// "urns": ["tel:+250788123123"], +// "fields": {"age": "39"}, +// "groups": ["b0b778db-6657-430b-9272-989ad43a10db"] +// } +// } +type createRequest struct { + OrgID models.OrgID `json:"org_id" validate:"required"` + UserID models.UserID `json:"user_id" validate:"required"` + Contact *models.ContactSpec `json:"contact" validate:"required"` +} + +// handles a request to create the given contact +func handleCreate(ctx context.Context, rt *runtime.Runtime, r *createRequest) (any, int, error) { + oa, err := models.GetOrgAssets(ctx, rt, r.OrgID) + if err != nil { + return nil, 0, errors.Wrapf(err, "unable to load org assets") + } + + c, err := SpecToCreation(r.Contact, oa.Env(), oa.SessionAssets()) + if err != nil { + return err, http.StatusBadRequest, nil + } + + _, contact, err := models.CreateContact(ctx, rt.DB, oa, r.UserID, c.Name, c.Language, c.URNs) + if err != nil { + return err, http.StatusBadRequest, nil + } + + modifiersByContact := map[*flows.Contact][]flows.Modifier{contact: c.Mods} + _, err = models.ApplyModifiers(ctx, rt, oa, r.UserID, modifiersByContact) + if err != nil { + return nil, 0, errors.Wrap(err, "error modifying new contact") + } + + return map[string]any{"contact": contact}, http.StatusOK, nil +} diff --git a/web/contact/inspect.go b/web/contact/inspect.go new file mode 100644 index 000000000..673015ca5 --- /dev/null +++ b/web/contact/inspect.go @@ -0,0 +1,111 @@ +package contact + +import ( + "context" + "net/http" + + "github.com/nyaruka/goflow/assets" + "github.com/nyaruka/goflow/flows" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/runtime" + "github.com/nyaruka/mailroom/web" + "github.com/pkg/errors" +) + +func init() { + web.RegisterRoute(http.MethodPost, "/mr/contact/inspect", web.RequireAuthToken(web.JSONPayload(handleInspect))) +} + +// Inspects contacts. +// +// { +// "org_id": 1, +// "contact_ids": [10000, 10001] +// } +type inspectRequest struct { + OrgID models.OrgID `json:"org_id" validate:"required"` + ContactIDs []models.ContactID `json:"contact_ids" validate:"required"` +} + +// { +// "10000": { +// "urns": [ +// { +// "channel": {"uuid": "5a1ae059-df67-4345-922c-2fad8a2376f2", "name": "Telegram"}, +// "scheme": "telegram", +// "path": "1234567876543", +// "display": "" +// }, +// { +// "channel": {"uuid": "b7aa1c23-b989-4e33-bd4c-1a8511259683", "name": "Vonage"}, +// "scheme": "tel", +// "path": "+1234567890", +// "display": "" +// }, +// { +// "channel": null, +// "scheme": "twitterid", +// "path": "45754875854", +// "display": "bobby" +// } +// ] +// } +// "10001": { +// "urns": [] +// } +// } +type urnInfo struct { + Channel *assets.ChannelReference `json:"channel"` + Scheme string `json:"scheme"` + Path string `json:"path"` + Display string `json:"display"` +} + +type contactInfo struct { + URNs []urnInfo `json:"urns"` +} + +func handleInspect(ctx context.Context, rt *runtime.Runtime, r *inspectRequest) (any, int, error) { + oa, err := models.GetOrgAssets(ctx, rt, r.OrgID) + if err != nil { + return nil, 0, errors.Wrapf(err, "error loading org assets") + } + + // load our contacts + contacts, err := models.LoadContacts(ctx, rt.DB, oa, r.ContactIDs) + if err != nil { + return nil, 0, errors.Wrapf(err, "error loading contact") + } + + response := make(map[flows.ContactID]*contactInfo, len(contacts)) + + for _, c := range contacts { + flowContact, err := c.FlowContact(oa) + if err != nil { + return nil, 0, errors.Wrapf(err, "error creating flow contact") + } + + // first add the URNs which have a corresponding channel (engine considers these destinations) + dests := flowContact.ResolveDestinations(true) + urnsSeen := make(map[string]bool, len(dests)) + urnInfos := make([]urnInfo, 0, len(flowContact.URNs())) + + for _, d := range dests { + scheme, path, _, display := d.URN.URN().ToParts() + urnInfos = append(urnInfos, urnInfo{Channel: d.Channel.Reference(), Scheme: scheme, Path: path, Display: display}) + urnsSeen[scheme+":"+path] = true + } + + // then the rest of the unsendable URNs + for _, u := range flowContact.URNs() { + scheme, path, _, display := u.URN().ToParts() + if !urnsSeen[scheme+":"+path] { + urnInfos = append(urnInfos, urnInfo{Channel: nil, Scheme: scheme, Path: path, Display: display}) + } + } + + response[flowContact.ID()] = &contactInfo{URNs: urnInfos} + } + + return response, http.StatusOK, nil +} diff --git a/web/contact/interrupt.go b/web/contact/interrupt.go new file mode 100644 index 000000000..ae5ff2efa --- /dev/null +++ b/web/contact/interrupt.go @@ -0,0 +1,38 @@ +package contact + +import ( + "context" + "net/http" + + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/runtime" + "github.com/nyaruka/mailroom/web" + "github.com/pkg/errors" +) + +func init() { + web.RegisterRoute(http.MethodPost, "/mr/contact/interrupt", web.RequireAuthToken(web.JSONPayload(handleInterrupt))) +} + +// Request that a single contact is interrupted. Multiple contacts should be interrupted via the task. +// +// { +// "org_id": 1, +// "user_id": 3, +// "contact_id": 235 +// } +type interruptRequest struct { + OrgID models.OrgID `json:"org_id" validate:"required"` + UserID models.UserID `json:"user_id" validate:"required"` + ContactID models.ContactID `json:"contact_id" validate:"required"` +} + +// handles a request to interrupt a contact +func handleInterrupt(ctx context.Context, rt *runtime.Runtime, r *interruptRequest) (any, int, error) { + count, err := models.InterruptSessionsForContacts(ctx, rt.DB, []models.ContactID{r.ContactID}) + if err != nil { + return nil, 0, errors.Wrapf(err, "unable to interrupt contact") + } + + return map[string]any{"sessions": count}, http.StatusOK, nil +} diff --git a/web/contact/modify.go b/web/contact/modify.go new file mode 100644 index 000000000..69a6fdd03 --- /dev/null +++ b/web/contact/modify.go @@ -0,0 +1,141 @@ +package contact + +import ( + "context" + "encoding/json" + "net/http" + "time" + + "github.com/nyaruka/goflow/flows" + "github.com/nyaruka/mailroom/core/goflow" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/runtime" + "github.com/nyaruka/mailroom/web" + "github.com/pkg/errors" + "golang.org/x/exp/maps" +) + +func init() { + web.RegisterRoute(http.MethodPost, "/mr/contact/modify", web.RequireAuthToken(web.JSONPayload(handleModify))) +} + +// Request that a set of contacts is modified. +// +// { +// "org_id": 1, +// "user_id": 1, +// "contact_ids": [15,235], +// "modifiers": [{ +// "type": "groups", +// "modification": "add", +// "groups": [{ +// "uuid": "a8e8efdb-78ee-46e7-9eb0-6a578da3b02d", +// "name": "Doctors" +// }] +// }] +// } +type modifyRequest struct { + OrgID models.OrgID `json:"org_id" validate:"required"` + UserID models.UserID `json:"user_id" validate:"required"` + ContactIDs []models.ContactID `json:"contact_ids" validate:"required"` + Modifiers []json.RawMessage `json:"modifiers" validate:"required"` +} + +// Response for contact modify. Will return the full contact state and the events generated. Contacts that we couldn't +// get a lock for are returned in skipped. +// +// { +// "modified": { +// "1001": { +// "contact": { +// "id": 123, +// "contact_uuid": "559d4cf7-8ed3-43db-9bbb-2be85345f87e", +// "name": "Joe", +// "language": "eng", +// ... +// }, +// "events": [ +// ... +// ] +// }, +// ... +// }, +// "skipped": [1006, 1007] +// } +type modifyResult struct { + Contact *flows.Contact `json:"contact"` + Events []flows.Event `json:"events"` +} + +type modifyResponse struct { + Modified map[flows.ContactID]modifyResult `json:"modified"` + Skipped []models.ContactID `json:"skipped"` +} + +// handles a request to apply the passed in actions +func handleModify(ctx context.Context, rt *runtime.Runtime, r *modifyRequest) (any, int, error) { + oa, err := models.GetOrgAssets(ctx, rt, r.OrgID) + if err != nil { + return nil, 0, errors.Wrapf(err, "unable to load org assets") + } + + // read the modifiers from the request + mods, err := goflow.ReadModifiers(oa.SessionAssets(), r.Modifiers, goflow.ErrorOnMissing) + if err != nil { + return nil, 0, err + } + + results := make(map[flows.ContactID]modifyResult, len(r.ContactIDs)) + remaining := r.ContactIDs + start := time.Now() + + for len(remaining) > 0 && time.Since(start) < time.Second*10 { + eventsByContact, skipped, err := tryToLockAndModify(ctx, rt, oa, remaining, mods, r.UserID) + if err != nil { + return nil, 0, err + } + + for flowContact, contactEvents := range eventsByContact { + results[flowContact.ID()] = modifyResult{Contact: flowContact, Events: contactEvents} + } + + remaining = skipped + } + + return &modifyResponse{Modified: results, Skipped: remaining}, http.StatusOK, nil +} + +func tryToLockAndModify(ctx context.Context, rt *runtime.Runtime, oa *models.OrgAssets, ids []models.ContactID, mods []flows.Modifier, userID models.UserID) (map[*flows.Contact][]flows.Event, []models.ContactID, error) { + locks, skipped, err := models.LockContacts(ctx, rt, oa.OrgID(), ids, time.Second) + if err != nil { + return nil, nil, err + } + + locked := maps.Keys(locks) + + defer models.UnlockContacts(rt, oa.OrgID(), locks) + + // load our contacts + contacts, err := models.LoadContacts(ctx, rt.DB, oa, locked) + if err != nil { + return nil, nil, errors.Wrap(err, "unable to load contacts") + } + + // convert to map of flow contacts to modifiers + modifiersByContact := make(map[*flows.Contact][]flows.Modifier, len(contacts)) + for _, contact := range contacts { + flowContact, err := contact.FlowContact(oa) + if err != nil { + return nil, nil, errors.Wrap(err, "error creating flow contact") + } + + modifiersByContact[flowContact] = mods + } + + eventsByContact, err := models.ApplyModifiers(ctx, rt, oa, userID, modifiersByContact) + if err != nil { + return nil, nil, err + } + + return eventsByContact, skipped, nil +} diff --git a/web/contact/parse_query.go b/web/contact/parse_query.go new file mode 100644 index 000000000..f6c49e849 --- /dev/null +++ b/web/contact/parse_query.go @@ -0,0 +1,103 @@ +package contact + +import ( + "context" + "net/http" + + "github.com/nyaruka/goflow/assets" + "github.com/nyaruka/goflow/contactql" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/core/search" + "github.com/nyaruka/mailroom/runtime" + "github.com/nyaruka/mailroom/web" + "github.com/pkg/errors" +) + +func init() { + web.RegisterRoute(http.MethodPost, "/mr/contact/parse_query", web.RequireAuthToken(web.JSONPayload(handleParseQuery))) +} + +// Request to parse the passed in query +// +// { +// "org_id": 1, +// "query": "age > 10", +// "group_id": 234 +// } +type parseRequest struct { + OrgID models.OrgID `json:"org_id" validate:"required"` + Query string `json:"query" validate:"required"` + ParseOnly bool `json:"parse_only"` + GroupID models.GroupID `json:"group_id"` + GroupUUID assets.GroupUUID `json:"group_uuid"` // deprecated +} + +// Response for a parse query request +// +// { +// "query": "age > 10", +// "elastic_query": { .. }, +// "metadata": { +// "fields": [ +// {"key": "age", "name": "Age"} +// ], +// "allow_as_group": true +// } +// } +type parseResponse struct { + Query string `json:"query"` + ElasticQuery interface{} `json:"elastic_query"` + Metadata *contactql.Inspection `json:"metadata,omitempty"` +} + +// handles a query parsing request +func handleParseQuery(ctx context.Context, rt *runtime.Runtime, r *parseRequest) (any, int, error) { + oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, r.OrgID, models.RefreshFields|models.RefreshGroups) + if err != nil { + return nil, 0, errors.Wrapf(err, "unable to load org assets") + } + + var group *models.Group + if r.GroupID != 0 { + group = oa.GroupByID(r.GroupID) + } else if r.GroupUUID != "" { + group = oa.GroupByUUID(r.GroupUUID) + } + + env := oa.Env() + var resolver contactql.Resolver + if !r.ParseOnly { + resolver = oa.SessionAssets() + } + + parsed, err := contactql.ParseQuery(env, r.Query, resolver) + if err != nil { + isQueryError, qerr := contactql.IsQueryError(err) + if isQueryError { + return qerr, http.StatusBadRequest, nil + } + return nil, 0, err + } + + // normalize and inspect the query + normalized := parsed.String() + metadata := contactql.Inspect(parsed) + + var elasticSource interface{} + if !r.ParseOnly { + eq := search.BuildElasticQuery(oa, group, models.NilContactStatus, nil, parsed) + elasticSource, err = eq.Source() + if err != nil { + return nil, 0, errors.Wrap(err, "error getting elastic source") + } + } + + // build our response + response := &parseResponse{ + Query: normalized, + ElasticQuery: elasticSource, + Metadata: metadata, + } + + return response, http.StatusOK, nil +} diff --git a/web/contact/resolve.go b/web/contact/resolve.go new file mode 100644 index 000000000..30daedee9 --- /dev/null +++ b/web/contact/resolve.go @@ -0,0 +1,69 @@ +package contact + +import ( + "context" + "net/http" + + "github.com/nyaruka/gocommon/urns" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/runtime" + "github.com/nyaruka/mailroom/web" + "github.com/pkg/errors" +) + +func init() { + web.RegisterRoute(http.MethodPost, "/mr/contact/resolve", web.RequireAuthToken(web.JSONPayload(handleResolve))) +} + +// Request to resolve a contact based on a channel and URN +// +// { +// "org_id": 1, +// "channel_id": 234, +// "urn": "tel:+250788123123" +// } +type resolveRequest struct { + OrgID models.OrgID `json:"org_id" validate:"required"` + ChannelID models.ChannelID `json:"channel_id" validate:"required"` + URN urns.URN `json:"urn" validate:"required"` +} + +// handles a request to resolve a contact +func handleResolve(ctx context.Context, rt *runtime.Runtime, r *resolveRequest) (any, int, error) { + oa, err := models.GetOrgAssets(ctx, rt, r.OrgID) + if err != nil { + return nil, 0, errors.Wrapf(err, "unable to load org assets") + } + + urn := r.URN.Normalize(string(oa.Env().DefaultCountry())) + + // TODO rework normalization to be idempotent because an invalid number like +2621112222 normalizes to + // 2621112222 (invalid) and then normalizes to +12621112222 (valid) + urn = urn.Normalize(string(oa.Env().DefaultCountry())) + + if err := urn.Validate(); err != nil { + return errors.Wrap(err, "URN failed validation"), http.StatusBadRequest, nil + } + + _, contact, created, err := models.GetOrCreateContact(ctx, rt.DB, oa, []urns.URN{urn}, r.ChannelID) + if err != nil { + return nil, 0, errors.Wrapf(err, "error getting or creating contact") + } + + // find the URN on the contact + for _, u := range contact.URNs() { + if urn.Identity() == u.URN().Identity() { + urn = u.URN() + break + } + } + + return map[string]any{ + "contact": contact, + "urn": map[string]interface{}{ + "id": models.GetURNInt(urn, "id"), + "identity": urn.Identity(), + }, + "created": created, + }, http.StatusOK, nil +} diff --git a/web/contact/search.go b/web/contact/search.go index d411a4a78..684722196 100644 --- a/web/contact/search.go +++ b/web/contact/search.go @@ -10,13 +10,11 @@ import ( "github.com/nyaruka/mailroom/core/search" "github.com/nyaruka/mailroom/runtime" "github.com/nyaruka/mailroom/web" - "github.com/pkg/errors" ) func init() { - web.RegisterJSONRoute(http.MethodPost, "/mr/contact/search", web.RequireAuthToken(handleSearch)) - web.RegisterJSONRoute(http.MethodPost, "/mr/contact/parse_query", web.RequireAuthToken(handleParseQuery)) + web.RegisterRoute(http.MethodPost, "/mr/contact/search", web.RequireAuthToken(web.JSONPayload(handleSearch))) } // Searches the contacts for an org @@ -52,7 +50,7 @@ type searchRequest struct { // "allow_as_group": true // } // } -type searchResponse struct { +type SearchResponse struct { Query string `json:"query"` ContactIDs []models.ContactID `json:"contact_ids"` Total int64 `json:"total"` @@ -62,38 +60,23 @@ type searchResponse struct { } // handles a contact search request -func handleSearch(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - request := &searchRequest{ - Offset: 0, - PageSize: 50, - Sort: "-id", - } - if err := web.ReadAndValidateJSON(r, request); err != nil { - return errors.Wrapf(err, "request failed validation"), http.StatusBadRequest, nil - } - - // grab our org assets - oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, request.OrgID, models.RefreshFields|models.RefreshGroups) +func handleSearch(ctx context.Context, rt *runtime.Runtime, r *searchRequest) (any, int, error) { + oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, r.OrgID, models.RefreshFields|models.RefreshGroups) if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "unable to load org assets") + return nil, 0, errors.Wrapf(err, "unable to load org assets") } - var group *models.Group - if request.GroupID != 0 { - group = oa.GroupByID(request.GroupID) - } else if request.GroupUUID != "" { - group = oa.GroupByUUID(request.GroupUUID) - } + group := oa.GroupByID(r.GroupID) // perform our search - parsed, hits, total, err := search.GetContactIDsForQueryPage(ctx, rt.ES, oa, group, request.ExcludeIDs, request.Query, request.Sort, request.Offset, request.PageSize) + parsed, hits, total, err := search.GetContactIDsForQueryPage(ctx, rt, oa, group, r.ExcludeIDs, r.Query, r.Sort, r.Offset, 50) if err != nil { isQueryError, qerr := contactql.IsQueryError(err) if isQueryError { return qerr, http.StatusBadRequest, nil } - return nil, http.StatusInternalServerError, err + return nil, 0, err } // normalize and inspect the query @@ -106,105 +89,14 @@ func handleSearch(ctx context.Context, rt *runtime.Runtime, r *http.Request) (in } // build our response - response := &searchResponse{ + response := &SearchResponse{ Query: normalized, ContactIDs: hits, Total: total, - Offset: request.Offset, - Sort: request.Sort, + Offset: r.Offset, + Sort: r.Sort, Metadata: metadata, } return response, http.StatusOK, nil } - -// Request to parse the passed in query -// -// { -// "org_id": 1, -// "query": "age > 10", -// "group_id": 234 -// } -type parseRequest struct { - OrgID models.OrgID `json:"org_id" validate:"required"` - Query string `json:"query" validate:"required"` - ParseOnly bool `json:"parse_only"` - GroupID models.GroupID `json:"group_id"` - GroupUUID assets.GroupUUID `json:"group_uuid"` // deprecated -} - -// Response for a parse query request -// -// { -// "query": "age > 10", -// "elastic_query": { .. }, -// "metadata": { -// "fields": [ -// {"key": "age", "name": "Age"} -// ], -// "allow_as_group": true -// } -// } -type parseResponse struct { - Query string `json:"query"` - ElasticQuery interface{} `json:"elastic_query"` - Metadata *contactql.Inspection `json:"metadata,omitempty"` -} - -// handles a query parsing request -func handleParseQuery(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - request := &parseRequest{} - if err := web.ReadAndValidateJSON(r, request); err != nil { - return errors.Wrapf(err, "request failed validation"), http.StatusBadRequest, nil - } - - // grab our org assets - oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, request.OrgID, models.RefreshFields|models.RefreshGroups) - if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "unable to load org assets") - } - - var group *models.Group - if request.GroupID != 0 { - group = oa.GroupByID(request.GroupID) - } else if request.GroupUUID != "" { - group = oa.GroupByUUID(request.GroupUUID) - } - - env := oa.Env() - var resolver contactql.Resolver - if !request.ParseOnly { - resolver = oa.SessionAssets() - } - - parsed, err := contactql.ParseQuery(env, request.Query, resolver) - if err != nil { - isQueryError, qerr := contactql.IsQueryError(err) - if isQueryError { - return qerr, http.StatusBadRequest, nil - } - return nil, http.StatusInternalServerError, err - } - - // normalize and inspect the query - normalized := parsed.String() - metadata := contactql.Inspect(parsed) - - var elasticSource interface{} - if !request.ParseOnly { - eq := search.BuildElasticQuery(oa, group, models.NilContactStatus, nil, parsed) - elasticSource, err = eq.Source() - if err != nil { - return nil, http.StatusInternalServerError, errors.Wrap(err, "error getting elastic source") - } - } - - // build our response - response := &parseResponse{ - Query: normalized, - ElasticQuery: elasticSource, - Metadata: metadata, - } - - return response, http.StatusOK, nil -} diff --git a/web/contact/search_test.go b/web/contact/search_test.go deleted file mode 100644 index 1713b0dfe..000000000 --- a/web/contact/search_test.go +++ /dev/null @@ -1,237 +0,0 @@ -package contact - -import ( - "bytes" - "encoding/json" - "fmt" - "io" - "net/http" - "sync" - "testing" - "time" - - "github.com/nyaruka/goflow/assets" - "github.com/nyaruka/goflow/test" - _ "github.com/nyaruka/mailroom/core/handlers" - "github.com/nyaruka/mailroom/core/models" - "github.com/nyaruka/mailroom/testsuite" - "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/nyaruka/mailroom/web" - - "github.com/stretchr/testify/assert" -) - -func TestContactSearch(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() - - wg := &sync.WaitGroup{} - - mockES := testsuite.NewMockElasticServer() - defer mockES.Close() - - rt.ES = mockES.Client() - - server := web.NewServer(ctx, rt, wg) - server.Start() - - // give our server time to start - time.Sleep(time.Second) - - defer server.Stop() - - tcs := []struct { - method string - url string - body string - mockResult []models.ContactID - expectedStatus int - expectedError string - expectedHits []models.ContactID - expectedQuery string - expectedAttributes []string - expectedFields []*assets.FieldReference - expectedSchemes []string - expectedAllowAsGroup bool - expectedESRequest string - }{ - { - method: "GET", - url: "/mr/contact/search", - expectedStatus: 405, - expectedError: "illegal method: GET", - }, - { - method: "POST", - url: "/mr/contact/search", - body: fmt.Sprintf(`{"org_id": 1, "query": "birthday = tomorrow", "group_uuid": "%s"}`, testdata.ActiveGroup.UUID), - expectedStatus: 400, - expectedError: "can't resolve 'birthday' to attribute, scheme or field", - }, - { - method: "POST", - url: "/mr/contact/search", - body: fmt.Sprintf(`{"org_id": 1, "query": "age > tomorrow", "group_uuid": "%s"}`, testdata.ActiveGroup.UUID), - expectedStatus: 400, - expectedError: "can't convert 'tomorrow' to a number", - }, - { - method: "POST", - url: "/mr/contact/search", - body: fmt.Sprintf(`{"org_id": 1, "query": "Cathy", "group_uuid": "%s"}`, testdata.ActiveGroup.UUID), - mockResult: []models.ContactID{testdata.Cathy.ID}, - expectedStatus: 200, - expectedHits: []models.ContactID{testdata.Cathy.ID}, - expectedQuery: `name ~ "Cathy"`, - expectedAttributes: []string{"name"}, - expectedFields: []*assets.FieldReference{}, - expectedSchemes: []string{}, - expectedAllowAsGroup: true, - }, - { - method: "POST", - url: "/mr/contact/search", - body: fmt.Sprintf(`{"org_id": 1, "query": "Cathy", "group_uuid": "%s", "exclude_ids": [%d, %d]}`, testdata.ActiveGroup.UUID, testdata.Bob.ID, testdata.George.ID), - mockResult: []models.ContactID{testdata.George.ID}, - expectedStatus: 200, - expectedHits: []models.ContactID{testdata.George.ID}, - expectedQuery: `name ~ "Cathy"`, - expectedAttributes: []string{"name"}, - expectedFields: []*assets.FieldReference{}, - expectedSchemes: []string{}, - expectedAllowAsGroup: true, - expectedESRequest: `{ - "_source": false, - "from": 0, - "query": { - "bool": { - "must": [ - { - "term": { - "org_id": 1 - } - }, - { - "term": { - "is_active": true - } - }, - { - "term": { - "group_ids": 1 - } - }, - { - "match": { - "name": { - "query": "cathy" - } - } - } - ], - "must_not": { - "ids": { - "type": "_doc", - "values": [ - "10001", "10002" - ] - } - } - } - }, - "size": 50, - "sort": [ - { - "id": { - "order": "desc" - } - } - ], - "track_total_hits": true - }`, - }, - { - method: "POST", - url: "/mr/contact/search", - body: fmt.Sprintf(`{"org_id": 1, "query": "AGE = 10 and gender = M", "group_uuid": "%s"}`, testdata.ActiveGroup.UUID), - mockResult: []models.ContactID{testdata.Cathy.ID}, - expectedStatus: 200, - expectedHits: []models.ContactID{testdata.Cathy.ID}, - expectedQuery: `age = 10 AND gender = "M"`, - expectedAttributes: []string{}, - expectedFields: []*assets.FieldReference{ - assets.NewFieldReference("age", "Age"), - assets.NewFieldReference("gender", "Gender"), - }, - expectedSchemes: []string{}, - expectedAllowAsGroup: true, - }, - { - method: "POST", - url: "/mr/contact/search", - body: fmt.Sprintf(`{"org_id": 1, "query": "", "group_uuid": "%s"}`, testdata.ActiveGroup.UUID), - mockResult: []models.ContactID{testdata.Cathy.ID}, - expectedStatus: 200, - expectedHits: []models.ContactID{testdata.Cathy.ID}, - expectedQuery: ``, - expectedAttributes: []string{}, - expectedFields: []*assets.FieldReference{}, - expectedSchemes: []string{}, - expectedAllowAsGroup: true, - }, - } - - for i, tc := range tcs { - if tc.mockResult != nil { - mockES.AddResponse(tc.mockResult...) - } - - var body io.Reader - if tc.body != "" { - body = bytes.NewReader([]byte(tc.body)) - } - - req, err := http.NewRequest(tc.method, "http://localhost:8090"+tc.url, body) - assert.NoError(t, err, "%d: error creating request", i) - - resp, err := http.DefaultClient.Do(req) - assert.NoError(t, err, "%d: error making request", i) - - assert.Equal(t, tc.expectedStatus, resp.StatusCode, "%d: unexpected status", i) - - content, err := io.ReadAll(resp.Body) - assert.NoError(t, err, "%d: error reading body", i) - - // on 200 responses parse them - if resp.StatusCode == 200 { - r := &searchResponse{} - err = json.Unmarshal(content, r) - assert.NoError(t, err) - assert.Equal(t, tc.expectedHits, r.ContactIDs) - assert.Equal(t, tc.expectedQuery, r.Query) - - if len(tc.expectedAttributes) > 0 || len(tc.expectedFields) > 0 || len(tc.expectedSchemes) > 0 { - assert.Equal(t, tc.expectedAttributes, r.Metadata.Attributes) - assert.Equal(t, tc.expectedFields, r.Metadata.Fields) - assert.Equal(t, tc.expectedSchemes, r.Metadata.Schemes) - assert.Equal(t, tc.expectedAllowAsGroup, r.Metadata.AllowAsGroup) - } - - if tc.expectedESRequest != "" { - test.AssertEqualJSON(t, []byte(tc.expectedESRequest), []byte(mockES.LastRequestBody), "elastic request mismatch") - } - } else { - r := &web.ErrorResponse{} - err = json.Unmarshal(content, r) - assert.NoError(t, err) - assert.Equal(t, tc.expectedError, r.Error) - } - } -} - -func TestParseQuery(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() - - defer testsuite.Reset(testsuite.ResetAll) - - web.RunWebTests(t, ctx, rt, "testdata/parse_query.json", nil) -} diff --git a/web/contact/testdata/bulk_create.json b/web/contact/testdata/bulk_create.json new file mode 100644 index 000000000..1555f0b78 --- /dev/null +++ b/web/contact/testdata/bulk_create.json @@ -0,0 +1,161 @@ +[ + { + "label": "error if specs not provided", + "method": "POST", + "path": "/mr/contact/bulk_create", + "body": { + "org_id": 1, + "user_id": 3 + }, + "status": 400, + "response": { + "error": "request failed validation: field 'specs' is required" + }, + "db_assertions": [ + { + "query": "SELECT count(*) FROM contacts_contact WHERE created_by_id != 2", + "count": 0 + } + ] + }, + { + "label": "create 2 empty contacs", + "method": "POST", + "path": "/mr/contact/bulk_create", + "body": { + "org_id": 1, + "user_id": 1, + "specs": [ + {}, + {} + ] + }, + "status": 200, + "response": { + "created": { + "0": { + "uuid": "d2f852ec-7b4e-457f-ae7f-f8b243c49ff5", + "id": 30000, + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:01.123457Z" + }, + "1": { + "uuid": "692926ea-09d6-4942-bd38-d266ec8d3716", + "id": 30001, + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:02.123457Z" + } + }, + "errored": {} + }, + "db_assertions": [ + { + "query": "SELECT count(*) FROM contacts_contact WHERE name IS NULL AND language IS NULL AND created_by_id != 2", + "count": 2 + } + ] + }, + { + "label": "create a contact with all properties", + "method": "POST", + "path": "/mr/contact/bulk_create", + "body": { + "org_id": 1, + "user_id": 3, + "specs": [ + { + "name": "José", + "language": "spa", + "urns": [ + "tel:+16055700001" + ], + "fields": { + "gender": "M", + "age": "39" + }, + "groups": [ + "c153e265-f7c9-4539-9dbc-9b358714b638" + ] + } + ] + }, + "status": 200, + "response": { + "created": { + "0": { + "uuid": "8720f157-ca1c-432f-9c0b-2014ddc77094", + "id": 30002, + "name": "José", + "language": "spa", + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z", + "urns": [ + "tel:+16055700001?id=30000&priority=1000" + ], + "groups": [ + { + "uuid": "c153e265-f7c9-4539-9dbc-9b358714b638", + "name": "Doctors" + } + ], + "fields": { + "age": { + "text": "39", + "number": 39 + }, + "gender": { + "text": "M" + } + } + } + }, + "errored": {} + } + }, + { + "label": "error if try to create contact with invalid language", + "method": "POST", + "path": "/mr/contact/bulk_create", + "body": { + "org_id": 1, + "user_id": 3, + "specs": [ + { + "name": "María", + "language": "xyz" + } + ] + }, + "status": 400, + "response": { + "error": "invalid language: unrecognized language code: xyz" + } + }, + { + "label": "error if try to create contact with taken URN", + "method": "POST", + "path": "/mr/contact/bulk_create", + "body": { + "org_id": 1, + "user_id": 3, + "specs": [ + { + "name": "María", + "urns": [ + "tel:+16055700001" + ] + } + ] + }, + "status": 207, + "response": { + "created": {}, + "errored": { + "0": "URNs in use by other contacts" + } + } + } +] \ No newline at end of file diff --git a/web/contact/testdata/inspect.json b/web/contact/testdata/inspect.json new file mode 100644 index 000000000..68067c0bf --- /dev/null +++ b/web/contact/testdata/inspect.json @@ -0,0 +1,59 @@ +[ + { + "label": "error if fields not provided", + "method": "POST", + "path": "/mr/contact/inspect", + "body": {}, + "status": 400, + "response": { + "error": "request failed validation: field 'org_id' is required, field 'contact_ids' is required" + } + }, + { + "label": "return info by id", + "method": "POST", + "path": "/mr/contact/inspect", + "body": { + "org_id": 1, + "contact_ids": [ + 10000, + 10001 + ] + }, + "status": 200, + "response": { + "10000": { + "urns": [ + { + "channel": { + "name": "Twilio", + "uuid": "74729f45-7f29-4868-9dc4-90e491e3c7d8" + }, + "scheme": "tel", + "path": "+16055741111", + "display": "" + }, + { + "channel": null, + "scheme": "twitterid", + "path": "23145325", + "display": "cathy" + } + ] + }, + "10001": { + "urns": [ + { + "channel": { + "name": "Twilio", + "uuid": "74729f45-7f29-4868-9dc4-90e491e3c7d8" + }, + "scheme": "tel", + "path": "+16055742222", + "display": "" + } + ] + } + } + } +] \ No newline at end of file diff --git a/web/contact/testdata/modify.json b/web/contact/testdata/modify.json index 235edf044..7da19cc0a 100644 --- a/web/contact/testdata/modify.json +++ b/web/contact/testdata/modify.json @@ -13,17 +13,20 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "name": "Cathy", - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z" - }, - "events": [] - } + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "name": "Cathy", + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z" + }, + "events": [] + } + }, + "skipped": [] }, "db_assertions": [ { @@ -51,23 +54,26 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "name": "Kathy", - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z" - }, - "events": [ - { - "type": "contact_name_changed", - "created_on": "2018-07-06T12:30:00.123456789Z", - "name": "Kathy" - } - ] - } + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "name": "Kathy", + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z" + }, + "events": [ + { + "type": "contact_name_changed", + "created_on": "2018-07-06T12:30:00.123456789Z", + "name": "Kathy" + } + ] + } + }, + "skipped": [] }, "db_assertions": [ { @@ -95,23 +101,26 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "name": "Nate", - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z" - }, - "events": [ - { - "type": "contact_name_changed", - "created_on": "2018-07-06T12:30:00.123456789Z", - "name": "Nate" - } - ] - } + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "name": "Nate", + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z" + }, + "events": [ + { + "type": "contact_name_changed", + "created_on": "2018-07-06T12:30:00.123456789Z", + "name": "Nate" + } + ] + } + }, + "skipped": [] }, "db_assertions": [ { @@ -139,22 +148,25 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z" - }, - "events": [ - { - "type": "contact_name_changed", - "created_on": "2018-07-06T12:30:00.123456789Z", - "name": "" - } - ] - } + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z" + }, + "events": [ + { + "type": "contact_name_changed", + "created_on": "2018-07-06T12:30:00.123456789Z", + "name": "" + } + ] + } + }, + "skipped": [] }, "db_assertions": [ { @@ -182,22 +194,25 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "status": "blocked", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z" - }, - "events": [ - { - "type": "contact_status_changed", - "created_on": "2018-07-06T12:30:00.123456789Z", - "status": "blocked" - } - ] - } + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "status": "blocked", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z" + }, + "events": [ + { + "type": "contact_status_changed", + "created_on": "2018-07-06T12:30:00.123456789Z", + "status": "blocked" + } + ] + } + }, + "skipped": [] }, "db_assertions": [ { @@ -225,22 +240,25 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "status": "archived", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z" - }, - "events": [ - { - "type": "contact_status_changed", - "created_on": "2018-07-06T12:30:00.123456789Z", - "status": "archived" - } - ] - } + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "status": "archived", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z" + }, + "events": [ + { + "type": "contact_status_changed", + "created_on": "2018-07-06T12:30:00.123456789Z", + "status": "archived" + } + ] + } + }, + "skipped": [] }, "db_assertions": [ { @@ -268,22 +286,25 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z" - }, - "events": [ - { - "type": "contact_status_changed", - "created_on": "2018-07-06T12:30:00.123456789Z", - "status": "active" - } - ] - } + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z" + }, + "events": [ + { + "type": "contact_status_changed", + "created_on": "2018-07-06T12:30:00.123456789Z", + "status": "active" + } + ] + } + }, + "skipped": [] }, "db_assertions": [ { @@ -315,33 +336,36 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z", - "fields": { - "gender": { - "text": "M" + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z", + "fields": { + "gender": { + "text": "M" + } } - } - }, - "events": [ - { - "type": "contact_field_changed", - "created_on": "2018-07-06T12:30:01.123456789Z", - "field": { - "key": "gender", - "name": "Gender" - }, - "value": { - "text": "M" + }, + "events": [ + { + "type": "contact_field_changed", + "created_on": "2018-07-06T12:30:01.123456789Z", + "field": { + "key": "gender", + "name": "Gender" + }, + "value": { + "text": "M" + } } - } - ] - } + ] + } + }, + "skipped": [] }, "db_assertions": [ { @@ -373,54 +397,57 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z", - "groups": [ - { - "uuid": "c153e265-f7c9-4539-9dbc-9b358714b638", - "name": "Doctors" - } - ], - "fields": { - "age": { - "text": "24", - "number": 24 - }, - "gender": { - "text": "M" - } - } - }, - "events": [ - { - "type": "contact_field_changed", - "created_on": "2018-07-06T12:30:01.123456789Z", - "field": { - "key": "age", - "name": "Age" - }, - "value": { - "text": "24", - "number": 24 - } - }, - { - "type": "contact_groups_changed", - "created_on": "2018-07-06T12:30:02.123456789Z", - "groups_added": [ + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z", + "groups": [ { "uuid": "c153e265-f7c9-4539-9dbc-9b358714b638", "name": "Doctors" } - ] - } - ] - } + ], + "fields": { + "age": { + "text": "24", + "number": 24 + }, + "gender": { + "text": "M" + } + } + }, + "events": [ + { + "type": "contact_field_changed", + "created_on": "2018-07-06T12:30:01.123456789Z", + "field": { + "key": "age", + "name": "Age" + }, + "value": { + "text": "24", + "number": 24 + } + }, + { + "type": "contact_groups_changed", + "created_on": "2018-07-06T12:30:02.123456789Z", + "groups_added": [ + { + "uuid": "c153e265-f7c9-4539-9dbc-9b358714b638", + "name": "Doctors" + } + ] + } + ] + } + }, + "skipped": [] }, "db_assertions": [ { @@ -460,48 +487,51 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z", - "groups": [ - { - "uuid": "c153e265-f7c9-4539-9dbc-9b358714b638", - "name": "Doctors" - } - ], - "fields": { - "age": { - "text": "24", - "number": 24 - }, - "gender": { - "text": "M" - }, - "joined": { - "text": "26/06/2020", - "datetime": "2020-06-26T05:30:01.123456-07:00" + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z", + "groups": [ + { + "uuid": "c153e265-f7c9-4539-9dbc-9b358714b638", + "name": "Doctors" + } + ], + "fields": { + "age": { + "text": "24", + "number": 24 + }, + "gender": { + "text": "M" + }, + "joined": { + "text": "26/06/2020", + "datetime": "2020-06-26T05:30:01.123456-07:00" + } } - } - }, - "events": [ - { - "type": "contact_field_changed", - "created_on": "2018-07-06T12:30:02.123456789Z", - "field": { - "key": "joined", - "name": "Joined" - }, - "value": { - "text": "26/06/2020", - "datetime": "2020-06-26T05:30:01.123456-07:00" + }, + "events": [ + { + "type": "contact_field_changed", + "created_on": "2018-07-06T12:30:02.123456789Z", + "field": { + "key": "joined", + "name": "Joined" + }, + "value": { + "text": "26/06/2020", + "datetime": "2020-06-26T05:30:01.123456-07:00" + } } - } - ] - } + ] + } + }, + "skipped": [] } }, { @@ -527,52 +557,55 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z", - "groups": [ - { - "uuid": "c153e265-f7c9-4539-9dbc-9b358714b638", - "name": "Doctors" - } - ], - "fields": { - "age": { - "text": "24", - "number": 24 - }, - "gender": { - "text": "M" - }, - "joined": { - "text": "26/06/2020", - "datetime": "2020-06-26T05:30:01.123456-07:00" - }, - "state": { - "text": "BORNO", - "state": "Nigeria > Borno" + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z", + "groups": [ + { + "uuid": "c153e265-f7c9-4539-9dbc-9b358714b638", + "name": "Doctors" + } + ], + "fields": { + "age": { + "text": "24", + "number": 24 + }, + "gender": { + "text": "M" + }, + "joined": { + "text": "26/06/2020", + "datetime": "2020-06-26T05:30:01.123456-07:00" + }, + "state": { + "text": "BORNO", + "state": "Nigeria > Borno" + } } - } - }, - "events": [ - { - "type": "contact_field_changed", - "created_on": "2018-07-06T12:30:01.123456789Z", - "field": { - "key": "state", - "name": "State" - }, - "value": { - "text": "BORNO", - "state": "Nigeria > Borno" + }, + "events": [ + { + "type": "contact_field_changed", + "created_on": "2018-07-06T12:30:01.123456789Z", + "field": { + "key": "state", + "name": "State" + }, + "value": { + "text": "BORNO", + "state": "Nigeria > Borno" + } } - } - ] - } + ] + } + }, + "skipped": [] } }, { @@ -622,63 +655,66 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z" - }, - "events": [ - { - "type": "contact_field_changed", - "created_on": "2018-07-06T12:30:00.123456789Z", - "field": { - "key": "gender", - "name": "Gender" - }, - "value": null + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z" }, - { - "type": "contact_field_changed", - "created_on": "2018-07-06T12:30:01.123456789Z", - "field": { - "key": "age", - "name": "Age" + "events": [ + { + "type": "contact_field_changed", + "created_on": "2018-07-06T12:30:00.123456789Z", + "field": { + "key": "gender", + "name": "Gender" + }, + "value": null }, - "value": null - }, - { - "type": "contact_groups_changed", - "created_on": "2018-07-06T12:30:02.123456789Z", - "groups_removed": [ - { - "uuid": "c153e265-f7c9-4539-9dbc-9b358714b638", - "name": "Doctors" - } - ] - }, - { - "type": "contact_field_changed", - "created_on": "2018-07-06T12:30:03.123456789Z", - "field": { - "key": "joined", - "name": "Joined" + { + "type": "contact_field_changed", + "created_on": "2018-07-06T12:30:01.123456789Z", + "field": { + "key": "age", + "name": "Age" + }, + "value": null }, - "value": null - }, - { - "type": "contact_field_changed", - "created_on": "2018-07-06T12:30:04.123456789Z", - "field": { - "key": "state", - "name": "State" + { + "type": "contact_groups_changed", + "created_on": "2018-07-06T12:30:02.123456789Z", + "groups_removed": [ + { + "uuid": "c153e265-f7c9-4539-9dbc-9b358714b638", + "name": "Doctors" + } + ] }, - "value": null - } - ] - } + { + "type": "contact_field_changed", + "created_on": "2018-07-06T12:30:03.123456789Z", + "field": { + "key": "joined", + "name": "Joined" + }, + "value": null + }, + { + "type": "contact_field_changed", + "created_on": "2018-07-06T12:30:04.123456789Z", + "field": { + "key": "state", + "name": "State" + }, + "value": null + } + ] + } + }, + "skipped": [] }, "db_assertions": [ { @@ -720,33 +756,36 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z", - "groups": [ - { - "uuid": "5e9d8fab-5e7e-4f51-b533-261af5dea70d", - "name": "Testers" - } - ] - }, - "events": [ - { - "type": "contact_groups_changed", - "created_on": "2018-07-06T12:30:00.123456789Z", - "groups_added": [ + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z", + "groups": [ { "uuid": "5e9d8fab-5e7e-4f51-b533-261af5dea70d", "name": "Testers" } ] - } - ] - } + }, + "events": [ + { + "type": "contact_groups_changed", + "created_on": "2018-07-06T12:30:00.123456789Z", + "groups_added": [ + { + "uuid": "5e9d8fab-5e7e-4f51-b533-261af5dea70d", + "name": "Testers" + } + ] + } + ] + } + }, + "skipped": [] }, "db_assertions": [ { @@ -780,22 +819,25 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z", - "groups": [ - { - "uuid": "5e9d8fab-5e7e-4f51-b533-261af5dea70d", - "name": "Testers" - } - ] - }, - "events": [] - } + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z", + "groups": [ + { + "uuid": "5e9d8fab-5e7e-4f51-b533-261af5dea70d", + "name": "Testers" + } + ] + }, + "events": [] + } + }, + "skipped": [] }, "db_assertions": [ { @@ -829,27 +871,30 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z" - }, - "events": [ - { - "type": "contact_groups_changed", - "created_on": "2018-07-06T12:30:00.123456789Z", - "groups_removed": [ - { - "uuid": "5e9d8fab-5e7e-4f51-b533-261af5dea70d", - "name": "Testers" - } - ] - } - ] - } + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z" + }, + "events": [ + { + "type": "contact_groups_changed", + "created_on": "2018-07-06T12:30:00.123456789Z", + "groups_removed": [ + { + "uuid": "5e9d8fab-5e7e-4f51-b533-261af5dea70d", + "name": "Testers" + } + ] + } + ] + } + }, + "skipped": [] }, "db_assertions": [ { @@ -883,16 +928,19 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z" - }, - "events": [] - } + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z" + }, + "events": [] + } + }, + "skipped": [] }, "db_assertions": [ { @@ -920,23 +968,26 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "language": "fra", - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z" - }, - "events": [ - { - "type": "contact_language_changed", - "created_on": "2018-07-06T12:30:00.123456789Z", - "language": "fra" - } - ] - } + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "language": "fra", + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z" + }, + "events": [ + { + "type": "contact_language_changed", + "created_on": "2018-07-06T12:30:00.123456789Z", + "language": "fra" + } + ] + } + }, + "skipped": [] }, "db_assertions": [ { @@ -967,28 +1018,31 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "language": "fra", - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z", - "urns": [ - "tel:+255788555111" - ] - }, - "events": [ - { - "type": "contact_urns_changed", - "created_on": "2018-07-06T12:30:00.123456789Z", + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "language": "fra", + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z", "urns": [ "tel:+255788555111" ] - } - ] - } + }, + "events": [ + { + "type": "contact_urns_changed", + "created_on": "2018-07-06T12:30:00.123456789Z", + "urns": [ + "tel:+255788555111" + ] + } + ] + } + }, + "skipped": [] }, "db_assertions": [ { @@ -1019,26 +1073,29 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "language": "fra", - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z", - "urns": [ - "tel:+255788555111?id=30000&priority=1000" + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "language": "fra", + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z", + "urns": [ + "tel:+255788555111?id=30000&priority=1000" + ] + }, + "events": [ + { + "type": "error", + "created_on": "2018-07-06T12:30:00.123456789Z", + "text": "'xyz:12345' is not valid URN" + } ] - }, - "events": [ - { - "type": "error", - "created_on": "2018-07-06T12:30:00.123456789Z", - "text": "'xyz:12345' is not valid URN" - } - ] - } + } + }, + "skipped": [] }, "db_assertions": [ { @@ -1067,30 +1124,33 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "language": "fra", - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z", - "urns": [ - "tel:+255788555111?id=30000&priority=1000", - "tel:+255788555222" - ] - }, - "events": [ - { - "type": "contact_urns_changed", - "created_on": "2018-07-06T12:30:00.123456789Z", + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "language": "fra", + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z", "urns": [ "tel:+255788555111?id=30000&priority=1000", "tel:+255788555222" ] - } - ] - } + }, + "events": [ + { + "type": "contact_urns_changed", + "created_on": "2018-07-06T12:30:00.123456789Z", + "urns": [ + "tel:+255788555111?id=30000&priority=1000", + "tel:+255788555222" + ] + } + ] + } + }, + "skipped": [] }, "db_assertions": [ { @@ -1121,21 +1181,24 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "language": "fra", - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z", - "urns": [ - "tel:+255788555111?id=30000&priority=1000", - "tel:+255788555222?id=30001&priority=999" - ] - }, - "events": [] - } + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "language": "fra", + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z", + "urns": [ + "tel:+255788555111?id=30000&priority=1000", + "tel:+255788555222?id=30001&priority=999" + ] + }, + "events": [] + } + }, + "skipped": [] }, "db_assertions": [ { @@ -1166,28 +1229,31 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "language": "fra", - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z", - "urns": [ - "tel:+255788555111?id=30000&priority=1000" - ] - }, - "events": [ - { - "type": "contact_urns_changed", - "created_on": "2018-07-06T12:30:00.123456789Z", + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "language": "fra", + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z", "urns": [ "tel:+255788555111?id=30000&priority=1000" ] - } - ] - } + }, + "events": [ + { + "type": "contact_urns_changed", + "created_on": "2018-07-06T12:30:00.123456789Z", + "urns": [ + "tel:+255788555111?id=30000&priority=1000" + ] + } + ] + } + }, + "skipped": [] }, "db_assertions": [ { @@ -1223,30 +1289,33 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "language": "fra", - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z", - "urns": [ - "tel:+255788555111", - "tel:+255788555333" - ] - }, - "events": [ - { - "type": "contact_urns_changed", - "created_on": "2018-07-06T12:30:00.123456789Z", + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "language": "fra", + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z", "urns": [ "tel:+255788555111", "tel:+255788555333" ] - } - ] - } + }, + "events": [ + { + "type": "contact_urns_changed", + "created_on": "2018-07-06T12:30:00.123456789Z", + "urns": [ + "tel:+255788555111", + "tel:+255788555333" + ] + } + ] + } + }, + "skipped": [] }, "db_assertions": [ { @@ -1287,23 +1356,26 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "language": "fra", - "status": "active", - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z" - }, - "events": [ - { - "type": "contact_urns_changed", - "created_on": "2018-07-06T12:30:00.123456789Z", - "urns": [] - } - ] - } + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "language": "fra", + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z" + }, + "events": [ + { + "type": "contact_urns_changed", + "created_on": "2018-07-06T12:30:00.123456789Z", + "urns": [] + } + ] + } + }, + "skipped": [] }, "db_assertions": [ { @@ -1355,72 +1427,73 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "created_on": "2018-07-06T12:30:00.123457Z", - "groups": [ - { - "name": "Open Tickets", - "uuid": "361838c4-2866-495a-8990-9f3c222a7604" - } - ], - "id": 10000, - "language": "fra", - "status": "active", - "tickets": [ - { - "assignee": { - "email": "admin1@nyaruka.com", - "name": "Andy Admin" - }, - "body": "Need help", + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "language": "fra", + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z", + "groups": [ + { + "uuid": "361838c4-2866-495a-8990-9f3c222a7604", + "name": "Open Tickets" + } + ], + "ticket": { + "uuid": "d2f852ec-7b4e-457f-ae7f-f8b243c49ff5", "ticketer": { - "name": "RapidPro Tickets", - "uuid": "ffc903f7-8cbb-443f-9627-87106842d1aa" + "uuid": "ffc903f7-8cbb-443f-9627-87106842d1aa", + "name": "RapidPro Tickets" }, "topic": { - "name": "Support", - "uuid": "0a8f2e00-fef6-402c-bd79-d789446ec0e0" + "uuid": "0a8f2e00-fef6-402c-bd79-d789446ec0e0", + "name": "Support" }, - "uuid": "d2f852ec-7b4e-457f-ae7f-f8b243c49ff5" - } - ], - "timezone": "America/Los_Angeles", - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf" - }, - "events": [ - { - "created_on": "2018-07-06T12:30:00.123456789Z", - "ticket": { + "body": "Need help", "assignee": { "email": "admin1@nyaruka.com", "name": "Andy Admin" - }, - "body": "Need help", - "ticketer": { - "name": "RapidPro Tickets", - "uuid": "ffc903f7-8cbb-443f-9627-87106842d1aa" - }, - "topic": { - "name": "Support", - "uuid": "0a8f2e00-fef6-402c-bd79-d789446ec0e0" - }, - "uuid": "d2f852ec-7b4e-457f-ae7f-f8b243c49ff5" - }, - "type": "ticket_opened" + } + } }, - { - "created_on": "2018-07-06T12:30:01.123456789Z", - "groups_added": [ - { - "name": "Open Tickets", - "uuid": "361838c4-2866-495a-8990-9f3c222a7604" + "events": [ + { + "type": "ticket_opened", + "created_on": "2018-07-06T12:30:00.123456789Z", + "ticket": { + "uuid": "d2f852ec-7b4e-457f-ae7f-f8b243c49ff5", + "ticketer": { + "uuid": "ffc903f7-8cbb-443f-9627-87106842d1aa", + "name": "RapidPro Tickets" + }, + "topic": { + "uuid": "0a8f2e00-fef6-402c-bd79-d789446ec0e0", + "name": "Support" + }, + "body": "Need help", + "assignee": { + "email": "admin1@nyaruka.com", + "name": "Andy Admin" + } } - ], - "type": "contact_groups_changed" - } - ] - } + }, + { + "type": "contact_groups_changed", + "created_on": "2018-07-06T12:30:01.123456789Z", + "groups_added": [ + { + "uuid": "361838c4-2866-495a-8990-9f3c222a7604", + "name": "Open Tickets" + } + ] + } + ] + } + }, + "skipped": [] } }, { @@ -1463,77 +1536,152 @@ }, "status": 200, "response": { - "10000": { - "contact": { - "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", - "id": 10000, - "name": "Juan", - "language": "spa", - "status": "active", - "tickets": [ - { - "assignee": { - "email": "admin1@nyaruka.com", - "name": "Andy Admin" + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "name": "Juan", + "language": "spa", + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z", + "urns": [ + "tel:+255788555111" + ], + "groups": [ + { + "uuid": "361838c4-2866-495a-8990-9f3c222a7604", + "name": "Open Tickets" }, - "body": "Need help", + { + "uuid": "5e9d8fab-5e7e-4f51-b533-261af5dea70d", + "name": "Testers" + } + ], + "ticket": { + "uuid": "d2f852ec-7b4e-457f-ae7f-f8b243c49ff5", "ticketer": { - "name": "RapidPro Tickets", - "uuid": "ffc903f7-8cbb-443f-9627-87106842d1aa" + "uuid": "ffc903f7-8cbb-443f-9627-87106842d1aa", + "name": "RapidPro Tickets" }, "topic": { - "name": "Support", - "uuid": "0a8f2e00-fef6-402c-bd79-d789446ec0e0" + "uuid": "0a8f2e00-fef6-402c-bd79-d789446ec0e0", + "name": "Support" }, - "uuid": "d2f852ec-7b4e-457f-ae7f-f8b243c49ff5" + "body": "Need help", + "assignee": { + "email": "admin1@nyaruka.com", + "name": "Andy Admin" + } } - ], - "timezone": "America/Los_Angeles", - "created_on": "2018-07-06T12:30:00.123457Z", - "urns": [ - "tel:+255788555111" - ], - "groups": [ + }, + "events": [ { - "name": "Open Tickets", - "uuid": "361838c4-2866-495a-8990-9f3c222a7604" + "type": "contact_name_changed", + "created_on": "2018-07-06T12:30:00.123456789Z", + "name": "Juan" }, { - "uuid": "5e9d8fab-5e7e-4f51-b533-261af5dea70d", - "name": "Testers" + "type": "contact_language_changed", + "created_on": "2018-07-06T12:30:01.123456789Z", + "language": "spa" + }, + { + "type": "contact_groups_changed", + "created_on": "2018-07-06T12:30:02.123456789Z", + "groups_added": [ + { + "uuid": "5e9d8fab-5e7e-4f51-b533-261af5dea70d", + "name": "Testers" + } + ] + }, + { + "type": "contact_urns_changed", + "created_on": "2018-07-06T12:30:03.123456789Z", + "urns": [ + "tel:+255788555111" + ] } ] - }, - "events": [ - { - "type": "contact_name_changed", - "created_on": "2018-07-06T12:30:00.123456789Z", - "name": "Juan" - }, - { - "type": "contact_language_changed", - "created_on": "2018-07-06T12:30:01.123456789Z", - "language": "spa" - }, - { - "type": "contact_groups_changed", - "created_on": "2018-07-06T12:30:02.123456789Z", - "groups_added": [ + } + }, + "skipped": [] + } + }, + { + "label": "locked contacts are skipped", + "method": "POST", + "path": "/mr/contact/modify", + "body": { + "org_id": 1, + "user_id": 3, + "contact_ids": [ + 10000, + 10003 + ], + "modifiers": [ + { + "type": "language", + "language": "eng" + } + ] + }, + "status": 200, + "response": { + "modified": { + "10000": { + "contact": { + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf", + "id": 10000, + "name": "Juan", + "language": "eng", + "status": "active", + "timezone": "America/Los_Angeles", + "created_on": "2018-07-06T12:30:00.123457Z", + "urns": [ + "tel:+255788555111?id=30000&priority=1000" + ], + "groups": [ + { + "uuid": "361838c4-2866-495a-8990-9f3c222a7604", + "name": "Open Tickets" + }, { "uuid": "5e9d8fab-5e7e-4f51-b533-261af5dea70d", "name": "Testers" } - ] + ], + "ticket": { + "uuid": "d2f852ec-7b4e-457f-ae7f-f8b243c49ff5", + "ticketer": { + "uuid": "ffc903f7-8cbb-443f-9627-87106842d1aa", + "name": "RapidPro Tickets" + }, + "topic": { + "uuid": "0a8f2e00-fef6-402c-bd79-d789446ec0e0", + "name": "Support" + }, + "body": "Need help", + "assignee": { + "email": "admin1@nyaruka.com", + "name": "Andy Admin" + } + } }, - { - "type": "contact_urns_changed", - "created_on": "2018-07-06T12:30:03.123456789Z", - "urns": [ - "tel:+255788555111" - ] - } - ] - } + "events": [ + { + "type": "contact_language_changed", + "created_on": "2018-07-06T12:30:00.123456789Z", + "language": "eng" + } + ] + } + }, + "skipped": [ + 10003 + ] } } ] \ No newline at end of file diff --git a/web/contact/utils_test.go b/web/contact/utils_test.go deleted file mode 100644 index e764fcd9a..000000000 --- a/web/contact/utils_test.go +++ /dev/null @@ -1,55 +0,0 @@ -package contact_test - -import ( - "testing" - - "github.com/nyaruka/goflow/assets" - "github.com/nyaruka/goflow/envs" - "github.com/nyaruka/mailroom/core/models" - "github.com/nyaruka/mailroom/testsuite" - "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/nyaruka/mailroom/web/contact" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestSpecToCreation(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() - - oa, err := models.GetOrgAssets(ctx, rt, testdata.Org1.ID) - require.NoError(t, err) - - sa := oa.SessionAssets() - env := envs.NewBuilder().Build() - - // empty spec is valid - s := &models.ContactSpec{} - c, err := contact.SpecToCreation(s, env, sa) - assert.NoError(t, err) - assert.Equal(t, "", c.Name) - assert.Equal(t, envs.NilLanguage, c.Language) - assert.Equal(t, 0, len(c.URNs)) - assert.Equal(t, 0, len(c.Mods)) - - // try to set invalid language - lang := "xyzd" - s = &models.ContactSpec{Language: &lang} - _, err = contact.SpecToCreation(s, env, sa) - assert.EqualError(t, err, "invalid language: iso-639-3 codes must be 3 characters, got: xyzd") - - // try to set non-existent contact field - s = &models.ContactSpec{Fields: map[string]string{"goats": "7"}} - _, err = contact.SpecToCreation(s, env, sa) - assert.EqualError(t, err, "unknown contact field 'goats'") - - // try to add to non-existent group - s = &models.ContactSpec{Groups: []assets.GroupUUID{"52f6c50e-f9a8-4f24-bb80-5c9f144ed27f"}} - _, err = contact.SpecToCreation(s, env, sa) - assert.EqualError(t, err, "unknown contact group '52f6c50e-f9a8-4f24-bb80-5c9f144ed27f'") - - // try to add to dynamic group - s = &models.ContactSpec{Groups: []assets.GroupUUID{"52f6c50e-f9a8-4f24-bb80-5c9f144ed27f"}} - _, err = contact.SpecToCreation(s, env, sa) - assert.EqualError(t, err, "unknown contact group '52f6c50e-f9a8-4f24-bb80-5c9f144ed27f'") -} diff --git a/web/expression/expression.go b/web/expression/expression.go deleted file mode 100644 index e6c9fc767..000000000 --- a/web/expression/expression.go +++ /dev/null @@ -1,43 +0,0 @@ -package expression - -import ( - "context" - "net/http" - - "github.com/nyaruka/goflow/flows/definition/legacy/expressions" - "github.com/nyaruka/mailroom/runtime" - "github.com/nyaruka/mailroom/web" - - "github.com/pkg/errors" -) - -func init() { - web.RegisterJSONRoute(http.MethodPost, "/mr/expression/migrate", web.RequireAuthToken(handleMigrate)) -} - -// Migrates a legacy expression to the new flow definition specification -// -// { -// "expression": "@contact.age" -// } -type migrateRequest struct { - Expression string `json:"expression" validate:"required"` -} - -type migrateResponse struct { - Migrated string `json:"migrated"` -} - -func handleMigrate(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - request := &migrateRequest{} - if err := web.ReadAndValidateJSON(r, request); err != nil { - return errors.Wrapf(err, "request failed validation"), http.StatusBadRequest, nil - } - - migrated, err := expressions.MigrateTemplate(request.Expression, nil) - if err != nil { - return errors.Wrapf(err, "unable to migrate expression"), http.StatusUnprocessableEntity, nil - } - - return &migrateResponse{migrated}, http.StatusOK, nil -} diff --git a/web/expression/expression_test.go b/web/expression/expression_test.go deleted file mode 100644 index 604f2b10b..000000000 --- a/web/expression/expression_test.go +++ /dev/null @@ -1,14 +0,0 @@ -package expression_test - -import ( - "testing" - - "github.com/nyaruka/mailroom/testsuite" - "github.com/nyaruka/mailroom/web" -) - -func TestServer(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() - - web.RunWebTests(t, ctx, rt, "testdata/migrate.json", nil) -} diff --git a/web/expression/testdata/migrate.json b/web/expression/testdata/migrate.json deleted file mode 100644 index fc21d7e37..000000000 --- a/web/expression/testdata/migrate.json +++ /dev/null @@ -1,48 +0,0 @@ -[ - { - "label": "illegal method", - "method": "GET", - "path": "/mr/expression/migrate", - "body": null, - "status": 405, - "response": { - "error": "illegal method: GET" - } - }, - { - "label": "migrate valid expression", - "method": "POST", - "path": "/mr/expression/migrate", - "body": { - "expression": "@contact.age" - }, - "status": 200, - "response": { - "migrated": "@fields.age" - } - }, - { - "label": "migrate another valid expression", - "method": "POST", - "path": "/mr/expression/migrate", - "body": { - "expression": "@(UPPER(contact.tel))" - }, - "status": 200, - "response": { - "migrated": "@(upper(format_urn(urns.tel)))" - } - }, - { - "label": "migrate invalid expression", - "method": "POST", - "path": "/mr/expression/migrate", - "body": { - "expression": "@(+)" - }, - "status": 422, - "response": { - "error": "unable to migrate expression: error evaluating @(+): syntax error at +" - } - } -] \ No newline at end of file diff --git a/web/flow/base_test.go b/web/flow/base_test.go new file mode 100644 index 000000000..220951098 --- /dev/null +++ b/web/flow/base_test.go @@ -0,0 +1,22 @@ +package flow_test + +import ( + "testing" + + "github.com/nyaruka/mailroom/testsuite" +) + +func TestServer(t *testing.T) { + ctx, rt := testsuite.Runtime() + + testsuite.RunWebTests(t, ctx, rt, "testdata/change_language.json", nil) + testsuite.RunWebTests(t, ctx, rt, "testdata/clone.json", nil) + testsuite.RunWebTests(t, ctx, rt, "testdata/inspect.json", nil) + testsuite.RunWebTests(t, ctx, rt, "testdata/migrate.json", nil) +} + +func TestPreviewStart(t *testing.T) { + ctx, rt := testsuite.Runtime() + + testsuite.RunWebTests(t, ctx, rt, "testdata/preview_start.json", nil) +} diff --git a/web/flow/change_language.go b/web/flow/change_language.go new file mode 100644 index 000000000..ac9f1cf01 --- /dev/null +++ b/web/flow/change_language.go @@ -0,0 +1,42 @@ +package flow + +import ( + "context" + "encoding/json" + "net/http" + + "github.com/nyaruka/goflow/envs" + "github.com/nyaruka/mailroom/core/goflow" + "github.com/nyaruka/mailroom/runtime" + "github.com/nyaruka/mailroom/web" + "github.com/pkg/errors" +) + +func init() { + web.RegisterRoute(http.MethodPost, "/mr/flow/change_language", web.RequireAuthToken(web.JSONPayload(handleChangeLanguage))) +} + +// Changes the language of a flow by replacing the text with a translation. +// +// { +// "language": "spa", +// "flow": { "uuid": "468621a8-32e6-4cd2-afc1-04416f7151f0", "nodes": [...]} +// } +type changeLanguageRequest struct { + Language envs.Language `json:"language" validate:"required"` + Flow json.RawMessage `json:"flow" validate:"required"` +} + +func handleChangeLanguage(ctx context.Context, rt *runtime.Runtime, r *changeLanguageRequest) (any, int, error) { + flow, err := goflow.ReadFlow(rt.Config, r.Flow) + if err != nil { + return errors.Wrapf(err, "unable to read flow"), http.StatusUnprocessableEntity, nil + } + + copy, err := flow.ChangeLanguage(r.Language) + if err != nil { + return errors.Wrapf(err, "unable to change flow language"), http.StatusUnprocessableEntity, nil + } + + return copy, http.StatusOK, nil +} diff --git a/web/flow/clone.go b/web/flow/clone.go new file mode 100644 index 000000000..37dc28bb9 --- /dev/null +++ b/web/flow/clone.go @@ -0,0 +1,47 @@ +package flow + +import ( + "context" + "encoding/json" + "net/http" + + "github.com/nyaruka/gocommon/uuids" + "github.com/nyaruka/mailroom/core/goflow" + "github.com/nyaruka/mailroom/runtime" + "github.com/nyaruka/mailroom/web" + "github.com/pkg/errors" +) + +func init() { + web.RegisterRoute(http.MethodPost, "/mr/flow/clone", web.RequireAuthToken(web.JSONPayload(handleClone))) +} + +// Clones a flow, replacing all UUIDs with either the given mapping or new random UUIDs. +// +// { +// "dependency_mapping": { +// "4ee4189e-0c06-4b00-b54f-5621329de947": "db31d23f-65b8-4518-b0f6-45638bfbbbf2", +// "723e62d8-a544-448f-8590-1dfd0fccfcd4": "f1fd861c-9e75-4376-a829-dcf76db6e721" +// }, +// "flow": { "uuid": "468621a8-32e6-4cd2-afc1-04416f7151f0", "nodes": [...]} +// } +type cloneRequest struct { + DependencyMapping map[uuids.UUID]uuids.UUID `json:"dependency_mapping"` + Flow json.RawMessage `json:"flow" validate:"required"` +} + +func handleClone(ctx context.Context, rt *runtime.Runtime, r *cloneRequest) (any, int, error) { + // try to clone the flow definition + cloneJSON, err := goflow.CloneDefinition(r.Flow, r.DependencyMapping) + if err != nil { + return errors.Wrapf(err, "unable to read flow"), http.StatusUnprocessableEntity, nil + } + + // read flow to check that cloning produced something valid + _, err = goflow.ReadFlow(rt.Config, cloneJSON) + if err != nil { + return errors.Wrapf(err, "unable to clone flow"), http.StatusUnprocessableEntity, nil + } + + return cloneJSON, http.StatusOK, nil +} diff --git a/web/flow/flow.go b/web/flow/flow.go deleted file mode 100644 index 17ace6fc4..000000000 --- a/web/flow/flow.go +++ /dev/null @@ -1,159 +0,0 @@ -package flow - -import ( - "context" - "encoding/json" - "net/http" - - "github.com/nyaruka/gocommon/uuids" - "github.com/nyaruka/goflow/envs" - "github.com/nyaruka/goflow/flows" - "github.com/nyaruka/mailroom/core/goflow" - "github.com/nyaruka/mailroom/core/models" - "github.com/nyaruka/mailroom/runtime" - "github.com/nyaruka/mailroom/web" - - "github.com/Masterminds/semver" - "github.com/pkg/errors" -) - -func init() { - web.RegisterJSONRoute(http.MethodPost, "/mr/flow/migrate", web.RequireAuthToken(handleMigrate)) - web.RegisterJSONRoute(http.MethodPost, "/mr/flow/inspect", web.RequireAuthToken(handleInspect)) - web.RegisterJSONRoute(http.MethodPost, "/mr/flow/clone", web.RequireAuthToken(handleClone)) - web.RegisterJSONRoute(http.MethodPost, "/mr/flow/change_language", web.RequireAuthToken(handleChangeLanguage)) -} - -// Migrates a flow to the latest flow specification -// -// { -// "flow": {"uuid": "468621a8-32e6-4cd2-afc1-04416f7151f0", "action_sets": [], ...}, -// "to_version": "13.0.0" -// } -type migrateRequest struct { - Flow json.RawMessage `json:"flow" validate:"required"` - ToVersion *semver.Version `json:"to_version"` -} - -func handleMigrate(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - request := &migrateRequest{} - if err := web.ReadAndValidateJSON(r, request); err != nil { - return errors.Wrapf(err, "request failed validation"), http.StatusBadRequest, nil - } - - // do a JSON to JSON migration of the definition - migrated, err := goflow.MigrateDefinition(rt.Config, request.Flow, request.ToVersion) - if err != nil { - return errors.Wrapf(err, "unable to migrate flow"), http.StatusUnprocessableEntity, nil - } - - // try to read result to check that it's valid - _, err = goflow.ReadFlow(rt.Config, migrated) - if err != nil { - return errors.Wrapf(err, "unable to read migrated flow"), http.StatusUnprocessableEntity, nil - } - - return migrated, http.StatusOK, nil -} - -// Inspects a flow, and returns metadata including the possible results generated by the flow, -// and dependencies in the flow. If `org_id` is specified then the dependencies will be checked -// to see if they exist in the org assets. -// -// { -// "flow": { "uuid": "468621a8-32e6-4cd2-afc1-04416f7151f0", "nodes": [...]}, -// "org_id": 1 -// } -type inspectRequest struct { - Flow json.RawMessage `json:"flow" validate:"required"` - OrgID models.OrgID `json:"org_id"` -} - -func handleInspect(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - request := &inspectRequest{} - if err := web.ReadAndValidateJSON(r, request); err != nil { - return errors.Wrapf(err, "request failed validation"), http.StatusBadRequest, nil - } - - flow, err := goflow.ReadFlow(rt.Config, request.Flow) - if err != nil { - return errors.Wrapf(err, "unable to read flow"), http.StatusUnprocessableEntity, nil - } - - var sa flows.SessionAssets - // if we have an org ID, create session assets to look for missing dependencies - if request.OrgID != models.NilOrgID { - oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, request.OrgID, models.RefreshFields|models.RefreshGroups|models.RefreshFlows) - if err != nil { - return nil, 0, err - } - sa = oa.SessionAssets() - } - - return flow.Inspect(sa), http.StatusOK, nil -} - -// Clones a flow, replacing all UUIDs with either the given mapping or new random UUIDs. -// -// { -// "dependency_mapping": { -// "4ee4189e-0c06-4b00-b54f-5621329de947": "db31d23f-65b8-4518-b0f6-45638bfbbbf2", -// "723e62d8-a544-448f-8590-1dfd0fccfcd4": "f1fd861c-9e75-4376-a829-dcf76db6e721" -// }, -// "flow": { "uuid": "468621a8-32e6-4cd2-afc1-04416f7151f0", "nodes": [...]} -// } -type cloneRequest struct { - DependencyMapping map[uuids.UUID]uuids.UUID `json:"dependency_mapping"` - Flow json.RawMessage `json:"flow" validate:"required"` -} - -func handleClone(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - request := &cloneRequest{} - if err := web.ReadAndValidateJSON(r, request); err != nil { - return errors.Wrapf(err, "request failed validation"), http.StatusBadRequest, nil - } - - // try to clone the flow definition - cloneJSON, err := goflow.CloneDefinition(request.Flow, request.DependencyMapping) - if err != nil { - return errors.Wrapf(err, "unable to read flow"), http.StatusUnprocessableEntity, nil - } - - // read flow to check that cloning produced something valid - _, err = goflow.ReadFlow(rt.Config, cloneJSON) - if err != nil { - return errors.Wrapf(err, "unable to clone flow"), http.StatusUnprocessableEntity, nil - } - - return cloneJSON, http.StatusOK, nil -} - -// Changes the language of a flow by replacing the text with a translation. -// -// { -// "language": "spa", -// "flow": { "uuid": "468621a8-32e6-4cd2-afc1-04416f7151f0", "nodes": [...]} -// } -type changeLanguageRequest struct { - Language envs.Language `json:"language" validate:"required"` - Flow json.RawMessage `json:"flow" validate:"required"` -} - -func handleChangeLanguage(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - request := &changeLanguageRequest{} - if err := web.ReadAndValidateJSON(r, request); err != nil { - return errors.Wrapf(err, "request failed validation"), http.StatusBadRequest, nil - } - - flow, err := goflow.ReadFlow(rt.Config, request.Flow) - if err != nil { - return errors.Wrapf(err, "unable to read flow"), http.StatusUnprocessableEntity, nil - } - - copy, err := flow.ChangeLanguage(request.Language) - if err != nil { - return errors.Wrapf(err, "unable to change flow language"), http.StatusUnprocessableEntity, nil - } - - return copy, http.StatusOK, nil -} diff --git a/web/flow/flow_test.go b/web/flow/flow_test.go deleted file mode 100644 index d5dc966b0..000000000 --- a/web/flow/flow_test.go +++ /dev/null @@ -1,17 +0,0 @@ -package flow_test - -import ( - "testing" - - "github.com/nyaruka/mailroom/testsuite" - "github.com/nyaruka/mailroom/web" -) - -func TestServer(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() - - web.RunWebTests(t, ctx, rt, "testdata/change_language.json", nil) - web.RunWebTests(t, ctx, rt, "testdata/clone.json", nil) - web.RunWebTests(t, ctx, rt, "testdata/inspect.json", nil) - web.RunWebTests(t, ctx, rt, "testdata/migrate.json", nil) -} diff --git a/web/flow/inspect.go b/web/flow/inspect.go new file mode 100644 index 000000000..a3e612997 --- /dev/null +++ b/web/flow/inspect.go @@ -0,0 +1,50 @@ +package flow + +import ( + "context" + "encoding/json" + "net/http" + + "github.com/nyaruka/goflow/flows" + "github.com/nyaruka/mailroom/core/goflow" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/runtime" + "github.com/nyaruka/mailroom/web" + "github.com/pkg/errors" +) + +func init() { + web.RegisterRoute(http.MethodPost, "/mr/flow/inspect", web.RequireAuthToken(web.JSONPayload(handleInspect))) +} + +// Inspects a flow, and returns metadata including the possible results generated by the flow, +// and dependencies in the flow. If `org_id` is specified then the dependencies will be checked +// to see if they exist in the org assets. +// +// { +// "flow": { "uuid": "468621a8-32e6-4cd2-afc1-04416f7151f0", "nodes": [...]}, +// "org_id": 1 +// } +type inspectRequest struct { + Flow json.RawMessage `json:"flow" validate:"required"` + OrgID models.OrgID `json:"org_id"` +} + +func handleInspect(ctx context.Context, rt *runtime.Runtime, r *inspectRequest) (any, int, error) { + flow, err := goflow.ReadFlow(rt.Config, r.Flow) + if err != nil { + return errors.Wrapf(err, "unable to read flow"), http.StatusUnprocessableEntity, nil + } + + var sa flows.SessionAssets + // if we have an org ID, create session assets to look for missing dependencies + if r.OrgID != models.NilOrgID { + oa, err := models.GetOrgAssetsWithRefresh(ctx, rt, r.OrgID, models.RefreshFields|models.RefreshGroups|models.RefreshFlows) + if err != nil { + return nil, 0, err + } + sa = oa.SessionAssets() + } + + return flow.Inspect(sa), http.StatusOK, nil +} diff --git a/web/flow/migrate.go b/web/flow/migrate.go new file mode 100644 index 000000000..f6b547ca8 --- /dev/null +++ b/web/flow/migrate.go @@ -0,0 +1,44 @@ +package flow + +import ( + "context" + "encoding/json" + "net/http" + + "github.com/Masterminds/semver" + "github.com/nyaruka/mailroom/core/goflow" + "github.com/nyaruka/mailroom/runtime" + "github.com/nyaruka/mailroom/web" + "github.com/pkg/errors" +) + +func init() { + web.RegisterRoute(http.MethodPost, "/mr/flow/migrate", web.RequireAuthToken(web.JSONPayload(handleMigrate))) +} + +// Migrates a flow to the latest flow specification +// +// { +// "flow": {"uuid": "468621a8-32e6-4cd2-afc1-04416f7151f0", "action_sets": [], ...}, +// "to_version": "13.0.0" +// } +type migrateRequest struct { + Flow json.RawMessage `json:"flow" validate:"required"` + ToVersion *semver.Version `json:"to_version"` +} + +func handleMigrate(ctx context.Context, rt *runtime.Runtime, r *migrateRequest) (any, int, error) { + // do a JSON to JSON migration of the definition + migrated, err := goflow.MigrateDefinition(rt.Config, r.Flow, r.ToVersion) + if err != nil { + return errors.Wrapf(err, "unable to migrate flow"), http.StatusUnprocessableEntity, nil + } + + // try to read result to check that it's valid + _, err = goflow.ReadFlow(rt.Config, migrated) + if err != nil { + return errors.Wrapf(err, "unable to read migrated flow"), http.StatusUnprocessableEntity, nil + } + + return migrated, http.StatusOK, nil +} diff --git a/web/flow/preview_start.go b/web/flow/preview_start.go new file mode 100644 index 000000000..912a1e75e --- /dev/null +++ b/web/flow/preview_start.go @@ -0,0 +1,96 @@ +package flow + +import ( + "context" + "net/http" + + "github.com/nyaruka/goflow/assets" + "github.com/nyaruka/goflow/contactql" + "github.com/nyaruka/goflow/flows" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/core/search" + "github.com/nyaruka/mailroom/runtime" + "github.com/nyaruka/mailroom/web" + "github.com/pkg/errors" +) + +func init() { + web.RegisterRoute(http.MethodPost, "/mr/flow/preview_start", web.RequireAuthToken(web.JSONPayload(handlePreviewStart))) +} + +// Generates a preview of which contacts will be started in the given flow. +// +// { +// "org_id": 1, +// "flow_id": 2, +// "include": { +// "group_uuids": ["5fa925e4-edd8-4e2a-ab24-b3dbb5932ddd", "2912b95f-5b89-4d39-a2a8-5292602f357f"], +// "contact_uuids": ["e5bb9e6f-7703-4ba1-afba-0b12791de38b"], +// "query": "" +// }, +// "exclude": { +// "non_active": false, +// "in_a_flow": false, +// "started_previously": true, +// "not_seen_since_days": 90 +// } +// } +// +// { +// "query": "(group = \"No Age\" OR group = \"No Name\" OR uuid = \"e5bb9e6f-7703-4ba1-afba-0b12791de38b\") AND history != \"Registration\"", +// "total": 567 +// } +type previewRequest struct { + OrgID models.OrgID `json:"org_id" validate:"required"` + FlowID models.FlowID `json:"flow_id" validate:"required"` + Include struct { + GroupUUIDs []assets.GroupUUID `json:"group_uuids"` + ContactUUIDs []flows.ContactUUID `json:"contact_uuids"` + Query string `json:"query"` + } `json:"include" validate:"required"` + Exclude models.Exclusions `json:"exclude"` +} + +type previewResponse struct { + Query string `json:"query"` + Total int `json:"total"` +} + +func handlePreviewStart(ctx context.Context, rt *runtime.Runtime, r *previewRequest) (any, int, error) { + oa, err := models.GetOrgAssets(ctx, rt, r.OrgID) + if err != nil { + return nil, 0, errors.Wrapf(err, "unable to load org assets") + } + + flow, err := oa.FlowByID(r.FlowID) + if err != nil { + return nil, 0, errors.Wrapf(err, "unable to load flow") + } + + groups := make([]*models.Group, 0, len(r.Include.GroupUUIDs)) + for _, groupUUID := range r.Include.GroupUUIDs { + g := oa.GroupByUUID(groupUUID) + if g != nil { + groups = append(groups, g) + } + } + + query, err := search.BuildRecipientsQuery(oa, flow, groups, r.Include.ContactUUIDs, r.Include.Query, r.Exclude, nil) + if err != nil { + isQueryError, qerr := contactql.IsQueryError(err) + if isQueryError { + return qerr, http.StatusBadRequest, nil + } + return nil, 0, err + } + if query == "" { + return &previewResponse{Query: "", Total: 0}, http.StatusOK, nil + } + + parsedQuery, total, err := search.GetContactTotal(ctx, rt, oa, query) + if err != nil { + return nil, 0, errors.Wrap(err, "error querying preview") + } + + return &previewResponse{Query: parsedQuery.String(), Total: int(total)}, http.StatusOK, nil +} diff --git a/web/flow/start.go b/web/flow/start.go deleted file mode 100644 index 2766e7530..000000000 --- a/web/flow/start.go +++ /dev/null @@ -1,122 +0,0 @@ -package flow - -import ( - "context" - "net/http" - - "github.com/nyaruka/gocommon/urns" - "github.com/nyaruka/goflow/assets" - "github.com/nyaruka/goflow/contactql" - "github.com/nyaruka/goflow/flows" - "github.com/nyaruka/mailroom/core/models" - "github.com/nyaruka/mailroom/core/search" - "github.com/nyaruka/mailroom/runtime" - "github.com/nyaruka/mailroom/web" - "github.com/pkg/errors" -) - -func init() { - web.RegisterJSONRoute(http.MethodPost, "/mr/flow/preview_start", web.RequireAuthToken(handlePreviewStart)) -} - -// Generates a preview of which contacts will be started in the given flow. -// -// { -// "org_id": 1, -// "flow_id": 2, -// "include": { -// "group_uuids": ["5fa925e4-edd8-4e2a-ab24-b3dbb5932ddd", "2912b95f-5b89-4d39-a2a8-5292602f357f"], -// "contact_uuids": ["e5bb9e6f-7703-4ba1-afba-0b12791de38b"], -// "urns": ["tel:+1234567890"], -// "user_query": "" -// }, -// "exclude": { -// "non_active": false, -// "in_a_flow": false, -// "started_previously": true, -// "not_seen_recently": false -// }, -// "sample_size": 5 -// } -// -// { -// "query": "(group = "No Age" OR group = "No Name" OR uuid = "e5bb9e6f-7703-4ba1-afba-0b12791de38b" OR tel = "+1234567890") AND history != \"Registration\"", -// "total": 567, -// "sample": [12, 34, 56, 67, 78], -// "metadata": { -// "fields": [ -// {"key": "age", "name": "Age"} -// ], -// "allow_as_group": true -// } -// } -type previewStartRequest struct { - OrgID models.OrgID `json:"org_id" validate:"required"` - FlowID models.FlowID `json:"flow_id" validate:"required"` - Include struct { - GroupUUIDs []assets.GroupUUID `json:"group_uuids"` - ContactUUIDs []flows.ContactUUID `json:"contact_uuids"` - URNs []urns.URN `json:"urns"` - Query string `json:"query"` - } `json:"include" validate:"required"` - Exclude search.Exclusions `json:"exclude"` - SampleSize int `json:"sample_size" validate:"required"` -} - -type previewStartResponse struct { - Query string `json:"query"` - Total int `json:"total"` - SampleIDs []models.ContactID `json:"sample_ids"` - Metadata *contactql.Inspection `json:"metadata,omitempty"` -} - -func handlePreviewStart(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - request := &previewStartRequest{} - if err := web.ReadAndValidateJSON(r, request); err != nil { - return errors.Wrapf(err, "request failed validation"), http.StatusBadRequest, nil - } - - oa, err := models.GetOrgAssets(ctx, rt, request.OrgID) - if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "unable to load org assets") - } - - flow, err := oa.FlowByID(request.FlowID) - if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "unable to load flow") - } - - groups := make([]*models.Group, 0, len(request.Include.GroupUUIDs)) - for _, groupUUID := range request.Include.GroupUUIDs { - g := oa.GroupByUUID(groupUUID) - if g != nil { - groups = append(groups, g) - } - } - - query, err := search.BuildStartQuery(oa, flow, groups, request.Include.ContactUUIDs, request.Include.URNs, request.Include.Query, request.Exclude) - if err != nil { - isQueryError, qerr := contactql.IsQueryError(err) - if isQueryError { - return qerr, http.StatusBadRequest, nil - } - return nil, http.StatusInternalServerError, err - } - if query == "" { - return &previewStartResponse{SampleIDs: []models.ContactID{}}, http.StatusOK, nil - } - - parsedQuery, sampleIDs, total, err := search.GetContactIDsForQueryPage(ctx, rt.ES, oa, nil, nil, query, "", 0, request.SampleSize) - if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "error querying preview") - } - - inspection := contactql.Inspect(parsedQuery) - - return &previewStartResponse{ - Query: parsedQuery.String(), - Total: int(total), - SampleIDs: sampleIDs, - Metadata: inspection, - }, http.StatusOK, nil -} diff --git a/web/flow/start_test.go b/web/flow/start_test.go deleted file mode 100644 index 6e2fac073..000000000 --- a/web/flow/start_test.go +++ /dev/null @@ -1,25 +0,0 @@ -package flow_test - -import ( - "testing" - - "github.com/nyaruka/mailroom/testsuite" - "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/nyaruka/mailroom/web" -) - -func TestPreviewStart(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() - - mockES := testsuite.NewMockElasticServer() - defer mockES.Close() - - rt.ES = mockES.Client() - - mockES.AddResponse(testdata.Cathy.ID) - mockES.AddResponse(testdata.Bob.ID) - mockES.AddResponse(testdata.George.ID) - mockES.AddResponse(testdata.Alexandria.ID) - - web.RunWebTests(t, ctx, rt, "testdata/preview_start.json", nil) -} diff --git a/web/flow/testdata/change_language.json b/web/flow/testdata/change_language.json index 3d881fb3d..9fd5fa215 100644 --- a/web/flow/testdata/change_language.json +++ b/web/flow/testdata/change_language.json @@ -171,7 +171,7 @@ "response": { "uuid": "19cad1f2-9110-4271-98d4-1b968bf19410", "name": "Change Language", - "spec_version": "13.1.0", + "spec_version": "13.2.0", "language": "kin", "type": "messaging", "revision": 16, @@ -517,7 +517,7 @@ "response": { "uuid": "19cad1f2-9110-4271-98d4-1b968bf19410", "name": "Change Language", - "spec_version": "13.1.0", + "spec_version": "13.2.0", "language": "ara", "type": "messaging", "revision": 16, @@ -842,7 +842,7 @@ "response": { "uuid": "19cad1f2-9110-4271-98d4-1b968bf19410", "name": "Change Language", - "spec_version": "13.1.0", + "spec_version": "13.2.0", "language": "spa", "type": "messaging", "revision": 16, diff --git a/web/flow/testdata/inspect.json b/web/flow/testdata/inspect.json index 8d6446995..6b1ac9337 100644 --- a/web/flow/testdata/inspect.json +++ b/web/flow/testdata/inspect.json @@ -338,5 +338,29 @@ "waiting_exits": [], "parent_refs": [] } + }, + { + "label": "inspect flow with invalid base language", + "method": "POST", + "path": "/mr/flow/inspect", + "body": { + "org_id": 1, + "dependency_mapping": {}, + "flow": { + "uuid": "8f107d42-7416-4cf2-9a51-9490361ad517", + "name": "Invalid Flow", + "spec_version": "13.2.0", + "language": "base", + "type": "messaging", + "revision": 1, + "expire_after_minutes": 10080, + "localization": {}, + "nodes": [] + } + }, + "status": 422, + "response": { + "error": "unable to read flow: field 'language' is not a valid language code" + } } ] \ No newline at end of file diff --git a/web/flow/testdata/migrate.json b/web/flow/testdata/migrate.json index 63c39a473..03e16f55a 100644 --- a/web/flow/testdata/migrate.json +++ b/web/flow/testdata/migrate.json @@ -83,7 +83,7 @@ } ], "revision": 1, - "spec_version": "13.1.0", + "spec_version": "13.2.0", "type": "messaging", "uuid": "42362831-f376-4df1-b6d9-a80b102821d9" } @@ -169,7 +169,7 @@ } ], "revision": 1, - "spec_version": "13.1.0", + "spec_version": "13.2.0", "type": "messaging", "uuid": "42362831-f376-4df1-b6d9-a80b102821d9" } diff --git a/web/flow/testdata/preview_start.json b/web/flow/testdata/preview_start.json index 8f037d917..1ae397369 100644 --- a/web/flow/testdata/preview_start.json +++ b/web/flow/testdata/preview_start.json @@ -15,7 +15,7 @@ "body": {}, "status": 400, "response": { - "error": "request failed validation: field 'org_id' is required, field 'flow_id' is required, field 'sample_size' is required" + "error": "request failed validation: field 'org_id' is required, field 'flow_id' is required" } }, { @@ -25,14 +25,12 @@ "body": { "org_id": 1, "flow_id": 10001, - "include": {}, - "sample_size": 3 + "include": {} }, "status": 200, "response": { "query": "", - "total": 0, - "sample_ids": [] + "total": 0 } }, { @@ -51,43 +49,13 @@ "5a8345c1-514a-4d1b-aee5-6f39b2f53cfa", "bd2aab59-5e28-4db4-b6e8-bbdb75fd7a0a" ], - "urns": [ - "tel:+1234567890", - "facebook:9876543210" - ], "query": "" - }, - "sample_size": 3 + } }, "status": 200, "response": { - "query": "group = \"Doctors\" OR group = \"Testers\" OR uuid = \"5a8345c1-514a-4d1b-aee5-6f39b2f53cfa\" OR uuid = \"bd2aab59-5e28-4db4-b6e8-bbdb75fd7a0a\" OR tel = \"+1234567890\" OR facebook = 9876543210", - "total": 1, - "sample_ids": [ - 10000 - ], - "metadata": { - "attributes": [ - "group", - "uuid" - ], - "fields": [], - "groups": [ - { - "name": "Doctors", - "uuid": "c153e265-f7c9-4539-9dbc-9b358714b638" - }, - { - "name": "Testers", - "uuid": "5e9d8fab-5e7e-4f51-b533-261af5dea70d" - } - ], - "schemes": [ - "facebook", - "tel" - ], - "allow_as_group": false - } + "query": "group = \"Doctors\" OR group = \"Testers\" OR uuid = \"5a8345c1-514a-4d1b-aee5-6f39b2f53cfa\" OR uuid = \"bd2aab59-5e28-4db4-b6e8-bbdb75fd7a0a\"", + "total": 121 } }, { @@ -100,30 +68,13 @@ "include": { "group_ids": [], "contact_ids": [], - "urns": [], "query": "gender = M" - }, - "sample_size": 3 + } }, "status": 200, "response": { "query": "gender = \"M\"", - "total": 1, - "sample_ids": [ - 10001 - ], - "metadata": { - "attributes": [], - "fields": [ - { - "key": "gender", - "name": "Gender" - } - ], - "groups": [], - "schemes": [], - "allow_as_group": true - } + "total": 0 } }, { @@ -142,10 +93,6 @@ "5a8345c1-514a-4d1b-aee5-6f39b2f53cfa", "bd2aab59-5e28-4db4-b6e8-bbdb75fd7a0a" ], - "urns": [ - "tel:+1234567890", - "facebook:9876543210" - ], "query": "" }, "exclude": { @@ -153,42 +100,12 @@ "in_a_flow": true, "started_previously": true, "not_seen_since_days": 90 - }, - "sample_size": 3 + } }, "status": 200, "response": { - "query": "(group = \"Doctors\" OR group = \"Testers\" OR uuid = \"5a8345c1-514a-4d1b-aee5-6f39b2f53cfa\" OR uuid = \"bd2aab59-5e28-4db4-b6e8-bbdb75fd7a0a\" OR tel = \"+1234567890\" OR facebook = 9876543210) AND status = \"active\" AND flow = \"\" AND history != \"Pick a Number\" AND last_seen_on > \"07-04-2018\"", - "total": 1, - "sample_ids": [ - 10002 - ], - "metadata": { - "attributes": [ - "flow", - "group", - "history", - "last_seen_on", - "status", - "uuid" - ], - "fields": [], - "groups": [ - { - "name": "Doctors", - "uuid": "c153e265-f7c9-4539-9dbc-9b358714b638" - }, - { - "name": "Testers", - "uuid": "5e9d8fab-5e7e-4f51-b533-261af5dea70d" - } - ], - "schemes": [ - "facebook", - "tel" - ], - "allow_as_group": false - } + "query": "(group = \"Doctors\" OR group = \"Testers\" OR uuid = \"5a8345c1-514a-4d1b-aee5-6f39b2f53cfa\" OR uuid = \"bd2aab59-5e28-4db4-b6e8-bbdb75fd7a0a\") AND status = \"active\" AND flow = \"\" AND history != \"Pick a Number\" AND last_seen_on > \"07-04-2018\"", + "total": 0 } }, { @@ -206,33 +123,12 @@ "in_a_flow": true, "started_previously": true, "not_seen_since_days": 90 - }, - "sample_size": 3 + } }, "status": 200, "response": { "query": "gender = \"M\" AND status = \"active\" AND flow = \"\" AND history != \"Pick a Number\" AND last_seen_on > \"07-04-2018\"", - "total": 1, - "sample_ids": [ - 10003 - ], - "metadata": { - "attributes": [ - "flow", - "history", - "last_seen_on", - "status" - ], - "fields": [ - { - "key": "gender", - "name": "Gender" - } - ], - "groups": [], - "schemes": [], - "allow_as_group": false - } + "total": 0 } }, { @@ -245,13 +141,12 @@ "include": { "query": "gender =" }, - "exclude": {}, - "sample_size": 3 + "exclude": {} }, "status": 400, "response": { - "code": "unexpected_token", "error": "mismatched input '' expecting {TEXT, STRING}", + "code": "unexpected_token", "extra": { "token": "" } @@ -267,13 +162,12 @@ "include": { "query": "goats > 10" }, - "exclude": {}, - "sample_size": 3 + "exclude": {} }, "status": 400, "response": { - "code": "unknown_property", "error": "can't resolve 'goats' to attribute, scheme or field", + "code": "unknown_property", "extra": { "property": "goats" } diff --git a/web/forms.go b/web/forms.go index a83af4804..833fde8f2 100644 --- a/web/forms.go +++ b/web/forms.go @@ -4,9 +4,9 @@ import ( "mime" "net/http" + "github.com/go-playground/validator/v10" "github.com/gorilla/schema" "github.com/nyaruka/goflow/utils" - validator "gopkg.in/go-playground/validator.v9" ) var ( diff --git a/web/forms_test.go b/web/forms_test.go index e7e53f5a8..2e6e15423 100644 --- a/web/forms_test.go +++ b/web/forms_test.go @@ -6,8 +6,8 @@ import ( "strings" "testing" + "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/web" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -49,7 +49,7 @@ func TestDecodeAndValidateForm(t *testing.T) { func TestDecodeAndValidateMultipartForm(t *testing.T) { // make a request with valid form data - request, err := web.MakeMultipartRequest("POST", "http://temba.io", []web.MultiPartPart{ + request, err := testsuite.MakeMultipartRequest("POST", "http://temba.io", []testsuite.MultiPartPart{ {Name: "foo", Data: "a"}, {Name: "foo", Data: "b"}, {Name: "bar", Data: "x"}, @@ -65,7 +65,7 @@ func TestDecodeAndValidateMultipartForm(t *testing.T) { assert.Equal(t, "x", form.Bar) // make a request that's missing required data - request, err = web.MakeMultipartRequest("POST", "http://temba.io", []web.MultiPartPart{ + request, err = testsuite.MakeMultipartRequest("POST", "http://temba.io", []testsuite.MultiPartPart{ {Name: "foo", Data: "a"}, {Name: "foo", Data: "b"}, }, nil) diff --git a/web/ivr/ivr.go b/web/ivr/ivr.go index adf88bfc7..f174e93e3 100644 --- a/web/ivr/ivr.go +++ b/web/ivr/ivr.go @@ -10,7 +10,6 @@ import ( "github.com/go-chi/chi" "github.com/nyaruka/gocommon/httpx" - "github.com/nyaruka/gocommon/jsonx" "github.com/nyaruka/gocommon/urns" "github.com/nyaruka/goflow/assets" "github.com/nyaruka/mailroom/core/ivr" @@ -58,22 +57,21 @@ func newIVRHandler(handler ivrHandlerFn, logType models.ChannelLogType) web.Hand return writeGenericErrorResponse(w, errors.Wrapf(err, "unable to get service for channel: %s", ch.UUID())) } - // validate this request's signature - err = svc.ValidateRequestSignature(r) + recorder, err := httpx.NewRecorder(r, w, true) if err != nil { - return svc.WriteErrorResponse(w, errors.Wrapf(err, "request failed signature validation")) + return svc.WriteErrorResponse(w, errors.Wrapf(err, "error reading request body")) } - recorder, err := httpx.NewRecorder(r, w, true) + // validate this request's signature + err = svc.ValidateRequestSignature(r) if err != nil { - return errors.Wrapf(err, "error reading request body") + return svc.WriteErrorResponse(w, errors.Wrapf(err, "request failed signature validation")) } clog := models.NewChannelLogForIncoming(logType, ch, recorder, svc.RedactValues(ch)) call, rerr := handler(ctx, rt, oa, ch, svc, r, recorder.ResponseWriter) if call != nil { - clog.SetCall(call) if err := call.AttachLog(ctx, rt.DB, clog); err != nil { logrus.WithError(err).WithField("http_request", r).Error("error attaching ivr channel log") } @@ -85,7 +83,7 @@ func newIVRHandler(handler ivrHandlerFn, logType models.ChannelLogType) web.Hand clog.End() - if err := models.InsertChannelLogs(ctx, rt.DB, []*models.ChannelLog{clog}); err != nil { + if err := models.InsertChannelLogs(ctx, rt, []*models.ChannelLog{clog}); err != nil { logrus.WithError(err).WithField("http_request", r).Error("error writing ivr channel log") } @@ -176,10 +174,7 @@ type IVRRequest struct { // writeGenericErrorResponse is just a small utility method to write out a simple JSON error when we don't have a client yet func writeGenericErrorResponse(w http.ResponseWriter, err error) error { - w.Header().Set("Content-type", "application/json") - w.WriteHeader(http.StatusBadRequest) - _, err = w.Write(jsonx.MustMarshal(map[string]string{"error": err.Error()})) - return err + return web.WriteMarshalled(w, http.StatusBadRequest, map[string]string{"error": err.Error()}) } func buildResumeURL(cfg *runtime.Config, channel *models.Channel, call *models.Call, urn urns.URN) string { diff --git a/web/ivr/ivr_test.go b/web/ivr/ivr_test.go index 2ee986197..802a3c2ef 100644 --- a/web/ivr/ivr_test.go +++ b/web/ivr/ivr_test.go @@ -1,6 +1,7 @@ package ivr import ( + "context" "encoding/json" "fmt" "io" @@ -12,13 +13,14 @@ import ( "testing" "github.com/nyaruka/gocommon/dbutil/assertdb" - "github.com/nyaruka/gocommon/jsonx" + "github.com/nyaruka/goflow/assets" "github.com/nyaruka/goflow/test" _ "github.com/nyaruka/mailroom/core/handlers" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/core/queue" - ivr_tasks "github.com/nyaruka/mailroom/core/tasks/ivr" + "github.com/nyaruka/mailroom/core/tasks" "github.com/nyaruka/mailroom/core/tasks/starts" + "github.com/nyaruka/mailroom/runtime" "github.com/nyaruka/mailroom/services/ivr/twiml" "github.com/nyaruka/mailroom/services/ivr/vonage" "github.com/nyaruka/mailroom/testsuite" @@ -54,8 +56,8 @@ func mockTwilioHandler(w http.ResponseWriter, r *http.Request) { } func TestTwilioIVR(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetAll) @@ -73,7 +75,7 @@ func TestTwilioIVR(t *testing.T) { defer server.Stop() // set callback domain and enable machine detection - db.MustExec(`UPDATE channels_channel SET config = config::jsonb || '{"callback_domain": "localhost:8090", "machine_detection": true}'::jsonb WHERE id = $1`, testdata.TwilioChannel.ID) + rt.DB.MustExec(`UPDATE channels_channel SET config = config || '{"callback_domain": "localhost:8090", "machine_detection": true}'::jsonb WHERE id = $1`, testdata.TwilioChannel.ID) // create a flow start for cathy bob, and george parentSummary := json.RawMessage(`{ @@ -96,29 +98,20 @@ func TestTwilioIVR(t *testing.T) { WithContactIDs([]models.ContactID{testdata.Cathy.ID, testdata.Bob.ID, testdata.George.ID}). WithParentSummary(parentSummary) - err := models.InsertFlowStarts(ctx, db, []*models.FlowStart{start}) + err := models.InsertFlowStarts(ctx, rt.DB, []*models.FlowStart{start}) require.NoError(t, err) - // call our master starter - err = starts.CreateFlowBatches(ctx, rt, start) + err = tasks.Queue(rc, queue.BatchQueue, testdata.Org1.ID, &starts.StartFlowTask{FlowStart: start}, queue.DefaultPriority) require.NoError(t, err) - // start our task - task, err := queue.PopNextTask(rc, queue.BatchQueue) - require.NoError(t, err) - batch := &models.FlowStartBatch{} - jsonx.MustUnmarshal(task.Task, batch) - - // request our calls to start - err = ivr_tasks.HandleFlowStartBatch(ctx, rt, batch) - require.NoError(t, err) + testsuite.FlushTasks(t, rt) // check our 3 contacts have 3 wired calls - assertdb.Query(t, db, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2 AND external_id = $3`, + assertdb.Query(t, rt.DB, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2 AND external_id = $3`, testdata.Cathy.ID, models.CallStatusWired, "Call1").Returns(1) - assertdb.Query(t, db, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2 AND external_id = $3`, + assertdb.Query(t, rt.DB, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2 AND external_id = $3`, testdata.Bob.ID, models.CallStatusWired, "Call2").Returns(1) - assertdb.Query(t, db, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2 AND external_id = $3`, + assertdb.Query(t, rt.DB, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2 AND external_id = $3`, testdata.George.ID, models.CallStatusWired, "Call3").Returns(1) tcs := []struct { @@ -318,25 +311,29 @@ func TestTwilioIVR(t *testing.T) { } for connExtID, expStatus := range tc.expectedConnStatus { - assertdb.Query(t, db, `SELECT status FROM ivr_call WHERE external_id = $1`, connExtID). + assertdb.Query(t, rt.DB, `SELECT status FROM ivr_call WHERE external_id = $1`, connExtID). Columns(map[string]interface{}{"status": expStatus}, "status mismatch for connection '%s' in test '%s'", connExtID, tc.label) } } // check our final state of sessions, runs, msgs, calls - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'C'`, testdata.Cathy.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'C'`, testdata.Cathy.ID).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowrun WHERE contact_id = $1 AND status = 'C'`, testdata.Cathy.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowrun WHERE contact_id = $1 AND status = 'C'`, testdata.Cathy.ID).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE contact_id = $1 AND msg_type = 'V' AND status = 'W' AND direction = 'O'`, testdata.Cathy.ID).Returns(8) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE contact_id = $1 AND msg_type = 'V' AND status = 'W' AND direction = 'O'`, testdata.Cathy.ID).Returns(8) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE contact_id = $1 AND msg_type = 'V' AND status = 'H' AND direction = 'I'`, testdata.Cathy.ID).Returns(5) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE contact_id = $1 AND msg_type = 'V' AND status = 'H' AND direction = 'I'`, testdata.Cathy.ID).Returns(5) - assertdb.Query(t, db, `SELECT count(*) FROM channels_channellog WHERE call_id = 1 AND channel_id = $1`, testdata.TwilioChannel.ID).Returns(9) - assertdb.Query(t, db, `SELECT count(*) FROM channels_channellog WHERE http_logs::text LIKE '%sesame%'`).Returns(0) // auth token redacted - - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE contact_id = $1 AND msg_type = 'V' + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE contact_id = $1 AND msg_type = 'V' AND ((status = 'H' AND direction = 'I') OR (status = 'W' AND direction = 'O'))`, testdata.Bob.ID).Returns(2) + + // check the generated channel logs + logs := getCallLogs(t, rt, testdata.TwilioChannel.UUID) + assert.Len(t, logs, 17) + for _, log := range logs { + assert.NotContains(t, string(log), "sesame") // auth token redacted + } } func mockVonageHandler(w http.ResponseWriter, r *http.Request) { @@ -375,17 +372,17 @@ func mockVonageHandler(w http.ResponseWriter, r *http.Request) { } func TestVonageIVR(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() defer rc.Close() defer testsuite.Reset(testsuite.ResetAll) // deactivate our twilio channel - db.MustExec(`UPDATE channels_channel SET is_active = FALSE WHERE id = $1`, testdata.TwilioChannel.ID) + rt.DB.MustExec(`UPDATE channels_channel SET is_active = FALSE WHERE id = $1`, testdata.TwilioChannel.ID) // update callback domain and role - db.MustExec(`UPDATE channels_channel SET config = config::jsonb || '{"callback_domain": "localhost:8090"}'::jsonb, role='SRCA' WHERE id = $1`, testdata.VonageChannel.ID) + rt.DB.MustExec(`UPDATE channels_channel SET config = config || '{"callback_domain": "localhost:8090"}'::jsonb, role='SRCA' WHERE id = $1`, testdata.VonageChannel.ID) // start test server ts := httptest.NewServer(http.HandlerFunc(mockVonageHandler)) @@ -400,31 +397,25 @@ func TestVonageIVR(t *testing.T) { vonage.IgnoreSignatures = true // create a flow start for cathy and george - extra := json.RawMessage(`{"ref_id":"123"}`) start := models.NewFlowStart(testdata.Org1.ID, models.StartTypeTrigger, models.FlowTypeVoice, testdata.IVRFlow.ID). WithContactIDs([]models.ContactID{testdata.Cathy.ID, testdata.George.ID}). - WithExtra(extra) - models.InsertFlowStarts(ctx, db, []*models.FlowStart{start}) + WithParams(json.RawMessage(`{"ref_id":"123"}`)) + + err := models.InsertFlowStarts(ctx, rt.DB, []*models.FlowStart{start}) + require.NoError(t, err) - // call our master starter - err := starts.CreateFlowBatches(ctx, rt, start) - assert.NoError(t, err) + assertdb.Query(t, rt.DB, `SELECT COUNT(*) FROM flows_flowstart`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT COUNT(*) FROM flows_flowstart WHERE params ->> 'ref_id' = '123'`).Returns(1) - // start our task - task, err := queue.PopNextTask(rc, queue.HandlerQueue) - assert.NoError(t, err) - batch := &models.FlowStartBatch{} - err = json.Unmarshal(task.Task, batch) - assert.NoError(t, err) + err = tasks.Queue(rc, queue.BatchQueue, testdata.Org1.ID, &starts.StartFlowTask{FlowStart: start}, queue.DefaultPriority) + require.NoError(t, err) - // request our call to start - err = ivr_tasks.HandleFlowStartBatch(ctx, rt, batch) - assert.NoError(t, err) + testsuite.FlushTasks(t, rt) - assertdb.Query(t, db, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2 AND external_id = $3`, + assertdb.Query(t, rt.DB, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2 AND external_id = $3`, testdata.Cathy.ID, models.CallStatusWired, "Call1").Returns(1) - assertdb.Query(t, db, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2 AND external_id = $3`, + assertdb.Query(t, rt.DB, `SELECT COUNT(*) FROM ivr_call WHERE contact_id = $1 AND status = $2 AND external_id = $3`, testdata.George.ID, models.CallStatusWired, "Call2").Returns(1) tcs := []struct { @@ -621,21 +612,44 @@ func TestVonageIVR(t *testing.T) { } // check our final state of sessions, runs, msgs, calls - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'C'`, testdata.Cathy.ID).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM flows_flowrun WHERE contact_id = $1 AND status = 'C'`, testdata.Cathy.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowsession WHERE contact_id = $1 AND status = 'C'`, testdata.Cathy.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM flows_flowrun WHERE contact_id = $1 AND status = 'C'`, testdata.Cathy.ID).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM ivr_call WHERE contact_id = $1 AND status = 'D' AND duration = 50`, testdata.Cathy.ID).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM ivr_call WHERE contact_id = $1 AND status = 'D' AND duration = 50`, testdata.Cathy.ID).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE contact_id = $1 AND msg_type = 'V' AND status = 'W' AND direction = 'O'`, testdata.Cathy.ID).Returns(9) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE contact_id = $1 AND msg_type = 'V' AND status = 'W' AND direction = 'O'`, testdata.Cathy.ID).Returns(9) - assertdb.Query(t, db, `SELECT count(*) FROM ivr_call WHERE status = 'F' AND direction = 'I'`).Returns(1) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM ivr_call WHERE status = 'F' AND direction = 'I'`).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE contact_id = $1 AND msg_type = 'V' AND status = 'H' AND direction = 'I'`, testdata.Cathy.ID).Returns(5) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE contact_id = $1 AND msg_type = 'V' AND status = 'H' AND direction = 'I'`, testdata.Cathy.ID).Returns(5) - assertdb.Query(t, db, `SELECT count(*) FROM channels_channellog WHERE call_id = 1 AND channel_id = $1`, testdata.VonageChannel.ID).Returns(10) - assertdb.Query(t, db, `SELECT count(*) FROM channels_channellog WHERE http_logs::text LIKE '%BEGIN PRIVATE KEY%'`).Returns(0) // private key redacted + assertdb.Query(t, rt.DB, `SELECT count(*) FROM msgs_msg WHERE contact_id = $1 AND msg_type = 'V' AND ((status = 'H' AND direction = 'I') OR (status = 'W' AND direction = 'O'))`, testdata.George.ID).Returns(3) - assertdb.Query(t, db, `SELECT count(*) FROM msgs_msg WHERE contact_id = $1 AND msg_type = 'V' AND ((status = 'H' AND direction = 'I') OR (status = 'W' AND direction = 'O'))`, testdata.George.ID).Returns(3) + assertdb.Query(t, rt.DB, `SELECT count(*) FROM ivr_call WHERE status = 'D' AND contact_id = $1`, testdata.George.ID).Returns(1) - assertdb.Query(t, db, `SELECT count(*) FROM ivr_call WHERE status = 'D' AND contact_id = $1`, testdata.George.ID).Returns(1) + // check the generated channel logs + logs := getCallLogs(t, rt, testdata.VonageChannel.UUID) + assert.Len(t, logs, 16) + for _, log := range logs { + assert.NotContains(t, string(log), "BEGIN PRIVATE KEY") // private key redacted + } + + // and 2 unattached logs in the database + assertdb.Query(t, rt.DB, `SELECT count(*) FROM channels_channellog WHERE channel_id = $1`, testdata.VonageChannel.ID).Returns(2) + assertdb.Query(t, rt.DB, `SELECT array_agg(log_type ORDER BY id) FROM channels_channellog WHERE channel_id = $1`, testdata.VonageChannel.ID).Returns([]byte(`{ivr_status,ivr_status}`)) +} + +func getCallLogs(t *testing.T, rt *runtime.Runtime, channelUUID assets.ChannelUUID) [][]byte { + var logUUIDs []models.ChannelLogUUID + err := rt.DB.Select(&logUUIDs, `SELECT unnest(log_uuids) FROM ivr_call ORDER BY id`) + require.NoError(t, err) + + logs := make([][]byte, len(logUUIDs)) + + for i, logUUID := range logUUIDs { + _, body, err := rt.LogStorage.Get(context.Background(), fmt.Sprintf("channels/%s/%s/%s.json", channelUUID, logUUID[0:4], logUUID)) + require.NoError(t, err) + logs[i] = body + } + return logs } diff --git a/web/msg/base_test.go b/web/msg/base_test.go new file mode 100644 index 000000000..dd11092b2 --- /dev/null +++ b/web/msg/base_test.go @@ -0,0 +1,50 @@ +package msg_test + +import ( + "fmt" + "testing" + "time" + + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/testsuite" + "github.com/nyaruka/mailroom/testsuite/testdata" +) + +func TestSend(t *testing.T) { + ctx, rt := testsuite.Runtime() + + defer testsuite.Reset(testsuite.ResetData | testsuite.ResetRedis) + + cathyTicket := testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Internal, testdata.DefaultTopic, "help", "", time.Date(2015, 1, 1, 12, 30, 45, 0, time.UTC), nil) + + testsuite.RunWebTests(t, ctx, rt, "testdata/send.json", map[string]string{ + "cathy_ticket_id": fmt.Sprintf("%d", cathyTicket.ID), + }) + + testsuite.AssertCourierQueues(t, map[string][]int{"msgs:74729f45-7f29-4868-9dc4-90e491e3c7d8|10/1": {1, 1, 1}}) +} + +func TestResend(t *testing.T) { + ctx, rt := testsuite.Runtime() + + defer testsuite.Reset(testsuite.ResetData) + + cathyIn := testdata.InsertIncomingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "hello", models.MsgStatusHandled) + cathyOut := testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "how can we help", nil, models.MsgStatusSent, false) + bobOut := testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.VonageChannel, testdata.Bob, "this failed", nil, models.MsgStatusFailed, false) + georgeOut := testdata.InsertOutgoingMsg(rt, testdata.Org1, testdata.VonageChannel, testdata.George, "no URN", nil, models.MsgStatusFailed, false) + rt.DB.MustExec(`UPDATE msgs_msg SET contact_urn_id = NULL WHERE id = $1`, georgeOut.ID()) + + testsuite.RunWebTests(t, ctx, rt, "testdata/resend.json", map[string]string{ + "cathy_msgin_id": fmt.Sprintf("%d", cathyIn.ID()), + "cathy_msgout_id": fmt.Sprintf("%d", cathyOut.ID()), + "bob_msgout_id": fmt.Sprintf("%d", bobOut.ID()), + "george_msgout_id": fmt.Sprintf("%d", georgeOut.ID()), + }) +} + +func TestPreviewBroadcast(t *testing.T) { + ctx, rt := testsuite.Runtime() + + testsuite.RunWebTests(t, ctx, rt, "testdata/preview_broadcast.json", nil) +} diff --git a/web/msg/msg_test.go b/web/msg/msg_test.go deleted file mode 100644 index fb59e23a3..000000000 --- a/web/msg/msg_test.go +++ /dev/null @@ -1,30 +0,0 @@ -package msg_test - -import ( - "fmt" - "testing" - - "github.com/nyaruka/mailroom/core/models" - "github.com/nyaruka/mailroom/testsuite" - "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/nyaruka/mailroom/web" -) - -func TestServer(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() - - defer testsuite.Reset(testsuite.ResetData) - - cathyIn := testdata.InsertIncomingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "hello", models.MsgStatusHandled) - cathyOut := testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.TwilioChannel, testdata.Cathy, "how can we help", nil, models.MsgStatusSent, false) - bobOut := testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.VonageChannel, testdata.Bob, "this failed", nil, models.MsgStatusFailed, false) - georgeOut := testdata.InsertOutgoingMsg(db, testdata.Org1, testdata.VonageChannel, testdata.George, "no URN", nil, models.MsgStatusFailed, false) - db.MustExec(`UPDATE msgs_msg SET contact_urn_id = NULL WHERE id = $1`, georgeOut.ID()) - - web.RunWebTests(t, ctx, rt, "testdata/resend.json", map[string]string{ - "cathy_msgin_id": fmt.Sprintf("%d", cathyIn.ID()), - "cathy_msgout_id": fmt.Sprintf("%d", cathyOut.ID()), - "bob_msgout_id": fmt.Sprintf("%d", bobOut.ID()), - "george_msgout_id": fmt.Sprintf("%d", georgeOut.ID()), - }) -} diff --git a/web/msg/preview_broadcast.go b/web/msg/preview_broadcast.go new file mode 100644 index 000000000..28b863c71 --- /dev/null +++ b/web/msg/preview_broadcast.go @@ -0,0 +1,88 @@ +package msg + +import ( + "context" + "net/http" + + "github.com/nyaruka/goflow/assets" + "github.com/nyaruka/goflow/contactql" + "github.com/nyaruka/goflow/flows" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/core/search" + "github.com/nyaruka/mailroom/runtime" + "github.com/nyaruka/mailroom/web" + "github.com/pkg/errors" +) + +func init() { + web.RegisterRoute(http.MethodPost, "/mr/msg/preview_broadcast", web.RequireAuthToken(web.JSONPayload(handlePreviewBroadcast))) +} + +// Generates a preview of which contacts will receive a broadcast. +// +// { +// "org_id": 1, +// "include": { +// "group_uuids": ["5fa925e4-edd8-4e2a-ab24-b3dbb5932ddd", "2912b95f-5b89-4d39-a2a8-5292602f357f"], +// "contact_uuids": ["e5bb9e6f-7703-4ba1-afba-0b12791de38b"], +// "query": "" +// }, +// "exclude": { +// "non_active": false, +// "in_a_flow": false, +// "not_seen_recently": false +// } +// } +// +// { +// "query": "(group = \"No Age\" OR group = \"No Name\" OR uuid = \"e5bb9e6f-7703-4ba1-afba-0b12791de38b\") AND history != \"Registration\"", +// "total": 567 +// } +type previewRequest struct { + OrgID models.OrgID `json:"org_id" validate:"required"` + Include struct { + GroupUUIDs []assets.GroupUUID `json:"group_uuids"` + ContactUUIDs []flows.ContactUUID `json:"contact_uuids"` + Query string `json:"query"` + } `json:"include" validate:"required"` + Exclude models.Exclusions `json:"exclude"` +} + +type previewResponse struct { + Query string `json:"query"` + Total int `json:"total"` +} + +func handlePreviewBroadcast(ctx context.Context, rt *runtime.Runtime, r *previewRequest) (any, int, error) { + oa, err := models.GetOrgAssets(ctx, rt, r.OrgID) + if err != nil { + return nil, 0, errors.Wrapf(err, "unable to load org assets") + } + + groups := make([]*models.Group, 0, len(r.Include.GroupUUIDs)) + for _, groupUUID := range r.Include.GroupUUIDs { + g := oa.GroupByUUID(groupUUID) + if g != nil { + groups = append(groups, g) + } + } + + query, err := search.BuildRecipientsQuery(oa, nil, groups, r.Include.ContactUUIDs, r.Include.Query, r.Exclude, nil) + if err != nil { + isQueryError, qerr := contactql.IsQueryError(err) + if isQueryError { + return qerr, http.StatusBadRequest, nil + } + return nil, 0, err + } + if query == "" { + return &previewResponse{Query: "", Total: 0}, http.StatusOK, nil + } + + parsedQuery, total, err := search.GetContactTotal(ctx, rt, oa, query) + if err != nil { + return nil, 0, errors.Wrap(err, "error querying preview") + } + + return &previewResponse{Query: parsedQuery.String(), Total: int(total)}, http.StatusOK, nil +} diff --git a/web/msg/msg.go b/web/msg/resend.go similarity index 51% rename from web/msg/msg.go rename to web/msg/resend.go index b55cd7008..22a643259 100644 --- a/web/msg/msg.go +++ b/web/msg/resend.go @@ -9,12 +9,11 @@ import ( "github.com/nyaruka/mailroom/core/msgio" "github.com/nyaruka/mailroom/runtime" "github.com/nyaruka/mailroom/web" - "github.com/pkg/errors" ) func init() { - web.RegisterJSONRoute(http.MethodPost, "/mr/msg/resend", web.RequireAuthToken(handleResend)) + web.RegisterRoute(http.MethodPost, "/mr/msg/resend", web.RequireAuthToken(web.JSONPayload(handleResend))) } // Request to resend failed messages. @@ -29,26 +28,21 @@ type resendRequest struct { } // handles a request to resend the given messages -func handleResend(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - request := &resendRequest{} - if err := web.ReadAndValidateJSON(r, request); err != nil { - return errors.Wrapf(err, "request failed validation"), http.StatusBadRequest, nil - } - +func handleResend(ctx context.Context, rt *runtime.Runtime, r *resendRequest) (any, int, error) { // grab our org - oa, err := models.GetOrgAssets(ctx, rt, request.OrgID) + oa, err := models.GetOrgAssets(ctx, rt, r.OrgID) if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "unable to load org assets") + return nil, 0, errors.Wrap(err, "unable to load org assets") } - msgs, err := models.GetMessagesByID(ctx, rt.DB, request.OrgID, models.DirectionOut, request.MsgIDs) + msgs, err := models.GetMessagesByID(ctx, rt.DB, r.OrgID, models.DirectionOut, r.MsgIDs) if err != nil { - return nil, http.StatusInternalServerError, errors.Wrap(err, "error loading messages to resend") + return nil, 0, errors.Wrap(err, "error loading messages to resend") } resends, err := models.ResendMessages(ctx, rt.DB, rt.RP, oa, msgs) if err != nil { - return nil, http.StatusInternalServerError, errors.Wrap(err, "error resending messages") + return nil, 0, errors.Wrap(err, "error resending messages") } msgio.SendMessages(ctx, rt, rt.DB, nil, resends) @@ -58,5 +52,5 @@ func handleResend(ctx context.Context, rt *runtime.Runtime, r *http.Request) (in for i, m := range resends { resentMsgIDs[i] = m.ID() } - return map[string]interface{}{"msg_ids": resentMsgIDs}, http.StatusOK, nil + return map[string]any{"msg_ids": resentMsgIDs}, http.StatusOK, nil } diff --git a/web/msg/send.go b/web/msg/send.go new file mode 100644 index 000000000..300273e7a --- /dev/null +++ b/web/msg/send.go @@ -0,0 +1,94 @@ +package msg + +import ( + "context" + "net/http" + + "github.com/nyaruka/gocommon/dates" + "github.com/nyaruka/goflow/utils" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/core/msgio" + "github.com/nyaruka/mailroom/runtime" + "github.com/nyaruka/mailroom/web" + "github.com/pkg/errors" +) + +func init() { + web.RegisterRoute(http.MethodPost, "/mr/msg/send", web.RequireAuthToken(web.JSONPayload(handleSend))) +} + +// Request to send a message. +// +// { +// "org_id": 1, +// "contact_id": 123456, +// "user_id": 56, +// "text": "hi there" +// } +type sendRequest struct { + OrgID models.OrgID `json:"org_id" validate:"required"` + UserID models.UserID `json:"user_id" validate:"required"` + ContactID models.ContactID `json:"contact_id" validate:"required"` + Text string `json:"text"` + Attachments []utils.Attachment `json:"attachments"` + TicketID models.TicketID `json:"ticket_id"` +} + +// handles a request to resend the given messages +func handleSend(ctx context.Context, rt *runtime.Runtime, r *sendRequest) (any, int, error) { + // grab our org + oa, err := models.GetOrgAssets(ctx, rt, r.OrgID) + if err != nil { + return nil, 0, errors.Wrap(err, "unable to load org assets") + } + + // load the contact and generate as a flow contact + c, err := models.LoadContact(ctx, rt.DB, oa, r.ContactID) + if err != nil { + return nil, 0, errors.Wrap(err, "error loading contact") + } + + contact, err := c.FlowContact(oa) + if err != nil { + return nil, 0, errors.Wrap(err, "error creating flow contact") + } + + out, ch := models.NewMsgOut(oa, contact, r.Text, r.Attachments, nil, contact.Locale(oa.Env())) + var msg *models.Msg + + if r.TicketID != models.NilTicketID { + msg, err = models.NewOutgoingTicketMsg(rt, oa.Org(), ch, contact, out, dates.Now(), r.TicketID, r.UserID) + } else { + msg, err = models.NewOutgoingChatMsg(rt, oa.Org(), ch, contact, out, dates.Now(), r.UserID) + } + + if err != nil { + return nil, 0, errors.Wrap(err, "error creating outgoing message") + } + + err = models.InsertMessages(ctx, rt.DB, []*models.Msg{msg}) + if err != nil { + return nil, 0, errors.Wrap(err, "error inserting outgoing message") + } + + // if message was a ticket reply, update the ticket + if r.TicketID != models.NilTicketID { + if err := models.RecordTicketReply(ctx, rt.DB, oa, r.TicketID, r.UserID); err != nil { + return nil, 0, errors.Wrap(err, "error recording ticket reply") + } + } + + msgio.SendMessages(ctx, rt, rt.DB, nil, []*models.Msg{msg}) + + return map[string]any{ + "id": msg.ID(), + "channel": out.Channel(), + "contact": contact.Reference(), + "urn": msg.URN(), + "text": msg.Text(), + "attachments": msg.Attachments(), + "status": msg.Status(), + "created_on": msg.CreatedOn(), + "modified_on": msg.ModifiedOn(), + }, http.StatusOK, nil +} diff --git a/web/msg/testdata/preview_broadcast.json b/web/msg/testdata/preview_broadcast.json new file mode 100644 index 000000000..4b6fbcbad --- /dev/null +++ b/web/msg/testdata/preview_broadcast.json @@ -0,0 +1,171 @@ +[ + { + "label": "illegal method", + "method": "GET", + "path": "/mr/msg/preview_broadcast", + "status": 405, + "response": { + "error": "illegal method: GET" + } + }, + { + "label": "missing org id", + "method": "POST", + "path": "/mr/msg/preview_broadcast", + "body": {}, + "status": 400, + "response": { + "error": "request failed validation: field 'org_id' is required" + } + }, + { + "label": "no inclusions or exclusions", + "method": "POST", + "path": "/mr/msg/preview_broadcast", + "body": { + "org_id": 1, + "include": {} + }, + "status": 200, + "response": { + "query": "", + "total": 0 + } + }, + { + "label": "manual inclusions, no exclusions", + "method": "POST", + "path": "/mr/msg/preview_broadcast", + "body": { + "org_id": 1, + "include": { + "group_uuids": [ + "c153e265-f7c9-4539-9dbc-9b358714b638", + "5e9d8fab-5e7e-4f51-b533-261af5dea70d" + ], + "contact_uuids": [ + "5a8345c1-514a-4d1b-aee5-6f39b2f53cfa", + "bd2aab59-5e28-4db4-b6e8-bbdb75fd7a0a" + ], + "query": "" + } + }, + "status": 200, + "response": { + "query": "group = \"Doctors\" OR group = \"Testers\" OR uuid = \"5a8345c1-514a-4d1b-aee5-6f39b2f53cfa\" OR uuid = \"bd2aab59-5e28-4db4-b6e8-bbdb75fd7a0a\"", + "total": 121 + } + }, + { + "label": "query inclusion, no exclusions", + "method": "POST", + "path": "/mr/msg/preview_broadcast", + "body": { + "org_id": 1, + "include": { + "group_uuids": [], + "contact_uuids": [], + "query": "gender = M" + } + }, + "status": 200, + "response": { + "query": "gender = \"M\"", + "total": 0 + } + }, + { + "label": "manual inclusions, all exclusions", + "method": "POST", + "path": "/mr/msg/preview_broadcast", + "body": { + "org_id": 1, + "include": { + "group_uuids": [ + "c153e265-f7c9-4539-9dbc-9b358714b638", + "5e9d8fab-5e7e-4f51-b533-261af5dea70d" + ], + "contact_uuids": [ + "5a8345c1-514a-4d1b-aee5-6f39b2f53cfa", + "bd2aab59-5e28-4db4-b6e8-bbdb75fd7a0a" + ], + "urns": [ + "tel:+1234567890", + "facebook:9876543210" + ], + "query": "" + }, + "exclude": { + "non_active": true, + "in_a_flow": true, + "not_seen_since_days": 90 + } + }, + "status": 200, + "response": { + "query": "(group = \"Doctors\" OR group = \"Testers\" OR uuid = \"5a8345c1-514a-4d1b-aee5-6f39b2f53cfa\" OR uuid = \"bd2aab59-5e28-4db4-b6e8-bbdb75fd7a0a\") AND status = \"active\" AND flow = \"\" AND last_seen_on > \"07-04-2018\"", + "total": 0 + } + }, + { + "label": "query inclusion, all exclusions", + "method": "POST", + "path": "/mr/msg/preview_broadcast", + "body": { + "org_id": 1, + "include": { + "query": "gender = M" + }, + "exclude": { + "non_active": true, + "in_a_flow": true, + "not_seen_since_days": 90 + } + }, + "status": 200, + "response": { + "query": "gender = \"M\" AND status = \"active\" AND flow = \"\" AND last_seen_on > \"07-04-2018\"", + "total": 0 + } + }, + { + "label": "invalid query inclusion (bad syntax)", + "method": "POST", + "path": "/mr/msg/preview_broadcast", + "body": { + "org_id": 1, + "include": { + "query": "gender =" + }, + "exclude": {} + }, + "status": 400, + "response": { + "error": "mismatched input '' expecting {TEXT, STRING}", + "code": "unexpected_token", + "extra": { + "token": "" + } + } + }, + { + "label": "invalid query inclusion (missing field)", + "method": "POST", + "path": "/mr/msg/preview_broadcast", + "body": { + "org_id": 1, + "include": { + "query": "goats > 10" + }, + "exclude": {} + }, + "status": 400, + "response": { + "error": "can't resolve 'goats' to attribute, scheme or field", + "code": "unknown_property", + "extra": { + "property": "goats" + } + } + } +] \ No newline at end of file diff --git a/web/msg/testdata/resend.json b/web/msg/testdata/resend.json index 8665cadd6..925735e7d 100644 --- a/web/msg/testdata/resend.json +++ b/web/msg/testdata/resend.json @@ -45,7 +45,7 @@ }, "db_assertions": [ { - "query": "SELECT count(*) FROM msgs_msg WHERE status = 'P'", + "query": "SELECT count(*) FROM msgs_msg WHERE status = 'Q'", "count": 2 } ] diff --git a/web/msg/testdata/send.json b/web/msg/testdata/send.json new file mode 100644 index 000000000..ee56ce97c --- /dev/null +++ b/web/msg/testdata/send.json @@ -0,0 +1,159 @@ +[ + { + "label": "illegal method", + "method": "GET", + "path": "/mr/msg/send", + "status": 405, + "response": { + "error": "illegal method: GET" + } + }, + { + "label": "invalid org_id", + "method": "POST", + "path": "/mr/msg/send", + "body": { + "org_id": 1234, + "user_id": 12, + "contact_id": 4, + "text": "hello" + }, + "status": 500, + "response": { + "error": "unable to load org assets: error loading environment for org 1234: no org with id: 1234" + } + }, + { + "label": "invalid contact_id", + "method": "POST", + "path": "/mr/msg/send", + "body": { + "org_id": 1, + "user_id": 3, + "contact_id": 123456789, + "text": "hello" + }, + "status": 500, + "response": { + "error": "error loading contact: no such contact #123456789 in org #1" + } + }, + { + "label": "text only message", + "method": "POST", + "path": "/mr/msg/send", + "body": { + "org_id": 1, + "user_id": 3, + "contact_id": 10000, + "text": "hello" + }, + "status": 200, + "response": { + "id": 1, + "contact": { + "name": "Cathy", + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf" + }, + "channel": { + "name": "Twilio", + "uuid": "74729f45-7f29-4868-9dc4-90e491e3c7d8" + }, + "urn": "tel:+16055741111?id=10000&priority=1000", + "text": "hello", + "attachments": [], + "status": "Q", + "created_on": "2018-07-06T12:30:00.123456789Z", + "modified_on": "$recent_timestamp$" + }, + "db_assertions": [ + { + "query": "SELECT count(*) FROM msgs_msg WHERE direction = 'O' AND text = 'hello' AND status = 'Q' AND created_by_id = 3", + "count": 1 + } + ] + }, + { + "label": "attachments only message", + "method": "POST", + "path": "/mr/msg/send", + "body": { + "org_id": 1, + "user_id": 3, + "contact_id": 10000, + "attachments": [ + "image/jpeg:https://aws.com/test/test.jpg", + "audio/mp3:https://aws.com/test/test.mp3" + ] + }, + "status": 200, + "response": { + "id": 2, + "contact": { + "name": "Cathy", + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf" + }, + "channel": { + "name": "Twilio", + "uuid": "74729f45-7f29-4868-9dc4-90e491e3c7d8" + }, + "urn": "tel:+16055741111?id=10000&priority=1000", + "text": "", + "attachments": [ + "image/jpeg:https://aws.com/test/test.jpg", + "audio/mp3:https://aws.com/test/test.mp3" + ], + "status": "Q", + "created_on": "2018-07-06T12:30:00.123456789Z", + "modified_on": "$recent_timestamp$" + } + }, + { + "label": "ticket reply", + "method": "POST", + "path": "/mr/msg/send", + "body": { + "org_id": 1, + "user_id": 6, + "contact_id": 10000, + "text": "we can help", + "ticket_id": $cathy_ticket_id$ + }, + "status": 200, + "response": { + "id": 3, + "contact": { + "name": "Cathy", + "uuid": "6393abc0-283d-4c9b-a1b3-641a035c34bf" + }, + "channel": { + "name": "Twilio", + "uuid": "74729f45-7f29-4868-9dc4-90e491e3c7d8" + }, + "urn": "tel:+16055741111?id=10000&priority=1000", + "text": "we can help", + "attachments": [], + "status": "Q", + "created_on": "2018-07-06T12:30:00.123456789Z", + "modified_on": "$recent_timestamp$" + }, + "db_assertions": [ + { + "query": "SELECT count(*) FROM tickets_ticket WHERE id = $cathy_ticket_id$ AND replied_on IS NOT NULL AND last_activity_on > '2015-01-01T12:30:45Z'", + "count": 1 + }, + { + "query": "SELECT SUM(count) FROM tickets_ticketdailycount WHERE count_type = 'R' AND scope = 'o:1'", + "count": 1 + }, + { + "query": "SELECT SUM(count) FROM tickets_ticketdailycount WHERE count_type = 'R' AND scope = 'o:1:u:6'", + "count": 1 + }, + { + "query": "SELECT SUM(count) FROM tickets_ticketdailytiming WHERE count_type = 'R' AND scope = 'o:1'", + "count": 1 + } + ] + } +] \ No newline at end of file diff --git a/web/org/metrics_test.go b/web/org/metrics_test.go index a9f80d0f1..3bb9d7c99 100644 --- a/web/org/metrics_test.go +++ b/web/org/metrics_test.go @@ -11,20 +11,19 @@ import ( "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" "github.com/nyaruka/mailroom/web" - "github.com/stretchr/testify/assert" ) func TestMetrics(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetAll) promToken := "2d26a50841ff48237238bbdd021150f6a33a4196" - db.MustExec(`INSERT INTO api_apitoken(is_active, org_id, created, key, role_id, user_id) VALUES(TRUE, $1, NOW(), $2, 12, 1);`, testdata.Org1.ID, promToken) + rt.DB.MustExec(`INSERT INTO api_apitoken(is_active, org_id, created, key, role_id, user_id) VALUES(TRUE, $1, NOW(), $2, 12, 1);`, testdata.Org1.ID, promToken) adminToken := "5c26a50841ff48237238bbdd021150f6a33a4199" - db.MustExec(`INSERT INTO api_apitoken(is_active, org_id, created, key, role_id, user_id) VALUES(TRUE, $1, NOW(), $2, 8, 1);`, testdata.Org1.ID, adminToken) + rt.DB.MustExec(`INSERT INTO api_apitoken(is_active, org_id, created, key, role_id, user_id) VALUES(TRUE, $1, NOW(), $2, 8, 1);`, testdata.Org1.ID, adminToken) wg := &sync.WaitGroup{} server := web.NewServer(ctx, rt, wg) diff --git a/web/po/base.go b/web/po/base.go new file mode 100644 index 000000000..4db58018f --- /dev/null +++ b/web/po/base.go @@ -0,0 +1,36 @@ +package po + +import ( + "context" + + "github.com/nyaruka/goflow/flows" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/runtime" + "github.com/pkg/errors" +) + +var excludeProperties = []string{"arguments"} + +func loadFlows(ctx context.Context, rt *runtime.Runtime, orgID models.OrgID, flowIDs []models.FlowID) ([]flows.Flow, error) { + // grab our org assets + oa, err := models.GetOrgAssets(ctx, rt, orgID) + if err != nil { + return nil, errors.Wrapf(err, "unable to load org assets") + } + + flows := make([]flows.Flow, len(flowIDs)) + for i, flowID := range flowIDs { + dbFlow, err := oa.FlowByID(flowID) + if err != nil { + return nil, errors.Wrapf(err, "unable to load flow with ID %d", flowID) + } + + flow, err := oa.SessionAssets().Flows().Get(dbFlow.UUID()) + if err != nil { + return nil, errors.Wrapf(err, "unable to read flow with UUID %s", string(dbFlow.UUID())) + } + + flows[i] = flow + } + return flows, nil +} diff --git a/web/po/base_test.go b/web/po/base_test.go new file mode 100644 index 000000000..288b783d4 --- /dev/null +++ b/web/po/base_test.go @@ -0,0 +1,14 @@ +package po_test + +import ( + "testing" + + "github.com/nyaruka/mailroom/testsuite" +) + +func TestServer(t *testing.T) { + ctx, rt := testsuite.Runtime() + + testsuite.RunWebTests(t, ctx, rt, "testdata/export.json", nil) + testsuite.RunWebTests(t, ctx, rt, "testdata/import.json", nil) +} diff --git a/web/po/export.go b/web/po/export.go new file mode 100644 index 000000000..4b4a1b2a2 --- /dev/null +++ b/web/po/export.go @@ -0,0 +1,55 @@ +package po + +import ( + "context" + "net/http" + + "github.com/go-chi/chi/middleware" + "github.com/nyaruka/goflow/envs" + "github.com/nyaruka/goflow/flows/translation" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/runtime" + "github.com/nyaruka/mailroom/web" + "github.com/pkg/errors" +) + +func init() { + web.RegisterRoute(http.MethodPost, "/mr/po/export", web.RequireAuthToken(handleExport)) +} + +// Exports a PO file from the given set of flows. +// +// { +// "org_id": 123, +// "flow_ids": [123, 354, 456], +// "language": "spa" +// } +type exportRequest struct { + OrgID models.OrgID `json:"org_id" validate:"required"` + FlowIDs []models.FlowID `json:"flow_ids" validate:"required"` + Language envs.Language `json:"language" validate:"omitempty,language"` +} + +func handleExport(ctx context.Context, rt *runtime.Runtime, r *http.Request, rawW http.ResponseWriter) error { + request := &exportRequest{} + if err := web.ReadAndValidateJSON(r, request); err != nil { + return errors.Wrapf(err, "request failed validation") + } + + flows, err := loadFlows(ctx, rt, request.OrgID, request.FlowIDs) + if err != nil { + return err + } + + // extract everything the engine considers localizable except router arguments + po, err := translation.ExtractFromFlows("Generated by mailroom", request.Language, excludeProperties, flows...) + if err != nil { + return errors.Wrapf(err, "unable to extract PO from flows") + } + + w := middleware.NewWrapResponseWriter(rawW, r.ProtoMajor) + w.Header().Set("Content-type", "text/x-gettext-translation") + w.WriteHeader(http.StatusOK) + po.Write(w) + return nil +} diff --git a/web/po/import.go b/web/po/import.go new file mode 100644 index 000000000..6c8d89803 --- /dev/null +++ b/web/po/import.go @@ -0,0 +1,60 @@ +package po + +import ( + "context" + "net/http" + + "github.com/nyaruka/goflow/envs" + "github.com/nyaruka/goflow/flows/translation" + "github.com/nyaruka/goflow/utils/i18n" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/runtime" + "github.com/nyaruka/mailroom/web" + "github.com/pkg/errors" +) + +func init() { + web.RegisterRoute(http.MethodPost, "/mr/po/import", web.RequireAuthToken(web.MarshaledResponse(handleImport))) +} + +// Imports translations from a PO file into the given set of flows. +// +// { +// "org_id": 123, +// "flow_ids": [123, 354, 456], +// "language": "spa" +// } +type importForm struct { + OrgID models.OrgID `form:"org_id" validate:"required"` + FlowIDs []models.FlowID `form:"flow_ids" validate:"required"` + Language envs.Language `form:"language" validate:"required"` +} + +func handleImport(ctx context.Context, rt *runtime.Runtime, r *http.Request) (any, int, error) { + form := &importForm{} + if err := web.DecodeAndValidateForm(form, r); err != nil { + return err, http.StatusBadRequest, nil + } + + poFile, _, err := r.FormFile("po") + if err != nil { + return errors.Wrapf(err, "missing po file on request"), http.StatusBadRequest, nil + } + + po, err := i18n.ReadPO(poFile) + if err != nil { + return errors.Wrapf(err, "invalid po file"), http.StatusBadRequest, nil + } + + flows, err := loadFlows(ctx, rt, form.OrgID, form.FlowIDs) + if err != nil { + return err, http.StatusBadRequest, nil + } + + err = translation.ImportIntoFlows(po, form.Language, excludeProperties, flows...) + if err != nil { + return err, http.StatusBadRequest, nil + } + + return map[string]interface{}{"flows": flows}, http.StatusOK, nil +} diff --git a/web/po/po.go b/web/po/po.go deleted file mode 100644 index 7521e4a9b..000000000 --- a/web/po/po.go +++ /dev/null @@ -1,125 +0,0 @@ -package flow - -import ( - "context" - "net/http" - - "github.com/nyaruka/goflow/envs" - "github.com/nyaruka/goflow/flows" - "github.com/nyaruka/goflow/flows/translation" - "github.com/nyaruka/goflow/utils/i18n" - "github.com/nyaruka/mailroom/core/models" - "github.com/nyaruka/mailroom/runtime" - "github.com/nyaruka/mailroom/web" - - "github.com/go-chi/chi/middleware" - "github.com/pkg/errors" -) - -func init() { - web.RegisterRoute(http.MethodPost, "/mr/po/export", handleExport) - web.RegisterJSONRoute(http.MethodPost, "/mr/po/import", handleImport) -} - -// Exports a PO file from the given set of flows. -// -// { -// "org_id": 123, -// "flow_ids": [123, 354, 456], -// "language": "spa" -// } -type exportRequest struct { - OrgID models.OrgID `json:"org_id" validate:"required"` - FlowIDs []models.FlowID `json:"flow_ids" validate:"required"` - Language envs.Language `json:"language" validate:"omitempty,language"` -} - -func handleExport(ctx context.Context, rt *runtime.Runtime, r *http.Request, rawW http.ResponseWriter) error { - request := &exportRequest{} - if err := web.ReadAndValidateJSON(r, request); err != nil { - return errors.Wrapf(err, "request failed validation") - } - - flows, err := loadFlows(ctx, rt, request.OrgID, request.FlowIDs) - if err != nil { - return err - } - - // extract everything the engine considers localizable except router arguments - po, err := translation.ExtractFromFlows("Generated by mailroom", request.Language, []string{"arguments"}, flows...) - if err != nil { - return errors.Wrapf(err, "unable to extract PO from flows") - } - - w := middleware.NewWrapResponseWriter(rawW, r.ProtoMajor) - w.Header().Set("Content-type", "text/x-gettext-translation") - w.WriteHeader(http.StatusOK) - po.Write(w) - return nil -} - -// Imports translations from a PO file into the given set of flows. -// -// { -// "org_id": 123, -// "flow_ids": [123, 354, 456], -// "language": "spa" -// } -type importForm struct { - OrgID models.OrgID `form:"org_id" validate:"required"` - FlowIDs []models.FlowID `form:"flow_ids" validate:"required"` - Language envs.Language `form:"language" validate:"required"` -} - -func handleImport(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - form := &importForm{} - if err := web.DecodeAndValidateForm(form, r); err != nil { - return err, http.StatusBadRequest, nil - } - - poFile, _, err := r.FormFile("po") - if err != nil { - return errors.Wrapf(err, "missing po file on request"), http.StatusBadRequest, nil - } - - po, err := i18n.ReadPO(poFile) - if err != nil { - return errors.Wrapf(err, "invalid po file"), http.StatusBadRequest, nil - } - - flows, err := loadFlows(ctx, rt, form.OrgID, form.FlowIDs) - if err != nil { - return err, http.StatusBadRequest, nil - } - - err = translation.ImportIntoFlows(po, form.Language, flows...) - if err != nil { - return err, http.StatusBadRequest, nil - } - - return map[string]interface{}{"flows": flows}, http.StatusOK, nil -} - -func loadFlows(ctx context.Context, rt *runtime.Runtime, orgID models.OrgID, flowIDs []models.FlowID) ([]flows.Flow, error) { - // grab our org assets - oa, err := models.GetOrgAssets(ctx, rt, orgID) - if err != nil { - return nil, errors.Wrapf(err, "unable to load org assets") - } - - flows := make([]flows.Flow, len(flowIDs)) - for i, flowID := range flowIDs { - dbFlow, err := oa.FlowByID(flowID) - if err != nil { - return nil, errors.Wrapf(err, "unable to load flow with ID %d", flowID) - } - - flow, err := oa.SessionAssets().Flows().Get(dbFlow.UUID()) - if err != nil { - return nil, errors.Wrapf(err, "unable to read flow with UUID %s", string(dbFlow.UUID())) - } - - flows[i] = flow - } - return flows, nil -} diff --git a/web/po/po_test.go b/web/po/po_test.go deleted file mode 100644 index 374acc87a..000000000 --- a/web/po/po_test.go +++ /dev/null @@ -1,15 +0,0 @@ -package flow_test - -import ( - "testing" - - "github.com/nyaruka/mailroom/testsuite" - "github.com/nyaruka/mailroom/web" -) - -func TestServer(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() - - web.RunWebTests(t, ctx, rt, "testdata/export.json", nil) - web.RunWebTests(t, ctx, rt, "testdata/import.json", nil) -} diff --git a/web/po/testdata/import.json b/web/po/testdata/import.json index a0826ed51..a70e78605 100644 --- a/web/po/testdata/import.json +++ b/web/po/testdata/import.json @@ -38,18 +38,13 @@ { "uuid": "9de3663f-c5c5-4c92-9f45-ecbc09abcc85", "name": "Favorites", - "spec_version": "13.1.0", - "language": "base", + "spec_version": "13.2.0", + "language": "und", "type": "messaging", "revision": 1, "expire_after_minutes": 720, "localization": { "spa": { - "8d2e259c-bc3c-464f-8c15-985bc736e212": { - "arguments": [ - "Azul" - ] - }, "baf07ebb-8a2a-4e63-aa08-d19aa408cd45": { "name": [ "Azul" diff --git a/web/server.go b/web/server.go index 95fe82813..1c7facbfa 100644 --- a/web/server.go +++ b/web/server.go @@ -27,33 +27,29 @@ const ( maxRequestBytes int64 = 1048576 * 50 // 50MB ) -type JSONHandler func(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) type Handler func(ctx context.Context, rt *runtime.Runtime, r *http.Request, w http.ResponseWriter) error -type jsonRoute struct { - method string - pattern string - handler JSONHandler -} - -var jsonRoutes = make([]*jsonRoute, 0) - type route struct { method string pattern string handler Handler } -var routes = make([]*route, 0) - -func RegisterJSONRoute(method string, pattern string, handler JSONHandler) { - jsonRoutes = append(jsonRoutes, &jsonRoute{method, pattern, handler}) -} +var routes []*route func RegisterRoute(method string, pattern string, handler Handler) { routes = append(routes, &route{method, pattern, handler}) } +type Server struct { + ctx context.Context + rt *runtime.Runtime + + wg *sync.WaitGroup + + httpServer *http.Server +} + // NewServer creates a new web server, it will need to be started after being created func NewServer(ctx context.Context, rt *runtime.Runtime, wg *sync.WaitGroup) *Server { s := &Server{ctx: ctx, rt: rt, wg: wg} @@ -69,17 +65,12 @@ func NewServer(ctx context.Context, rt *runtime.Runtime, wg *sync.WaitGroup) *Se router.Use(requestLogger) // wire up our main pages - router.NotFound(s.WrapJSONHandler(handle404)) - router.MethodNotAllowed(s.WrapJSONHandler(handle405)) - router.Get("/", s.WrapJSONHandler(handleIndex)) - router.Get("/mr/", s.WrapJSONHandler(handleIndex)) - - // add any registered json routes - for _, route := range jsonRoutes { - router.Method(route.method, route.pattern, s.WrapJSONHandler(route.handler)) - } + router.NotFound(handle404) + router.MethodNotAllowed(handle405) + router.Get("/", s.WrapHandler(handleIndex)) + router.Get("/mr/", s.WrapHandler(handleIndex)) - // and any normal routes + // and all registered routes for _, route := range routes { router.Method(route.method, route.pattern, s.WrapHandler(route.handler)) } @@ -96,45 +87,9 @@ func NewServer(ctx context.Context, rt *runtime.Runtime, wg *sync.WaitGroup) *Se return s } -// WrapJSONHandler wraps a simple JSONHandler -func (s *Server) WrapJSONHandler(handler JSONHandler) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - w.Header().Set("Content-type", "application/json") - - value, status, err := handler(r.Context(), s.rt, r) - - // handler errored (a hard error) - if err != nil { - value = NewErrorResponse(err) - } else { - // handler returned an error to use as a the response - asError, isError := value.(error) - if isError { - value = NewErrorResponse(asError) - } - } - - serialized, serr := jsonx.MarshalPretty(value) - if serr != nil { - logrus.WithError(err).WithField("http_request", r).Error("error serializing handler response") - w.WriteHeader(http.StatusInternalServerError) - w.Write([]byte(`{"error": "error serializing handler response"}`)) - return - } - - if err != nil { - logrus.WithError(err).WithField("http_request", r).Error("error handling request") - w.WriteHeader(http.StatusInternalServerError) - w.Write(serialized) - return - } - - w.WriteHeader(status) - w.Write(serialized) - } -} - -// WrapHandler wraps a simple Handler, taking care of passing down server and handling errors +// WrapHandler wraps a simple handler and +// 1. adds server runtime to the handler func +// 2. allows an error return value to be logged and returned as a 500 func (s *Server) WrapHandler(handler Handler) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { err := handler(r.Context(), s.rt, r, w) @@ -143,9 +98,8 @@ func (s *Server) WrapHandler(handler Handler) http.HandlerFunc { } logrus.WithError(err).WithField("http_request", r).Error("error handling request") - w.WriteHeader(http.StatusInternalServerError) - serialized := jsonx.MustMarshal(NewErrorResponse(err)) - w.Write(serialized) + + WriteMarshalled(w, http.StatusInternalServerError, NewErrorResponse(err)) } } @@ -174,28 +128,31 @@ func (s *Server) Stop() { } } -func handleIndex(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - response := map[string]string{ +func handleIndex(ctx context.Context, rt *runtime.Runtime, r *http.Request, w http.ResponseWriter) error { + return WriteMarshalled(w, http.StatusOK, map[string]string{ "url": r.URL.String(), "component": "mailroom", "version": rt.Config.Version, - } - return response, http.StatusOK, nil + }) } -func handle404(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - return errors.Errorf("not found: %s", r.URL.String()), http.StatusNotFound, nil +func handle404(w http.ResponseWriter, r *http.Request) { + WriteMarshalled(w, http.StatusNotFound, NewErrorResponse(errors.Errorf("not found: %s", r.URL.String()))) } -func handle405(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - return errors.Errorf("illegal method: %s", r.Method), http.StatusMethodNotAllowed, nil +func handle405(w http.ResponseWriter, r *http.Request) { + WriteMarshalled(w, http.StatusMethodNotAllowed, NewErrorResponse(errors.Errorf("illegal method: %s", r.Method))) } -type Server struct { - ctx context.Context - rt *runtime.Runtime +func WriteMarshalled(w http.ResponseWriter, status int, value any) error { + w.Header().Set("Content-type", "application/json") + w.WriteHeader(status) - wg *sync.WaitGroup + marshaled, err := jsonx.MarshalPretty(value) + if err != nil { + return err + } - httpServer *http.Server + w.Write(marshaled) + return nil } diff --git a/web/server_test.go b/web/server_test.go index 75c42a6b5..3c0627052 100644 --- a/web/server_test.go +++ b/web/server_test.go @@ -1,4 +1,4 @@ -package web +package web_test import ( "testing" @@ -7,7 +7,7 @@ import ( ) func TestServer(t *testing.T) { - ctx, rt, _, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() - RunWebTests(t, ctx, rt, "testdata/server.json", nil) + testsuite.RunWebTests(t, ctx, rt, "testdata/server.json", nil) } diff --git a/web/simulation/simulation.go b/web/simulation/simulation.go index 65506e9da..53ade1077 100644 --- a/web/simulation/simulation.go +++ b/web/simulation/simulation.go @@ -25,8 +25,8 @@ var testChannel = assets.NewChannelReference("440099cf-200c-4d45-a8e7-4a564f4a0e var testURN = urns.URN("tel:+12065551212") func init() { - web.RegisterJSONRoute(http.MethodPost, "/mr/sim/start", web.RequireAuthToken(handleStart)) - web.RegisterJSONRoute(http.MethodPost, "/mr/sim/resume", web.RequireAuthToken(handleResume)) + web.RegisterRoute(http.MethodPost, "/mr/sim/start", web.RequireAuthToken(web.JSONPayload(handleStart))) + web.RegisterRoute(http.MethodPost, "/mr/sim/resume", web.RequireAuthToken(web.JSONPayload(handleResume))) } type flowDefinition struct { @@ -117,26 +117,20 @@ func handleSimulationEvents(ctx context.Context, db models.Queryer, oa *models.O } // handles a request to /start -func handleStart(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - request := &startRequest{} - if err := web.ReadAndValidateJSON(r, request); err != nil { - return nil, http.StatusBadRequest, errors.Wrapf(err, "request failed validation") - } - - // grab our org assets - oa, err := models.GetOrgAssets(ctx, rt, request.OrgID) +func handleStart(ctx context.Context, rt *runtime.Runtime, r *startRequest) (any, int, error) { + oa, err := models.GetOrgAssets(ctx, rt, r.OrgID) if err != nil { return nil, http.StatusBadRequest, errors.Wrapf(err, "unable to load org assets") } // create clone of assets for simulation - oa, err = oa.CloneForSimulation(ctx, rt, request.flows(), request.channels()) + oa, err = oa.CloneForSimulation(ctx, rt, r.flows(), r.channels()) if err != nil { return nil, http.StatusBadRequest, errors.Wrapf(err, "unable to clone org") } // read our trigger - trigger, err := triggers.ReadTrigger(oa.SessionAssets(), request.Trigger, assets.IgnoreMissing) + trigger, err := triggers.ReadTrigger(oa.SessionAssets(), r.Trigger, assets.IgnoreMissing) if err != nil { return nil, http.StatusBadRequest, errors.Wrapf(err, "unable to read trigger") } @@ -145,16 +139,16 @@ func handleStart(ctx context.Context, rt *runtime.Runtime, r *http.Request) (int } // triggerFlow creates a new session with the passed in trigger, returning our standard response -func triggerFlow(ctx context.Context, rt *runtime.Runtime, oa *models.OrgAssets, trigger flows.Trigger) (interface{}, int, error) { +func triggerFlow(ctx context.Context, rt *runtime.Runtime, oa *models.OrgAssets, trigger flows.Trigger) (any, int, error) { // start our flow session session, sprint, err := goflow.Simulator(rt.Config).NewSession(oa.SessionAssets(), trigger) if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "error starting session") + return nil, 0, errors.Wrapf(err, "error starting session") } err = handleSimulationEvents(ctx, rt.DB, oa, sprint.Events()) if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "error handling simulation events") + return nil, 0, errors.Wrapf(err, "error handling simulation events") } return newSimulationResponse(session, sprint), http.StatusOK, nil @@ -179,31 +173,25 @@ type resumeRequest struct { Resume json.RawMessage `json:"resume" validate:"required"` } -func handleResume(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - request := &resumeRequest{} - if err := web.ReadAndValidateJSON(r, request); err != nil { - return nil, http.StatusBadRequest, err - } - - // grab our org assets - oa, err := models.GetOrgAssets(ctx, rt, request.OrgID) +func handleResume(ctx context.Context, rt *runtime.Runtime, r *resumeRequest) (any, int, error) { + oa, err := models.GetOrgAssets(ctx, rt, r.OrgID) if err != nil { return nil, http.StatusBadRequest, err } // create clone of assets for simulation - oa, err = oa.CloneForSimulation(ctx, rt, request.flows(), request.channels()) + oa, err = oa.CloneForSimulation(ctx, rt, r.flows(), r.channels()) if err != nil { return nil, http.StatusBadRequest, err } - session, err := goflow.Simulator(rt.Config).ReadSession(oa.SessionAssets(), request.Session, assets.IgnoreMissing) + session, err := goflow.Simulator(rt.Config).ReadSession(oa.SessionAssets(), r.Session, assets.IgnoreMissing) if err != nil { return nil, http.StatusBadRequest, err } // read our resume - resume, err := resumes.ReadResume(oa.SessionAssets(), request.Resume, assets.IgnoreMissing) + resume, err := resumes.ReadResume(oa.SessionAssets(), r.Resume, assets.IgnoreMissing) if err != nil { return nil, http.StatusBadRequest, err } @@ -228,7 +216,7 @@ func handleResume(ctx context.Context, rt *runtime.Runtime, r *http.Request) (in if flow == nil || (!flow.IgnoreTriggers() && trigger.TriggerType() == models.KeywordTriggerType) { triggeredFlow, err := oa.FlowByID(trigger.FlowID()) if err != nil && err != models.ErrNotFound { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "unable to load triggered flow") + return nil, 0, errors.Wrapf(err, "unable to load triggered flow") } if triggeredFlow != nil { @@ -257,12 +245,12 @@ func handleResume(ctx context.Context, rt *runtime.Runtime, r *http.Request) (in // resume our session sprint, err := session.Resume(resume) if err != nil { - return nil, http.StatusInternalServerError, err + return nil, 0, err } err = handleSimulationEvents(ctx, rt.DB, oa, sprint.Events()) if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "error handling simulation events") + return nil, 0, errors.Wrapf(err, "error handling simulation events") } return newSimulationResponse(session, sprint), http.StatusOK, nil diff --git a/web/simulation/simulation_test.go b/web/simulation/simulation_test.go index 0c3da1a51..ef28309f1 100644 --- a/web/simulation/simulation_test.go +++ b/web/simulation/simulation_test.go @@ -197,7 +197,7 @@ const ( ) func TestServer(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) @@ -214,13 +214,13 @@ func TestServer(t *testing.T) { var session json.RawMessage // add a trigger for our campaign flow with 'trigger' - testdata.InsertKeywordTrigger(db, testdata.Org1, testdata.CampaignFlow, "trigger", models.MatchOnly, nil, nil) + testdata.InsertKeywordTrigger(rt, testdata.Org1, testdata.CampaignFlow, "trigger", models.MatchOnly, nil, nil) // and a trigger which will trigger an IVR flow - testdata.InsertKeywordTrigger(db, testdata.Org1, testdata.IVRFlow, "ivr", models.MatchOnly, nil, nil) + testdata.InsertKeywordTrigger(rt, testdata.Org1, testdata.IVRFlow, "ivr", models.MatchOnly, nil, nil) // also add a catch all - testdata.InsertCatchallTrigger(db, testdata.Org1, testdata.CampaignFlow, nil, nil) + testdata.InsertCatchallTrigger(rt, testdata.Org1, testdata.CampaignFlow, nil, nil) tcs := []struct { URL string diff --git a/web/surveyor/surveyor.go b/web/surveyor/submit.go similarity index 74% rename from web/surveyor/surveyor.go rename to web/surveyor/submit.go index 9a33d9f81..0fab85283 100644 --- a/web/surveyor/surveyor.go +++ b/web/surveyor/submit.go @@ -4,7 +4,9 @@ import ( "context" "encoding/json" "net/http" + "time" + "github.com/nyaruka/gocommon/dates" "github.com/nyaruka/gocommon/urns" "github.com/nyaruka/goflow/assets" "github.com/nyaruka/goflow/envs" @@ -16,12 +18,15 @@ import ( "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/runtime" "github.com/nyaruka/mailroom/web" - "github.com/pkg/errors" ) +const ( + maxSubmissionAge = time.Hour * 24 * 90 +) + func init() { - web.RegisterJSONRoute(http.MethodPost, "/mr/surveyor/submit", web.RequireUserToken(handleSubmit)) + web.RegisterRoute(http.MethodPost, "/mr/surveyor/submit", web.RequireUserToken(web.MarshaledResponse(handleSubmit))) } // Represents a surveyor submission @@ -49,28 +54,33 @@ type submitResponse struct { } // handles a surveyor request -func handleSubmit(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { +func handleSubmit(ctx context.Context, rt *runtime.Runtime, r *http.Request) (any, int, error) { request := &submitRequest{} if err := web.ReadAndValidateJSON(r, request); err != nil { - return nil, http.StatusBadRequest, errors.Wrapf(err, "request failed validation") + return nil, 0, errors.Wrapf(err, "request failed validation") } // grab our org assets orgID := ctx.Value(web.OrgIDKey).(models.OrgID) oa, err := models.GetOrgAssets(ctx, rt, orgID) if err != nil { - return nil, http.StatusBadRequest, errors.Wrapf(err, "unable to load org assets") + return nil, 0, errors.Wrapf(err, "unable to load org assets") } // and our user id _, valid := ctx.Value(web.UserIDKey).(int64) if !valid { - return nil, http.StatusInternalServerError, errors.Errorf("missing request user") + return nil, 0, errors.Errorf("missing request user") } fs, err := goflow.Engine(rt.Config).ReadSession(oa.SessionAssets(), request.Session, assets.IgnoreMissing) if err != nil { - return nil, http.StatusBadRequest, errors.Wrapf(err, "error reading session") + return nil, 0, errors.Wrapf(err, "error reading session") + } + + // reject any really old sessions as this could create messages/runs outside of the archival period + if dates.Since(fs.Trigger().TriggeredOn()) > maxSubmissionAge { + return nil, 0, errors.New("session too old to be submitted") } // and our events @@ -78,7 +88,7 @@ func handleSubmit(ctx context.Context, rt *runtime.Runtime, r *http.Request) (in for _, e := range request.Events { event, err := events.ReadEvent(e) if err != nil { - return nil, http.StatusBadRequest, errors.Wrapf(err, "error unmarshalling event: %s", string(e)) + return nil, 0, errors.Wrapf(err, "error unmarshalling event: %s", string(e)) } sessionEvents = append(sessionEvents, event) } @@ -86,7 +96,7 @@ func handleSubmit(ctx context.Context, rt *runtime.Runtime, r *http.Request) (in // and our modifiers mods, err := goflow.ReadModifiers(oa.SessionAssets(), request.Modifiers, goflow.IgnoreMissing) if err != nil { - return nil, http.StatusBadRequest, err + return nil, 0, err } // get the current version of this contact from the database @@ -99,12 +109,12 @@ func handleSubmit(ctx context.Context, rt *runtime.Runtime, r *http.Request) (in modelContact, flowContact, _, err = models.GetOrCreateContact(ctx, rt.DB, oa, []urns.URN{urn}, models.NilChannelID) if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "unable to look up contact") + return nil, 0, errors.Wrapf(err, "unable to look up contact") } } else { modelContact, flowContact, err = models.CreateContact(ctx, rt.DB, oa, models.NilUserID, "", envs.NilLanguage, nil) if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "unable to create contact") + return nil, 0, errors.Wrapf(err, "unable to create contact") } } @@ -130,7 +140,7 @@ func handleSubmit(ctx context.Context, rt *runtime.Runtime, r *http.Request) (in // write our session out tx, err := rt.DB.BeginTxx(ctx, nil) if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "error starting transaction for session write") + return nil, 0, errors.Wrap(err, "error starting transaction for session write") } sessions, err := models.InsertSessions(ctx, rt, tx, oa, []flows.Session{fs}, []flows.Sprint{sprint}, []*models.Contact{modelContact}, nil) if err == nil && len(sessions) == 0 { @@ -138,27 +148,27 @@ func handleSubmit(ctx context.Context, rt *runtime.Runtime, r *http.Request) (in } if err != nil { tx.Rollback() - return nil, http.StatusInternalServerError, errors.Wrapf(err, "error writing session") + return nil, 0, errors.Wrap(err, "error writing session") } err = tx.Commit() if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "error committing sessions") + return nil, 0, errors.Wrap(err, "error committing sessions") } tx, err = rt.DB.BeginTxx(ctx, nil) if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "error starting transaction for post commit hooks") + return nil, 0, errors.Wrap(err, "error starting transaction for post commit hooks") } // write our post commit hooks err = models.ApplyEventPostCommitHooks(ctx, rt, tx, oa, []*models.Scene{sessions[0].Scene()}) if err != nil { tx.Rollback() - return nil, http.StatusInternalServerError, errors.Wrapf(err, "error applying post commit hooks") + return nil, 0, errors.Wrap(err, "error applying post commit hooks") } err = tx.Commit() if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "error committing post commit hooks") + return nil, 0, errors.Wrap(err, "error committing post commit hooks") } response := &submitResponse{} diff --git a/web/surveyor/submit_test.go b/web/surveyor/submit_test.go new file mode 100644 index 000000000..149e8abb1 --- /dev/null +++ b/web/surveyor/submit_test.go @@ -0,0 +1,204 @@ +package surveyor + +import ( + "bytes" + "fmt" + "io" + "net/http" + "path/filepath" + "sync" + "testing" + "time" + + "github.com/buger/jsonparser" + "github.com/nyaruka/gocommon/dates" + "github.com/nyaruka/goflow/assets" + "github.com/nyaruka/goflow/flows" + _ "github.com/nyaruka/mailroom/core/handlers" + "github.com/nyaruka/mailroom/core/models" + "github.com/nyaruka/mailroom/testsuite" + "github.com/nyaruka/mailroom/testsuite/testdata" + "github.com/nyaruka/mailroom/web" + "github.com/stretchr/testify/assert" +) + +func TestSurveyor(t *testing.T) { + ctx, rt := testsuite.Runtime() + rc := rt.RP.Get() + defer rc.Close() + + defer testsuite.Reset(testsuite.ResetAll) + + wg := &sync.WaitGroup{} + server := web.NewServer(ctx, rt, wg) + server.Start() + defer server.Stop() + + // insert an auth token for user 1 for org 1 + rt.DB.MustExec(`INSERT INTO api_apitoken(is_active, key, created, org_id, role_id, user_id) VALUES(TRUE, 'sesame', NOW(), 1, 5, 1)`) + + type Assertion struct { + Query string + Count int + } + + tcs := []struct { + file string + token string + expectedStatus int + expectedContains string + assertions []Assertion + }{ + { + file: "valid_submission1.json", + token: "", + expectedStatus: 401, + expectedContains: "missing authorization", + }, + { + file: "valid_submission1.json", + token: "invalid", + expectedStatus: 401, + expectedContains: "invalid authorization", + assertions: []Assertion{ + {`SELECT count(*) FROM flows_flowrun WHERE flow_id = :flow_id`, 0}, + }, + }, + // new contact is created (our test db already has a bob, he should be unaffected) + { + file: "valid_submission1.json", + token: "sesame", + expectedStatus: 201, + expectedContains: `"status": "C"`, + assertions: []Assertion{ + {`SELECT count(*) FROM flows_flowrun WHERE flow_id = :flow_id AND contact_id = :contact_id AND status = 'C'`, 1}, + {`SELECT count(*) FROM contacts_contact WHERE name = 'Bob' AND org_id = 1`, 2}, + {`SELECT count(*) FROM contacts_contact WHERE uuid = 'bdfe862c-84f8-422e-8fdc-ebfaaae0697a'`, 0}, + {`SELECT count(*) FROM contacts_contact WHERE name = 'Bob' AND fields -> :age_field_uuid = jsonb_build_object('text', '37', 'number', 37)`, 1}, + {`SELECT count(*) FROM contacts_contacturn WHERE identity = 'tel::+593979123456' AND contact_id = :contact_id`, 1}, + {`SELECT count(*) FROM contacts_contactgroup_contacts WHERE contact_id = :contact_id and contactgroup_id = :testers_group_id`, 1}, + {`SELECT count(*) FROM msgs_msg WHERE contact_id = :contact_id AND contact_urn_id IS NULL AND direction = 'O' AND org_id = :org_id`, 4}, + {`SELECT count(*) FROM msgs_msg WHERE contact_id = :contact_id AND contact_urn_id IS NULL AND direction = 'I' AND org_id = :org_id`, 3}, + }, + }, + // dupe submission should fail due to run UUIDs being duplicated + { + file: "valid_submission1.json", + token: "sesame", + expectedStatus: 500, + expectedContains: `error writing runs`, + assertions: []Assertion{ + {`SELECT count(*) FROM flows_flowrun WHERE flow_id = :flow_id`, 1}, + }, + }, + // but submission with new UUIDs should succeed, new run is created but not contact + { + file: "valid_submission2.json", + token: "sesame", + expectedStatus: 201, + expectedContains: `"status": "C"`, + assertions: []Assertion{ + {`SELECT count(*) FROM flows_flowrun WHERE flow_id = :flow_id AND contact_id = :contact_id`, 2}, + {`SELECT count(*) FROM contacts_contact WHERE uuid = 'bdfe862c-84f8-422e-8fdc-ebfaaae0697a'`, 0}, + {`SELECT count(*) FROM contacts_contact WHERE name = 'Bob' AND fields -> :age_field_uuid = jsonb_build_object('text', '37', 'number', 37)`, 1}, + {`SELECT count(*) FROM contacts_contacturn WHERE identity = 'tel::+593979123456' AND contact_id = :contact_id`, 1}, + {`SELECT count(*) FROM contacts_contactgroup_contacts WHERE contact_id = :contact_id and contactgroup_id = :testers_group_id`, 1}, + {`SELECT count(*) FROM msgs_msg WHERE contact_id = :contact_id AND contact_urn_id IS NULL AND direction = 'O' AND org_id = :org_id`, 8}, + {`SELECT count(*) FROM msgs_msg WHERE contact_id = :contact_id AND contact_urn_id IS NULL AND direction = 'I' AND org_id = :org_id`, 6}, + }}, + // group removal is ONLY in the modifier + { + file: "remove_group.json", + token: "sesame", + expectedStatus: 201, + expectedContains: `"status": "C"`, + assertions: []Assertion{ + {`SELECT count(*) FROM flows_flowrun WHERE flow_id = :flow_id AND contact_id = :contact_id`, 3}, + {`SELECT count(*) FROM contacts_contact WHERE uuid = 'bdfe862c-84f8-422e-8fdc-ebfaaae0697a'`, 0}, + {`SELECT count(*) FROM contacts_contact WHERE name = 'Bob' AND fields -> :age_field_uuid = jsonb_build_object('text', '37', 'number', 37)`, 1}, + {`SELECT count(*) FROM contacts_contacturn WHERE identity = 'tel::+593979123456' AND contact_id = :contact_id`, 1}, + {`SELECT count(*) FROM contacts_contactgroup_contacts WHERE contact_id = :contact_id and contactgroup_id = :testers_group_id`, 0}, + }, + }, + // new contact, new session, group and field no longer exist + { + file: "missing_group_field.json", + token: "sesame", + expectedStatus: 201, + expectedContains: `"status": "C"`, + assertions: []Assertion{ + {`SELECT count(*) FROM flows_flowrun WHERE flow_id = :flow_id AND contact_id = :contact_id`, 1}, + {`SELECT count(*) FROM contacts_contact WHERE uuid = 'c7fa24ca-48f9-45bf-b923-f95aa49c3cd2'`, 0}, + {`SELECT count(*) FROM contacts_contact WHERE name = 'Fred' AND fields = jsonb_build_object()`, 1}, + {`SELECT count(*) FROM contacts_contacturn WHERE identity = 'tel::+593979123488' AND contact_id = :contact_id`, 1}, + {`SELECT count(*) FROM contacts_contactgroup_contacts WHERE contact_id = :contact_id and contactgroup_id = :testers_group_id`, 0}, + }, + }, + // submission that is too old should fail + { + file: "too_old.json", + token: "sesame", + expectedStatus: 500, + expectedContains: `"error": "session too old to be submitted"`, + assertions: []Assertion{ + {`SELECT count(*) FROM flows_flowrun WHERE flow_id = :flow_id AND contact_id = :contact_id`, 0}, + }, + }, + } + + type AssertionArgs struct { + FlowID models.FlowID `db:"flow_id"` + ContactID flows.ContactID `db:"contact_id"` + OrgID models.OrgID `db:"org_id"` + AgeFieldUUID assets.FieldUUID `db:"age_field_uuid"` + TestersGroupID models.GroupID `db:"testers_group_id"` + } + + args := &AssertionArgs{ + FlowID: testdata.SurveyorFlow.ID, + OrgID: testdata.Org1.ID, + AgeFieldUUID: testdata.AgeField.UUID, + TestersGroupID: testdata.TestersGroup.ID, + } + + dates.SetNowSource(dates.NewSequentialNowSource(time.Date(2018, 12, 21, 12, 0, 0, 0, time.UTC))) + defer dates.SetNowSource(dates.DefaultNowSource) + + for i, tc := range tcs { + testID := fmt.Sprintf("%s[token=%s]", tc.file, tc.token) + path := filepath.Join("testdata", tc.file) + submission := testsuite.ReadFile(path) + + url := "http://localhost:8090/mr/surveyor/submit" + req, err := http.NewRequest(http.MethodPost, url, bytes.NewReader(submission)) + assert.NoError(t, err) + req.Header.Set("Content-Type", "application/json") + + if tc.token != "" { + req.Header.Set("Authorization", "Token "+tc.token) + } + + resp, err := http.DefaultClient.Do(req) + assert.NoError(t, err) + assert.Equal(t, tc.expectedStatus, resp.StatusCode, "unexpected status code for %s", testID) + + body, _ := io.ReadAll(resp.Body) + assert.Containsf(t, string(body), tc.expectedContains, "%s does not contain expected body", testID) + + id, _ := jsonparser.GetInt(body, "contact", "id") + args.ContactID = flows.ContactID(id) + + // if we have assertions, check them + for ii, assertion := range tc.assertions { + rows, err := rt.DB.NamedQuery(assertion.Query, args) + assert.NoError(t, err, "%d:%d error with named query", i, ii) + + count := 0 + assert.True(t, rows.Next()) + err = rows.Scan(&count) + assert.NoError(t, err) + + assert.Equal(t, assertion.Count, count, "%d:%d mismatched counts", i, ii) + } + } +} diff --git a/web/surveyor/surveyor_test.go b/web/surveyor/surveyor_test.go deleted file mode 100644 index cd3d53413..000000000 --- a/web/surveyor/surveyor_test.go +++ /dev/null @@ -1,150 +0,0 @@ -package surveyor - -import ( - "bytes" - "fmt" - "io" - "net/http" - "path/filepath" - "sync" - "testing" - - "github.com/nyaruka/goflow/assets" - "github.com/nyaruka/goflow/flows" - "github.com/nyaruka/mailroom/core/models" - "github.com/nyaruka/mailroom/testsuite" - "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/nyaruka/mailroom/web" - - "github.com/buger/jsonparser" - _ "github.com/nyaruka/mailroom/core/handlers" - "github.com/stretchr/testify/assert" -) - -func TestSurveyor(t *testing.T) { - ctx, rt, db, rp := testsuite.Get() - rc := rp.Get() - defer rc.Close() - - defer testsuite.Reset(testsuite.ResetAll) - - wg := &sync.WaitGroup{} - server := web.NewServer(ctx, rt, wg) - server.Start() - defer server.Stop() - - // insert an auth token for user 1 for org 1 - db.MustExec(`INSERT INTO api_apitoken(is_active, key, created, org_id, role_id, user_id) VALUES(TRUE, 'sesame', NOW(), 1, 5, 1)`) - - type Assertion struct { - Query string - Count int - } - - tcs := []struct { - File string - Token string - StatusCode int - Contains string - Assertions []Assertion - }{ - {"contact_surveyor_submission.json", "", 401, "missing authorization", nil}, - {"contact_surveyor_submission.json", "invalid", 401, "invalid authorization", []Assertion{ - {`SELECT count(*) FROM flows_flowrun WHERE flow_id = :flow_id`, 0}, - }}, - // new contact is created (our test db already has a bob, he should be unaffected) - {"contact_surveyor_submission.json", "sesame", 201, `"status": "C"`, []Assertion{ - {`SELECT count(*) FROM flows_flowrun WHERE flow_id = :flow_id AND contact_id = :contact_id AND status = 'C'`, 1}, - {`SELECT count(*) FROM contacts_contact WHERE name = 'Bob' AND org_id = 1`, 2}, - {`SELECT count(*) FROM contacts_contact WHERE uuid = 'bdfe862c-84f8-422e-8fdc-ebfaaae0697a'`, 0}, - {`SELECT count(*) FROM contacts_contact WHERE name = 'Bob' AND fields -> :age_field_uuid = jsonb_build_object('text', '37', 'number', 37)`, 1}, - {`SELECT count(*) FROM contacts_contacturn WHERE identity = 'tel::+593979123456' AND contact_id = :contact_id`, 1}, - {`SELECT count(*) FROM contacts_contactgroup_contacts WHERE contact_id = :contact_id and contactgroup_id = :testers_group_id`, 1}, - {`SELECT count(*) FROM msgs_msg WHERE contact_id = :contact_id AND contact_urn_id IS NULL AND direction = 'O' AND org_id = :org_id`, 4}, - {`SELECT count(*) FROM msgs_msg WHERE contact_id = :contact_id AND contact_urn_id IS NULL AND direction = 'I' AND org_id = :org_id`, 3}, - }}, - // dupe submission should fail due to run UUIDs being duplicated - {"contact_surveyor_submission.json", "sesame", 500, `error writing runs`, []Assertion{ - {`SELECT count(*) FROM flows_flowrun WHERE flow_id = :flow_id`, 1}, - }}, - // but submission with new UUIDs should succeed, new run is created but not contact - {"contact_surveyor_submission2.json", "sesame", 201, `"status": "C"`, []Assertion{ - {`SELECT count(*) FROM flows_flowrun WHERE flow_id = :flow_id AND contact_id = :contact_id`, 2}, - {`SELECT count(*) FROM contacts_contact WHERE uuid = 'bdfe862c-84f8-422e-8fdc-ebfaaae0697a'`, 0}, - {`SELECT count(*) FROM contacts_contact WHERE name = 'Bob' AND fields -> :age_field_uuid = jsonb_build_object('text', '37', 'number', 37)`, 1}, - {`SELECT count(*) FROM contacts_contacturn WHERE identity = 'tel::+593979123456' AND contact_id = :contact_id`, 1}, - {`SELECT count(*) FROM contacts_contactgroup_contacts WHERE contact_id = :contact_id and contactgroup_id = :testers_group_id`, 1}, - {`SELECT count(*) FROM msgs_msg WHERE contact_id = :contact_id AND contact_urn_id IS NULL AND direction = 'O' AND org_id = :org_id`, 8}, - {`SELECT count(*) FROM msgs_msg WHERE contact_id = :contact_id AND contact_urn_id IS NULL AND direction = 'I' AND org_id = :org_id`, 6}, - }}, - // group removal is ONLY in the modifier - {"remove_group.json", "sesame", 201, `"status": "C"`, []Assertion{ - {`SELECT count(*) FROM flows_flowrun WHERE flow_id = :flow_id AND contact_id = :contact_id`, 3}, - {`SELECT count(*) FROM contacts_contact WHERE uuid = 'bdfe862c-84f8-422e-8fdc-ebfaaae0697a'`, 0}, - {`SELECT count(*) FROM contacts_contact WHERE name = 'Bob' AND fields -> :age_field_uuid = jsonb_build_object('text', '37', 'number', 37)`, 1}, - {`SELECT count(*) FROM contacts_contacturn WHERE identity = 'tel::+593979123456' AND contact_id = :contact_id`, 1}, - {`SELECT count(*) FROM contacts_contactgroup_contacts WHERE contact_id = :contact_id and contactgroup_id = :testers_group_id`, 0}, - }}, - // new contact, new session, group and field no longer exist - {"missing_group_field.json", "sesame", 201, `"status": "C"`, []Assertion{ - {`SELECT count(*) FROM flows_flowrun WHERE flow_id = :flow_id AND contact_id = :contact_id`, 1}, - {`SELECT count(*) FROM contacts_contact WHERE uuid = 'c7fa24ca-48f9-45bf-b923-f95aa49c3cd2'`, 0}, - {`SELECT count(*) FROM contacts_contact WHERE name = 'Fred' AND fields = jsonb_build_object()`, 1}, - {`SELECT count(*) FROM contacts_contacturn WHERE identity = 'tel::+593979123488' AND contact_id = :contact_id`, 1}, - {`SELECT count(*) FROM contacts_contactgroup_contacts WHERE contact_id = :contact_id and contactgroup_id = :testers_group_id`, 0}, - }}, - } - - type AssertionArgs struct { - FlowID models.FlowID `db:"flow_id"` - ContactID flows.ContactID `db:"contact_id"` - OrgID models.OrgID `db:"org_id"` - AgeFieldUUID assets.FieldUUID `db:"age_field_uuid"` - TestersGroupID models.GroupID `db:"testers_group_id"` - } - - args := &AssertionArgs{ - FlowID: testdata.SurveyorFlow.ID, - OrgID: testdata.Org1.ID, - AgeFieldUUID: testdata.AgeField.UUID, - TestersGroupID: testdata.TestersGroup.ID, - } - - for i, tc := range tcs { - testID := fmt.Sprintf("%s[token=%s]", tc.File, tc.Token) - path := filepath.Join("testdata", tc.File) - submission := testsuite.ReadFile(path) - - url := "http://localhost:8090/mr/surveyor/submit" - req, err := http.NewRequest(http.MethodPost, url, bytes.NewReader(submission)) - assert.NoError(t, err) - req.Header.Set("Content-Type", "application/json") - - if tc.Token != "" { - req.Header.Set("Authorization", "Token "+tc.Token) - } - - resp, err := http.DefaultClient.Do(req) - assert.NoError(t, err) - assert.Equal(t, tc.StatusCode, resp.StatusCode, "unexpected status code for %s", testID) - - body, _ := io.ReadAll(resp.Body) - assert.Containsf(t, string(body), tc.Contains, "%s does not contain expected body", testID) - - id, _ := jsonparser.GetInt(body, "contact", "id") - args.ContactID = flows.ContactID(id) - - // if we have assertions, check them - for ii, assertion := range tc.Assertions { - rows, err := db.NamedQuery(assertion.Query, args) - assert.NoError(t, err, "%d:%d error with named query", i, ii) - - count := 0 - assert.True(t, rows.Next()) - err = rows.Scan(&count) - assert.NoError(t, err) - - assert.Equal(t, assertion.Count, count, "%d:%d mismatched counts", i, ii) - } - } -} diff --git a/web/surveyor/testdata/too_old.json b/web/surveyor/testdata/too_old.json new file mode 100644 index 000000000..d50de289a --- /dev/null +++ b/web/surveyor/testdata/too_old.json @@ -0,0 +1,473 @@ +{ + "session": { + "type": "messaging_offline", + "environment": { + "date_format": "DD-MM-YYYY", + "time_format": "tt:mm", + "timezone": "Africa/Kigali", + "default_language": "eng", + "allowed_languages": [ + "eng", + "fra" + ], + "redaction_policy": "none" + }, + "trigger": { + "type": "manual", + "environment": { + "date_format": "DD-MM-YYYY", + "time_format": "tt:mm", + "timezone": "Africa/Kigali", + "default_language": "eng", + "allowed_languages": [ + "eng", + "fra" + ], + "redaction_policy": "none" + }, + "flow": { + "uuid": "ed8cf8d4-a42c-4ce1-a7e3-44a2918e3cec", + "name": "Contact Details" + }, + "contact": { + "uuid": "bdfe862c-84f8-422e-8fdc-ebfaaae0697a", + "created_on": "2017-12-18T19:14:53.707395Z" + }, + "triggered_on": "2017-12-18T19:14:53.70753Z" + }, + "contact": { + "uuid": "bdfe862c-84f8-422e-8fdc-ebfaaae0697a", + "name": "Bob", + "created_on": "2017-12-18T19:14:53.707395Z", + "urns": [ + "tel:+593979123456" + ], + "groups": [ + { + "uuid": "5e9d8fab-5e7e-4f51-b533-261af5dea70d", + "name": "Testers" + } + ], + "fields": { + "age": { + "text": "37", + "number": 37 + } + } + }, + "runs": [ + { + "uuid": "1d4f5427-a7fa-4d4c-9c70-93a73fca59da", + "flow": { + "uuid": "ed8cf8d4-a42c-4ce1-a7e3-44a2918e3cec", + "name": "Contact Details" + }, + "path": [ + { + "uuid": "a6c7ff4d-b23f-450a-ac16-da870f6f73be", + "node_uuid": "036901e0-abb8-4979-92cb-f0d43aeb5b68", + "exit_uuid": "706853c2-831b-4dd8-8073-cd51b21d94d6", + "arrived_on": "2017-12-18T19:14:53.711805Z" + }, + { + "uuid": "dd2c2662-437f-480c-93e5-a5150ca67d1f", + "node_uuid": "39fe1ce0-7dee-445e-9945-48c72a05cef5", + "exit_uuid": "bb999ff8-5eb3-45f6-bec6-a0430105b0ca", + "arrived_on": "2017-12-18T19:14:53.712041Z" + }, + { + "uuid": "1d0cc3ea-f5c6-409f-af1f-a9bd26c1a367", + "node_uuid": "73dda1a7-9152-45f1-993a-e7d01eb028db", + "exit_uuid": "5e7a398e-eebe-4b32-8600-374659f56d9e", + "arrived_on": "2017-12-18T19:14:55.289318Z" + }, + { + "uuid": "16c63d0d-2eaa-4a60-bc9c-b9358b8739f0", + "node_uuid": "1a7612b5-777d-4af3-a657-077c46f242d9", + "exit_uuid": "0c047d03-3b61-4ff2-8bc8-43a89cf1087b", + "arrived_on": "2017-12-18T19:14:55.289954Z" + }, + { + "uuid": "dac7fb6e-5e9d-4793-be36-b5e8756a6f6b", + "node_uuid": "2d55c61f-384c-4a07-a17e-1e42fc543dd9", + "exit_uuid": "4e483159-af9f-48a4-907f-c875fde66c70", + "arrived_on": "2017-12-18T19:14:56.721324Z" + }, + { + "uuid": "a034eccc-581a-4b84-8a6f-37ed513b53d0", + "node_uuid": "52a6784b-f51f-42c7-8c6a-3e5ec42603bb", + "exit_uuid": "34d09b52-ac85-44a4-b4f4-c7a3b489fcf8", + "arrived_on": "2017-12-18T19:14:56.730637Z" + }, + { + "uuid": "94106b99-9e4d-4bfa-9feb-dcd780d7bf89", + "node_uuid": "6d5703f9-938c-4c2f-9cc7-7d1bbe328095", + "exit_uuid": "6bab242d-85d5-4afe-b6e7-5fe7c98f187e", + "arrived_on": "2017-12-18T19:14:57.973998Z" + } + ], + "events": [ + { + "type": "msg_created", + "created_on": "2017-12-18T19:14:53.711924Z", + "step_uuid": "a6c7ff4d-b23f-450a-ac16-da870f6f73be", + "msg": { + "uuid": "c7c3e345-dcb9-487d-9102-b1cbc7deacf1", + "text": "Hi there. What's your name?" + } + }, + { + "type": "msg_wait", + "created_on": "2017-12-18T19:14:53.712575Z", + "step_uuid": "dd2c2662-437f-480c-93e5-a5150ca67d1f" + }, + { + "type": "msg_received", + "created_on": "2017-12-18T19:14:55.288681Z", + "step_uuid": "dd2c2662-437f-480c-93e5-a5150ca67d1f", + "msg": { + "uuid": "2dcb9bbc-46f7-4bc6-afe9-3887209402cc", + "text": "Bob" + } + }, + { + "type": "run_result_changed", + "created_on": "2017-12-18T19:14:55.289221Z", + "step_uuid": "dd2c2662-437f-480c-93e5-a5150ca67d1f", + "name": "Name", + "value": "Bob", + "category": "All Responses", + "input": "Bob" + }, + { + "type": "contact_name_changed", + "created_on": "2017-12-18T19:14:55.289471Z", + "step_uuid": "1d0cc3ea-f5c6-409f-af1f-a9bd26c1a367", + "name": "Bob" + }, + { + "type": "msg_created", + "created_on": "2017-12-18T19:14:55.289645Z", + "step_uuid": "1d0cc3ea-f5c6-409f-af1f-a9bd26c1a367", + "msg": { + "uuid": "b6fbd0e6-35ce-495a-b3f0-8c5f9852452f", + "text": "Thanks Bob. What's your phone number?" + } + }, + { + "type": "msg_wait", + "created_on": "2017-12-18T19:14:55.290128Z", + "step_uuid": "16c63d0d-2eaa-4a60-bc9c-b9358b8739f0" + }, + { + "type": "msg_received", + "created_on": "2017-12-18T19:14:56.720739Z", + "step_uuid": "16c63d0d-2eaa-4a60-bc9c-b9358b8739f0", + "msg": { + "uuid": "ca87bcce-f113-4ad2-9c5b-96efbc0d6248", + "text": "+593979123456" + } + }, + { + "type": "run_result_changed", + "created_on": "2017-12-18T19:14:56.721229Z", + "step_uuid": "16c63d0d-2eaa-4a60-bc9c-b9358b8739f0", + "name": "Phone", + "value": "+593979123456", + "category": "phone", + "input": "+593979123456" + }, + { + "type": "contact_urns_changed", + "created_on": "2017-12-18T19:14:56.730115Z", + "step_uuid": "dac7fb6e-5e9d-4793-be36-b5e8756a6f6b", + "urns": [ + "tel:+593979123456" + ] + }, + { + "type": "contact_groups_changed", + "created_on": "2017-12-18T19:14:56.730233Z", + "step_uuid": "dac7fb6e-5e9d-4793-be36-b5e8756a6f6b", + "groups_added": [ + { + "uuid": "5e9d8fab-5e7e-4f51-b533-261af5dea70d", + "name": "Testers" + } + ] + }, + { + "type": "msg_created", + "created_on": "2017-12-18T19:14:56.730535Z", + "step_uuid": "dac7fb6e-5e9d-4793-be36-b5e8756a6f6b", + "msg": { + "uuid": "0c578642-8236-4b71-94a8-5f1db5bd3764", + "text": "Finally, what is your age?" + } + }, + { + "type": "msg_wait", + "created_on": "2017-12-18T19:14:56.73068Z", + "step_uuid": "a034eccc-581a-4b84-8a6f-37ed513b53d0" + }, + { + "type": "msg_received", + "created_on": "2017-12-18T19:14:57.97173Z", + "step_uuid": "a034eccc-581a-4b84-8a6f-37ed513b53d0", + "msg": { + "uuid": "fc7f9830-ec40-4a43-b92e-6eb04750c119", + "text": "37" + } + }, + { + "type": "run_result_changed", + "created_on": "2017-12-18T19:14:57.973844Z", + "step_uuid": "a034eccc-581a-4b84-8a6f-37ed513b53d0", + "name": "Age", + "value": "37", + "category": "numeric", + "input": "37" + }, + { + "type": "contact_field_changed", + "created_on": "2017-12-18T19:14:57.974455Z", + "step_uuid": "94106b99-9e4d-4bfa-9feb-dcd780d7bf89", + "field": { + "key": "age", + "name": "Age" + }, + "value": { + "text": "37", + "number": 37 + } + }, + { + "type": "msg_created", + "created_on": "2017-12-18T19:14:57.975302Z", + "step_uuid": "94106b99-9e4d-4bfa-9feb-dcd780d7bf89", + "msg": { + "uuid": "388e5968-57c8-4795-b4bf-d73916aa5f45", + "text": "Thanks Bob. You are 37 and your phone number is 097 912 3456" + } + } + ], + "results": { + "age": { + "name": "Age", + "value": "37", + "category": "numeric", + "node_uuid": "52a6784b-f51f-42c7-8c6a-3e5ec42603bb", + "input": "37", + "created_on": "2017-12-18T19:14:57.972203Z" + }, + "name": { + "name": "Name", + "value": "Bob", + "category": "All Responses", + "node_uuid": "39fe1ce0-7dee-445e-9945-48c72a05cef5", + "input": "Bob", + "created_on": "2017-12-18T19:14:55.288827Z" + }, + "phone": { + "name": "Phone", + "value": "+593979123456", + "category": "phone", + "node_uuid": "1a7612b5-777d-4af3-a657-077c46f242d9", + "input": "+593979123456", + "created_on": "2017-12-18T19:14:56.721077Z" + } + }, + "status": "completed", + "created_on": "2017-12-18T19:14:53.711676Z", + "modified_on": "2017-12-18T19:14:57.975537Z", + "expires_on": "2017-12-25T19:14:57.97142Z", + "exited_on": "2017-12-18T19:14:57.975537Z" + } + ], + "status": "completed", + "input": { + "type": "msg", + "uuid": "fc7f9830-ec40-4a43-b92e-6eb04750c119", + "created_on": "2017-12-18T19:14:57.971277Z", + "urn": "", + "text": "37" + } + }, + "events": [ + { + "type": "msg_created", + "created_on": "2017-12-18T19:14:53.711924Z", + "step_uuid": "a6c7ff4d-b23f-450a-ac16-da870f6f73be", + "msg": { + "uuid": "c7c3e345-dcb9-487d-9102-b1cbc7deacf1", + "text": "Hi there. What's your name?" + } + }, + { + "type": "msg_wait", + "created_on": "2017-12-18T19:14:53.712575Z", + "step_uuid": "dd2c2662-437f-480c-93e5-a5150ca67d1f" + }, + { + "type": "msg_received", + "created_on": "2017-12-18T19:14:55.288681Z", + "step_uuid": "dd2c2662-437f-480c-93e5-a5150ca67d1f", + "msg": { + "uuid": "2dcb9bbc-46f7-4bc6-afe9-3887209402cc", + "text": "Bob" + } + }, + { + "type": "run_result_changed", + "created_on": "2017-12-18T19:14:55.289221Z", + "step_uuid": "dd2c2662-437f-480c-93e5-a5150ca67d1f", + "name": "Name", + "value": "Bob", + "category": "All Responses", + "input": "Bob" + }, + { + "type": "contact_name_changed", + "created_on": "2017-12-18T19:14:55.289471Z", + "step_uuid": "1d0cc3ea-f5c6-409f-af1f-a9bd26c1a367", + "name": "Bob" + }, + { + "type": "msg_created", + "created_on": "2017-12-18T19:14:55.289645Z", + "step_uuid": "1d0cc3ea-f5c6-409f-af1f-a9bd26c1a367", + "msg": { + "uuid": "b6fbd0e6-35ce-495a-b3f0-8c5f9852452f", + "text": "Thanks Bob. What's your phone number?" + } + }, + { + "type": "msg_wait", + "created_on": "2017-12-18T19:14:55.290128Z", + "step_uuid": "16c63d0d-2eaa-4a60-bc9c-b9358b8739f0" + }, + { + "type": "msg_received", + "created_on": "2017-12-18T19:14:56.720739Z", + "step_uuid": "16c63d0d-2eaa-4a60-bc9c-b9358b8739f0", + "msg": { + "uuid": "ca87bcce-f113-4ad2-9c5b-96efbc0d6248", + "text": "+593979123456" + } + }, + { + "type": "run_result_changed", + "created_on": "2017-12-18T19:14:56.721229Z", + "step_uuid": "16c63d0d-2eaa-4a60-bc9c-b9358b8739f0", + "name": "Phone", + "value": "+593979123456", + "category": "phone", + "input": "+593979123456" + }, + { + "type": "contact_urns_changed", + "created_on": "2017-12-18T19:14:56.730115Z", + "step_uuid": "dac7fb6e-5e9d-4793-be36-b5e8756a6f6b", + "urns": [ + "tel:+593979123456" + ] + }, + { + "type": "contact_groups_changed", + "created_on": "2017-12-18T19:14:56.730233Z", + "step_uuid": "dac7fb6e-5e9d-4793-be36-b5e8756a6f6b", + "groups_added": [ + { + "uuid": "5e9d8fab-5e7e-4f51-b533-261af5dea70d", + "name": "Testers" + } + ] + }, + { + "type": "msg_created", + "created_on": "2017-12-18T19:14:56.730535Z", + "step_uuid": "dac7fb6e-5e9d-4793-be36-b5e8756a6f6b", + "msg": { + "uuid": "0c578642-8236-4b71-94a8-5f1db5bd3764", + "text": "Finally, what is your age?" + } + }, + { + "type": "msg_wait", + "created_on": "2017-12-18T19:14:56.73068Z", + "step_uuid": "a034eccc-581a-4b84-8a6f-37ed513b53d0" + }, + { + "type": "msg_received", + "created_on": "2017-12-18T19:14:57.97173Z", + "step_uuid": "a034eccc-581a-4b84-8a6f-37ed513b53d0", + "msg": { + "uuid": "fc7f9830-ec40-4a43-b92e-6eb04750c119", + "text": "37" + } + }, + { + "type": "run_result_changed", + "created_on": "2017-12-18T19:14:57.973844Z", + "step_uuid": "a034eccc-581a-4b84-8a6f-37ed513b53d0", + "name": "Age", + "value": "37", + "category": "numeric", + "input": "37" + }, + { + "type": "contact_field_changed", + "created_on": "2017-12-18T19:14:57.974455Z", + "step_uuid": "94106b99-9e4d-4bfa-9feb-dcd780d7bf89", + "field": { + "key": "age", + "name": "Age" + }, + "value": { + "text": "37", + "number": 37 + } + }, + { + "type": "msg_created", + "created_on": "2017-12-18T19:14:57.975302Z", + "step_uuid": "94106b99-9e4d-4bfa-9feb-dcd780d7bf89", + "msg": { + "uuid": "388e5968-57c8-4795-b4bf-d73916aa5f45", + "text": "Thanks Bob. You are 37 and your phone number is 097 912 3456" + } + } + ], + "modifiers": [ + { + "type": "name", + "name": "Bob" + }, + { + "type": "urn", + "urn": "tel:+593979123456", + "modification": "append" + }, + { + "type": "groups", + "groups": [ + { + "uuid": "5e9d8fab-5e7e-4f51-b533-261af5dea70d", + "name": "Testers" + } + ], + "modification": "add" + }, + { + "type": "field", + "field": { + "key": "age", + "name": "Age" + }, + "value": { + "text": "37", + "number": 37 + } + } + ] +} \ No newline at end of file diff --git a/web/surveyor/testdata/contact_surveyor_submission.json b/web/surveyor/testdata/valid_submission1.json similarity index 100% rename from web/surveyor/testdata/contact_surveyor_submission.json rename to web/surveyor/testdata/valid_submission1.json diff --git a/web/surveyor/testdata/contact_surveyor_submission2.json b/web/surveyor/testdata/valid_submission2.json similarity index 100% rename from web/surveyor/testdata/contact_surveyor_submission2.json rename to web/surveyor/testdata/valid_submission2.json diff --git a/web/ticket/add_note.go b/web/ticket/add_note.go index b784dd5a0..c47fd27c5 100644 --- a/web/ticket/add_note.go +++ b/web/ticket/add_note.go @@ -11,8 +11,7 @@ import ( ) func init() { - web.RegisterJSONRoute(http.MethodPost, "/mr/ticket/note", web.RequireAuthToken(handleAddNote)) // deprecated - web.RegisterJSONRoute(http.MethodPost, "/mr/ticket/add_note", web.RequireAuthToken(handleAddNote)) + web.RegisterRoute(http.MethodPost, "/mr/ticket/add_note", web.RequireAuthToken(web.JSONPayload(handleAddNote))) } type addNoteRequest struct { @@ -29,26 +28,20 @@ type addNoteRequest struct { // "ticket_ids": [1234, 2345], // "note": "spam" // } -func handleAddNote(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - request := &addNoteRequest{} - if err := web.ReadAndValidateJSON(r, request); err != nil { - return errors.Wrapf(err, "request failed validation"), http.StatusBadRequest, nil - } - - // grab our org assets - oa, err := models.GetOrgAssets(ctx, rt, request.OrgID) +func handleAddNote(ctx context.Context, rt *runtime.Runtime, r *addNoteRequest) (any, int, error) { + oa, err := models.GetOrgAssets(ctx, rt, r.OrgID) if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "unable to load org assets") + return nil, 0, errors.Wrap(err, "unable to load org assets") } - tickets, err := models.LoadTickets(ctx, rt.DB, request.TicketIDs) + tickets, err := models.LoadTickets(ctx, rt.DB, r.TicketIDs) if err != nil { - return nil, http.StatusBadRequest, errors.Wrapf(err, "error loading tickets for org: %d", request.OrgID) + return nil, 0, errors.Wrapf(err, "error loading tickets for org: %d", r.OrgID) } - evts, err := models.TicketsAddNote(ctx, rt.DB, oa, request.UserID, tickets, request.Note) + evts, err := models.TicketsAddNote(ctx, rt.DB, oa, r.UserID, tickets, r.Note) if err != nil { - return nil, http.StatusInternalServerError, errors.Wrap(err, "error adding notes to tickets") + return nil, 0, errors.Wrap(err, "error adding notes to tickets") } return newBulkResponse(evts), http.StatusOK, nil diff --git a/web/ticket/assign.go b/web/ticket/assign.go index 1a03def5f..6f51cbde0 100644 --- a/web/ticket/assign.go +++ b/web/ticket/assign.go @@ -11,14 +11,13 @@ import ( ) func init() { - web.RegisterJSONRoute(http.MethodPost, "/mr/ticket/assign", web.RequireAuthToken(handleAssign)) + web.RegisterRoute(http.MethodPost, "/mr/ticket/assign", web.RequireAuthToken(web.JSONPayload(handleAssign))) } type assignRequest struct { bulkTicketRequest AssigneeID models.UserID `json:"assignee_id"` - Note string `json:"note"` } // Assigns the tickets with the given ids to the given user @@ -27,29 +26,22 @@ type assignRequest struct { // "org_id": 123, // "user_id": 234, // "ticket_ids": [1234, 2345], -// "assignee_id": 567, -// "note": "please look at these" +// "assignee_id": 567 // } -func handleAssign(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - request := &assignRequest{} - if err := web.ReadAndValidateJSON(r, request); err != nil { - return errors.Wrapf(err, "request failed validation"), http.StatusBadRequest, nil - } - - // grab our org assets - oa, err := models.GetOrgAssets(ctx, rt, request.OrgID) +func handleAssign(ctx context.Context, rt *runtime.Runtime, r *assignRequest) (any, int, error) { + oa, err := models.GetOrgAssets(ctx, rt, r.OrgID) if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "unable to load org assets") + return nil, 0, errors.Wrap(err, "unable to load org assets") } - tickets, err := models.LoadTickets(ctx, rt.DB, request.TicketIDs) + tickets, err := models.LoadTickets(ctx, rt.DB, r.TicketIDs) if err != nil { - return nil, http.StatusBadRequest, errors.Wrapf(err, "error loading tickets for org: %d", request.OrgID) + return nil, 0, errors.Wrapf(err, "error loading tickets for org: %d", r.OrgID) } - evts, err := models.TicketsAssign(ctx, rt.DB, oa, request.UserID, tickets, request.AssigneeID, request.Note) + evts, err := models.TicketsAssign(ctx, rt.DB, oa, r.UserID, tickets, r.AssigneeID) if err != nil { - return nil, http.StatusInternalServerError, errors.Wrap(err, "error assigning tickets") + return nil, 0, errors.Wrap(err, "error assigning tickets") } return newBulkResponse(evts), http.StatusOK, nil diff --git a/web/ticket/base_test.go b/web/ticket/base_test.go index c25ae085d..2e2f9d0f6 100644 --- a/web/ticket/base_test.go +++ b/web/ticket/base_test.go @@ -4,73 +4,77 @@ import ( "testing" "time" + _ "github.com/nyaruka/mailroom/services/tickets/intern" _ "github.com/nyaruka/mailroom/services/tickets/mailgun" _ "github.com/nyaruka/mailroom/services/tickets/zendesk" "github.com/nyaruka/mailroom/testsuite" "github.com/nyaruka/mailroom/testsuite/testdata" - "github.com/nyaruka/mailroom/web" ) func TestTicketAssign(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) - testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Internal, testdata.DefaultTopic, "Have you seen my cookies?", "17", time.Now(), testdata.Admin) - testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Internal, testdata.DefaultTopic, "Have you seen my cookies?", "21", time.Now(), testdata.Agent) - testdata.InsertClosedTicket(db, testdata.Org1, testdata.Cathy, testdata.Internal, testdata.DefaultTopic, "Have you seen my cookies?", "34", nil) - testdata.InsertClosedTicket(db, testdata.Org1, testdata.Bob, testdata.Internal, testdata.DefaultTopic, "", "", nil) + testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Internal, testdata.DefaultTopic, "Have you seen my cookies?", "17", time.Now(), testdata.Admin) + testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Internal, testdata.DefaultTopic, "Have you seen my cookies?", "21", time.Now(), testdata.Agent) + testdata.InsertClosedTicket(rt, testdata.Org1, testdata.Cathy, testdata.Internal, testdata.DefaultTopic, "Have you seen my cookies?", "34", nil) + testdata.InsertClosedTicket(rt, testdata.Org1, testdata.Bob, testdata.Internal, testdata.DefaultTopic, "", "", nil) - web.RunWebTests(t, ctx, rt, "testdata/assign.json", nil) + testsuite.RunWebTests(t, ctx, rt, "testdata/assign.json", nil) } func TestTicketAddNote(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) - testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Internal, testdata.DefaultTopic, "Have you seen my cookies?", "17", time.Now(), testdata.Admin) - testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Internal, testdata.DefaultTopic, "Have you seen my cookies?", "21", time.Now(), testdata.Agent) - testdata.InsertClosedTicket(db, testdata.Org1, testdata.Cathy, testdata.Internal, testdata.DefaultTopic, "Have you seen my cookies?", "34", nil) + testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Internal, testdata.DefaultTopic, "Have you seen my cookies?", "17", time.Now(), testdata.Admin) + testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Internal, testdata.DefaultTopic, "Have you seen my cookies?", "21", time.Now(), testdata.Agent) + testdata.InsertClosedTicket(rt, testdata.Org1, testdata.Cathy, testdata.Internal, testdata.DefaultTopic, "Have you seen my cookies?", "34", nil) - web.RunWebTests(t, ctx, rt, "testdata/add_note.json", nil) + testsuite.RunWebTests(t, ctx, rt, "testdata/add_note.json", nil) } func TestTicketChangeTopic(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) - testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Internal, testdata.DefaultTopic, "Have you seen my cookies?", "17", time.Now(), testdata.Admin) - testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Internal, testdata.SupportTopic, "Have you seen my cookies?", "21", time.Now(), testdata.Agent) - testdata.InsertClosedTicket(db, testdata.Org1, testdata.Cathy, testdata.Internal, testdata.SalesTopic, "Have you seen my cookies?", "34", nil) + testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Internal, testdata.DefaultTopic, "Have you seen my cookies?", "17", time.Now(), testdata.Admin) + testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Internal, testdata.SupportTopic, "Have you seen my cookies?", "21", time.Now(), testdata.Agent) + testdata.InsertClosedTicket(rt, testdata.Org1, testdata.Cathy, testdata.Internal, testdata.SalesTopic, "Have you seen my cookies?", "34", nil) - web.RunWebTests(t, ctx, rt, "testdata/change_topic.json", nil) + testsuite.RunWebTests(t, ctx, rt, "testdata/change_topic.json", nil) } func TestTicketClose(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() defer testsuite.Reset(testsuite.ResetData) // create 2 open tickets and 1 closed one for Cathy across two different ticketers - testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Have you seen my cookies?", "17", time.Now(), testdata.Admin) - testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Have you seen my cookies?", "21", time.Now(), nil) - testdata.InsertClosedTicket(db, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Have you seen my cookies?", "34", testdata.Editor) - testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Have you seen my cookies?", "21", time.Now(), nil) + testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Have you seen my cookies?", "17", time.Now(), testdata.Admin) + testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Have you seen my cookies?", "21", time.Now(), nil) + testdata.InsertClosedTicket(rt, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Have you seen my cookies?", "34", testdata.Editor) + testdata.InsertOpenTicket(rt, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Have you seen my cookies?", "21", time.Now(), nil) - web.RunWebTests(t, ctx, rt, "testdata/close.json", nil) + testsuite.RunWebTests(t, ctx, rt, "testdata/close.json", nil) } func TestTicketReopen(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() - defer testsuite.Reset(testsuite.ResetData) + defer testsuite.Reset(testsuite.ResetData | testsuite.ResetRedis) + + // we should be able to reopen ticket #1 because Cathy has no other tickets open + testdata.InsertClosedTicket(rt, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Have you seen my cookies?", "17", testdata.Admin) + + // but then we won't be able to open ticket #2 + testdata.InsertClosedTicket(rt, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Have you seen my cookies?", "21", nil) - // create 2 closed tickets and 1 open one for Cathy - testdata.InsertClosedTicket(db, testdata.Org1, testdata.Cathy, testdata.Mailgun, testdata.DefaultTopic, "Have you seen my cookies?", "17", testdata.Admin) - testdata.InsertClosedTicket(db, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Have you seen my cookies?", "21", nil) - testdata.InsertOpenTicket(db, testdata.Org1, testdata.Cathy, testdata.Zendesk, testdata.DefaultTopic, "Have you seen my cookies?", "34", time.Now(), testdata.Editor) + testdata.InsertClosedTicket(rt, testdata.Org1, testdata.Bob, testdata.Zendesk, testdata.DefaultTopic, "Have you seen my cookies?", "27", testdata.Editor) + testdata.InsertClosedTicket(rt, testdata.Org1, testdata.Alexandria, testdata.Internal, testdata.DefaultTopic, "Have you seen my cookies?", "", testdata.Editor) - web.RunWebTests(t, ctx, rt, "testdata/reopen.json", nil) + testsuite.RunWebTests(t, ctx, rt, "testdata/reopen.json", nil) } diff --git a/web/ticket/change_topic.go b/web/ticket/change_topic.go index 7abbde879..5b1a577a3 100644 --- a/web/ticket/change_topic.go +++ b/web/ticket/change_topic.go @@ -11,7 +11,7 @@ import ( ) func init() { - web.RegisterJSONRoute(http.MethodPost, "/mr/ticket/change_topic", web.RequireAuthToken(handleChangeTopic)) + web.RegisterRoute(http.MethodPost, "/mr/ticket/change_topic", web.RequireAuthToken(web.JSONPayload(handleChangeTopic))) } type changeTopicRequest struct { @@ -28,26 +28,20 @@ type changeTopicRequest struct { // "ticket_ids": [1234, 2345], // "topic_id": 345 // } -func handleChangeTopic(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { - request := &changeTopicRequest{} - if err := web.ReadAndValidateJSON(r, request); err != nil { - return errors.Wrapf(err, "request failed validation"), http.StatusBadRequest, nil - } - - // grab our org assets - oa, err := models.GetOrgAssets(ctx, rt, request.OrgID) +func handleChangeTopic(ctx context.Context, rt *runtime.Runtime, r *changeTopicRequest) (any, int, error) { + oa, err := models.GetOrgAssets(ctx, rt, r.OrgID) if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "unable to load org assets") + return nil, 0, errors.Wrap(err, "unable to load org assets") } - tickets, err := models.LoadTickets(ctx, rt.DB, request.TicketIDs) + tickets, err := models.LoadTickets(ctx, rt.DB, r.TicketIDs) if err != nil { - return nil, http.StatusBadRequest, errors.Wrapf(err, "error loading tickets for org: %d", request.OrgID) + return nil, 0, errors.Wrapf(err, "error loading tickets for org: %d", r.OrgID) } - evts, err := models.TicketsChangeTopic(ctx, rt.DB, oa, request.UserID, tickets, request.TopicID) + evts, err := models.TicketsChangeTopic(ctx, rt.DB, oa, r.UserID, tickets, r.TopicID) if err != nil { - return nil, http.StatusInternalServerError, errors.Wrap(err, "error changing topic of tickets") + return nil, 0, errors.Wrap(err, "error changing topic of tickets") } return newBulkResponse(evts), http.StatusOK, nil diff --git a/web/ticket/close.go b/web/ticket/close.go index 753f32f61..a6792da58 100644 --- a/web/ticket/close.go +++ b/web/ticket/close.go @@ -12,7 +12,7 @@ import ( ) func init() { - web.RegisterJSONRoute(http.MethodPost, "/mr/ticket/close", web.RequireAuthToken(web.WithHTTPLogs(handleClose))) + web.RegisterRoute(http.MethodPost, "/mr/ticket/close", web.RequireAuthToken(web.MarshaledResponse(web.WithHTTPLogs(handleClose)))) } // Closes any open tickets with the given ids. If force=true then even if tickets can't be closed on external service, @@ -24,7 +24,7 @@ func init() { // "ticket_ids": [1234, 2345], // "force": false // } -func handleClose(ctx context.Context, rt *runtime.Runtime, r *http.Request, l *models.HTTPLogger) (interface{}, int, error) { +func handleClose(ctx context.Context, rt *runtime.Runtime, r *http.Request, l *models.HTTPLogger) (any, int, error) { request := &bulkTicketRequest{} if err := web.ReadAndValidateJSON(r, request); err != nil { return errors.Wrapf(err, "request failed validation"), http.StatusBadRequest, nil @@ -33,17 +33,17 @@ func handleClose(ctx context.Context, rt *runtime.Runtime, r *http.Request, l *m // grab our org assets oa, err := models.GetOrgAssets(ctx, rt, request.OrgID) if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "unable to load org assets") + return nil, 0, errors.Wrap(err, "unable to load org assets") } tickets, err := models.LoadTickets(ctx, rt.DB, request.TicketIDs) if err != nil { - return nil, http.StatusBadRequest, errors.Wrapf(err, "error loading tickets for org: %d", request.OrgID) + return nil, 0, errors.Wrapf(err, "error loading tickets for org: %d", request.OrgID) } evts, err := models.CloseTickets(ctx, rt, oa, request.UserID, tickets, true, request.Force, l) if err != nil { - return nil, http.StatusInternalServerError, errors.Wrap(err, "error closing tickets") + return nil, 0, errors.Wrap(err, "error closing tickets") } rc := rt.RP.Get() @@ -52,7 +52,7 @@ func handleClose(ctx context.Context, rt *runtime.Runtime, r *http.Request, l *m for t, e := range evts { err = handler.QueueTicketEvent(rc, t.ContactID(), e) if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "error queueing ticket event for ticket %d", t.ID()) + return nil, 0, errors.Wrapf(err, "error queueing ticket event for ticket %d", t.ID()) } } diff --git a/web/ticket/reopen.go b/web/ticket/reopen.go index 18b46d489..812c0a3b0 100644 --- a/web/ticket/reopen.go +++ b/web/ticket/reopen.go @@ -3,16 +3,17 @@ package ticket import ( "context" "net/http" + "time" "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/runtime" "github.com/nyaruka/mailroom/web" - "github.com/pkg/errors" + "golang.org/x/exp/maps" ) func init() { - web.RegisterJSONRoute(http.MethodPost, "/mr/ticket/reopen", web.RequireAuthToken(web.WithHTTPLogs(handleReopen))) + web.RegisterRoute(http.MethodPost, "/mr/ticket/reopen", web.RequireAuthToken(web.MarshaledResponse(web.WithHTTPLogs(handleReopen)))) } // Reopens any closed tickets with the given ids @@ -22,27 +23,83 @@ func init() { // "user_id": 234, // "ticket_ids": [1234, 2345] // } -func handleReopen(ctx context.Context, rt *runtime.Runtime, r *http.Request, l *models.HTTPLogger) (interface{}, int, error) { +func handleReopen(ctx context.Context, rt *runtime.Runtime, r *http.Request, l *models.HTTPLogger) (any, int, error) { request := &bulkTicketRequest{} if err := web.ReadAndValidateJSON(r, request); err != nil { - return errors.Wrapf(err, "request failed validation"), http.StatusBadRequest, nil + return errors.Wrap(err, "request failed validation"), http.StatusBadRequest, nil } // grab our org assets oa, err := models.GetOrgAssets(ctx, rt, request.OrgID) if err != nil { - return nil, http.StatusInternalServerError, errors.Wrapf(err, "unable to load org assets") + return nil, 0, errors.Wrap(err, "unable to load org assets") } tickets, err := models.LoadTickets(ctx, rt.DB, request.TicketIDs) if err != nil { - return nil, http.StatusBadRequest, errors.Wrapf(err, "error loading tickets for org: %d", request.OrgID) + return nil, 0, errors.Wrapf(err, "error loading tickets for org: %d", request.OrgID) + } + + // organize last opened ticket by contact (we know we can't open more than one ticket per contact) + ticketByContact := make(map[models.ContactID]*models.Ticket, len(request.TicketIDs)) + for _, t := range tickets { + if ticketByContact[t.ContactID()] == nil { + ticketByContact[t.ContactID()] = t + } + } + + results := make(map[*models.Ticket]*models.TicketEvent, len(tickets)) + remaining := ticketByContact + start := time.Now() + + for len(remaining) > 0 && time.Since(start) < time.Second*10 { + evts, skipped, err := tryToLockAndReopen(ctx, rt, oa, remaining, request.UserID, l) + if err != nil { + return nil, 0, err + } + + maps.Copy(results, evts) + + remaining = skipped } - evts, err := models.ReopenTickets(ctx, rt, oa, request.UserID, tickets, true, l) + return newBulkResponse(results), http.StatusOK, nil +} + +func tryToLockAndReopen(ctx context.Context, rt *runtime.Runtime, oa *models.OrgAssets, tickets map[models.ContactID]*models.Ticket, userID models.UserID, l *models.HTTPLogger) (map[*models.Ticket]*models.TicketEvent, map[models.ContactID]*models.Ticket, error) { + locks, skipped, err := models.LockContacts(ctx, rt, oa.OrgID(), maps.Keys(tickets), time.Second) if err != nil { - return nil, http.StatusBadRequest, errors.Wrapf(err, "error reopening tickets for org: %d", request.OrgID) + return nil, nil, err } - return newBulkResponse(evts), http.StatusOK, nil + locked := maps.Keys(locks) + + defer models.UnlockContacts(rt, oa.OrgID(), locks) + + // load our contacts + contacts, err := models.LoadContacts(ctx, rt.DB, oa, locked) + if err != nil { + return nil, nil, errors.Wrap(err, "unable to load contacts") + } + + // filter tickets to those belonging to contacts without an open ticket + reopenable := make([]*models.Ticket, 0, len(contacts)) + for _, c := range contacts { + if c.Ticket() == nil { + reopenable = append(reopenable, tickets[c.ID()]) + } + } + + evts, err := models.ReopenTickets(ctx, rt, oa, userID, reopenable, true, l) + if err != nil { + return nil, nil, errors.Wrap(err, "error reopening tickets") + } + + skippedTickets := make(map[models.ContactID]*models.Ticket, len(skipped)) + for _, c := range skipped { + skippedTickets[c] = tickets[c] + } + + return evts, skippedTickets, nil + } diff --git a/web/ticket/testdata/assign.json b/web/ticket/testdata/assign.json index 2cde0f7f5..32bd9624e 100644 --- a/web/ticket/testdata/assign.json +++ b/web/ticket/testdata/assign.json @@ -11,8 +11,7 @@ 2, 3 ], - "assignee_id": 6, - "note": "please handle" + "assignee_id": 6 }, "status": 200, "response": { @@ -27,7 +26,7 @@ "count": 3 }, { - "query": "SELECT count(*) FROM tickets_ticketevent WHERE event_type = 'A' AND created_by_id = 3 AND note = 'please handle'", + "query": "SELECT count(*) FROM tickets_ticketevent WHERE event_type = 'A' AND created_by_id = 3", "count": 2 } ] diff --git a/web/ticket/testdata/reopen.json b/web/ticket/testdata/reopen.json index c995abecc..b1498c4a9 100644 --- a/web/ticket/testdata/reopen.json +++ b/web/ticket/testdata/reopen.json @@ -1,6 +1,6 @@ [ { - "label": "reopens the given mailgun tickets", + "label": "reopens the given mailgun ticket", "http_mocks": { "https://api.mailgun.net/v3/tickets.rapidpro.io/messages": [ { @@ -15,8 +15,7 @@ "org_id": 1, "user_id": 3, "ticket_ids": [ - 1, - 3 + 1 ] }, "status": 200, @@ -28,11 +27,11 @@ "db_assertions": [ { "query": "SELECT count(*) FROM tickets_ticket WHERE status = 'C'", - "count": 1 + "count": 3 }, { "query": "SELECT count(*) FROM tickets_ticket WHERE status = 'O'", - "count": 2 + "count": 1 }, { "query": "SELECT count(*) FROM tickets_ticketevent WHERE event_type = 'R' AND created_by_id = 3", @@ -43,7 +42,7 @@ { "label": "reopens the given zendesk tickets", "http_mocks": { - "https://nyaruka.zendesk.com/api/v2/tickets/update_many.json?ids=21": [ + "https://nyaruka.zendesk.com/api/v2/tickets/update_many.json?ids=27": [ { "status": 200, "body": "{\"job_status\":{\"id\":\"1234\",\"status\":\"queued\"}}" @@ -56,29 +55,58 @@ "org_id": 1, "user_id": 3, "ticket_ids": [ - 1, - 2 + 2, + 3 ] }, "status": 200, "response": { "changed_ids": [ - 2 + 3 ] }, "db_assertions": [ { "query": "SELECT count(*) FROM tickets_ticket WHERE status = 'C'", - "count": 0 + "count": 2 }, { "query": "SELECT count(*) FROM tickets_ticket WHERE status = 'O'", - "count": 3 + "count": 2 }, { "query": "SELECT count(*) FROM tickets_ticketevent WHERE event_type = 'R' AND created_by_id = 3", "count": 2 } ] + }, + { + "label": "reopens the given internal ticket", + "http_mocks": {}, + "method": "POST", + "path": "/mr/ticket/reopen", + "body": { + "org_id": 1, + "user_id": 3, + "ticket_ids": [ + 4 + ] + }, + "status": 200, + "response": { + "changed_ids": [ + 4 + ] + }, + "db_assertions": [ + { + "query": "SELECT count(*) FROM tickets_ticket WHERE status = 'C'", + "count": 1 + }, + { + "query": "SELECT count(*) FROM tickets_ticket WHERE status = 'O'", + "count": 3 + } + ] } ] \ No newline at end of file diff --git a/web/wrappers.go b/web/wrappers.go index f5619ac1c..12376d707 100644 --- a/web/wrappers.go +++ b/web/wrappers.go @@ -8,86 +8,115 @@ import ( "github.com/nyaruka/mailroom/core/models" "github.com/nyaruka/mailroom/runtime" - "github.com/pkg/errors" ) -// RequireUserToken wraps a JSON handler to require passing of an API token via the authorization header -func RequireUserToken(handler JSONHandler) JSONHandler { - return func(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { +type JSONHandler[T any] func(ctx context.Context, rt *runtime.Runtime, request *T) (any, int, error) + +func JSONPayload[T any](handler JSONHandler[T]) Handler { + return MarshaledResponse(func(ctx context.Context, rt *runtime.Runtime, r *http.Request) (any, int, error) { + payload := new(T) + + if err := ReadAndValidateJSON(r, payload); err != nil { + return errors.Wrap(err, "request failed validation"), http.StatusBadRequest, nil + } + + return handler(ctx, rt, payload) + }) +} + +type MarshaledHandler func(ctx context.Context, rt *runtime.Runtime, r *http.Request) (any, int, error) + +// MarshaledResponse wraps a handler to change the signature so that the return value is marshaled as the response +func MarshaledResponse(handler MarshaledHandler) Handler { + return func(ctx context.Context, rt *runtime.Runtime, r *http.Request, w http.ResponseWriter) error { + value, status, err := handler(ctx, rt, r) + if err != nil { + return err + } + + // handler returned an error to use as the response + asError, isError := value.(error) + if isError { + value = NewErrorResponse(asError) + } + + return WriteMarshalled(w, status, value) + } +} + +var sqlLookupAPIToken = ` +SELECT user_id, org_id + FROM api_apitoken t + JOIN orgs_org o ON t.org_id = o.id + JOIN auth_group g ON t.role_id = g.id + JOIN auth_user u ON t.user_id = u.id + WHERE key = $1 AND g.name IN ('Administrators', 'Editors', 'Surveyors') AND t.is_active AND o.is_active AND u.is_active` + +// RequireUserToken wraps a handler to require passing of an API token via the authorization header +func RequireUserToken(handler Handler) Handler { + return func(ctx context.Context, rt *runtime.Runtime, r *http.Request, w http.ResponseWriter) error { token := r.Header.Get("authorization") + if !strings.HasPrefix(token, "Token ") { - return errors.New("missing authorization header"), http.StatusUnauthorized, nil + return WriteMarshalled(w, http.StatusUnauthorized, NewErrorResponse(errors.New("missing authorization token"))) } - // pull out the actual token - token = token[6:] + token = token[6:] // pull out the actual token // try to look it up - rows, err := rt.DB.QueryContext(ctx, ` - SELECT - user_id, - org_id - FROM - api_apitoken t - JOIN orgs_org o ON t.org_id = o.id - JOIN auth_group g ON t.role_id = g.id - JOIN auth_user u ON t.user_id = u.id - WHERE - key = $1 AND - g.name IN ('Administrators', 'Editors', 'Surveyors') AND - t.is_active = TRUE AND - o.is_active = TRUE AND - u.is_active = TRUE - `, token) + rows, err := rt.DB.QueryContext(ctx, sqlLookupAPIToken, token) if err != nil { - return errors.Wrapf(err, "error looking up authorization header"), http.StatusUnauthorized, nil + return errors.Wrap(err, "error querying API token") } + defer rows.Close() if !rows.Next() { - return errors.Errorf("invalid authorization header"), http.StatusUnauthorized, nil + return WriteMarshalled(w, http.StatusUnauthorized, NewErrorResponse(errors.New("invalid authorization token"))) } var userID int64 var orgID models.OrgID err = rows.Scan(&userID, &orgID) if err != nil { - return nil, 0, errors.Wrapf(err, "error scanning auth row") + return errors.Wrap(err, "error scanning auth row") } - // we are authenticated set our user id ang org id on our context and call our sub handler + // we are authenticated set our user id ang org id on our context and continue ctx = context.WithValue(ctx, UserIDKey, userID) ctx = context.WithValue(ctx, OrgIDKey, orgID) - return handler(ctx, rt, r) + + return handler(ctx, rt, r, w) } } // RequireAuthToken wraps a handler to require that our request to have our global authorization header -func RequireAuthToken(handler JSONHandler) JSONHandler { - return func(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { +func RequireAuthToken(handler Handler) Handler { + return func(ctx context.Context, rt *runtime.Runtime, r *http.Request, w http.ResponseWriter) error { auth := r.Header.Get("authorization") + if rt.Config.AuthToken != "" && fmt.Sprintf("Token %s", rt.Config.AuthToken) != auth { - return fmt.Errorf("invalid or missing authorization header, denying"), http.StatusUnauthorized, nil + return WriteMarshalled(w, http.StatusUnauthorized, NewErrorResponse(errors.New("invalid or missing authorization header"))) } // we are authenticated, call our chain - return handler(ctx, rt, r) + return handler(ctx, rt, r, w) } } // LoggingJSONHandler is a JSON web handler which logs HTTP logs -type LoggingJSONHandler func(ctx context.Context, rt *runtime.Runtime, r *http.Request, l *models.HTTPLogger) (interface{}, int, error) +type LoggingJSONHandler func(ctx context.Context, rt *runtime.Runtime, r *http.Request, l *models.HTTPLogger) (any, int, error) // WithHTTPLogs wraps a handler to create a handler which can record and save HTTP logs -func WithHTTPLogs(handler LoggingJSONHandler) JSONHandler { - return func(ctx context.Context, rt *runtime.Runtime, r *http.Request) (interface{}, int, error) { +func WithHTTPLogs(handler LoggingJSONHandler) MarshaledHandler { + return func(ctx context.Context, rt *runtime.Runtime, r *http.Request) (any, int, error) { logger := &models.HTTPLogger{} response, status, err := handler(ctx, rt, r, logger) if err := logger.Insert(ctx, rt.DB); err != nil { - return nil, http.StatusInternalServerError, errors.Wrap(err, "error writing HTTP logs") + return nil, 0, errors.Wrap(err, "error writing HTTP logs") } return response, status, err diff --git a/web/wrappers_test.go b/web/wrappers_test.go index ae91cb1eb..7b609526b 100644 --- a/web/wrappers_test.go +++ b/web/wrappers_test.go @@ -18,9 +18,9 @@ import ( ) func TestWithHTTPLogs(t *testing.T) { - ctx, rt, db, _ := testsuite.Get() + ctx, rt := testsuite.Runtime() - defer db.MustExec(`DELETE FROM request_logs_httplog`) + defer rt.DB.MustExec(`DELETE FROM request_logs_httplog`) defer httpx.SetRequestor(httpx.DefaultRequestor) httpx.SetRequestor(httpx.NewMockRequestor(map[string][]*httpx.MockResponse{ @@ -30,7 +30,7 @@ func TestWithHTTPLogs(t *testing.T) { }, })) - handler := func(ctx context.Context, rt *runtime.Runtime, r *http.Request, l *models.HTTPLogger) (interface{}, int, error) { + handler := func(ctx context.Context, rt *runtime.Runtime, r *http.Request, l *models.HTTPLogger) (any, int, error) { ticketer, _ := models.LookupTicketerByUUID(ctx, rt.DB, testdata.Mailgun.UUID) logger := l.Ticketer(ticketer) @@ -61,5 +61,5 @@ func TestWithHTTPLogs(t *testing.T) { assert.NoError(t, err) // check HTTP logs were created - assertdb.Query(t, db, `select count(*) from request_logs_httplog where ticketer_id = $1;`, testdata.Mailgun.ID).Returns(2) + assertdb.Query(t, rt.DB, `select count(*) from request_logs_httplog where ticketer_id = $1;`, testdata.Mailgun.ID).Returns(2) } diff --git a/workers.go b/workers.go index e684ab070..2a4074de8 100644 --- a/workers.go +++ b/workers.go @@ -8,6 +8,7 @@ import ( "github.com/nyaruka/mailroom/core/queue" "github.com/nyaruka/mailroom/runtime" + "github.com/pkg/errors" "github.com/sirupsen/logrus" ) @@ -181,18 +182,11 @@ func (w *Worker) handleTask(task *queue.Task) { log.Info("starting handling of task") start := time.Now() - taskFunc, found := taskFunctions[task.Type] - if found { - err := taskFunc(context.Background(), w.foreman.rt, task) - if err != nil { - log.WithError(err).WithField("task", string(task.Task)).Error("error running task") - } - } else { - log.Error("unable to find function for task type") + if err := PerformTask(w.foreman.rt, task); err != nil { + log.WithError(err).WithField("task", string(task.Task)).Error("error running task") } elapsed := time.Since(start) - log.WithField("elapsed", elapsed).Info("task complete") // additionally if any task took longer than 1 minute, log as warning @@ -200,3 +194,12 @@ func (w *Worker) handleTask(task *queue.Task) { log.WithField("task", string(task.Task)).WithField("elapsed", elapsed).Warn("long running task") } } + +func PerformTask(rt *runtime.Runtime, t *queue.Task) error { + taskFunc, found := taskFunctions[t.Type] + if !found { + return errors.Errorf("unable to find handler for task type %s", t.Type) + } + + return taskFunc(context.Background(), rt, t) +}