mirror of
https://github.com/mattermost/mattermost.git
synced 2026-02-20 08:21:09 -05:00
Compare commits
56 commits
master
...
@mattermos
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e8ac030ed9 | ||
|
|
9518d25e51 | ||
|
|
032b60bb97 | ||
|
|
c6bd44ab20 | ||
|
|
15364790cc | ||
|
|
c910ee3467 | ||
|
|
feb598ed2b | ||
|
|
ab131e5163 | ||
|
|
644022c3e1 | ||
|
|
7ccb62db79 | ||
|
|
15a0d50f36 | ||
|
|
c33f6a4d81 | ||
|
|
2537285371 | ||
|
|
3624526af1 | ||
|
|
7977e7e6da | ||
|
|
733e878e03 | ||
|
|
6b61997b1c | ||
|
|
60cdd5b389 | ||
|
|
8b86719a0a | ||
|
|
331757c34d | ||
|
|
4904019771 | ||
|
|
f05a75c26a | ||
|
|
daf9812043 | ||
|
|
a08f86c370 | ||
|
|
bf8672bb95 | ||
|
|
547c6be541 | ||
|
|
70dcdd0449 | ||
|
|
045e6daae3 | ||
|
|
577206545b | ||
|
|
bc5654ae20 | ||
|
|
6f4f5d264d | ||
|
|
d90bb094b0 | ||
|
|
ea5128a818 | ||
|
|
a5e251eb3e | ||
|
|
93fe3c49c1 | ||
|
|
c4cc139c22 | ||
|
|
97efc2a3bb | ||
|
|
c430783209 | ||
|
|
91a9c815a4 | ||
|
|
88aa3c868f | ||
|
|
36442d62a4 | ||
|
|
339afff28a | ||
|
|
aaf9811353 | ||
|
|
2e2a9dcb1b | ||
|
|
1764775bfe | ||
|
|
bdf211ac7b | ||
|
|
bf86cdeed3 | ||
|
|
408f24bf13 | ||
|
|
dea962c372 | ||
|
|
12eab585e7 | ||
|
|
ff251c72b8 | ||
|
|
371783ee09 | ||
|
|
3d85e9cec9 | ||
|
|
bced819eb8 | ||
|
|
3f9c029c85 | ||
|
|
62058fecfc |
224 changed files with 10758 additions and 2455 deletions
2
.github/workflows/api.yml
vendored
2
.github/workflows/api.yml
vendored
|
|
@ -11,7 +11,7 @@ permissions:
|
|||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./api
|
||||
|
|
|
|||
2
.github/workflows/claude.yml
vendored
2
.github/workflows/claude.yml
vendored
|
|
@ -17,7 +17,7 @@ jobs:
|
|||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
|
||||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
|
|
|
|||
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
|
|
@ -16,7 +16,7 @@ jobs:
|
|||
security-events: write # for github/codeql-action/autobuild to send a status report
|
||||
name: Analyze
|
||||
if: github.repository_owner == 'mattermost'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ permissions:
|
|||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
env:
|
||||
IMAGE_TAG: ${{ github.event.inputs.tag }}
|
||||
|
||||
|
|
@ -66,7 +66,7 @@ jobs:
|
|||
-f server/build/Dockerfile.buildenv .
|
||||
|
||||
build-and-push-fips:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- uses: chainguard-dev/setup-chainctl@f4ed65b781b048c44d4f033ae854c025c5531c19 # v0.3.2
|
||||
|
|
|
|||
2
.github/workflows/docker-push-mirrored.yml
vendored
2
.github/workflows/docker-push-mirrored.yml
vendored
|
|
@ -11,7 +11,7 @@ jobs:
|
|||
build-docker:
|
||||
name: cd/Push mirrored docker images
|
||||
if: github.repository_owner == 'mattermost'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout mattermost project
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
|
|
|||
4
.github/workflows/e2e-fulltests-ci.yml
vendored
4
.github/workflows/e2e-fulltests-ci.yml
vendored
|
|
@ -53,7 +53,7 @@ concurrency:
|
|||
|
||||
jobs:
|
||||
generate-test-variables:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
|
@ -307,7 +307,7 @@ jobs:
|
|||
AWS_SECRET_ACCESS_KEY: "${{ secrets.CYPRESS_AWS_SECRET_ACCESS_KEY }}"
|
||||
|
||||
notify-user:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
if: always()
|
||||
needs:
|
||||
- generate-test-variables
|
||||
|
|
|
|||
22
.github/workflows/e2e-tests-ci-template.yml
vendored
22
.github/workflows/e2e-tests-ci-template.yml
vendored
|
|
@ -105,7 +105,7 @@ on:
|
|||
|
||||
jobs:
|
||||
update-initial-status:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: mattermost/actions/delivery/update-commit-status@main
|
||||
env:
|
||||
|
|
@ -118,7 +118,7 @@ jobs:
|
|||
status: pending
|
||||
|
||||
cypress-check:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- update-initial-status
|
||||
defaults:
|
||||
|
|
@ -149,7 +149,7 @@ jobs:
|
|||
npm run check
|
||||
|
||||
playwright-check:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- update-initial-status
|
||||
defaults:
|
||||
|
|
@ -186,7 +186,7 @@ jobs:
|
|||
npm run check
|
||||
|
||||
shell-check:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- update-initial-status
|
||||
defaults:
|
||||
|
|
@ -204,7 +204,7 @@ jobs:
|
|||
run: make check-shell
|
||||
|
||||
generate-build-variables:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- update-initial-status
|
||||
defaults:
|
||||
|
|
@ -230,7 +230,7 @@ jobs:
|
|||
echo "node-cache-dependency-path=e2e-tests/${TEST}/package-lock.json" >> $GITHUB_OUTPUT
|
||||
|
||||
generate-test-cycle:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- generate-build-variables
|
||||
defaults:
|
||||
|
|
@ -284,8 +284,8 @@ jobs:
|
|||
# - For MacOS: works on developer machines, but uses too many resources to be able to run on Github Actions
|
||||
# - for Windows: cannot currently run on Github Actions, since the runners do not support running linux containers, at the moment
|
||||
#
|
||||
#os: [ubuntu-latest, windows-2022, macos-12-xl]
|
||||
os: [ubuntu-latest]
|
||||
#os: [ubuntu-24.04, windows-2022, macos-12-xl]
|
||||
os: [ubuntu-24.04]
|
||||
worker_index: ${{ fromJSON(needs.generate-build-variables.outputs.workers) }} # https://docs.github.com/en/actions/learn-github-actions/expressions#example-returning-a-json-object
|
||||
runs-on: "${{ matrix.os }}"
|
||||
timeout-minutes: 120
|
||||
|
|
@ -376,7 +376,7 @@ jobs:
|
|||
retention-days: 1
|
||||
|
||||
report:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
needs:
|
||||
- test
|
||||
- generate-build-variables
|
||||
|
|
@ -527,7 +527,7 @@ jobs:
|
|||
[ "${{ steps.calculate-results.outputs.failed }}" = "0" ]
|
||||
|
||||
update-failure-final-status:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
if: failure() || cancelled()
|
||||
needs:
|
||||
- generate-test-cycle
|
||||
|
|
@ -550,7 +550,7 @@ jobs:
|
|||
|
||||
|
||||
update-success-final-status:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
if: success()
|
||||
needs:
|
||||
- generate-test-cycle
|
||||
|
|
|
|||
2
.github/workflows/e2e-tests-ci.yml
vendored
2
.github/workflows/e2e-tests-ci.yml
vendored
|
|
@ -11,7 +11,7 @@ on:
|
|||
|
||||
jobs:
|
||||
generate-test-variables:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
outputs:
|
||||
BRANCH: "${{ steps.generate.outputs.BRANCH }}"
|
||||
BUILD_ID: "${{ steps.generate.outputs.BUILD_ID }}"
|
||||
|
|
|
|||
32
.github/workflows/mmctl-test-template.yml
vendored
32
.github/workflows/mmctl-test-template.yml
vendored
|
|
@ -17,6 +17,10 @@ on:
|
|||
go-version:
|
||||
required: true
|
||||
type: string
|
||||
fips-enabled:
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
test:
|
||||
|
|
@ -25,8 +29,25 @@ jobs:
|
|||
env:
|
||||
COMPOSE_PROJECT_NAME: ghactions
|
||||
steps:
|
||||
- name: buildenv/docker-login
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Checkout mattermost project
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Setup BUILD_IMAGE
|
||||
id: build
|
||||
run: |
|
||||
if [[ ${{ inputs.fips-enabled }} == 'true' ]]; then
|
||||
echo "BUILD_IMAGE=mattermost/mattermost-build-server-fips:${{ inputs.go-version }}" >> "${GITHUB_OUTPUT}"
|
||||
echo "LOG_ARTIFACT_NAME=${{ inputs.logsartifact }}-fips" >> "${GITHUB_OUTPUT}"
|
||||
else
|
||||
echo "BUILD_IMAGE=mattermostdevelopment/mattermost-build-server:${{ inputs.go-version }}" >> "${GITHUB_OUTPUT}"
|
||||
echo "LOG_ARTIFACT_NAME=${{ inputs.logsartifact }}" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
- name: Store required variables for publishing results
|
||||
run: |
|
||||
echo "${{ inputs.name }}" > server/test-name
|
||||
|
|
@ -35,6 +56,7 @@ jobs:
|
|||
run: |
|
||||
cd server
|
||||
make prepackaged-plugins PLUGIN_PACKAGES=mattermost-plugin-jira-v3.2.5
|
||||
|
||||
- name: Run docker compose
|
||||
run: |
|
||||
cd server/build
|
||||
|
|
@ -44,9 +66,10 @@ jobs:
|
|||
cat ../tests/test-data.ldif | docker compose --ansi never exec -T openldap bash -c 'ldapadd -x -D "cn=admin,dc=mm,dc=test,dc=com" -w mostest';
|
||||
docker compose --ansi never exec -T minio sh -c 'mkdir -p /data/mattermost-test';
|
||||
docker compose --ansi never ps
|
||||
|
||||
- name: Run mmctl Tests
|
||||
env:
|
||||
BUILD_IMAGE: mattermostdevelopment/mattermost-build-server:${{ inputs.go-version }}
|
||||
BUILD_IMAGE: ${{ steps.build.outputs.BUILD_IMAGE }}
|
||||
run: |
|
||||
if [[ ${{ github.ref_name }} == 'master' ]]; then
|
||||
export TESTFLAGS="-timeout 90m -race"
|
||||
|
|
@ -58,21 +81,22 @@ jobs:
|
|||
--env-file=server/build/dotenv/test.env \
|
||||
--env MM_SQLSETTINGS_DATASOURCE="${{ inputs.datasource }}" \
|
||||
--env MMCTL_TESTFLAGS="$TESTFLAGS" \
|
||||
-v $(go env GOCACHE):/go/cache \
|
||||
-e GOCACHE=/go/cache \
|
||||
--env FIPS_ENABLED="${{ inputs.fips-enabled }}" \
|
||||
-v $PWD:/mattermost \
|
||||
-w /mattermost/server \
|
||||
$BUILD_IMAGE \
|
||||
make test-mmctl BUILD_NUMBER=$GITHUB_HEAD_REF-$GITHUB_RUN_ID
|
||||
|
||||
- name: Stop docker compose
|
||||
run: |
|
||||
cd server/build
|
||||
docker compose --ansi never stop
|
||||
|
||||
- name: Archive logs
|
||||
if: ${{ always() }}
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: ${{ inputs.logsartifact }}
|
||||
name: ${{ steps.build.outputs.LOG_ARTIFACT_NAME }}
|
||||
path: |
|
||||
server/gotestsum.json
|
||||
server/report.xml
|
||||
|
|
|
|||
2
.github/workflows/scorecards-analysis.yml
vendored
2
.github/workflows/scorecards-analysis.yml
vendored
|
|
@ -12,7 +12,7 @@ jobs:
|
|||
analysis:
|
||||
name: Scorecard analysis
|
||||
if: github.repository_owner == 'mattermost'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-24.04
|
||||
permissions:
|
||||
# Needed to upload the results to code-scanning dashboard.
|
||||
security-events: write
|
||||
|
|
|
|||
49
.github/workflows/server-ci-report.yml
vendored
49
.github/workflows/server-ci-report.yml
vendored
|
|
@ -22,14 +22,37 @@ jobs:
|
|||
pattern: "*-test-logs"
|
||||
path: reports
|
||||
|
||||
- name: report/validate-and-prepare-data
|
||||
id: validate
|
||||
run: |
|
||||
# Create validated data file
|
||||
> /tmp/validated-tests.json
|
||||
|
||||
find "reports" -type f -name "test-name" | while read -r test_file; do
|
||||
folder=$(basename "$(dirname "$test_file")")
|
||||
test_name_raw=$(cat "$test_file" | tr -d '\n\r')
|
||||
|
||||
# Validate test name: allow alphanumeric, spaces, hyphens, underscores, parentheses, and dots
|
||||
if [[ "$test_name_raw" =~ ^[a-zA-Z0-9\ \(\)_.-]+$ ]] && [[ ${#test_name_raw} -le 100 ]]; then
|
||||
# Use jq to safely escape the test name as JSON
|
||||
test_name_escaped=$(echo -n "$test_name_raw" | jq -R .)
|
||||
echo "{\"artifact\": \"$folder\", \"name\": $test_name_escaped}" >> /tmp/validated-tests.json
|
||||
else
|
||||
echo "Warning: Skipping invalid test name in $test_file: '$test_name_raw'" >&2
|
||||
fi
|
||||
done
|
||||
|
||||
# Verify we have at least some valid tests
|
||||
if [[ ! -s /tmp/validated-tests.json ]]; then
|
||||
echo "Error: No valid test names found" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: report/generate-report-matrix
|
||||
id: report
|
||||
run: |
|
||||
find "reports" -type f -name "test-name" | while read -r test_file; do
|
||||
folder=$(basename "$(dirname "$test_file")")
|
||||
test_name=$(cat "$test_file")
|
||||
echo "{\"artifact\": \"$folder\", \"name\": \"$test_name\"}"
|
||||
done | jq -s '{ "test": . }' | tee /tmp/report-matrix
|
||||
# Convert validated JSON objects to matrix format
|
||||
jq -s '{ "test": . }' /tmp/validated-tests.json | tee /tmp/report-matrix
|
||||
echo REPORT_MATRIX=$(cat /tmp/report-matrix | jq --compact-output --monochrome-output) >> ${GITHUB_OUTPUT}
|
||||
|
||||
publish-report:
|
||||
|
|
@ -54,7 +77,21 @@ jobs:
|
|||
- name: report/fetch-pr-number
|
||||
if: github.event.workflow_run.name == 'Server CI PR'
|
||||
id: incoming-pr
|
||||
run: echo "NUMBER=$(cat ${{ matrix.test.artifact }}/pr-number)" >> ${GITHUB_OUTPUT}
|
||||
env:
|
||||
ARTIFACT: "${{ matrix.test.artifact }}"
|
||||
run: |
|
||||
if [[ -f "$ARTIFACT/pr-number" ]]; then
|
||||
pr_number=$(cat "$ARTIFACT/pr-number" | tr -d '\n\r' | grep -E '^[0-9]+$')
|
||||
if [[ -n "$pr_number" ]] && [[ ${#pr_number} -le 10 ]]; then
|
||||
echo "NUMBER=$pr_number" >> ${GITHUB_OUTPUT}
|
||||
else
|
||||
echo "Invalid PR number format" >&2
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "PR number file not found" >&2
|
||||
exit 1
|
||||
fi
|
||||
- name: Publish test report
|
||||
id: report
|
||||
uses: mikepenz/action-junit-report@cf701569b05ccdd861a76b8607a66d76f6fd4857 # v5.5.1
|
||||
|
|
|
|||
87
.github/workflows/server-ci.yml
vendored
87
.github/workflows/server-ci.yml
vendored
|
|
@ -11,6 +11,8 @@ on:
|
|||
- ".github/workflows/server-ci.yml"
|
||||
- ".github/workflows/server-test-template.yml"
|
||||
- ".github/workflows/mmctl-test-template.yml"
|
||||
- "!server/build/Dockerfile.buildenv"
|
||||
- "!server/build/Dockerfile.buildenv-fips"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.event_name == 'pull_request' && format('{0}-{1}', github.workflow, github.ref) || github.run_id }}
|
||||
|
|
@ -63,8 +65,8 @@ jobs:
|
|||
run: make modules-tidy
|
||||
- name: Check modules
|
||||
run: if [[ -n $(git status --porcelain) ]]; then echo "Please tidy up the Go modules using make modules-tidy"; git diff; exit 1; fi
|
||||
golangci:
|
||||
name: golangci-lint
|
||||
check-style:
|
||||
name: check-style
|
||||
needs: go
|
||||
runs-on: ubuntu-22.04
|
||||
container: mattermostdevelopment/mattermost-build-server:${{ needs.go.outputs.version }}
|
||||
|
|
@ -79,7 +81,7 @@ jobs:
|
|||
- name: Run setup-go-work
|
||||
run: make setup-go-work
|
||||
- name: Run golangci
|
||||
run: make golangci-lint
|
||||
run: make check-style
|
||||
check-gen-serialized:
|
||||
name: Check serialization methods for hot structs
|
||||
needs: go
|
||||
|
|
@ -97,25 +99,6 @@ jobs:
|
|||
run: make gen-serialized
|
||||
- name: Check serialized
|
||||
run: if [[ -n $(git status --porcelain) ]]; then echo "Please update the serialized files using 'make gen-serialized'"; exit 1; fi
|
||||
check-mattermost-vet:
|
||||
name: Check style
|
||||
needs: go
|
||||
runs-on: ubuntu-22.04
|
||||
container: mattermostdevelopment/mattermost-build-server:${{ needs.go.outputs.version }}
|
||||
defaults:
|
||||
run:
|
||||
working-directory: server
|
||||
steps:
|
||||
- name: Checkout mattermost project
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Run setup-go-work
|
||||
run: make setup-go-work
|
||||
- name: Reset config
|
||||
run: make config-reset
|
||||
- name: Run plugin-checker
|
||||
run: make plugin-checker
|
||||
- name: Run mattermost-vet
|
||||
run: make vet BUILD_NUMBER='${GITHUB_HEAD_REF}'
|
||||
check-mattermost-vet-api:
|
||||
name: Vet API
|
||||
needs: go
|
||||
|
|
@ -201,9 +184,7 @@ jobs:
|
|||
test-postgres-binary:
|
||||
if: github.event_name == 'push' # Only run postgres binary tests on master/release pushes: odds are low this regresses, so save the cycles for pull requests.
|
||||
name: Postgres with binary parameters
|
||||
needs:
|
||||
- go
|
||||
- check-mattermost-vet
|
||||
needs: go
|
||||
uses: ./.github/workflows/server-test-template.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
|
|
@ -212,11 +193,10 @@ jobs:
|
|||
drivername: postgres
|
||||
logsartifact: postgres-binary-server-test-logs
|
||||
go-version: ${{ needs.go.outputs.version }}
|
||||
fips-enabled: false
|
||||
test-postgres-normal:
|
||||
name: Postgres
|
||||
needs:
|
||||
- go
|
||||
- check-mattermost-vet
|
||||
needs: go
|
||||
uses: ./.github/workflows/server-test-template.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
|
|
@ -225,13 +205,24 @@ jobs:
|
|||
drivername: postgres
|
||||
logsartifact: postgres-server-test-logs
|
||||
go-version: ${{ needs.go.outputs.version }}
|
||||
fips-enabled: false
|
||||
test-postgres-normal-fips:
|
||||
name: Postgres (FIPS)
|
||||
needs: go
|
||||
uses: ./.github/workflows/server-test-template.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
name: Postgres
|
||||
datasource: postgres://mmuser:mostest@postgres:5432/mattermost_test?sslmode=disable&connect_timeout=10
|
||||
drivername: postgres
|
||||
logsartifact: postgres-server-test-logs
|
||||
go-version: ${{ needs.go.outputs.version }}
|
||||
fips-enabled: true
|
||||
test-coverage:
|
||||
# Skip coverage generation for cherry-pick PRs into release branches.
|
||||
if: ${{ github.event_name != 'pull_request' || !startsWith(github.event.pull_request.base.ref, 'release-') }}
|
||||
name: Generate Test Coverage
|
||||
needs:
|
||||
- go
|
||||
- check-mattermost-vet
|
||||
needs: go
|
||||
uses: ./.github/workflows/server-test-template.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
|
|
@ -244,9 +235,7 @@ jobs:
|
|||
go-version: ${{ needs.go.outputs.version }}
|
||||
test-mmctl:
|
||||
name: Run mmctl tests
|
||||
needs:
|
||||
- check-mattermost-vet
|
||||
- go
|
||||
needs: go
|
||||
uses: ./.github/workflows/mmctl-test-template.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
|
|
@ -255,11 +244,22 @@ jobs:
|
|||
drivername: postgres
|
||||
logsartifact: mmctl-test-logs
|
||||
go-version: ${{ needs.go.outputs.version }}
|
||||
fips-enabled: false
|
||||
test-mmctl-fips:
|
||||
name: Run mmctl tests (FIPS)
|
||||
needs: go
|
||||
uses: ./.github/workflows/mmctl-test-template.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
name: mmctl
|
||||
datasource: postgres://mmuser:mostest@postgres:5432/mattermost_test?sslmode=disable&connect_timeout=10
|
||||
drivername: postgres
|
||||
logsartifact: mmctl-test-logs
|
||||
go-version: ${{ needs.go.outputs.version }}
|
||||
fips-enabled: true
|
||||
build-mattermost-server:
|
||||
name: Build mattermost server app
|
||||
needs:
|
||||
- go
|
||||
- check-mattermost-vet
|
||||
needs: go
|
||||
runs-on: ubuntu-22.04
|
||||
container: mattermostdevelopment/mattermost-build-server:${{ needs.go.outputs.version }}
|
||||
defaults:
|
||||
|
|
@ -267,23 +267,18 @@ jobs:
|
|||
working-directory: server
|
||||
env:
|
||||
GOFLAGS: -buildvcs=false # TODO: work around "error obtaining VCS status: exit status 128" in a container
|
||||
BUILD_NUMBER: "${GITHUB_HEAD_REF}-${GITHUB_RUN_ID}"
|
||||
FIPS_ENABLED: false
|
||||
steps:
|
||||
- name: Checkout mattermost project
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: ci/setup-node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
|
||||
id: setup_node
|
||||
with:
|
||||
node-version-file: ".nvmrc"
|
||||
cache: npm
|
||||
cache-dependency-path: "webapp/package-lock.json"
|
||||
- name: Run setup-go-work
|
||||
run: make setup-go-work
|
||||
- name: Build
|
||||
run: |
|
||||
make config-reset
|
||||
make build-cmd BUILD_NUMBER='${GITHUB_HEAD_REF}-${GITHUB_RUN_ID}'
|
||||
make package BUILD_NUMBER='${GITHUB_HEAD_REF}-${GITHUB_RUN_ID}'
|
||||
make build-cmd
|
||||
make package
|
||||
- name: Persist dist artifacts
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
|
|
|
|||
37
.github/workflows/server-test-template.yml
vendored
37
.github/workflows/server-test-template.yml
vendored
|
|
@ -25,6 +25,14 @@ on:
|
|||
go-version:
|
||||
required: true
|
||||
type: string
|
||||
fips-enabled:
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
test:
|
||||
|
|
@ -34,12 +42,30 @@ jobs:
|
|||
env:
|
||||
COMPOSE_PROJECT_NAME: ghactions
|
||||
steps:
|
||||
- name: buildenv/docker-login
|
||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Checkout mattermost project
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Setup BUILD_IMAGE
|
||||
id: build
|
||||
run: |
|
||||
if [[ ${{ inputs.fips-enabled }} == 'true' ]]; then
|
||||
echo "BUILD_IMAGE=mattermost/mattermost-build-server-fips:${{ inputs.go-version }}" >> "${GITHUB_OUTPUT}"
|
||||
echo "LOG_ARTIFACT_NAME=${{ inputs.logsartifact }}-fips" >> "${GITHUB_OUTPUT}"
|
||||
else
|
||||
echo "BUILD_IMAGE=mattermostdevelopment/mattermost-build-server:${{ inputs.go-version }}" >> "${GITHUB_OUTPUT}"
|
||||
echo "LOG_ARTIFACT_NAME=${{ inputs.logsartifact }}" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
- name: Store required variables for publishing results
|
||||
run: |
|
||||
echo "${{ inputs.name }}" > server/test-name
|
||||
echo "${{ github.event.pull_request.number }}" > server/pr-number
|
||||
|
||||
- name: Run docker compose
|
||||
run: |
|
||||
cd server/build
|
||||
|
|
@ -49,9 +75,10 @@ jobs:
|
|||
cat ../tests/test-data.ldif | docker compose --ansi never exec -T openldap bash -c 'ldapadd -x -D "cn=admin,dc=mm,dc=test,dc=com" -w mostest';
|
||||
docker compose --ansi never exec -T minio sh -c 'mkdir -p /data/mattermost-test';
|
||||
docker compose --ansi never ps
|
||||
|
||||
- name: Run Tests
|
||||
env:
|
||||
BUILD_IMAGE: mattermostdevelopment/mattermost-build-server:${{ inputs.go-version }}
|
||||
BUILD_IMAGE: ${{ steps.build.outputs.BUILD_IMAGE }}
|
||||
run: |
|
||||
if [[ ${{ github.ref_name }} == 'master' && ${{ inputs.fullyparallel }} != true ]]; then
|
||||
export RACE_MODE="-race"
|
||||
|
|
@ -61,12 +88,10 @@ jobs:
|
|||
--env-file=server/build/dotenv/test.env \
|
||||
--env MM_SQLSETTINGS_DRIVERNAME="${{ inputs.drivername }}" \
|
||||
--env MM_SQLSETTINGS_DATASOURCE="${{ inputs.datasource }}" \
|
||||
--env TEST_DATABASE_MYSQL_DSN="${{ inputs.datasource }}" \
|
||||
--env TEST_DATABASE_POSTGRESQL_DSN="${{ inputs.datasource }}" \
|
||||
--env ENABLE_FULLY_PARALLEL_TESTS="${{ inputs.fullyparallel }}" \
|
||||
--env ENABLE_COVERAGE="${{ inputs.enablecoverage }}" \
|
||||
-v $(go env GOCACHE):/go/cache \
|
||||
-e GOCACHE=/go/cache \
|
||||
--env FIPS_ENABLED="${{ inputs.fips-enabled }}" \
|
||||
-v $PWD:/mattermost \
|
||||
-w /mattermost/server \
|
||||
$BUILD_IMAGE \
|
||||
|
|
@ -78,15 +103,17 @@ jobs:
|
|||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
disable_search: true
|
||||
files: server/cover.out
|
||||
|
||||
- name: Stop docker compose
|
||||
run: |
|
||||
cd server/build
|
||||
docker compose --ansi never stop
|
||||
|
||||
- name: Archive logs
|
||||
if: ${{ always() }}
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: ${{ inputs.logsartifact }}
|
||||
name: ${{ steps.build.outputs.LOG_ARTIFACT_NAME }}
|
||||
path: |
|
||||
server/gotestsum.json
|
||||
server/report.xml
|
||||
|
|
|
|||
|
|
@ -66,6 +66,53 @@
|
|||
$ref: "#/components/responses/Forbidden"
|
||||
"500":
|
||||
$ref: "#/components/responses/InternalServerError"
|
||||
/api/v4/access_control_policies/cel/validate_requester:
|
||||
post:
|
||||
tags:
|
||||
- access control
|
||||
summary: Validate if the current user matches a CEL expression
|
||||
description: |
|
||||
Validates whether the current authenticated user matches the given CEL expression.
|
||||
This is used to determine if a channel admin can test expressions they match.
|
||||
##### Permissions
|
||||
Must have `manage_system` permission OR be a channel admin for the specified channel (channelId required for channel admins).
|
||||
operationId: ValidateExpressionAgainstRequester
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
expression:
|
||||
type: string
|
||||
description: The CEL expression to validate against the current user.
|
||||
channelId:
|
||||
type: string
|
||||
description: The channel ID for channel-specific permission checks (required for channel admins).
|
||||
required:
|
||||
- expression
|
||||
responses:
|
||||
"200":
|
||||
description: Validation result returned successfully.
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
requester_matches:
|
||||
type: boolean
|
||||
description: Whether the current user matches the expression.
|
||||
required:
|
||||
- requester_matches
|
||||
"400":
|
||||
$ref: "#/components/responses/BadRequest"
|
||||
"401":
|
||||
$ref: "#/components/responses/Unauthorized"
|
||||
"403":
|
||||
$ref: "#/components/responses/Forbidden"
|
||||
"500":
|
||||
$ref: "#/components/responses/InternalServerError"
|
||||
/api/v4/access_control_policies/cel/test:
|
||||
post:
|
||||
tags:
|
||||
|
|
|
|||
|
|
@ -1149,64 +1149,6 @@
|
|||
$ref: "#/components/responses/Forbidden"
|
||||
"404":
|
||||
$ref: "#/components/responses/NotFound"
|
||||
"/api/v4/teams/{team_id}/channels/search_archived":
|
||||
post:
|
||||
tags:
|
||||
- channels
|
||||
summary: Search archived channels
|
||||
description: >
|
||||
Search archived channels on a team based on the search term provided in
|
||||
the request body.
|
||||
|
||||
|
||||
__Minimum server version__: 5.18
|
||||
|
||||
|
||||
##### Permissions
|
||||
|
||||
Must have the `list_team_channels` permission.
|
||||
|
||||
|
||||
In server version 5.18 and later, a user without the `list_team_channels` permission will be able to use this endpoint, with the search results limited to the channels that the user is a member of.
|
||||
operationId: SearchArchivedChannels
|
||||
parameters:
|
||||
- name: team_id
|
||||
in: path
|
||||
description: Team GUID
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
required:
|
||||
- term
|
||||
properties:
|
||||
term:
|
||||
description: The search term to match against the name or display name of
|
||||
archived channels
|
||||
type: string
|
||||
description: Search criteria
|
||||
required: true
|
||||
responses:
|
||||
"201":
|
||||
description: Channels search successful
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/Channel"
|
||||
"400":
|
||||
$ref: "#/components/responses/BadRequest"
|
||||
"401":
|
||||
$ref: "#/components/responses/Unauthorized"
|
||||
"403":
|
||||
$ref: "#/components/responses/Forbidden"
|
||||
"404":
|
||||
$ref: "#/components/responses/NotFound"
|
||||
"/api/v4/teams/{team_id}/channels/name/{channel_name}":
|
||||
get:
|
||||
tags:
|
||||
|
|
|
|||
|
|
@ -250,7 +250,7 @@
|
|||
type: string
|
||||
value:
|
||||
oneOf:
|
||||
- type: string
|
||||
- type: string
|
||||
- type: array
|
||||
items:
|
||||
type: string
|
||||
|
|
@ -349,6 +349,65 @@
|
|||
$ref: "#/components/responses/Unauthorized"
|
||||
"403":
|
||||
$ref: "#/components/responses/Forbidden"
|
||||
patch:
|
||||
tags:
|
||||
- custom profile attributes
|
||||
summary: Update custom profile attribute values for a user
|
||||
description: |
|
||||
Update Custom Profile Attribute field values for a specific user.
|
||||
|
||||
_This endpoint is experimental._
|
||||
|
||||
__Minimum server version__: 11
|
||||
|
||||
##### Permissions
|
||||
Must have permission to edit the user. Users can only edit their own CPA values unless they are system administrators.
|
||||
parameters:
|
||||
- name: user_id
|
||||
in: path
|
||||
description: User GUID
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
description: Custom Profile Attribute values that are to be updated
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
value:
|
||||
oneOf:
|
||||
- type: string
|
||||
- type: array
|
||||
items:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: Custom profile attribute values updated successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
value:
|
||||
oneOf:
|
||||
- type: string
|
||||
- type: array
|
||||
items:
|
||||
type: string
|
||||
'400':
|
||||
$ref: '#/components/responses/BadRequest'
|
||||
'403':
|
||||
$ref: '#/components/responses/Forbidden'
|
||||
'404':
|
||||
$ref: '#/components/responses/NotFound'
|
||||
|
|
|
|||
|
|
@ -1200,40 +1200,3 @@
|
|||
$ref: "#/components/responses/BadRequest"
|
||||
"501":
|
||||
$ref: "#/components/responses/NotImplemented"
|
||||
"/api/v4/groups/names":
|
||||
post:
|
||||
tags:
|
||||
- groups
|
||||
summary: Get groups by name
|
||||
description: |
|
||||
Get a list of groups based on a provided list of names.
|
||||
|
||||
##### Permissions
|
||||
Requires an active session but no other permissions.
|
||||
|
||||
__Minimum server version__: 11.0
|
||||
operationId: GetGroupsByNames
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
description: List of group names
|
||||
required: true
|
||||
responses:
|
||||
"200":
|
||||
description: Group list retrieval successfully
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/components/schemas/Group"
|
||||
"400":
|
||||
$ref: "#/components/responses/BadRequest"
|
||||
"401":
|
||||
$ref: "#/components/responses/Unauthorized"
|
||||
"501":
|
||||
$ref: "#/components/responses/NotImplemented"
|
||||
|
|
|
|||
|
|
@ -74,6 +74,59 @@
|
|||
$ref: "#/components/responses/Unauthorized"
|
||||
"403":
|
||||
$ref: "#/components/responses/Forbidden"
|
||||
/api/v4/users/login/sso/code-exchange:
|
||||
post:
|
||||
tags:
|
||||
- users
|
||||
summary: Exchange SSO login code for session tokens
|
||||
description: >
|
||||
Exchange a short-lived login_code for session tokens using SAML code exchange (mobile SSO flow).
|
||||
This endpoint is part of the mobile SSO code-exchange flow to prevent tokens
|
||||
from appearing in deep links.
|
||||
|
||||
##### Permissions
|
||||
|
||||
No permission required.
|
||||
operationId: LoginSSOCodeExchange
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
required:
|
||||
- login_code
|
||||
- code_verifier
|
||||
- state
|
||||
properties:
|
||||
login_code:
|
||||
description: Short-lived one-time code from SSO callback
|
||||
type: string
|
||||
code_verifier:
|
||||
description: SAML verifier to prove code possession
|
||||
type: string
|
||||
state:
|
||||
description: State parameter to prevent CSRF attacks
|
||||
type: string
|
||||
description: SSO code exchange object
|
||||
required: true
|
||||
responses:
|
||||
"200":
|
||||
description: Code exchange successful
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: object
|
||||
properties:
|
||||
token:
|
||||
description: Session token for authentication
|
||||
type: string
|
||||
csrf:
|
||||
description: CSRF token for request validation
|
||||
type: string
|
||||
"400":
|
||||
$ref: "#/components/responses/BadRequest"
|
||||
"403":
|
||||
$ref: "#/components/responses/Forbidden"
|
||||
/api/v4/users/logout:
|
||||
post:
|
||||
tags:
|
||||
|
|
|
|||
|
|
@ -81,7 +81,7 @@ $(for service in $ENABLED_DOCKER_SERVICES; do
|
|||
$(if mme2e_is_token_in_list "postgres" "$ENABLED_DOCKER_SERVICES"; then
|
||||
echo '
|
||||
postgres:
|
||||
image: mattermostdevelopment/mirrored-postgres:13
|
||||
image: mattermostdevelopment/mirrored-postgres:14
|
||||
restart: "no"
|
||||
network_mode: host
|
||||
networks: !reset []
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ describe('Keyboard Shortcuts', () => {
|
|||
|
||||
it('MM-T1277 SHIFT+UP', () => {
|
||||
// # Press shift+up to open the latest thread in the channel in the RHS
|
||||
cy.uiGetPostTextBox().type('{shift}{uparrow}');
|
||||
cy.uiGetPostTextBox().type('{shift+uparrow}', {delay: 50});
|
||||
|
||||
// * RHS Opens up
|
||||
cy.get('.sidebar--right__header').should('be.visible');
|
||||
|
|
@ -33,7 +33,28 @@ describe('Keyboard Shortcuts', () => {
|
|||
cy.uiGetPostTextBox().click();
|
||||
|
||||
// # Press shift+up again
|
||||
cy.uiGetPostTextBox().type('{shift}{uparrow}');
|
||||
cy.uiGetPostTextBox().type('{shift+uparrow}', {delay: 50});
|
||||
|
||||
// * RHS textbox should be focused
|
||||
cy.uiGetReplyTextBox().should('be.focused');
|
||||
|
||||
// # Post a reply in the thread
|
||||
cy.uiGetReplyTextBox().type('This is a reply{enter}');
|
||||
|
||||
// # Close the RHS by clicking the X button
|
||||
cy.get('#rhsCloseButton').click();
|
||||
|
||||
// * Verify RHS is closed
|
||||
cy.get('.sidebar--right__header').should('not.exist');
|
||||
|
||||
// # Click into the center channel post textbox
|
||||
cy.uiGetPostTextBox().click();
|
||||
|
||||
// # Press shift+up to open the thread with replies
|
||||
cy.uiGetPostTextBox().type('{shift+uparrow}', {delay: 50});
|
||||
|
||||
// * RHS Opens up
|
||||
cy.get('.sidebar--right__header').should('be.visible');
|
||||
|
||||
// * RHS textbox should be focused
|
||||
cy.uiGetReplyTextBox().should('be.focused');
|
||||
|
|
|
|||
|
|
@ -15,12 +15,14 @@ import * as TIMEOUTS from '../../../fixtures/timeouts';
|
|||
describe('Messaging', () => {
|
||||
let offTopicUrl;
|
||||
let testChannelName;
|
||||
let user;
|
||||
|
||||
before(() => {
|
||||
// # Login as test user
|
||||
cy.apiInitSetup({loginAfter: true}).then((out) => {
|
||||
offTopicUrl = out.offTopicUrl;
|
||||
testChannelName = out.channel.display_name;
|
||||
user = out.user;
|
||||
});
|
||||
});
|
||||
|
||||
|
|
@ -97,6 +99,43 @@ describe('Messaging', () => {
|
|||
cy.uiGetReplyTextBox().should('be.focused');
|
||||
});
|
||||
|
||||
it('MM-T205 Focus to remain in RHS textbox when replying to reply post in center channel (CRT disabled)', () => {
|
||||
// # Ensure collapsed reply threads is disabled
|
||||
cy.apiSaveCRTPreference(user.id, 'off');
|
||||
|
||||
// # Post a thread root message
|
||||
cy.postMessage('Thread root message');
|
||||
|
||||
// # Open RHS and post a reply
|
||||
cy.clickPostCommentIcon();
|
||||
cy.uiGetReplyTextBox().type('First reply{enter}');
|
||||
|
||||
// # Close RHS
|
||||
cy.get('#rhsCloseButton').click();
|
||||
|
||||
// * Verify RHS is closed
|
||||
cy.get('.sidebar--right__header').should('not.exist');
|
||||
|
||||
// # Get the reply post ID and click its comment icon
|
||||
cy.getLastPostId().then((postId) => {
|
||||
// # Click the reply arrow on the reply post
|
||||
cy.clickPostCommentIcon(postId);
|
||||
|
||||
// * Verify RHS opens and textbox is focused
|
||||
cy.get('.sidebar--right__header').should('be.visible');
|
||||
cy.uiGetReplyTextBox().should('be.focused');
|
||||
|
||||
// # Focus away from RHS textbox
|
||||
cy.get('#rhsContainer .post-right__content').click();
|
||||
|
||||
// # Click reply arrow on the same reply post again
|
||||
cy.clickPostCommentIcon(postId);
|
||||
});
|
||||
|
||||
// * Verify RHS textbox is focused again
|
||||
cy.uiGetReplyTextBox().should('be.focused');
|
||||
});
|
||||
|
||||
it('MM-T203 Focus does not move when it has already been set elsewhere', () => {
|
||||
// # Verify Focus in add channel member modal
|
||||
verifyFocusInAddChannelMemberModal();
|
||||
|
|
@ -157,8 +196,8 @@ function verifyFocusInAddChannelMemberModal() {
|
|||
// * Check that input box has character A
|
||||
cy.get('#selectItems input').should('have.value', 'A');
|
||||
|
||||
// # Click anywhere in the modal that is not on a field that can take focus
|
||||
cy.get('#deletePostModalLabel > span').click();
|
||||
// # Remove the focus from the input box
|
||||
cy.get('#selectItems input').blur();
|
||||
|
||||
// * Note the focus has been removed from the search box
|
||||
cy.get('#selectItems input').should('not.be.focused');
|
||||
|
|
|
|||
|
|
@ -79,7 +79,7 @@
|
|||
"EnableAPITriggerAdminNotifications": false,
|
||||
"EnableAPIUserDeletion": false,
|
||||
"ExperimentalEnableHardenedMode": false,
|
||||
"StrictCSRFEnforcement": false,
|
||||
"ExperimentalStrictCSRFEnforcement": false,
|
||||
"EnableEmailInvitations": true,
|
||||
"DisableBotsWhenOwnerIsDeactivated": true,
|
||||
"EnableBotAccountCreation": true,
|
||||
|
|
|
|||
|
|
@ -81,7 +81,7 @@
|
|||
"EnableAPITriggerAdminNotifications": false,
|
||||
"EnableAPIUserDeletion": false,
|
||||
"ExperimentalEnableHardenedMode": false,
|
||||
"StrictCSRFEnforcement": false,
|
||||
"ExperimentalStrictCSRFEnforcement": false,
|
||||
"EnableEmailInvitations": true,
|
||||
"DisableBotsWhenOwnerIsDeactivated": true,
|
||||
"EnableBotAccountCreation": true,
|
||||
|
|
|
|||
|
|
@ -170,7 +170,7 @@ const defaultServerConfig: AdminConfig = {
|
|||
EnableAPIPostDeletion: false,
|
||||
EnableDesktopLandingPage: true,
|
||||
ExperimentalEnableHardenedMode: false,
|
||||
StrictCSRFEnforcement: true,
|
||||
ExperimentalStrictCSRFEnforcement: false,
|
||||
EnableEmailInvitations: false,
|
||||
DisableBotsWhenOwnerIsDeactivated: true,
|
||||
EnableBotAccountCreation: false,
|
||||
|
|
@ -754,7 +754,6 @@ const defaultServerConfig: AdminConfig = {
|
|||
AttributeBasedAccessControl: true,
|
||||
ContentFlagging: false,
|
||||
InteractiveDialogAppsForm: true,
|
||||
ChannelAdminManageABACRules: false,
|
||||
},
|
||||
ImportSettings: {
|
||||
Directory: './import',
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
1.24.5
|
||||
1.24.6
|
||||
|
|
|
|||
|
|
@ -78,6 +78,11 @@ else
|
|||
BUILD_TYPE_NAME = team
|
||||
endif
|
||||
|
||||
FIPS_ENABLED ?= false
|
||||
ifeq ($(FIPS_ENABLED),true)
|
||||
BUILD_TYPE_NAME := $(BUILD_TYPE_NAME)-fips
|
||||
endif
|
||||
|
||||
# Clean up the old means of importing enterprise source, if it exists
|
||||
ifneq ($(wildcard channels/imports/imports.go),)
|
||||
IGNORE := $(shell rm -f channels/imports/imports.go)
|
||||
|
|
@ -106,6 +111,11 @@ GOFLAGS ?= $(GOFLAGS:)
|
|||
# for processes spawned from the Makefile
|
||||
export GOBIN ?= $(PWD)/bin
|
||||
GO ?= go
|
||||
|
||||
ifeq ($(FIPS_ENABLED),true)
|
||||
BUILD_TAGS += requirefips
|
||||
endif
|
||||
|
||||
DELVE ?= dlv
|
||||
LDFLAGS += -X "github.com/mattermost/mattermost/server/public/model.BuildNumber=$(BUILD_NUMBER)"
|
||||
LDFLAGS += -X "github.com/mattermost/mattermost/server/public/model.BuildDate=$(BUILD_DATE)"
|
||||
|
|
@ -146,18 +156,28 @@ PLUGIN_PACKAGES += mattermost-plugin-calls-v1.10.0
|
|||
PLUGIN_PACKAGES += mattermost-plugin-github-v2.4.0
|
||||
PLUGIN_PACKAGES += mattermost-plugin-gitlab-v1.10.0
|
||||
PLUGIN_PACKAGES += mattermost-plugin-jira-v4.3.0
|
||||
PLUGIN_PACKAGES += mattermost-plugin-playbooks-v2.4.1
|
||||
PLUGIN_PACKAGES += mattermost-plugin-playbooks-v2.4.2
|
||||
PLUGIN_PACKAGES += mattermost-plugin-servicenow-v2.3.4
|
||||
PLUGIN_PACKAGES += mattermost-plugin-zoom-v1.8.0
|
||||
PLUGIN_PACKAGES += mattermost-plugin-agents-v1.3.1
|
||||
PLUGIN_PACKAGES += mattermost-plugin-boards-v9.1.5
|
||||
PLUGIN_PACKAGES += mattermost-plugin-msteams-v2.2.1
|
||||
PLUGIN_PACKAGES += mattermost-plugin-boards-v9.1.7
|
||||
PLUGIN_PACKAGES += mattermost-plugin-msteams-v2.2.2
|
||||
PLUGIN_PACKAGES += mattermost-plugin-user-survey-v1.1.1
|
||||
PLUGIN_PACKAGES += mattermost-plugin-mscalendar-v1.3.4
|
||||
PLUGIN_PACKAGES += mattermost-plugin-msteams-meetings-v2.2.0
|
||||
PLUGIN_PACKAGES += mattermost-plugin-metrics-v0.7.0
|
||||
PLUGIN_PACKAGES += mattermost-plugin-channel-export-v1.2.1
|
||||
|
||||
# Overwrite the definition of PLUGIN_PACKAGES with the list of FIPS-ready plugins
|
||||
# Note that the '+' in the file name is encoded as %2B for the URL we use to
|
||||
# download the package from to work. This will no longer be needed when we unify
|
||||
# the way we pre-package FIPS and non-FIPS plugins.
|
||||
ifeq ($(FIPS_ENABLED),true)
|
||||
PLUGIN_PACKAGES = mattermost-plugin-playbooks-v2.4.2%2B4a22550-fips
|
||||
PLUGIN_PACKAGES += mattermost-plugin-agents-v1.3.1%2B6e1b6eb-fips
|
||||
PLUGIN_PACKAGES += mattermost-plugin-boards-v9.1.6%2B2b0e66a-fips
|
||||
endif
|
||||
|
||||
EE_PACKAGES=$(shell $(GO) list $(BUILD_ENTERPRISE_DIR)/...)
|
||||
|
||||
ifeq ($(BUILD_ENTERPRISE_READY),true)
|
||||
|
|
@ -209,9 +229,6 @@ ifneq ($(DOCKER_SERVICES_OVERRIDE),true)
|
|||
ifeq (,$(findstring openldap,$(ENABLED_DOCKER_SERVICES)))
|
||||
TEMP_DOCKER_SERVICES:=$(TEMP_DOCKER_SERVICES) openldap
|
||||
endif
|
||||
ifeq (,$(findstring elasticsearch,$(ENABLED_DOCKER_SERVICES)))
|
||||
TEMP_DOCKER_SERVICES:=$(TEMP_DOCKER_SERVICES) elasticsearch
|
||||
endif
|
||||
endif
|
||||
ENABLED_DOCKER_SERVICES:=$(ENABLED_DOCKER_SERVICES) $(TEMP_DOCKER_SERVICES)
|
||||
endif
|
||||
|
|
@ -757,11 +774,9 @@ ifeq ($(BUILD_ENTERPRISE_READY),true)
|
|||
endif
|
||||
|
||||
vet: ## Run mattermost go vet specific checks
|
||||
## Note that it is pinned to a specific commit, rather than a branch. This is to prevent
|
||||
## having to backport the fix to multiple release branches for any new change.
|
||||
$(GO) install github.com/mattermost/mattermost-govet/v2@7d8db289e508999dfcac47b97c9490a0fec12d66
|
||||
$(GO) vet -vettool=$(GOBIN)/mattermost-govet -structuredLogging -inconsistentReceiverName -emptyStrCmp -tFatal -configtelemetry -errorAssertions -requestCtxNaming -license -inconsistentReceiverName.ignore=session_serial_gen.go,team_member_serial_gen.go,user_serial_gen.go,utils_serial_gen.go ./...
|
||||
ifeq ($(BUILD_ENTERPRISE_READY),true)
|
||||
ifeq ($(BUILD_ENTERPRISE_READY),true)
|
||||
ifneq ($(MM_NO_ENTERPRISE_LINT),true)
|
||||
$(GO) vet -vettool=$(GOBIN)/mattermost-govet -structuredLogging -inconsistentReceiverName -emptyStrCmp -tFatal -configtelemetry -errorAssertions -requestCtxNaming -enterpriseLicense $(BUILD_ENTERPRISE_DIR)/...
|
||||
endif
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
|||
ARG PUID=2000
|
||||
ARG PGID=2000
|
||||
# MM_PACKAGE build arguments controls which version of mattermost to install, defaults to latest stable enterprise
|
||||
# i.e. https://releases.mattermost.com/9.7.1/mattermost-9.7.1-linux-amd64.tar.gz
|
||||
# e.g. https://releases.mattermost.com/9.7.1/mattermost-9.7.1-linux-amd64.tar.gz
|
||||
ARG MM_PACKAGE="https://latest.mattermost.com/mattermost-enterprise-linux"
|
||||
|
||||
# Install needed packages and indirect dependencies
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
FROM golang:1.24.5-bullseye@sha256:62ba6b19de03e891f7fa1001326bd48411f2626ff35e7ba5b9d890711ce581d9
|
||||
FROM golang:1.24.6-bullseye@sha256:cf78ce8205287fdb2ca403aac77d68965c75734749e560c577c00e20ecb11954
|
||||
ARG NODE_VERSION=20.11.1
|
||||
|
||||
RUN apt-get update && apt-get install -y make git apt-transport-https ca-certificates curl software-properties-common build-essential zip xmlsec1 jq pgloader gnupg
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
FROM cgr.dev/mattermost.com/go-msft-fips:1.24.5-dev@sha256:d7b2872c129277c01447903b7fde7a186fe211b59613172a7e40a3cc0dc5f126
|
||||
FROM cgr.dev/mattermost.com/go-msft-fips:1.24.6-dev@sha256:53d076b1cfa53f8189c4723d813d711d92107c2e8b140805c71e39f4a06dc9cc
|
||||
ARG NODE_VERSION=20.11.1
|
||||
|
||||
RUN apk add curl ca-certificates mailcap unrtf wv poppler-utils tzdata gpg xmlsec
|
||||
|
|
|
|||
90
server/build/Dockerfile.fips
Normal file
90
server/build/Dockerfile.fips
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
# First stage - FIPS dev image with dependencies for building
|
||||
FROM cgr.dev/mattermost.com/glibc-openssl-fips:15-dev@sha256:9223f9245fb026a3c255ce9b7028a069fe11432aa7710713a331eaa36f44851c AS builder
|
||||
# Setting bash as our shell, and enabling pipefail option
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
# Build Arguments
|
||||
ARG PUID=2000
|
||||
ARG PGID=2000
|
||||
# MM_PACKAGE build arguments controls which version of mattermost to install, defaults to latest stable enterprise
|
||||
# e.g. https://releases.mattermost.com/9.7.1/mattermost-9.7.1-linux-amd64.tar.gz
|
||||
ARG MM_PACKAGE="https://latest.mattermost.com/mattermost-enterprise-linux"
|
||||
|
||||
# Install needed packages and indirect dependencies
|
||||
USER 0:0
|
||||
RUN apk add \
|
||||
curl \
|
||||
ca-certificates \
|
||||
mailcap \
|
||||
unrtf \
|
||||
wv \
|
||||
poppler-utils \
|
||||
tzdata
|
||||
|
||||
# Set mattermost group/user and download Mattermost
|
||||
RUN mkdir -p /mattermost/data /mattermost/plugins /mattermost/client/plugins \
|
||||
&& addgroup -g ${PGID} mattermost \
|
||||
&& adduser -D -u ${PUID} -G mattermost -g "" -s /bin/sh -h /mattermost mattermost \
|
||||
&& curl -L $MM_PACKAGE | tar -xvz \
|
||||
&& chown -R mattermost:mattermost /mattermost /mattermost/data /mattermost/plugins /mattermost/client/plugins
|
||||
|
||||
# Create PostgreSQL client SSL directory structure for ssl_mode=require
|
||||
RUN mkdir -p /mattermost/.postgresql \
|
||||
&& chmod 700 /mattermost/.postgresql
|
||||
|
||||
# Create /var/tmp directory needed for local socket files
|
||||
RUN mkdir -p /var/tmp \
|
||||
&& chmod 755 /var/tmp
|
||||
|
||||
# Final stage using FIPS runtime image
|
||||
FROM cgr.dev/mattermost.com/glibc-openssl-fips:15@sha256:7947eecc0d82fa3bc661aaca039bcd86d55fdf3ee581c8ecdef1b3c6f63fa83a
|
||||
|
||||
# Some ENV variables
|
||||
ENV PATH="/mattermost/bin:${PATH}"
|
||||
ENV MM_SERVICESETTINGS_ENABLELOCALMODE="true"
|
||||
|
||||
# Copy over metadata files needed by runtime
|
||||
COPY --from=builder /etc/mime.types /etc
|
||||
|
||||
# Copy CA certificates for SSL/TLS validation with proper ownership
|
||||
COPY --from=builder --chown=2000:2000 /etc/ssl/certs /etc/ssl/certs
|
||||
|
||||
# Copy document processing utilities and necessary support files
|
||||
COPY --from=builder /usr/bin/pdftotext /usr/bin/pdftotext
|
||||
COPY --from=builder /usr/bin/wvText /usr/bin/wvText
|
||||
COPY --from=builder /usr/bin/wvWare /usr/bin/wvWare
|
||||
COPY --from=builder /usr/bin/unrtf /usr/bin/unrtf
|
||||
COPY --from=builder /usr/share/wv /usr/share/wv
|
||||
|
||||
# Copy necessary libraries for document processing utilities
|
||||
COPY --from=builder /usr/lib/libpoppler.so* /usr/lib/
|
||||
COPY --from=builder /usr/lib/libfreetype.so* /usr/lib/
|
||||
COPY --from=builder /usr/lib/libpng16.so* /usr/lib/
|
||||
COPY --from=builder /usr/lib/libwv.so* /usr/lib/
|
||||
COPY --from=builder /usr/lib/libfontconfig.so* /usr/lib/
|
||||
|
||||
# Copy mattermost from builder stage
|
||||
COPY --from=builder --chown=2000:2000 /mattermost /mattermost
|
||||
|
||||
# Copy group and passwd files including mattermost user
|
||||
COPY --from=builder /etc/passwd /etc/passwd
|
||||
COPY --from=builder /etc/group /etc/group
|
||||
|
||||
# Copy /var/tmp directory needed for local socket files
|
||||
COPY --from=builder --chown=2000:2000 /var/tmp /var/tmp
|
||||
|
||||
# We should refrain from running as privileged user
|
||||
USER mattermost
|
||||
|
||||
# Healthcheck to make sure container is ready - using mmctl instead of curl for distroless compatibility
|
||||
HEALTHCHECK --interval=30s --timeout=10s \
|
||||
CMD ["/mattermost/bin/mmctl", "system", "status", "--local"]
|
||||
|
||||
# Configure entrypoint and command with proper permissions
|
||||
WORKDIR /mattermost
|
||||
CMD ["/mattermost/bin/mattermost"]
|
||||
|
||||
EXPOSE 8065 8067 8074 8075
|
||||
|
||||
# Declare volumes for mount point directories
|
||||
VOLUME ["/mattermost/data", "/mattermost/logs", "/mattermost/config", "/mattermost/plugins", "/mattermost/client/plugins"]
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
services:
|
||||
postgres:
|
||||
image: "postgres:13"
|
||||
image: "postgres:14"
|
||||
restart: always
|
||||
networks:
|
||||
- mm-test
|
||||
|
|
@ -8,6 +8,7 @@ services:
|
|||
POSTGRES_USER: mmuser
|
||||
POSTGRES_PASSWORD: mostest
|
||||
POSTGRES_DB: mattermost_test
|
||||
POSTGRES_INITDB_ARGS: "--auth-host=scram-sha-256 --auth-local=scram-sha-256"
|
||||
command: postgres -c 'config_file=/etc/postgresql/postgresql.conf'
|
||||
volumes:
|
||||
- "./docker/postgres.conf:/etc/postgresql/postgresql.conf"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
# Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
|
||||
# See License.txt for license information.
|
||||
FROM postgres:13
|
||||
FROM postgres:14
|
||||
|
||||
RUN apt-get update && apt-get install -y ca-certificates
|
||||
|
||||
|
|
|
|||
|
|
@ -5,3 +5,4 @@ full_page_writes = off
|
|||
default_text_search_config = 'pg_catalog.english'
|
||||
commit_delay=1000
|
||||
logging_collector=off
|
||||
password_encryption = 'scram-sha-256'
|
||||
|
|
|
|||
|
|
@ -10,8 +10,20 @@ else
|
|||
mkdir -p $(GOBIN)/linux_amd64
|
||||
env GOOS=linux GOARCH=amd64 $(GO) build -o $(GOBIN)/linux_amd64 $(GOFLAGS) -trimpath -tags '$(BUILD_TAGS) production' -ldflags '$(LDFLAGS)' ./...
|
||||
endif
|
||||
ifeq ($(FIPS_ENABLED),true)
|
||||
@echo Verifying Build Linux amd64 for FIPS
|
||||
$(GO) version -m $(GOBIN)/$(MM_BIN_NAME) | grep -q "GOEXPERIMENT=systemcrypto" || (echo "ERROR: FIPS mattermost binary missing GOEXPERIMENT=systemcrypto" && exit 1)
|
||||
$(GO) version -m $(GOBIN)/$(MM_BIN_NAME) | grep "\-tags" | grep -q "requirefips" || (echo "ERROR: FIPS mattermost binary missing -tags=requirefips" && exit 1)
|
||||
$(GO) tool nm $(GOBIN)/$(MM_BIN_NAME) | grep -q "func_go_openssl_OpenSSL_version" || (echo "ERROR: FIPS mattermost binary missing OpenSSL integration" && exit 1)
|
||||
$(GO) version -m $(GOBIN)/$(MMCTL_BIN_NAME) | grep -q "GOEXPERIMENT=systemcrypto" || (echo "ERROR: FIPS mmctl binary missing GOEXPERIMENT=systemcrypto" && exit 1)
|
||||
$(GO) version -m $(GOBIN)/$(MMCTL_BIN_NAME) | grep "\-tags" | grep -q "requirefips" || (echo "ERROR: FIPS mmctl binary missing -tags=requirefips" && exit 1)
|
||||
$(GO) tool nm $(GOBIN)/$(MMCTL_BIN_NAME) | grep -q "func_go_openssl_OpenSSL_version" || (echo "ERROR: FIPS mmctl binary missing OpenSSL integration" && exit 1)
|
||||
endif
|
||||
|
||||
build-linux-arm64:
|
||||
ifeq ($(FIPS_ENABLED),true)
|
||||
@echo Skipping Build Linux arm64 for FIPS
|
||||
else
|
||||
@echo Build Linux arm64
|
||||
ifeq ($(BUILDER_GOOS_GOARCH),"linux_arm64")
|
||||
env GOOS=linux GOARCH=arm64 $(GO) build -o $(GOBIN) $(GOFLAGS) -trimpath -tags '$(BUILD_TAGS) production' -ldflags '$(LDFLAGS)' ./...
|
||||
|
|
@ -19,6 +31,7 @@ else
|
|||
mkdir -p $(GOBIN)/linux_arm64
|
||||
env GOOS=linux GOARCH=arm64 $(GO) build -o $(GOBIN)/linux_arm64 $(GOFLAGS) -trimpath -tags '$(BUILD_TAGS) production' -ldflags '$(LDFLAGS)' ./...
|
||||
endif
|
||||
endif
|
||||
|
||||
build-osx:
|
||||
@echo Build OSX amd64
|
||||
|
|
@ -53,6 +66,18 @@ else
|
|||
mkdir -p $(GOBIN)/linux_amd64
|
||||
env GOOS=linux GOARCH=amd64 $(GO) build -o $(GOBIN)/linux_amd64 $(GOFLAGS) -trimpath -tags '$(BUILD_TAGS) production' -ldflags '$(LDFLAGS)' ./cmd/...
|
||||
endif
|
||||
ifeq ($(FIPS_ENABLED),true)
|
||||
@echo Verifying Build Linux amd64 for FIPS
|
||||
$(GO) version -m $(GOBIN)/mattermost | grep -q "GOEXPERIMENT=systemcrypto" || (echo "ERROR: FIPS mattermost binary missing GOEXPERIMENT=systemcrypto" && exit 1)
|
||||
$(GO) version -m $(GOBIN)/mattermost | grep "\-tags" | grep -q "requirefips" || (echo "ERROR: FIPS mattermost binary missing -tags=requirefips" && exit 1)
|
||||
$(GO) tool nm $(GOBIN)/mattermost | grep -q "func_go_openssl_OpenSSL_version" || (echo "ERROR: FIPS mattermost binary missing OpenSSL integration" && exit 1)
|
||||
$(GO) version -m $(GOBIN)/mmctl | grep -q "GOEXPERIMENT=systemcrypto" || (echo "ERROR: FIPS mmctl binary missing GOEXPERIMENT=systemcrypto" && exit 1)
|
||||
$(GO) version -m $(GOBIN)/mmctl | grep "\-tags" | grep -q "requirefips" || (echo "ERROR: FIPS mmctl binary missing -tags=requirefips" && exit 1)
|
||||
$(GO) tool nm $(GOBIN)/mmctl | grep -q "func_go_openssl_OpenSSL_version" || (echo "ERROR: FIPS mmctl binary missing OpenSSL integration" && exit 1)
|
||||
endif
|
||||
ifeq ($(FIPS_ENABLED),true)
|
||||
@echo Skipping Build Linux arm64 for FIPS
|
||||
else
|
||||
@echo Build CMD Linux arm64
|
||||
ifeq ($(BUILDER_GOOS_GOARCH),"linux_arm64")
|
||||
env GOOS=linux GOARCH=arm64 $(GO) build -o $(GOBIN) $(GOFLAGS) -trimpath -tags '$(BUILD_TAGS) production' -ldflags '$(LDFLAGS)' ./cmd/...
|
||||
|
|
@ -60,6 +85,7 @@ else
|
|||
mkdir -p $(GOBIN)/linux_arm64
|
||||
env GOOS=linux GOARCH=arm64 $(GO) build -o $(GOBIN)/linux_arm64 $(GOFLAGS) -trimpath -tags '$(BUILD_TAGS) production' -ldflags '$(LDFLAGS)' ./cmd/...
|
||||
endif
|
||||
endif
|
||||
|
||||
build-cmd-osx:
|
||||
@echo Build CMD OSX amd64
|
||||
|
|
@ -141,8 +167,9 @@ endif
|
|||
fi
|
||||
|
||||
fetch-prepackaged-plugins:
|
||||
@# Import Mattermost plugin public key
|
||||
gpg --import build/plugin-production-public-key.gpg
|
||||
@# Import Mattermost plugin public key, ignoring errors. In FIPS mode, GPG fails to start
|
||||
@# the gpg-agent, but still imports the key. If it really fails, it will fail validation later.
|
||||
-gpg --import build/plugin-production-public-key.gpg
|
||||
@# Download prepackaged plugins
|
||||
mkdir -p tmpprepackaged
|
||||
@echo "Downloading prepackaged plugins ... "
|
||||
|
|
@ -205,11 +232,15 @@ package-linux-amd64: package-prep
|
|||
rm -rf $(DIST_ROOT)/linux_amd64
|
||||
|
||||
package-linux-arm64: package-prep
|
||||
ifeq ($(FIPS_ENABLED),true)
|
||||
@echo Skipping package linux arm64 for FIPS
|
||||
else
|
||||
DIST_PATH_GENERIC=$(DIST_PATH_LIN_ARM64) CURRENT_PACKAGE_ARCH=linux_arm64 MM_BIN_NAME=mattermost MMCTL_BIN_NAME=mmctl $(MAKE) package-general
|
||||
@# Package
|
||||
tar -C $(DIST_PATH_LIN_ARM64)/.. -czf $(DIST_PATH)-$(BUILD_TYPE_NAME)-linux-arm64.tar.gz mattermost ../mattermost
|
||||
@# Cleanup
|
||||
rm -rf $(DIST_ROOT)/linux_arm64
|
||||
endif
|
||||
|
||||
package-linux: package-linux-amd64 package-linux-arm64
|
||||
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ func (api *API) InitAccessControlPolicy() {
|
|||
|
||||
api.BaseRoutes.AccessControlPolicies.Handle("/cel/check", api.APISessionRequired(checkExpression)).Methods(http.MethodPost)
|
||||
api.BaseRoutes.AccessControlPolicies.Handle("/cel/test", api.APISessionRequired(testExpression)).Methods(http.MethodPost)
|
||||
api.BaseRoutes.AccessControlPolicies.Handle("/cel/validate_requester", api.APISessionRequired(validateExpressionAgainstRequester)).Methods(http.MethodPost)
|
||||
api.BaseRoutes.AccessControlPolicies.Handle("/cel/autocomplete/fields", api.APISessionRequired(getFieldsAutocomplete)).Methods(http.MethodGet)
|
||||
api.BaseRoutes.AccessControlPolicies.Handle("/cel/visual_ast", api.APISessionRequired(convertToVisualAST)).Methods(http.MethodPost)
|
||||
|
||||
|
|
@ -56,13 +57,6 @@ func createAccessControlPolicy(c *Context, w http.ResponseWriter, r *http.Reques
|
|||
hasManageSystemPermission := c.App.SessionHasPermissionTo(*c.AppContext.Session(), model.PermissionManageSystem)
|
||||
|
||||
if !hasManageSystemPermission {
|
||||
// FEATURE_FLAG_REMOVAL: ChannelAdminManageABACRules - Remove this check when feature is GA
|
||||
if !c.App.Config().FeatureFlags.ChannelAdminManageABACRules {
|
||||
c.Err = model.NewAppError("createAccessControlPolicy", "api.not_implemented", nil, "", http.StatusNotImplemented)
|
||||
return
|
||||
}
|
||||
// END FEATURE_FLAG_REMOVAL: ChannelAdminManageABACRules
|
||||
|
||||
// For non-system admins, check channel-specific permission
|
||||
if !model.IsValidId(policy.ID) {
|
||||
c.SetInvalidParam("policy.id")
|
||||
|
|
@ -120,13 +114,6 @@ func getAccessControlPolicy(c *Context, w http.ResponseWriter, r *http.Request)
|
|||
// Check if user has system admin permission OR channel-specific permission
|
||||
hasManageSystemPermission := c.App.SessionHasPermissionTo(*c.AppContext.Session(), model.PermissionManageSystem)
|
||||
if !hasManageSystemPermission {
|
||||
// FEATURE_FLAG_REMOVAL: ChannelAdminManageABACRules - Remove this check when feature is GA
|
||||
if !c.App.Config().FeatureFlags.ChannelAdminManageABACRules {
|
||||
c.Err = model.NewAppError("getAccessControlPolicy", "api.not_implemented", nil, "", http.StatusNotImplemented)
|
||||
return
|
||||
}
|
||||
// END FEATURE_FLAG_REMOVAL: ChannelAdminManageABACRules
|
||||
|
||||
// For non-system admins, validate policy access permission (read-only access for GET requests)
|
||||
if appErr := c.App.ValidateAccessControlPolicyPermissionWithChannelContext(c.AppContext, c.AppContext.Session().UserId, policyID, true, channelID); appErr != nil {
|
||||
c.SetPermissionError(model.PermissionManageSystem)
|
||||
|
|
@ -165,13 +152,6 @@ func deleteAccessControlPolicy(c *Context, w http.ResponseWriter, r *http.Reques
|
|||
// Check if user has system admin permission OR channel-specific permission
|
||||
hasManageSystemPermission := c.App.SessionHasPermissionTo(*c.AppContext.Session(), model.PermissionManageSystem)
|
||||
if !hasManageSystemPermission {
|
||||
// FEATURE_FLAG_REMOVAL: ChannelAdminManageABACRules - Remove this check when feature is GA
|
||||
if !c.App.Config().FeatureFlags.ChannelAdminManageABACRules {
|
||||
c.Err = model.NewAppError("deleteAccessControlPolicy", "api.not_implemented", nil, "", http.StatusNotImplemented)
|
||||
return
|
||||
}
|
||||
// END FEATURE_FLAG_REMOVAL: ChannelAdminManageABACRules
|
||||
|
||||
// For non-system admins, validate policy access permission
|
||||
if appErr := c.App.ValidateAccessControlPolicyPermission(c.AppContext, c.AppContext.Session().UserId, policyID); appErr != nil {
|
||||
c.SetPermissionError(model.PermissionManageSystem)
|
||||
|
|
@ -215,12 +195,6 @@ func checkExpression(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
// FEATURE_FLAG_REMOVAL: ChannelAdminManageABACRules - Remove this check when feature is GA
|
||||
if !c.App.Config().FeatureFlags.ChannelAdminManageABACRules {
|
||||
c.Err = model.NewAppError("checkExpression", "api.not_implemented", nil, "", http.StatusNotImplemented)
|
||||
return
|
||||
}
|
||||
|
||||
// SECURE: Check specific channel permission
|
||||
hasChannelPermission := c.App.HasPermissionToChannel(c.AppContext, c.AppContext.Session().UserId, channelId, model.PermissionManageChannelAccessRules)
|
||||
if !hasChannelPermission {
|
||||
|
|
@ -269,12 +243,6 @@ func testExpression(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
// FEATURE_FLAG_REMOVAL: ChannelAdminManageABACRules - Remove this check when feature is GA
|
||||
if !c.App.Config().FeatureFlags.ChannelAdminManageABACRules {
|
||||
c.Err = model.NewAppError("testExpression", "api.not_implemented", nil, "", http.StatusNotImplemented)
|
||||
return
|
||||
}
|
||||
|
||||
// SECURE: Check specific channel permission
|
||||
hasChannelPermission := c.App.HasPermissionToChannel(c.AppContext, c.AppContext.Session().UserId, channelId, model.PermissionManageChannelAccessRules)
|
||||
if !hasChannelPermission {
|
||||
|
|
@ -283,13 +251,26 @@ func testExpression(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
}
|
||||
|
||||
users, count, appErr := c.App.TestExpression(c.AppContext, checkExpressionRequest.Expression, model.SubjectSearchOptions{
|
||||
var users []*model.User
|
||||
var count int64
|
||||
var appErr *model.AppError
|
||||
|
||||
searchOpts := model.SubjectSearchOptions{
|
||||
Term: checkExpressionRequest.Term,
|
||||
Limit: checkExpressionRequest.Limit,
|
||||
Cursor: model.SubjectCursor{
|
||||
TargetID: checkExpressionRequest.After,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
if hasSystemPermission {
|
||||
// SYSTEM ADMIN: Can see ALL users (no restrictions)
|
||||
users, count, appErr = c.App.TestExpression(c.AppContext, checkExpressionRequest.Expression, searchOpts)
|
||||
} else {
|
||||
// CHANNEL ADMIN: Only see users matching expressions with attributes they possess
|
||||
users, count, appErr = c.App.TestExpressionWithChannelContext(c.AppContext, checkExpressionRequest.Expression, searchOpts)
|
||||
}
|
||||
|
||||
if appErr != nil {
|
||||
c.Err = appErr
|
||||
return
|
||||
|
|
@ -311,6 +292,60 @@ func testExpression(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
}
|
||||
|
||||
func validateExpressionAgainstRequester(c *Context, w http.ResponseWriter, r *http.Request) {
|
||||
var request struct {
|
||||
Expression string `json:"expression"`
|
||||
ChannelId string `json:"channelId,omitempty"`
|
||||
}
|
||||
|
||||
if jsonErr := json.NewDecoder(r.Body).Decode(&request); jsonErr != nil {
|
||||
c.SetInvalidParamWithErr("request", jsonErr)
|
||||
return
|
||||
}
|
||||
|
||||
// Get channelId from request body (required for channel-specific permission check)
|
||||
channelId := request.ChannelId
|
||||
if channelId != "" && !model.IsValidId(channelId) {
|
||||
c.SetInvalidParam("channelId")
|
||||
return
|
||||
}
|
||||
|
||||
// Check permissions: system admin OR channel-specific permission
|
||||
hasSystemPermission := c.App.SessionHasPermissionTo(*c.AppContext.Session(), model.PermissionManageSystem)
|
||||
if !hasSystemPermission {
|
||||
// For channel admins, channelId is required
|
||||
if channelId == "" {
|
||||
c.SetPermissionError(model.PermissionManageSystem)
|
||||
return
|
||||
}
|
||||
|
||||
// SECURE: Check specific channel permission
|
||||
hasChannelPermission := c.App.HasPermissionToChannel(c.AppContext, c.AppContext.Session().UserId, channelId, model.PermissionManageChannelAccessRules)
|
||||
if !hasChannelPermission {
|
||||
c.SetPermissionError(model.PermissionManageChannelAccessRules)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Direct validation against requester
|
||||
matches, appErr := c.App.ValidateExpressionAgainstRequester(c.AppContext, request.Expression, c.AppContext.Session().UserId)
|
||||
if appErr != nil {
|
||||
c.Err = appErr
|
||||
return
|
||||
}
|
||||
|
||||
response := struct {
|
||||
RequesterMatches bool `json:"requester_matches"`
|
||||
}{
|
||||
RequesterMatches: matches,
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
if err := json.NewEncoder(w).Encode(response); err != nil {
|
||||
c.Logger.Warn("Error while writing response", mlog.Err(err))
|
||||
}
|
||||
}
|
||||
|
||||
func searchAccessControlPolicies(c *Context, w http.ResponseWriter, r *http.Request) {
|
||||
if !c.App.SessionHasPermissionTo(*c.AppContext.Session(), model.PermissionManageSystem) {
|
||||
c.SetPermissionError(model.PermissionManageSystem)
|
||||
|
|
@ -357,13 +392,6 @@ func updateActiveStatus(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
// Check if user has system admin permission OR channel-specific permission for this policy
|
||||
hasManageSystemPermission := c.App.SessionHasPermissionTo(*c.AppContext.Session(), model.PermissionManageSystem)
|
||||
if !hasManageSystemPermission {
|
||||
// FEATURE_FLAG_REMOVAL: ChannelAdminManageABACRules - Remove this check when feature is GA
|
||||
if !c.App.Config().FeatureFlags.ChannelAdminManageABACRules {
|
||||
c.Err = model.NewAppError("updateActiveStatus", "api.not_implemented", nil, "", http.StatusNotImplemented)
|
||||
return
|
||||
}
|
||||
// END FEATURE_FLAG_REMOVAL: ChannelAdminManageABACRules
|
||||
|
||||
// For non-system admins, validate policy access permission
|
||||
if appErr := c.App.ValidateAccessControlPolicyPermission(c.AppContext, c.AppContext.Session().UserId, policyID); appErr != nil {
|
||||
c.SetPermissionError(model.PermissionManageSystem)
|
||||
|
|
@ -394,6 +422,16 @@ func updateActiveStatus(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
auditRec.Success()
|
||||
|
||||
// Return success response
|
||||
response := map[string]any{
|
||||
"status": "OK",
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
if err := json.NewEncoder(w).Encode(response); err != nil {
|
||||
c.Logger.Warn("Error while writing response", mlog.Err(err))
|
||||
}
|
||||
}
|
||||
|
||||
func assignAccessPolicy(c *Context, w http.ResponseWriter, r *http.Request) {
|
||||
|
|
@ -585,12 +623,6 @@ func getFieldsAutocomplete(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
// FEATURE_FLAG_REMOVAL: ChannelAdminManageABACRules - Remove this check when feature is GA
|
||||
if !c.App.Config().FeatureFlags.ChannelAdminManageABACRules {
|
||||
c.Err = model.NewAppError("getFieldsAutocomplete", "api.not_implemented", nil, "", http.StatusNotImplemented)
|
||||
return
|
||||
}
|
||||
|
||||
// SECURE: Check specific channel permission
|
||||
hasChannelPermission := c.App.HasPermissionToChannel(c.AppContext, c.AppContext.Session().UserId, channelId, model.PermissionManageChannelAccessRules)
|
||||
if !hasChannelPermission {
|
||||
|
|
@ -618,7 +650,11 @@ func getFieldsAutocomplete(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
ac, appErr := c.App.GetAccessControlFieldsAutocomplete(c.AppContext, after, limit)
|
||||
var ac []*model.PropertyField
|
||||
var appErr *model.AppError
|
||||
|
||||
ac, appErr = c.App.GetAccessControlFieldsAutocomplete(c.AppContext, after, limit)
|
||||
|
||||
if appErr != nil {
|
||||
c.Err = appErr
|
||||
return
|
||||
|
|
@ -661,12 +697,6 @@ func convertToVisualAST(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
// FEATURE_FLAG_REMOVAL: ChannelAdminManageABACRules - Remove this check when feature is GA
|
||||
if !c.App.Config().FeatureFlags.ChannelAdminManageABACRules {
|
||||
c.Err = model.NewAppError("convertToVisualAST", "api.not_implemented", nil, "", http.StatusNotImplemented)
|
||||
return
|
||||
}
|
||||
|
||||
// SECURE: Check specific channel permission
|
||||
hasChannelPermission := c.App.HasPermissionToChannel(c.AppContext, c.AppContext.Session().UserId, channelId, model.PermissionManageChannelAccessRules)
|
||||
if !hasChannelPermission {
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ func (api *API) InitAccessControlPolicyLocal() {
|
|||
|
||||
api.BaseRoutes.AccessControlPolicies.Handle("/cel/check", api.APILocal(checkExpression)).Methods(http.MethodPost)
|
||||
api.BaseRoutes.AccessControlPolicies.Handle("/cel/test", api.APILocal(testExpression)).Methods(http.MethodPost)
|
||||
api.BaseRoutes.AccessControlPolicies.Handle("/cel/validate_requester", api.APILocal(validateExpressionAgainstRequester)).Methods(http.MethodPost)
|
||||
api.BaseRoutes.AccessControlPolicies.Handle("/cel/autocomplete/fields", api.APILocal(getFieldsAutocomplete)).Methods(http.MethodGet)
|
||||
api.BaseRoutes.AccessControlPolicies.Handle("/cel/visual_ast", api.APILocal(convertToVisualAST)).Methods(http.MethodPost)
|
||||
|
||||
|
|
|
|||
|
|
@ -16,14 +16,10 @@ import (
|
|||
|
||||
func TestCreateAccessControlPolicy(t *testing.T) {
|
||||
os.Setenv("MM_FEATUREFLAGS_ATTRIBUTEBASEDACCESSCONTROL", "true")
|
||||
// FEATURE_FLAG_REMOVAL: ChannelAdminManageABACRules - Remove this env var when feature is GA
|
||||
os.Setenv("MM_FEATUREFLAGS_CHANNELADMINMANAGEABACRULES", "true")
|
||||
th := Setup(t).InitBasic()
|
||||
t.Cleanup(func() {
|
||||
th.TearDown()
|
||||
os.Unsetenv("MM_FEATUREFLAGS_ATTRIBUTEBASEDACCESSCONTROL")
|
||||
// FEATURE_FLAG_REMOVAL: ChannelAdminManageABACRules - Remove this unsetenv when feature is GA
|
||||
os.Unsetenv("MM_FEATUREFLAGS_CHANNELADMINMANAGEABACRULES")
|
||||
})
|
||||
|
||||
samplePolicy := &model.AccessControlPolicy{
|
||||
|
|
@ -319,7 +315,7 @@ func TestGetAccessControlPolicy(t *testing.T) {
|
|||
|
||||
_, resp, err := th.Client.GetAccessControlPolicy(context.Background(), samplePolicy.ID)
|
||||
require.Error(t, err)
|
||||
CheckNotImplementedStatus(t, resp)
|
||||
CheckForbiddenStatus(t, resp)
|
||||
})
|
||||
|
||||
th.TestForSystemAdminAndLocal(t, func(t *testing.T, client *model.Client4) {
|
||||
|
|
@ -385,7 +381,7 @@ func TestDeleteAccessControlPolicy(t *testing.T) {
|
|||
|
||||
resp, err := th.Client.DeleteAccessControlPolicy(context.Background(), samplePolicyID)
|
||||
require.Error(t, err)
|
||||
CheckNotImplementedStatus(t, resp)
|
||||
CheckForbiddenStatus(t, resp)
|
||||
})
|
||||
|
||||
th.TestForSystemAdminAndLocal(t, func(t *testing.T, client *model.Client4) {
|
||||
|
|
@ -479,10 +475,6 @@ func TestCheckExpression(t *testing.T) {
|
|||
}, "CheckExpression with system admin errors returned")
|
||||
|
||||
t.Run("CheckExpression with channel admin for their channel", func(t *testing.T) {
|
||||
// FEATURE_FLAG_REMOVAL: ChannelAdminManageABACRules - Remove this env var when feature is GA
|
||||
os.Setenv("MM_FEATUREFLAGS_CHANNELADMINMANAGEABACRULES", "true")
|
||||
defer os.Unsetenv("MM_FEATUREFLAGS_CHANNELADMINMANAGEABACRULES")
|
||||
|
||||
// Reload config to pick up the feature flag
|
||||
err := th.App.ReloadConfig()
|
||||
require.NoError(t, err)
|
||||
|
|
|
|||
|
|
@ -34,7 +34,6 @@ func (api *API) InitChannel() {
|
|||
api.BaseRoutes.ChannelsForTeam.Handle("/private", api.APISessionRequired(getPrivateChannelsForTeam)).Methods(http.MethodGet)
|
||||
api.BaseRoutes.ChannelsForTeam.Handle("/ids", api.APISessionRequired(getPublicChannelsByIdsForTeam)).Methods(http.MethodPost)
|
||||
api.BaseRoutes.ChannelsForTeam.Handle("/search", api.APISessionRequiredDisableWhenBusy(searchChannelsForTeam)).Methods(http.MethodPost)
|
||||
api.BaseRoutes.ChannelsForTeam.Handle("/search_archived", api.APISessionRequiredDisableWhenBusy(searchArchivedChannelsForTeam)).Methods(http.MethodPost)
|
||||
api.BaseRoutes.ChannelsForTeam.Handle("/autocomplete", api.APISessionRequired(autocompleteChannelsForTeam)).Methods(http.MethodGet)
|
||||
api.BaseRoutes.ChannelsForTeam.Handle("/search_autocomplete", api.APISessionRequired(autocompleteChannelsForTeamForSearch)).Methods(http.MethodGet)
|
||||
api.BaseRoutes.User.Handle("/teams/{team_id:[A-Za-z0-9]+}/channels", api.APISessionRequired(getChannelsForTeamForUser)).Methods(http.MethodGet)
|
||||
|
|
@ -1235,45 +1234,6 @@ func searchChannelsForTeam(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
}
|
||||
|
||||
func searchArchivedChannelsForTeam(c *Context, w http.ResponseWriter, r *http.Request) {
|
||||
c.RequireTeamId()
|
||||
if c.Err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
var props *model.ChannelSearch
|
||||
err := json.NewDecoder(r.Body).Decode(&props)
|
||||
if err != nil || props == nil {
|
||||
c.SetInvalidParamWithErr("channel_search", err)
|
||||
return
|
||||
}
|
||||
|
||||
var channels model.ChannelList
|
||||
var appErr *model.AppError
|
||||
if c.App.SessionHasPermissionToTeam(*c.AppContext.Session(), c.Params.TeamId, model.PermissionListTeamChannels) {
|
||||
channels, appErr = c.App.SearchArchivedChannels(c.AppContext, c.Params.TeamId, props.Term, c.AppContext.Session().UserId)
|
||||
} else {
|
||||
// If the user is not a team member, return a 404
|
||||
if _, appErr = c.App.GetTeamMember(c.AppContext, c.Params.TeamId, c.AppContext.Session().UserId); appErr != nil {
|
||||
c.Err = appErr
|
||||
return
|
||||
}
|
||||
|
||||
channels, appErr = c.App.SearchArchivedChannels(c.AppContext, c.Params.TeamId, props.Term, c.AppContext.Session().UserId)
|
||||
}
|
||||
|
||||
if appErr != nil {
|
||||
c.Err = appErr
|
||||
return
|
||||
}
|
||||
|
||||
// Don't fill in channels props, since unused by client and potentially expensive.
|
||||
|
||||
if err := json.NewEncoder(w).Encode(channels); err != nil {
|
||||
c.Logger.Warn("Error while writing response", mlog.Err(err))
|
||||
}
|
||||
}
|
||||
|
||||
func searchAllChannels(c *Context, w http.ResponseWriter, r *http.Request) {
|
||||
var props *model.ChannelSearch
|
||||
err := json.NewDecoder(r.Body).Decode(&props)
|
||||
|
|
@ -1972,16 +1932,15 @@ func addChannelMember(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
lastError = err
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
// user is already a member, go to next
|
||||
c.Logger.Warn("User is already a channel member, skipping", mlog.String("UserId", userId), mlog.String("ChannelId", channel.Id))
|
||||
newChannelMembers = append(newChannelMembers, *existingMember)
|
||||
continue
|
||||
}
|
||||
|
||||
if channel.Type == model.ChannelTypeOpen {
|
||||
isSelfAdd := member.UserId == c.AppContext.Session().UserId
|
||||
if isSelfAdd && !canAddSelf {
|
||||
if isSelfAdd && existingMember != nil {
|
||||
// users should be able to add themselves if they're already a member, even if they don't have permissions
|
||||
newChannelMembers = append(newChannelMembers, *existingMember)
|
||||
continue
|
||||
} else if isSelfAdd && !canAddSelf {
|
||||
c.Logger.Warn("Error adding channel member, Invalid Permission to add self", mlog.String("UserId", userId), mlog.String("ChannelId", channel.Id))
|
||||
c.SetPermissionError(model.PermissionJoinPublicChannels)
|
||||
lastError = c.Err
|
||||
|
|
@ -1994,6 +1953,13 @@ func addChannelMember(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
}
|
||||
|
||||
if existingMember != nil {
|
||||
// user is already a member, go to next
|
||||
c.Logger.Warn("User is already a channel member, skipping", mlog.String("UserId", userId), mlog.String("ChannelId", channel.Id))
|
||||
newChannelMembers = append(newChannelMembers, *existingMember)
|
||||
continue
|
||||
}
|
||||
|
||||
cm, err := c.App.AddChannelMember(c.AppContext, member.UserId, channel, app.ChannelMemberOpts{
|
||||
UserRequestorID: c.AppContext.Session().UserId,
|
||||
PostRootID: postRootId,
|
||||
|
|
@ -2461,7 +2427,7 @@ func getGroupMessageMembersCommonTeams(c *Context, w http.ResponseWriter, r *htt
|
|||
return
|
||||
}
|
||||
|
||||
if err := json.NewEncoder(w).Encode(teams); err != nil {
|
||||
if err := json.NewEncoder(w).Encode(c.App.SanitizeTeams(*c.AppContext.Session(), teams)); err != nil {
|
||||
c.Logger.Warn("Error while writing response from getGroupMessageMembersCommonTeams", mlog.Err(err))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2474,100 +2474,6 @@ func TestSearchChannels(t *testing.T) {
|
|||
})
|
||||
}
|
||||
|
||||
func TestSearchArchivedChannels(t *testing.T) {
|
||||
mainHelper.Parallel(t)
|
||||
th := Setup(t).InitBasic()
|
||||
defer th.TearDown()
|
||||
client := th.Client
|
||||
|
||||
search := &model.ChannelSearch{Term: th.BasicChannel.Name}
|
||||
|
||||
_, err := client.DeleteChannel(context.Background(), th.BasicChannel.Id)
|
||||
require.NoError(t, err)
|
||||
|
||||
channels, _, err := client.SearchArchivedChannels(context.Background(), th.BasicTeam.Id, search)
|
||||
require.NoError(t, err)
|
||||
|
||||
found := false
|
||||
for _, c := range channels {
|
||||
require.Equal(t, model.ChannelTypeOpen, c.Type)
|
||||
|
||||
if c.Id == th.BasicChannel.Id {
|
||||
found = true
|
||||
}
|
||||
}
|
||||
|
||||
require.True(t, found)
|
||||
|
||||
search.Term = th.BasicPrivateChannel.Name
|
||||
_, err = client.DeleteChannel(context.Background(), th.BasicPrivateChannel.Id)
|
||||
require.NoError(t, err)
|
||||
|
||||
channels, _, err = client.SearchArchivedChannels(context.Background(), th.BasicTeam.Id, search)
|
||||
require.NoError(t, err)
|
||||
|
||||
found = false
|
||||
for _, c := range channels {
|
||||
if c.Id == th.BasicPrivateChannel.Id {
|
||||
found = true
|
||||
}
|
||||
}
|
||||
|
||||
require.True(t, found)
|
||||
|
||||
search.Term = ""
|
||||
_, _, err = client.SearchArchivedChannels(context.Background(), th.BasicTeam.Id, search)
|
||||
require.NoError(t, err)
|
||||
|
||||
search.Term = th.BasicDeletedChannel.Name
|
||||
_, resp, err := client.SearchArchivedChannels(context.Background(), model.NewId(), search)
|
||||
require.Error(t, err)
|
||||
CheckNotFoundStatus(t, resp)
|
||||
|
||||
_, resp, err = client.SearchArchivedChannels(context.Background(), "junk", search)
|
||||
require.Error(t, err)
|
||||
CheckBadRequestStatus(t, resp)
|
||||
|
||||
_, _, err = th.SystemAdminClient.SearchArchivedChannels(context.Background(), th.BasicTeam.Id, search)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Check the appropriate permissions are enforced.
|
||||
defaultRolePermissions := th.SaveDefaultRolePermissions()
|
||||
defer func() {
|
||||
th.RestoreDefaultRolePermissions(defaultRolePermissions)
|
||||
}()
|
||||
|
||||
// Remove list channels permission from the user
|
||||
th.RemovePermissionFromRole(model.PermissionListTeamChannels.Id, model.TeamUserRoleId)
|
||||
|
||||
t.Run("Search for a BasicDeletedChannel, which the user is a member of", func(t *testing.T) {
|
||||
search.Term = th.BasicDeletedChannel.Name
|
||||
channelList, _, err := client.SearchArchivedChannels(context.Background(), th.BasicTeam.Id, search)
|
||||
require.NoError(t, err)
|
||||
|
||||
channelNames := []string{}
|
||||
for _, c := range channelList {
|
||||
channelNames = append(channelNames, c.Name)
|
||||
}
|
||||
require.Contains(t, channelNames, th.BasicDeletedChannel.Name)
|
||||
})
|
||||
|
||||
t.Run("Remove the user from BasicDeletedChannel and search again, should still return", func(t *testing.T) {
|
||||
appErr := th.App.RemoveUserFromChannel(th.Context, th.BasicUser.Id, th.BasicUser.Id, th.BasicDeletedChannel)
|
||||
require.Nil(t, appErr)
|
||||
|
||||
search.Term = th.BasicDeletedChannel.Name
|
||||
channelList, _, err := client.SearchArchivedChannels(context.Background(), th.BasicTeam.Id, search)
|
||||
require.NoError(t, err)
|
||||
|
||||
channelNames := []string{}
|
||||
for _, c := range channelList {
|
||||
channelNames = append(channelNames, c.Name)
|
||||
}
|
||||
require.Contains(t, channelNames, th.BasicDeletedChannel.Name)
|
||||
})
|
||||
}
|
||||
|
||||
func TestSearchAllChannels(t *testing.T) {
|
||||
mainHelper.Parallel(t)
|
||||
th := setupForSharedChannels(t).InitBasic()
|
||||
|
|
@ -4465,6 +4371,37 @@ func TestAddChannelMember(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
})
|
||||
|
||||
t.Run("requester is not a member of the team and tries to add a user to a channel where it is already a member", func(t *testing.T) {
|
||||
// Create two teams using SystemAdminClient
|
||||
t1 := th.CreateTeamWithClient(th.SystemAdminClient)
|
||||
t2 := th.CreateTeamWithClient(th.SystemAdminClient)
|
||||
|
||||
// Use existing users - user will be BasicUser, user2 will be BasicUser2
|
||||
u1 := th.BasicUser
|
||||
u2 := th.BasicUser2
|
||||
|
||||
// Add user1 to team1 and user2 to team2 (they're already on BasicTeam)
|
||||
th.LinkUserToTeam(u1, t1)
|
||||
th.LinkUserToTeam(u2, t2)
|
||||
|
||||
// Create a public channel in team1
|
||||
pubChannel := th.CreateChannelWithClientAndTeam(th.SystemAdminClient, model.ChannelTypeOpen, t1.Id)
|
||||
|
||||
// Add user1 to the public channel
|
||||
th.AddUserToChannel(u1, pubChannel)
|
||||
|
||||
// Create client for user2
|
||||
client2 := th.CreateClient()
|
||||
_, _, err := client2.Login(context.Background(), u2.Email, u2.Password)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Try to add user1 to the public channel using user2's credentials
|
||||
// This should fail with 403 since user2 is not a member of the team
|
||||
_, resp, err := client2.AddChannelMember(context.Background(), pubChannel.Id, u1.Id)
|
||||
CheckForbiddenStatus(t, resp)
|
||||
require.Error(t, err)
|
||||
})
|
||||
|
||||
t.Run("invalid request data", func(t *testing.T) {
|
||||
th.TestForSystemAdminAndLocal(t, func(t *testing.T, client *model.Client4) {
|
||||
// correct type for user ids (string) but invalid value.
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ func (api *API) InitCustomProfileAttributes() {
|
|||
api.BaseRoutes.User.Handle("/custom_profile_attributes", api.APISessionRequired(listCPAValues)).Methods(http.MethodGet)
|
||||
api.BaseRoutes.CustomProfileAttributesValues.Handle("", api.APISessionRequired(patchCPAValues)).Methods(http.MethodPatch)
|
||||
api.BaseRoutes.CustomProfileAttributes.Handle("/group", api.APISessionRequired(getCPAGroup)).Methods(http.MethodGet)
|
||||
api.BaseRoutes.User.Handle("/custom_profile_attributes", api.APISessionRequired(patchCPAValuesForUser)).Methods(http.MethodPatch)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -277,10 +278,13 @@ func listCPAValues(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
userID := c.Params.UserId
|
||||
canSee, err := c.App.UserCanSeeOtherUser(c.AppContext, c.AppContext.Session().UserId, userID)
|
||||
if err != nil || !canSee {
|
||||
c.SetPermissionError(model.PermissionViewMembers)
|
||||
return
|
||||
// we check unrestricted sessions to allow local mode requests to go through
|
||||
if !c.AppContext.Session().IsUnrestricted() {
|
||||
canSee, err := c.App.UserCanSeeOtherUser(c.AppContext, c.AppContext.Session().UserId, userID)
|
||||
if err != nil || !canSee {
|
||||
c.SetPermissionError(model.PermissionViewMembers)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
values, appErr := c.App.ListCPAValues(userID)
|
||||
|
|
@ -297,3 +301,75 @@ func listCPAValues(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
c.Logger.Warn("Error while writing response", mlog.Err(err))
|
||||
}
|
||||
}
|
||||
|
||||
func patchCPAValuesForUser(c *Context, w http.ResponseWriter, r *http.Request) {
|
||||
if !model.MinimumEnterpriseLicense(c.App.Channels().License()) {
|
||||
c.Err = model.NewAppError("Api4.patchCPAValues", "api.custom_profile_attributes.license_error", nil, "", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
|
||||
// Get userID from URL
|
||||
c.RequireUserId()
|
||||
if c.Err != nil {
|
||||
return
|
||||
}
|
||||
userID := c.Params.UserId
|
||||
|
||||
if !c.App.SessionHasPermissionToUser(*c.AppContext.Session(), userID) {
|
||||
c.SetPermissionError(model.PermissionEditOtherUsers)
|
||||
return
|
||||
}
|
||||
|
||||
var updates map[string]json.RawMessage
|
||||
if err := json.NewDecoder(r.Body).Decode(&updates); err != nil {
|
||||
c.SetInvalidParamWithErr("value", err)
|
||||
return
|
||||
}
|
||||
|
||||
auditRec := c.MakeAuditRecord(model.AuditEventPatchCPAValues, model.AuditStatusFail)
|
||||
defer c.LogAuditRec(auditRec)
|
||||
model.AddEventParameterToAuditRec(auditRec, "user_id", userID)
|
||||
|
||||
// if the user is not an admin, we need to check that there are no
|
||||
// admin-managed fields
|
||||
if !c.App.SessionHasPermissionTo(*c.AppContext.Session(), model.PermissionManageSystem) {
|
||||
fields, appErr := c.App.ListCPAFields()
|
||||
if appErr != nil {
|
||||
c.Err = appErr
|
||||
return
|
||||
}
|
||||
|
||||
// Check if any of the fields being updated are admin-managed
|
||||
for _, field := range fields {
|
||||
if _, isBeingUpdated := updates[field.ID]; isBeingUpdated {
|
||||
// Convert to CPAField to check if managed
|
||||
cpaField, fErr := model.NewCPAFieldFromPropertyField(field)
|
||||
if fErr != nil {
|
||||
c.Err = model.NewAppError("Api4.patchCPAValues", "app.custom_profile_attributes.property_field_conversion.app_error", nil, "", http.StatusInternalServerError).Wrap(fErr)
|
||||
return
|
||||
}
|
||||
if cpaField.IsAdminManaged() {
|
||||
c.Err = model.NewAppError("Api4.patchCPAValues", "app.custom_profile_attributes.property_field_is_managed.app_error", nil, "", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
results := make(map[string]json.RawMessage, len(updates))
|
||||
for fieldID, rawValue := range updates {
|
||||
patchedValue, appErr := c.App.PatchCPAValue(userID, fieldID, rawValue, false)
|
||||
if appErr != nil {
|
||||
c.Err = appErr
|
||||
return
|
||||
}
|
||||
results[fieldID] = patchedValue.Value
|
||||
}
|
||||
|
||||
auditRec.Success()
|
||||
auditRec.AddEventObjectType("patchCPAValues")
|
||||
|
||||
if err := json.NewEncoder(w).Encode(results); err != nil {
|
||||
c.Logger.Warn("Error while writing response", mlog.Err(err))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,7 +11,8 @@ func (api *API) InitCustomProfileAttributesLocal() {
|
|||
api.BaseRoutes.CustomProfileAttributesFields.Handle("", api.APILocal(createCPAField)).Methods(http.MethodPost)
|
||||
api.BaseRoutes.CustomProfileAttributesField.Handle("", api.APILocal(patchCPAField)).Methods(http.MethodPatch)
|
||||
api.BaseRoutes.CustomProfileAttributesField.Handle("", api.APILocal(deleteCPAField)).Methods(http.MethodDelete)
|
||||
api.BaseRoutes.User.Handle("/custom_profile_attributes", api.APISessionRequired(listCPAValues)).Methods(http.MethodGet)
|
||||
api.BaseRoutes.CustomProfileAttributesValues.Handle("", api.APISessionRequired(patchCPAValues)).Methods(http.MethodPatch)
|
||||
api.BaseRoutes.User.Handle("/custom_profile_attributes", api.APILocal(listCPAValues)).Methods(http.MethodGet)
|
||||
api.BaseRoutes.CustomProfileAttributesValues.Handle("", api.APILocal(patchCPAValues)).Methods(http.MethodPatch)
|
||||
api.BaseRoutes.User.Handle("/custom_profile_attributes", api.APILocal(patchCPAValuesForUser)).Methods(http.MethodPatch)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -872,3 +872,410 @@ func TestPatchCPAValues(t *testing.T) {
|
|||
})
|
||||
})
|
||||
}
|
||||
|
||||
func TestPatchCPAValuesForUser(t *testing.T) {
|
||||
mainHelper.Parallel(t)
|
||||
|
||||
th := SetupConfig(t, func(cfg *model.Config) {
|
||||
cfg.FeatureFlags.CustomProfileAttributes = true
|
||||
}).InitBasic()
|
||||
defer th.TearDown()
|
||||
|
||||
field, err := model.NewCPAFieldFromPropertyField(&model.PropertyField{
|
||||
Name: model.NewId(),
|
||||
Type: model.PropertyFieldTypeText,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
createdField, appErr := th.App.CreateCPAField(field)
|
||||
require.Nil(t, appErr)
|
||||
require.NotNil(t, createdField)
|
||||
|
||||
t.Run("endpoint should not work if no valid license is present", func(t *testing.T) {
|
||||
values := map[string]json.RawMessage{createdField.ID: json.RawMessage(`"Field Value"`)}
|
||||
patchedValues, resp, err := th.Client.PatchCPAValuesForUser(context.Background(), th.BasicUser.Id, values)
|
||||
CheckForbiddenStatus(t, resp)
|
||||
require.Error(t, err)
|
||||
CheckErrorID(t, err, "api.custom_profile_attributes.license_error")
|
||||
require.Empty(t, patchedValues)
|
||||
})
|
||||
|
||||
// add a valid license
|
||||
th.App.Srv().SetLicense(model.NewTestLicenseSKU(model.LicenseShortSkuEnterprise))
|
||||
|
||||
t.Run("any team member should be able to create their own values", func(t *testing.T) {
|
||||
webSocketClient := th.CreateConnectedWebSocketClient(t)
|
||||
|
||||
values := map[string]json.RawMessage{}
|
||||
value := "Field Value"
|
||||
values[createdField.ID] = json.RawMessage(fmt.Sprintf(`" %s "`, value)) // value should be sanitized
|
||||
patchedValues, resp, err := th.Client.PatchCPAValuesForUser(context.Background(), th.BasicUser.Id, values)
|
||||
CheckOKStatus(t, resp)
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, patchedValues)
|
||||
require.Len(t, patchedValues, 1)
|
||||
var actualValue string
|
||||
require.NoError(t, json.Unmarshal(patchedValues[createdField.ID], &actualValue))
|
||||
require.Equal(t, value, actualValue)
|
||||
|
||||
values, resp, err = th.Client.ListCPAValues(context.Background(), th.BasicUser.Id)
|
||||
CheckOKStatus(t, resp)
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, values)
|
||||
require.Len(t, values, 1)
|
||||
actualValue = ""
|
||||
require.NoError(t, json.Unmarshal(values[createdField.ID], &actualValue))
|
||||
require.Equal(t, value, actualValue)
|
||||
|
||||
t.Run("a websocket event should be fired as part of the value changes", func(t *testing.T) {
|
||||
var wsValues map[string]json.RawMessage
|
||||
require.Eventually(t, func() bool {
|
||||
select {
|
||||
case event := <-webSocketClient.EventChannel:
|
||||
if event.EventType() == model.WebsocketEventCPAValuesUpdated {
|
||||
valuesData, err := json.Marshal(event.GetData()["values"])
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, json.Unmarshal(valuesData, &wsValues))
|
||||
return true
|
||||
}
|
||||
default:
|
||||
return false
|
||||
}
|
||||
return false
|
||||
}, 5*time.Second, 100*time.Millisecond)
|
||||
|
||||
require.NotEmpty(t, wsValues)
|
||||
require.Equal(t, patchedValues, wsValues)
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("any team member should be able to patch their own values", func(t *testing.T) {
|
||||
values, resp, err := th.Client.ListCPAValues(context.Background(), th.BasicUser.Id)
|
||||
CheckOKStatus(t, resp)
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, values)
|
||||
require.Len(t, values, 1)
|
||||
|
||||
value := "Updated Field Value"
|
||||
values[createdField.ID] = json.RawMessage(fmt.Sprintf(`" %s \t"`, value)) // value should be sanitized
|
||||
patchedValues, resp, err := th.Client.PatchCPAValuesForUser(context.Background(), th.BasicUser.Id, values)
|
||||
CheckOKStatus(t, resp)
|
||||
require.NoError(t, err)
|
||||
var actualValue string
|
||||
require.NoError(t, json.Unmarshal(patchedValues[createdField.ID], &actualValue))
|
||||
require.Equal(t, value, actualValue)
|
||||
|
||||
values, resp, err = th.Client.ListCPAValues(context.Background(), th.BasicUser.Id)
|
||||
CheckOKStatus(t, resp)
|
||||
require.NoError(t, err)
|
||||
actualValue = ""
|
||||
require.NoError(t, json.Unmarshal(values[createdField.ID], &actualValue))
|
||||
require.Equal(t, value, actualValue)
|
||||
})
|
||||
|
||||
t.Run("should handle array values correctly", func(t *testing.T) {
|
||||
optionsID := []string{model.NewId(), model.NewId(), model.NewId(), model.NewId()}
|
||||
|
||||
arrayField, err := model.NewCPAFieldFromPropertyField(&model.PropertyField{
|
||||
Name: model.NewId(),
|
||||
Type: model.PropertyFieldTypeMultiselect,
|
||||
Attrs: model.StringInterface{
|
||||
"options": []map[string]any{
|
||||
{"id": optionsID[0], "name": "option1"},
|
||||
{"id": optionsID[1], "name": "option2"},
|
||||
{"id": optionsID[2], "name": "option3"},
|
||||
{"id": optionsID[3], "name": "option4"},
|
||||
},
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
createdArrayField, appErr := th.App.CreateCPAField(arrayField)
|
||||
require.Nil(t, appErr)
|
||||
require.NotNil(t, createdArrayField)
|
||||
|
||||
values := map[string]json.RawMessage{
|
||||
createdArrayField.ID: json.RawMessage(fmt.Sprintf(`["%s", "%s", "%s"]`, optionsID[0], optionsID[1], optionsID[2])),
|
||||
}
|
||||
patchedValues, resp, err := th.Client.PatchCPAValuesForUser(context.Background(), th.BasicUser.Id, values)
|
||||
CheckOKStatus(t, resp)
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, patchedValues)
|
||||
|
||||
var actualValues []string
|
||||
require.NoError(t, json.Unmarshal(patchedValues[createdArrayField.ID], &actualValues))
|
||||
require.Equal(t, optionsID[:3], actualValues)
|
||||
|
||||
// Test updating array values
|
||||
values[createdArrayField.ID] = json.RawMessage(fmt.Sprintf(`["%s", "%s"]`, optionsID[2], optionsID[3]))
|
||||
patchedValues, resp, err = th.Client.PatchCPAValuesForUser(context.Background(), th.BasicUser.Id, values)
|
||||
CheckOKStatus(t, resp)
|
||||
require.NoError(t, err)
|
||||
|
||||
actualValues = nil
|
||||
require.NoError(t, json.Unmarshal(patchedValues[createdArrayField.ID], &actualValues))
|
||||
require.Equal(t, optionsID[2:4], actualValues)
|
||||
})
|
||||
|
||||
t.Run("should fail if any of the values belongs to a field that is LDAP/SAML synced", func(t *testing.T) {
|
||||
// Create a field with LDAP attribute
|
||||
ldapField, err := model.NewCPAFieldFromPropertyField(&model.PropertyField{
|
||||
Name: model.NewId(),
|
||||
Type: model.PropertyFieldTypeText,
|
||||
Attrs: model.StringInterface{
|
||||
model.CustomProfileAttributesPropertyAttrsLDAP: "ldap_attr",
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
createdLDAPField, appErr := th.App.CreateCPAField(ldapField)
|
||||
require.Nil(t, appErr)
|
||||
require.NotNil(t, createdLDAPField)
|
||||
|
||||
// Create a field with SAML attribute
|
||||
samlField, err := model.NewCPAFieldFromPropertyField(&model.PropertyField{
|
||||
Name: model.NewId(),
|
||||
Type: model.PropertyFieldTypeText,
|
||||
Attrs: model.StringInterface{
|
||||
model.CustomProfileAttributesPropertyAttrsSAML: "saml_attr",
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
createdSAMLField, appErr := th.App.CreateCPAField(samlField)
|
||||
require.Nil(t, appErr)
|
||||
require.NotNil(t, createdSAMLField)
|
||||
|
||||
// Test LDAP field
|
||||
values := map[string]json.RawMessage{
|
||||
createdLDAPField.ID: json.RawMessage(`"LDAP Value"`),
|
||||
}
|
||||
_, resp, err := th.Client.PatchCPAValuesForUser(context.Background(), th.BasicUser.Id, values)
|
||||
CheckBadRequestStatus(t, resp)
|
||||
require.Error(t, err)
|
||||
CheckErrorID(t, err, "app.custom_profile_attributes.property_field_is_synced.app_error")
|
||||
|
||||
// Test SAML field
|
||||
values = map[string]json.RawMessage{
|
||||
createdSAMLField.ID: json.RawMessage(`"SAML Value"`),
|
||||
}
|
||||
_, resp, err = th.Client.PatchCPAValuesForUser(context.Background(), th.BasicUser.Id, values)
|
||||
CheckBadRequestStatus(t, resp)
|
||||
require.Error(t, err)
|
||||
CheckErrorID(t, err, "app.custom_profile_attributes.property_field_is_synced.app_error")
|
||||
|
||||
// Test multiple fields with one being LDAP synced
|
||||
values = map[string]json.RawMessage{
|
||||
createdField.ID: json.RawMessage(`"Regular Value"`),
|
||||
createdLDAPField.ID: json.RawMessage(`"LDAP Value"`),
|
||||
}
|
||||
_, resp, err = th.Client.PatchCPAValuesForUser(context.Background(), th.BasicUser.Id, values)
|
||||
CheckBadRequestStatus(t, resp)
|
||||
require.Error(t, err)
|
||||
CheckErrorID(t, err, "app.custom_profile_attributes.property_field_is_synced.app_error")
|
||||
})
|
||||
|
||||
t.Run("an invalid patch should be rejected", func(t *testing.T) {
|
||||
field, err := model.NewCPAFieldFromPropertyField(&model.PropertyField{
|
||||
Name: model.NewId(),
|
||||
Type: model.PropertyFieldTypeText,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
createdField, appErr := th.App.CreateCPAField(field)
|
||||
require.Nil(t, appErr)
|
||||
require.NotNil(t, createdField)
|
||||
|
||||
// Create a value that's too long (over 64 characters)
|
||||
tooLongValue := strings.Repeat("a", model.CPAValueTypeTextMaxLength+1)
|
||||
values := map[string]json.RawMessage{
|
||||
createdField.ID: json.RawMessage(fmt.Sprintf(`"%s"`, tooLongValue)),
|
||||
}
|
||||
|
||||
_, resp, err := th.Client.PatchCPAValuesForUser(context.Background(), th.BasicUser.Id, values)
|
||||
CheckBadRequestStatus(t, resp)
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), "Failed to validate property value")
|
||||
})
|
||||
|
||||
t.Run("admin-managed fields", func(t *testing.T) {
|
||||
// Create a managed field (only admins can create fields)
|
||||
managedField := &model.PropertyField{
|
||||
Name: "Managed Field",
|
||||
Type: model.PropertyFieldTypeText,
|
||||
Attrs: model.StringInterface{
|
||||
model.CustomProfileAttributesPropertyAttrsManaged: "admin",
|
||||
},
|
||||
}
|
||||
|
||||
createdManagedField, resp, err := th.SystemAdminClient.CreateCPAField(context.Background(), managedField)
|
||||
CheckCreatedStatus(t, resp)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, createdManagedField)
|
||||
|
||||
// Create a non-managed field for comparison
|
||||
regularField := &model.PropertyField{
|
||||
Name: "Regular Field",
|
||||
Type: model.PropertyFieldTypeText,
|
||||
}
|
||||
|
||||
createdRegularField, resp, err := th.SystemAdminClient.CreateCPAField(context.Background(), regularField)
|
||||
CheckCreatedStatus(t, resp)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, createdRegularField)
|
||||
|
||||
t.Run("regular user cannot update managed field", func(t *testing.T) {
|
||||
values := map[string]json.RawMessage{
|
||||
createdManagedField.ID: json.RawMessage(`"Managed Value"`),
|
||||
}
|
||||
|
||||
_, resp, err := th.Client.PatchCPAValuesForUser(context.Background(), th.BasicUser.Id, values)
|
||||
CheckForbiddenStatus(t, resp)
|
||||
require.Error(t, err)
|
||||
CheckErrorID(t, err, "app.custom_profile_attributes.property_field_is_managed.app_error")
|
||||
})
|
||||
|
||||
t.Run("regular user can update non-managed field", func(t *testing.T) {
|
||||
values := map[string]json.RawMessage{
|
||||
createdRegularField.ID: json.RawMessage(`"Regular Value"`),
|
||||
}
|
||||
|
||||
patchedValues, resp, err := th.Client.PatchCPAValuesForUser(context.Background(), th.BasicUser.Id, values)
|
||||
CheckOKStatus(t, resp)
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, patchedValues)
|
||||
|
||||
var actualValue string
|
||||
require.NoError(t, json.Unmarshal(patchedValues[createdRegularField.ID], &actualValue))
|
||||
require.Equal(t, "Regular Value", actualValue)
|
||||
})
|
||||
|
||||
th.TestForSystemAdminAndLocal(t, func(t *testing.T, client *model.Client4) {
|
||||
// Set initial value through the app layer that we will be replacing during the test
|
||||
_, appErr := th.App.PatchCPAValue(th.SystemAdminUser.Id, createdManagedField.ID, json.RawMessage(`"Initial Admin Value"`), true)
|
||||
require.Nil(t, appErr)
|
||||
|
||||
values := map[string]json.RawMessage{
|
||||
createdManagedField.ID: json.RawMessage(`"Admin Updated Value"`),
|
||||
}
|
||||
|
||||
patchedValues, resp, err := client.PatchCPAValuesForUser(context.Background(), th.SystemAdminUser.Id, values)
|
||||
CheckOKStatus(t, resp)
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, patchedValues)
|
||||
|
||||
var actualValue string
|
||||
require.NoError(t, json.Unmarshal(patchedValues[createdManagedField.ID], &actualValue))
|
||||
require.Equal(t, "Admin Updated Value", actualValue)
|
||||
}, "system admin can update managed field")
|
||||
|
||||
th.TestForSystemAdminAndLocal(t, func(t *testing.T, client *model.Client4) {
|
||||
values := map[string]json.RawMessage{
|
||||
createdManagedField.ID: json.RawMessage(`"Admin Updated Managed Value For Other User"`),
|
||||
}
|
||||
|
||||
patchedValues, resp, err := th.SystemAdminClient.PatchCPAValuesForUser(context.Background(), th.BasicUser.Id, values)
|
||||
CheckOKStatus(t, resp)
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, patchedValues)
|
||||
|
||||
var actualValue string
|
||||
require.NoError(t, json.Unmarshal(patchedValues[createdManagedField.ID], &actualValue))
|
||||
require.Equal(t, "Admin Updated Managed Value For Other User", actualValue)
|
||||
|
||||
// Verify the value was actually set for the target user
|
||||
userValues, resp, err := th.SystemAdminClient.ListCPAValues(context.Background(), th.BasicUser.Id)
|
||||
CheckOKStatus(t, resp)
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, userValues)
|
||||
|
||||
var storedValue string
|
||||
require.NoError(t, json.Unmarshal(userValues[createdManagedField.ID], &storedValue))
|
||||
require.Equal(t, "Admin Updated Managed Value For Other User", storedValue)
|
||||
}, "system admin can update managed field values for other users")
|
||||
|
||||
t.Run("a user should not be able to update other user's field values", func(t *testing.T) {
|
||||
values := map[string]json.RawMessage{
|
||||
createdRegularField.ID: json.RawMessage(`"Attempted Value For Other User"`),
|
||||
}
|
||||
|
||||
// th.Client (BasicUser) trying to update th.BasicUser2's values should fail
|
||||
_, resp, err := th.Client.PatchCPAValuesForUser(context.Background(), th.BasicUser2.Id, values)
|
||||
CheckForbiddenStatus(t, resp)
|
||||
require.Error(t, err)
|
||||
CheckErrorID(t, err, "api.context.permissions.app_error")
|
||||
})
|
||||
|
||||
t.Run("batch update with managed fields fails for regular user", func(t *testing.T) {
|
||||
// First set some initial values to ensure we can verify they don't change
|
||||
// Set initial values for both fields using th.App (admins can set managed field values)
|
||||
_, appErr := th.App.PatchCPAValue(th.BasicUser.Id, createdRegularField.ID, json.RawMessage(`"Initial Regular Value"`), false)
|
||||
require.Nil(t, appErr)
|
||||
|
||||
_, appErr = th.App.PatchCPAValue(th.BasicUser.Id, createdManagedField.ID, json.RawMessage(`"Initial Managed Value"`), true)
|
||||
require.Nil(t, appErr)
|
||||
|
||||
// Try to batch update both managed and regular fields - this should fail
|
||||
attemptedValues := map[string]json.RawMessage{
|
||||
createdManagedField.ID: json.RawMessage(`"Managed Batch Value"`),
|
||||
createdRegularField.ID: json.RawMessage(`"Regular Batch Value"`),
|
||||
}
|
||||
|
||||
_, resp, err := th.Client.PatchCPAValuesForUser(context.Background(), th.BasicUser.Id, attemptedValues)
|
||||
CheckForbiddenStatus(t, resp)
|
||||
require.Error(t, err)
|
||||
CheckErrorID(t, err, "app.custom_profile_attributes.property_field_is_managed.app_error")
|
||||
|
||||
// Verify that no values were updated when the batch operation failed
|
||||
currentValues, appErr := th.App.ListCPAValues(th.BasicUser.Id)
|
||||
require.Nil(t, appErr)
|
||||
|
||||
// Check that values remain unchanged - both fields should retain their initial values
|
||||
regularFieldHasOriginalValue := false
|
||||
managedFieldHasOriginalValue := false
|
||||
|
||||
for _, value := range currentValues {
|
||||
if value.FieldID == createdManagedField.ID {
|
||||
var currentValue string
|
||||
require.NoError(t, json.Unmarshal(value.Value, ¤tValue))
|
||||
if currentValue == "Initial Managed Value" {
|
||||
managedFieldHasOriginalValue = true
|
||||
}
|
||||
// Verify it's not the attempted update value
|
||||
require.NotEqual(t, "Managed Batch Value", currentValue, "Managed field should not have been updated in failed batch operation")
|
||||
}
|
||||
if value.FieldID == createdRegularField.ID {
|
||||
var currentValue string
|
||||
require.NoError(t, json.Unmarshal(value.Value, ¤tValue))
|
||||
if currentValue == "Initial Regular Value" {
|
||||
regularFieldHasOriginalValue = true
|
||||
}
|
||||
// Verify it's not the attempted update value
|
||||
require.NotEqual(t, "Regular Batch Value", currentValue, "Regular field should not have been updated in failed batch operation")
|
||||
}
|
||||
}
|
||||
|
||||
// Both fields should retain their original values after the failed batch operation
|
||||
require.True(t, regularFieldHasOriginalValue, "Regular field should retain its original value")
|
||||
require.True(t, managedFieldHasOriginalValue, "Managed field should retain its original value")
|
||||
})
|
||||
|
||||
th.TestForSystemAdminAndLocal(t, func(t *testing.T, client *model.Client4) {
|
||||
values := map[string]json.RawMessage{
|
||||
createdManagedField.ID: json.RawMessage(`"Admin Managed Batch"`),
|
||||
createdRegularField.ID: json.RawMessage(`"Admin Regular Batch"`),
|
||||
}
|
||||
|
||||
patchedValues, resp, err := th.SystemAdminClient.PatchCPAValuesForUser(context.Background(), th.BasicUser.Id, values)
|
||||
CheckOKStatus(t, resp)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, patchedValues, 2)
|
||||
|
||||
var managedValue, regularValue string
|
||||
require.NoError(t, json.Unmarshal(patchedValues[createdManagedField.ID], &managedValue))
|
||||
require.NoError(t, json.Unmarshal(patchedValues[createdRegularField.ID], ®ularValue))
|
||||
require.Equal(t, "Admin Managed Batch", managedValue)
|
||||
require.Equal(t, "Admin Regular Batch", regularValue)
|
||||
}, "batch update with managed fields succeeds for admin")
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -75,10 +75,6 @@ func (api *API) InitGroup() {
|
|||
api.BaseRoutes.Channels.Handle("/{channel_id:[A-Za-z0-9]+}/groups",
|
||||
api.APISessionRequired(getGroupsByChannel)).Methods(http.MethodGet)
|
||||
|
||||
// POST
|
||||
api.BaseRoutes.Groups.Handle("/names",
|
||||
api.APISessionRequired(getGroupsByNames)).Methods(http.MethodPost)
|
||||
|
||||
// GET /api/v4/teams/:team_id/groups
|
||||
api.BaseRoutes.Teams.Handle("/{team_id:[A-Za-z0-9]+}/groups",
|
||||
api.APISessionRequired(getGroupsByTeam)).Methods(http.MethodGet)
|
||||
|
|
@ -855,45 +851,6 @@ func getGroupsByChannel(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
}
|
||||
|
||||
func getGroupsByNames(c *Context, w http.ResponseWriter, r *http.Request) {
|
||||
permissionErr := requireLicense(c)
|
||||
if permissionErr != nil {
|
||||
c.Err = permissionErr
|
||||
return
|
||||
}
|
||||
|
||||
groupNames, err := model.SortedArrayFromJSON(r.Body)
|
||||
if err != nil {
|
||||
c.Err = model.NewAppError("getGroupsByNames", model.PayloadParseError, nil, "", http.StatusBadRequest).Wrap(err)
|
||||
return
|
||||
} else if len(groupNames) == 0 {
|
||||
c.SetInvalidParam("group_names")
|
||||
return
|
||||
}
|
||||
|
||||
restrictions, appErr := c.App.GetViewUsersRestrictions(c.AppContext, c.AppContext.Session().UserId)
|
||||
if appErr != nil {
|
||||
c.Err = appErr
|
||||
return
|
||||
}
|
||||
|
||||
groups, appErr := c.App.GetGroupsByNames(groupNames, restrictions)
|
||||
if appErr != nil {
|
||||
c.Err = appErr
|
||||
return
|
||||
}
|
||||
|
||||
js, err := json.Marshal(groups)
|
||||
if err != nil {
|
||||
c.Err = model.NewAppError("getGroupsByNames", "api.marshal_error", nil, "", http.StatusInternalServerError).Wrap(err)
|
||||
return
|
||||
}
|
||||
|
||||
if _, err := w.Write(js); err != nil {
|
||||
c.Logger.Warn("Error while writing response", mlog.Err(err))
|
||||
}
|
||||
}
|
||||
|
||||
func getGroupsByTeam(c *Context, w http.ResponseWriter, r *http.Request) {
|
||||
permissionErr := requireLicense(c)
|
||||
if permissionErr != nil {
|
||||
|
|
|
|||
|
|
@ -2274,118 +2274,6 @@ func TestGetGroups(t *testing.T) {
|
|||
})
|
||||
}
|
||||
|
||||
func TestGetGroupsByNames(t *testing.T) {
|
||||
mainHelper.Parallel(t)
|
||||
th := Setup(t).InitBasic()
|
||||
defer th.TearDown()
|
||||
|
||||
// make sure "createdDate" for next group is after one created in InitBasic()
|
||||
time.Sleep(2 * time.Millisecond)
|
||||
id := model.NewId()
|
||||
groupName := model.NewPointer("name" + id)
|
||||
group, appErr := th.App.CreateGroup(&model.Group{
|
||||
DisplayName: "dn-foo_" + id,
|
||||
Name: groupName,
|
||||
Source: model.GroupSourceLdap,
|
||||
Description: "description_" + id,
|
||||
RemoteId: model.NewPointer(model.NewId()),
|
||||
})
|
||||
assert.Nil(t, appErr)
|
||||
|
||||
id2 := model.NewId()
|
||||
group2Name := model.NewPointer("name" + id2)
|
||||
group2, appErr := th.App.CreateGroup(&model.Group{
|
||||
DisplayName: "dn-foo_" + id2,
|
||||
Name: group2Name,
|
||||
Source: model.GroupSourceLdap,
|
||||
Description: "description_" + id2,
|
||||
RemoteId: model.NewPointer(model.NewId()),
|
||||
})
|
||||
assert.Nil(t, appErr)
|
||||
|
||||
// Create a group with AllowReference=false
|
||||
id3 := model.NewId()
|
||||
group3Name := model.NewPointer("name" + id3)
|
||||
group3, appErr := th.App.CreateGroup(&model.Group{
|
||||
DisplayName: "dn-foo_" + id3,
|
||||
Name: group3Name,
|
||||
Source: model.GroupSourceLdap,
|
||||
Description: "description_" + id3,
|
||||
RemoteId: model.NewPointer(model.NewId()),
|
||||
})
|
||||
assert.Nil(t, appErr)
|
||||
|
||||
t.Run("without license", func(t *testing.T) {
|
||||
th.App.Srv().SetLicense(nil)
|
||||
groups, resp, err := th.SystemAdminClient.GetGroupsByNames(context.Background(), []string{*groupName})
|
||||
require.Error(t, err)
|
||||
CheckNotImplementedStatus(t, resp)
|
||||
assert.Nil(t, groups)
|
||||
})
|
||||
|
||||
th.App.Srv().SetLicense(model.NewTestLicenseSKU(model.LicenseShortSkuProfessional))
|
||||
|
||||
t.Run("search for one group", func(t *testing.T) {
|
||||
groups, resp, err := th.SystemAdminClient.GetGroupsByNames(context.Background(), []string{*groupName})
|
||||
require.NoError(t, err)
|
||||
CheckOKStatus(t, resp)
|
||||
assert.ElementsMatch(t, []*model.Group{group}, groups)
|
||||
assert.Nil(t, groups[0].MemberCount)
|
||||
})
|
||||
|
||||
t.Run("search for multiple groups only finding one", func(t *testing.T) {
|
||||
searchTerms := []string{*group2Name, "fakename", "fakename2"}
|
||||
groups, resp, err := th.SystemAdminClient.GetGroupsByNames(context.Background(), searchTerms)
|
||||
require.NoError(t, err)
|
||||
CheckOKStatus(t, resp)
|
||||
assert.ElementsMatch(t, []*model.Group{group2}, groups)
|
||||
assert.Nil(t, groups[0].MemberCount)
|
||||
})
|
||||
|
||||
t.Run("search for multiple groups returning all three", func(t *testing.T) {
|
||||
searchTerms := []string{*groupName, *group2Name, *group3Name}
|
||||
groups, resp, err := th.SystemAdminClient.GetGroupsByNames(context.Background(), searchTerms)
|
||||
require.NoError(t, err)
|
||||
CheckOKStatus(t, resp)
|
||||
assert.ElementsMatch(t, []*model.Group{group, group2, group3}, groups)
|
||||
assert.Nil(t, groups[0].MemberCount)
|
||||
})
|
||||
|
||||
t.Run("search for more groups than existing returning existing", func(t *testing.T) {
|
||||
searchTerms := []string{*groupName, *group2Name, *group3Name, "fakename", "fakename2"}
|
||||
groups, resp, err := th.SystemAdminClient.GetGroupsByNames(context.Background(), searchTerms)
|
||||
require.NoError(t, err)
|
||||
CheckOKStatus(t, resp)
|
||||
assert.ElementsMatch(t, []*model.Group{group, group2, group3}, groups)
|
||||
assert.Nil(t, groups[0].MemberCount)
|
||||
})
|
||||
|
||||
t.Run("search for groups with invalid names", func(t *testing.T) {
|
||||
searchTerms := []string{"fakename", "fakename2"}
|
||||
groups, resp, err := th.SystemAdminClient.GetGroupsByNames(context.Background(), searchTerms)
|
||||
require.NoError(t, err)
|
||||
require.Empty(t, groups, "no groups should be returned")
|
||||
CheckOKStatus(t, resp)
|
||||
})
|
||||
|
||||
t.Run("search for groups is empty", func(t *testing.T) {
|
||||
searchTerms := []string{}
|
||||
groups, resp, err := th.SystemAdminClient.GetGroupsByNames(context.Background(), searchTerms)
|
||||
require.Error(t, err)
|
||||
CheckBadRequestStatus(t, resp)
|
||||
assert.Nil(t, groups)
|
||||
})
|
||||
|
||||
t.Run("attempt search without session", func(t *testing.T) {
|
||||
_, err := th.Client.Logout(context.Background())
|
||||
require.NoError(t, err)
|
||||
searchTerms := []string{*groupName}
|
||||
_, resp, err := th.Client.GetGroupsByNames(context.Background(), searchTerms)
|
||||
require.Error(t, err)
|
||||
CheckUnauthorizedStatus(t, resp)
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetGroupsByUserId(t *testing.T) {
|
||||
mainHelper.Parallel(t)
|
||||
th := Setup(t).InitBasic()
|
||||
|
|
|
|||
|
|
@ -29,8 +29,8 @@ func TestGetServerLimits(t *testing.T) {
|
|||
|
||||
// Should have full access to all limits data
|
||||
require.Greater(t, serverLimits.ActiveUserCount, int64(0))
|
||||
require.Equal(t, int64(2500), serverLimits.MaxUsersLimit)
|
||||
require.Equal(t, int64(5000), serverLimits.MaxUsersHardLimit)
|
||||
require.Equal(t, int64(200), serverLimits.MaxUsersLimit)
|
||||
require.Equal(t, int64(250), serverLimits.MaxUsersHardLimit)
|
||||
require.Equal(t, int64(0), serverLimits.PostHistoryLimit)
|
||||
require.Equal(t, int64(0), serverLimits.LastAccessiblePostTime)
|
||||
})
|
||||
|
|
|
|||
|
|
@ -925,13 +925,6 @@ func TestServerBusy503(t *testing.T) {
|
|||
CheckServiceUnavailableStatus(t, resp)
|
||||
})
|
||||
|
||||
t.Run("search archived channels while busy", func(t *testing.T) {
|
||||
cs := &model.ChannelSearch{}
|
||||
_, resp, err := th.SystemAdminClient.SearchArchivedChannels(context.Background(), "foo", cs)
|
||||
require.Error(t, err)
|
||||
CheckServiceUnavailableStatus(t, resp)
|
||||
})
|
||||
|
||||
th.App.Srv().Platform().Busy.Clear()
|
||||
|
||||
t.Run("search users while not busy", func(t *testing.T) {
|
||||
|
|
|
|||
|
|
@ -4,6 +4,8 @@
|
|||
package api4
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
|
|
@ -63,6 +65,7 @@ func (api *API) InitUser() {
|
|||
api.BaseRoutes.User.Handle("/mfa/generate", api.APISessionRequiredMfa(generateMfaSecret)).Methods(http.MethodPost)
|
||||
|
||||
api.BaseRoutes.Users.Handle("/login", api.APIHandler(login)).Methods(http.MethodPost)
|
||||
api.BaseRoutes.Users.Handle("/login/sso/code-exchange", api.APIHandler(loginSSOCodeExchange)).Methods(http.MethodPost)
|
||||
api.BaseRoutes.Users.Handle("/login/desktop_token", api.RateLimitedHandler(api.APIHandler(loginWithDesktopToken), model.RateLimitSettings{PerSec: model.NewPointer(2), MaxBurst: model.NewPointer(1)})).Methods(http.MethodPost)
|
||||
api.BaseRoutes.Users.Handle("/login/switch", api.APIHandler(switchAccountType)).Methods(http.MethodPost)
|
||||
api.BaseRoutes.Users.Handle("/login/cws", api.APIHandlerTrustRequester(loginCWS)).Methods(http.MethodPost)
|
||||
|
|
@ -110,6 +113,102 @@ func (api *API) InitUser() {
|
|||
api.BaseRoutes.Users.Handle("/trigger-notify-admin-posts", api.APISessionRequired(handleTriggerNotifyAdminPosts)).Methods(http.MethodPost)
|
||||
}
|
||||
|
||||
// loginSSOCodeExchange exchanges a short-lived login_code for session tokens (mobile SAML code exchange)
|
||||
func loginSSOCodeExchange(c *Context, w http.ResponseWriter, r *http.Request) {
|
||||
if !c.App.Config().FeatureFlags.MobileSSOCodeExchange {
|
||||
c.Err = model.NewAppError("loginSSOCodeExchange", "api.oauth.get_access_token.bad_request.app_error", nil, "feature disabled", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
props := model.MapFromJSON(r.Body)
|
||||
loginCode := props["login_code"]
|
||||
codeVerifier := props["code_verifier"]
|
||||
state := props["state"]
|
||||
|
||||
if loginCode == "" || codeVerifier == "" || state == "" {
|
||||
c.SetInvalidParam("login_code | code_verifier | state")
|
||||
return
|
||||
}
|
||||
|
||||
// Consume one-time code atomically
|
||||
token, appErr := c.App.ConsumeTokenOnce(model.TokenTypeSSOCodeExchange, loginCode)
|
||||
if appErr != nil {
|
||||
c.Err = appErr
|
||||
return
|
||||
}
|
||||
|
||||
// Check token expiration as fallback to cleanup process
|
||||
if token.IsExpired() {
|
||||
c.Err = model.NewAppError("loginSSOCodeExchange", "api.oauth.get_access_token.bad_request.app_error", nil, "token expired", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Parse extra JSON
|
||||
extra := model.MapFromJSON(strings.NewReader(token.Extra))
|
||||
userID := extra["user_id"]
|
||||
codeChallenge := extra["code_challenge"]
|
||||
method := strings.ToUpper(extra["code_challenge_method"])
|
||||
expectedState := extra["state"]
|
||||
|
||||
if userID == "" || codeChallenge == "" || expectedState == "" {
|
||||
c.Err = model.NewAppError("loginSSOCodeExchange", "api.oauth.get_access_token.bad_request.app_error", nil, "", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
if state != expectedState {
|
||||
c.Err = model.NewAppError("loginSSOCodeExchange", "api.oauth.get_access_token.bad_request.app_error", nil, "state mismatch", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Verify SAML challenge
|
||||
var computed string
|
||||
switch strings.ToUpper(method) {
|
||||
case "S256":
|
||||
sum := sha256.Sum256([]byte(codeVerifier))
|
||||
computed = base64.RawURLEncoding.EncodeToString(sum[:])
|
||||
case "":
|
||||
computed = codeVerifier
|
||||
case "PLAIN":
|
||||
// Explicitly reject plain method for security
|
||||
c.Err = model.NewAppError("loginSSOCodeExchange", "api.oauth.get_access_token.bad_request.app_error", nil, "plain SAML challenge method not supported",
|
||||
http.StatusBadRequest)
|
||||
return
|
||||
default:
|
||||
// Reject unknown methods
|
||||
c.Err = model.NewAppError("loginSSOCodeExchange", "api.oauth.get_access_token.bad_request.app_error", nil, "unsupported SAML challenge method", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
if computed != codeChallenge {
|
||||
c.Err = model.NewAppError("loginSSOCodeExchange", "api.oauth.get_access_token.bad_request.app_error", nil, "SAML challenge mismatch", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// Create session for this user
|
||||
user, err := c.App.GetUser(userID)
|
||||
if err != nil {
|
||||
c.Err = err
|
||||
return
|
||||
}
|
||||
|
||||
isMobile := utils.IsMobileRequest(r)
|
||||
session, err2 := c.App.DoLogin(c.AppContext, w, r, user, "", isMobile, false, true)
|
||||
if err2 != nil {
|
||||
c.Err = err2
|
||||
return
|
||||
}
|
||||
c.AppContext = c.AppContext.WithSession(session)
|
||||
c.App.AttachSessionCookies(c.AppContext, w, r)
|
||||
|
||||
// Respond with tokens for mobile client to set
|
||||
resp := map[string]string{
|
||||
"token": session.Token,
|
||||
"csrf": session.GetCSRF(),
|
||||
}
|
||||
if err := json.NewEncoder(w).Encode(resp); err != nil {
|
||||
c.Logger.Warn("Error while writing response", mlog.Err(err))
|
||||
}
|
||||
}
|
||||
|
||||
func createUser(c *Context, w http.ResponseWriter, r *http.Request) {
|
||||
var user model.User
|
||||
if jsonErr := json.NewDecoder(r.Body).Decode(&user); jsonErr != nil {
|
||||
|
|
@ -2990,6 +3089,7 @@ func verifyUserEmailWithoutToken(c *Context, w http.ResponseWriter, r *http.Requ
|
|||
auditRec.Success()
|
||||
c.LogAudit("user verified")
|
||||
|
||||
c.App.SanitizeProfile(user, true)
|
||||
if err := json.NewEncoder(w).Encode(user); err != nil {
|
||||
c.Logger.Warn("Error while writing response", mlog.Err(err))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,6 +6,8 @@ package api4
|
|||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"image/png"
|
||||
|
|
@ -6419,6 +6421,51 @@ func TestVerifyUserEmailWithoutToken(t *testing.T) {
|
|||
require.Equal(t, ruser.Id, vuser.Id)
|
||||
}, "Should verify a new user")
|
||||
|
||||
th.TestForSystemAdminAndLocal(t, func(t *testing.T, client *model.Client4) {
|
||||
// Enable MFA for this test
|
||||
th.App.Srv().SetLicense(model.NewTestLicense("mfa"))
|
||||
th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.EnableMultifactorAuthentication = true })
|
||||
|
||||
email := th.GenerateTestEmail()
|
||||
user := model.User{Email: email, Nickname: "Test User", Password: "password123", Username: GenerateTestUsername(), Roles: model.SystemUserRoleId}
|
||||
ruser, _, _ := th.Client.CreateUser(context.Background(), &user)
|
||||
|
||||
// Set some NotifyProps to ensure we have data to verify is preserved
|
||||
ruser.NotifyProps = map[string]string{
|
||||
"email": "true",
|
||||
"push": "mention",
|
||||
"desktop": "mention",
|
||||
"channel": "true",
|
||||
}
|
||||
_, appErr := th.App.UpdateUser(th.Context, ruser, false)
|
||||
require.Nil(t, appErr)
|
||||
|
||||
// Set up MFA secret for the user
|
||||
secret, appErr := th.App.GenerateMfaSecret(ruser.Id)
|
||||
require.Nil(t, appErr)
|
||||
err := th.Server.Store().User().UpdateMfaSecret(ruser.Id, secret.Secret)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify the user has a password hash and MFA secret in the database
|
||||
dbUser, appErr := th.App.GetUser(ruser.Id)
|
||||
require.Nil(t, appErr)
|
||||
require.NotEmpty(t, dbUser.Password, "User should have a password hash in database")
|
||||
require.NotEmpty(t, dbUser.MfaSecret, "User should have MFA secret in database")
|
||||
|
||||
// Call the API endpoint
|
||||
vuser, _, err := client.VerifyUserEmailWithoutToken(context.Background(), ruser.Id)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, ruser.Id, vuser.Id)
|
||||
|
||||
// Verify sensitive fields are sanitized in the response
|
||||
require.Empty(t, vuser.Password, "Password hash should be sanitized from response")
|
||||
require.Empty(t, vuser.MfaSecret, "MFA secret should be sanitized from response")
|
||||
|
||||
// Verify admin-level fields like NotifyProps are preserved for system admin
|
||||
require.NotEmpty(t, vuser.NotifyProps, "NotifyProps should be preserved for system admin")
|
||||
require.Equal(t, "true", vuser.NotifyProps["email"], "NotifyProps data should be preserved for system admin")
|
||||
}, "Should sanitize password hash and MFA secret from response")
|
||||
|
||||
th.TestForSystemAdminAndLocal(t, func(t *testing.T, client *model.Client4) {
|
||||
vuser, _, err := client.VerifyUserEmailWithoutToken(context.Background(), "randomId")
|
||||
require.Error(t, err)
|
||||
|
|
@ -8445,6 +8492,85 @@ func TestLoginWithDesktopToken(t *testing.T) {
|
|||
})
|
||||
}
|
||||
|
||||
func TestLoginSSOCodeExchange(t *testing.T) {
|
||||
mainHelper.Parallel(t)
|
||||
th := Setup(t).InitBasic()
|
||||
defer th.TearDown()
|
||||
|
||||
t.Run("wrong token type cannot be used for code exchange", func(t *testing.T) {
|
||||
th.App.UpdateConfig(func(cfg *model.Config) {
|
||||
cfg.FeatureFlags.MobileSSOCodeExchange = true
|
||||
})
|
||||
|
||||
token := model.NewToken(model.TokenTypeOAuth, "extra-data")
|
||||
require.NoError(t, th.App.Srv().Store().Token().Save(token))
|
||||
defer func() {
|
||||
_ = th.App.Srv().Store().Token().Delete(token.Token)
|
||||
}()
|
||||
|
||||
props := map[string]string{
|
||||
"login_code": token.Token,
|
||||
"code_verifier": "test_verifier",
|
||||
"state": "test_state",
|
||||
}
|
||||
|
||||
resp, err := th.Client.DoAPIPost(context.Background(), "/users/login/sso/code-exchange", model.MapToJSON(props))
|
||||
require.Error(t, err)
|
||||
require.Equal(t, http.StatusNotFound, resp.StatusCode)
|
||||
})
|
||||
|
||||
t.Run("successful code exchange with S256 challenge", func(t *testing.T) {
|
||||
th.App.UpdateConfig(func(cfg *model.Config) {
|
||||
cfg.FeatureFlags.MobileSSOCodeExchange = true
|
||||
})
|
||||
|
||||
samlUser := th.CreateUserWithAuth(model.UserAuthServiceSaml)
|
||||
|
||||
codeVerifier := "test_code_verifier_123456789"
|
||||
state := "test_state_value"
|
||||
|
||||
sum := sha256.Sum256([]byte(codeVerifier))
|
||||
codeChallenge := base64.RawURLEncoding.EncodeToString(sum[:])
|
||||
|
||||
extra := map[string]string{
|
||||
"user_id": samlUser.Id,
|
||||
"code_challenge": codeChallenge,
|
||||
"code_challenge_method": "S256",
|
||||
"state": state,
|
||||
}
|
||||
|
||||
token := model.NewToken(model.TokenTypeSSOCodeExchange, model.MapToJSON(extra))
|
||||
require.NoError(t, th.App.Srv().Store().Token().Save(token))
|
||||
|
||||
props := map[string]string{
|
||||
"login_code": token.Token,
|
||||
"code_verifier": codeVerifier,
|
||||
"state": state,
|
||||
}
|
||||
|
||||
resp, err := th.Client.DoAPIPost(context.Background(), "/users/login/sso/code-exchange", model.MapToJSON(props))
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, http.StatusOK, resp.StatusCode)
|
||||
|
||||
var result map[string]string
|
||||
require.NoError(t, json.NewDecoder(resp.Body).Decode(&result))
|
||||
assert.NotEmpty(t, result["token"])
|
||||
assert.NotEmpty(t, result["csrf"])
|
||||
|
||||
_, err = th.App.Srv().Store().Token().GetByToken(token.Token)
|
||||
require.Error(t, err)
|
||||
|
||||
authenticatedClient := model.NewAPIv4Client(th.Client.URL)
|
||||
authenticatedClient.SetToken(result["token"])
|
||||
|
||||
user, _, err := authenticatedClient.GetMe(context.Background(), "")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, samlUser.Id, user.Id)
|
||||
assert.Equal(t, samlUser.Email, user.Email)
|
||||
assert.Equal(t, samlUser.Username, user.Username)
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetUsersByNames(t *testing.T) {
|
||||
mainHelper.Parallel(t)
|
||||
th := Setup(t).InitBasic()
|
||||
|
|
|
|||
|
|
@ -12,6 +12,7 @@ import (
|
|||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/dgryski/dgoogauth"
|
||||
"github.com/gorilla/websocket"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
|
|
@ -536,3 +537,147 @@ func TestValidateDisconnectErrCode(t *testing.T) {
|
|||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to enable MFA enforcement in config
|
||||
func enableMFAEnforcement(th *TestHelper) {
|
||||
th.App.UpdateConfig(func(cfg *model.Config) {
|
||||
*cfg.ServiceSettings.EnableMultifactorAuthentication = true
|
||||
*cfg.ServiceSettings.EnforceMultifactorAuthentication = true
|
||||
})
|
||||
}
|
||||
|
||||
// Helper function to set up MFA for a user
|
||||
func setupUserWithMFA(t *testing.T, th *TestHelper, user *model.User) string {
|
||||
// Setup MFA properly - following authentication_test.go pattern
|
||||
secret, appErr := th.App.GenerateMfaSecret(user.Id)
|
||||
require.Nil(t, appErr)
|
||||
err := th.Server.Store().User().UpdateMfaActive(user.Id, true)
|
||||
require.NoError(t, err)
|
||||
err = th.Server.Store().User().UpdateMfaSecret(user.Id, secret.Secret)
|
||||
require.NoError(t, err)
|
||||
return secret.Secret
|
||||
}
|
||||
|
||||
func TestWebSocketMFAEnforcement(t *testing.T) {
|
||||
mainHelper.Parallel(t)
|
||||
|
||||
t.Run("WebSocket works when MFA enforcement is disabled", func(t *testing.T) {
|
||||
th := Setup(t).InitBasic()
|
||||
defer th.TearDown()
|
||||
|
||||
// MFA enforcement disabled - should work normally
|
||||
webSocketClient := th.CreateConnectedWebSocketClient(t)
|
||||
defer webSocketClient.Close()
|
||||
|
||||
webSocketClient.GetStatuses()
|
||||
|
||||
select {
|
||||
case resp := <-webSocketClient.ResponseChannel:
|
||||
require.Nil(t, resp.Error, "WebSocket should work when MFA enforcement is disabled")
|
||||
require.Equal(t, resp.Status, model.StatusOk)
|
||||
case <-time.After(3 * time.Second):
|
||||
require.Fail(t, "Expected WebSocket response but got timeout")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("WebSocket blocked when MFA required but user has no MFA", func(t *testing.T) {
|
||||
th := SetupEnterprise(t).InitBasic()
|
||||
defer th.TearDown()
|
||||
|
||||
// Enable MFA enforcement in config
|
||||
enableMFAEnforcement(th)
|
||||
// Defer the teardown to reset the config after the test
|
||||
defer func() {
|
||||
th.App.UpdateConfig(func(cfg *model.Config) {
|
||||
*cfg.ServiceSettings.EnforceMultifactorAuthentication = false
|
||||
})
|
||||
}()
|
||||
|
||||
// Create user without MFA using existing basic user to avoid license timing issues
|
||||
user := th.BasicUser
|
||||
|
||||
// Login user (this should work for initial authentication)
|
||||
client := th.CreateClient()
|
||||
_, _, err := client.Login(context.Background(), user.Email, "Pa$$word11")
|
||||
require.NoError(t, err)
|
||||
|
||||
// Create WebSocket client - initial connection succeeds, but subsequent API requests require completed MFA
|
||||
webSocketClient, err := th.CreateWebSocketClientWithClient(client)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, webSocketClient, "webSocketClient should not be nil")
|
||||
webSocketClient.Listen()
|
||||
defer webSocketClient.Close()
|
||||
|
||||
// First, consume the successful authentication challenge response
|
||||
authResp := <-webSocketClient.ResponseChannel
|
||||
require.Nil(t, authResp.Error, "Authentication challenge should succeed")
|
||||
require.Equal(t, authResp.Status, model.StatusOk)
|
||||
|
||||
// Individual WebSocket requests should be blocked due to MFA requirement
|
||||
webSocketClient.GetStatuses()
|
||||
|
||||
// Should get authentication error due to MFA requirement on the second request
|
||||
select {
|
||||
case resp := <-webSocketClient.ResponseChannel:
|
||||
t.Logf("Received response: Error=%v, Status=%s, SeqReply=%d", resp.Error, resp.Status, resp.SeqReply)
|
||||
require.NotNil(t, resp.Error, "Should get authentication error due to MFA requirement")
|
||||
require.Equal(t, "api.web_socket_router.not_authenticated.app_error", resp.Error.Id,
|
||||
"Should get specific 'not authenticated' error ID due to MFA requirement")
|
||||
case <-time.After(3 * time.Second):
|
||||
require.Fail(t, "Expected WebSocket error response but got timeout")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("WebSocket connection allowed when user has MFA active", func(t *testing.T) {
|
||||
th := SetupEnterprise(t).InitBasic()
|
||||
defer th.TearDown()
|
||||
|
||||
// Enable MFA enforcement in config
|
||||
enableMFAEnforcement(th)
|
||||
// Defer the teardown to reset the config after the test
|
||||
defer func() {
|
||||
th.App.UpdateConfig(func(cfg *model.Config) {
|
||||
*cfg.ServiceSettings.EnforceMultifactorAuthentication = false
|
||||
})
|
||||
}()
|
||||
|
||||
// Create user and set up MFA
|
||||
user := &model.User{
|
||||
Email: th.GenerateTestEmail(),
|
||||
Username: model.NewUsername(),
|
||||
Password: "password123",
|
||||
}
|
||||
ruser, _, err := th.Client.CreateUser(context.Background(), user)
|
||||
require.NoError(t, err)
|
||||
|
||||
th.LinkUserToTeam(ruser, th.BasicTeam)
|
||||
_, err = th.App.Srv().Store().User().VerifyEmail(ruser.Id, ruser.Email)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Setup MFA for the user and get the secret
|
||||
secretString := setupUserWithMFA(t, th, ruser)
|
||||
|
||||
// Generate TOTP token from the user's MFA secret
|
||||
code := dgoogauth.ComputeCode(secretString, time.Now().UTC().Unix()/30)
|
||||
token := fmt.Sprintf("%06d", code)
|
||||
|
||||
client := th.CreateClient()
|
||||
_, _, err = client.LoginWithMFA(context.Background(), user.Email, user.Password, token)
|
||||
require.NoError(t, err)
|
||||
|
||||
// WebSocket connection should work
|
||||
webSocketClient := th.CreateConnectedWebSocketClientWithClient(t, client)
|
||||
defer webSocketClient.Close()
|
||||
|
||||
// Should be able to get statuses
|
||||
webSocketClient.GetStatuses()
|
||||
|
||||
select {
|
||||
case resp := <-webSocketClient.ResponseChannel:
|
||||
require.Nil(t, resp.Error, "WebSocket should work when MFA is properly set up")
|
||||
require.Equal(t, resp.Status, model.StatusOk)
|
||||
case <-time.After(5 * time.Second):
|
||||
require.Fail(t, "Expected WebSocket response but got timeout")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -445,3 +445,60 @@ func (a *App) ValidateChannelAccessControlPolicyCreation(rctx request.CTX, userI
|
|||
// For channel-type policies, validate channel-specific permission (policy ID equals channel ID)
|
||||
return a.ValidateChannelAccessControlPermission(rctx, userID, policy.ID)
|
||||
}
|
||||
|
||||
// TestExpressionWithChannelContext tests expressions for channel admins with attribute validation
|
||||
// Channel admins can only see users that match expressions they themselves would match
|
||||
func (a *App) TestExpressionWithChannelContext(rctx request.CTX, expression string, opts model.SubjectSearchOptions) ([]*model.User, int64, *model.AppError) {
|
||||
// Get the current user (channel admin)
|
||||
session := rctx.Session()
|
||||
if session == nil {
|
||||
return nil, 0, model.NewAppError("TestExpressionWithChannelContext", "api.context.session_expired.app_error", nil, "", http.StatusUnauthorized)
|
||||
}
|
||||
|
||||
currentUserID := session.UserId
|
||||
|
||||
// SECURITY: First check if the channel admin themselves matches this expression
|
||||
// If they don't match, they shouldn't be able to see users who do
|
||||
adminMatches, appErr := a.ValidateExpressionAgainstRequester(rctx, expression, currentUserID)
|
||||
if appErr != nil {
|
||||
return nil, 0, appErr
|
||||
}
|
||||
|
||||
if !adminMatches {
|
||||
// Channel admin doesn't match the expression, so return empty results
|
||||
return []*model.User{}, 0, nil
|
||||
}
|
||||
|
||||
// If the channel admin matches the expression, run it against all users
|
||||
acs := a.Srv().ch.AccessControl
|
||||
if acs == nil {
|
||||
return nil, 0, model.NewAppError("TestExpressionWithChannelContext", "app.pap.check_expression.app_error", nil, "Policy Administration Point is not initialized", http.StatusNotImplemented)
|
||||
}
|
||||
|
||||
return a.TestExpression(rctx, expression, opts)
|
||||
}
|
||||
|
||||
// ValidateExpressionAgainstRequester validates an expression directly against a specific user
|
||||
func (a *App) ValidateExpressionAgainstRequester(rctx request.CTX, expression string, requesterID string) (bool, *model.AppError) {
|
||||
// Self-exclusion validation should work with any attribute
|
||||
// Channel admins should be able to validate any expression they're testing
|
||||
|
||||
// Use access control service to evaluate expression
|
||||
acs := a.Srv().ch.AccessControl
|
||||
if acs == nil {
|
||||
return false, model.NewAppError("ValidateExpressionAgainstRequester", "app.pap.check_expression.app_error", nil, "Policy Administration Point is not initialized", http.StatusNotImplemented)
|
||||
}
|
||||
|
||||
// Search only for the specific requester user ID
|
||||
users, _, appErr := acs.QueryUsersForExpression(rctx, expression, model.SubjectSearchOptions{
|
||||
SubjectID: requesterID, // Only check this specific user
|
||||
Limit: 1, // Maximum 1 result expected
|
||||
})
|
||||
if appErr != nil {
|
||||
return false, appErr
|
||||
}
|
||||
if len(users) == 1 && users[0].Id == requesterID {
|
||||
return true, nil
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -827,6 +827,321 @@ func TestValidateChannelAccessControlPolicyCreation(t *testing.T) {
|
|||
})
|
||||
}
|
||||
|
||||
func TestTestExpressionWithChannelContext(t *testing.T) {
|
||||
th := Setup(t).InitBasic()
|
||||
defer th.TearDown()
|
||||
|
||||
// Create test session with user
|
||||
session := model.Session{
|
||||
UserId: th.BasicUser.Id,
|
||||
Id: model.NewId(),
|
||||
}
|
||||
|
||||
// Setup test context with session
|
||||
rctx := request.TestContext(t).WithSession(&session)
|
||||
|
||||
t.Run("should allow channel admin to test expression they match", func(t *testing.T) {
|
||||
// Setup mock access control service
|
||||
mockAccessControlService := &mocks.AccessControlServiceInterface{}
|
||||
th.App.Srv().ch.AccessControl = mockAccessControlService
|
||||
|
||||
expression := "user.attributes.department == 'engineering'"
|
||||
opts := model.SubjectSearchOptions{Limit: 50}
|
||||
|
||||
// Mock that admin matches the expression (for validation)
|
||||
mockAccessControlService.On(
|
||||
"QueryUsersForExpression",
|
||||
rctx,
|
||||
expression,
|
||||
model.SubjectSearchOptions{SubjectID: th.BasicUser.Id, Limit: 1},
|
||||
).Return([]*model.User{th.BasicUser}, int64(1), nil) // Admin matches
|
||||
|
||||
// Mock the actual search results
|
||||
expectedUsers := []*model.User{th.BasicUser, th.BasicUser2}
|
||||
expectedCount := int64(2)
|
||||
mockAccessControlService.On(
|
||||
"QueryUsersForExpression",
|
||||
rctx,
|
||||
expression,
|
||||
opts,
|
||||
).Return(expectedUsers, expectedCount, nil)
|
||||
|
||||
// Call the function
|
||||
users, count, appErr := th.App.TestExpressionWithChannelContext(rctx, expression, opts)
|
||||
|
||||
require.Nil(t, appErr)
|
||||
require.Equal(t, expectedUsers, users)
|
||||
require.Equal(t, expectedCount, count)
|
||||
mockAccessControlService.AssertExpectations(t)
|
||||
})
|
||||
|
||||
t.Run("should deny channel admin testing expression they don't match", func(t *testing.T) {
|
||||
// Setup mock access control service
|
||||
mockAccessControlService := &mocks.AccessControlServiceInterface{}
|
||||
th.App.Srv().ch.AccessControl = mockAccessControlService
|
||||
|
||||
expression := "user.attributes.department == 'sales'"
|
||||
opts := model.SubjectSearchOptions{Limit: 50}
|
||||
|
||||
// Mock that admin does NOT match the expression (for validation)
|
||||
mockAccessControlService.On(
|
||||
"QueryUsersForExpression",
|
||||
rctx,
|
||||
expression,
|
||||
model.SubjectSearchOptions{SubjectID: th.BasicUser.Id, Limit: 1},
|
||||
).Return([]*model.User{}, int64(0), nil) // Admin doesn't match
|
||||
|
||||
// Call the function
|
||||
users, count, appErr := th.App.TestExpressionWithChannelContext(rctx, expression, opts)
|
||||
|
||||
require.Nil(t, appErr)
|
||||
require.Empty(t, users) // Should return empty results
|
||||
require.Equal(t, int64(0), count)
|
||||
mockAccessControlService.AssertExpectations(t)
|
||||
})
|
||||
|
||||
t.Run("should handle complex expression with multiple attributes", func(t *testing.T) {
|
||||
// Setup mock access control service
|
||||
mockAccessControlService := &mocks.AccessControlServiceInterface{}
|
||||
th.App.Srv().ch.AccessControl = mockAccessControlService
|
||||
|
||||
// Complex expression with multiple conditions
|
||||
expression := "user.attributes.department == 'engineering' && user.attributes.team == 'backend'"
|
||||
opts := model.SubjectSearchOptions{Limit: 50}
|
||||
|
||||
// Mock that admin matches the expression (for validation)
|
||||
mockAccessControlService.On(
|
||||
"QueryUsersForExpression",
|
||||
rctx,
|
||||
expression,
|
||||
model.SubjectSearchOptions{SubjectID: th.BasicUser.Id, Limit: 1},
|
||||
).Return([]*model.User{th.BasicUser}, int64(1), nil) // Admin matches
|
||||
|
||||
// Mock the actual search results
|
||||
expectedUsers := []*model.User{th.BasicUser, th.BasicUser2}
|
||||
expectedCount := int64(2)
|
||||
mockAccessControlService.On(
|
||||
"QueryUsersForExpression",
|
||||
rctx,
|
||||
expression,
|
||||
opts,
|
||||
).Return(expectedUsers, expectedCount, nil)
|
||||
|
||||
// Call the function
|
||||
users, count, appErr := th.App.TestExpressionWithChannelContext(rctx, expression, opts)
|
||||
|
||||
require.Nil(t, appErr)
|
||||
require.Equal(t, expectedUsers, users)
|
||||
require.Equal(t, expectedCount, count)
|
||||
mockAccessControlService.AssertExpectations(t)
|
||||
})
|
||||
|
||||
t.Run("should deny when admin partially matches expression", func(t *testing.T) {
|
||||
// Setup mock access control service
|
||||
mockAccessControlService := &mocks.AccessControlServiceInterface{}
|
||||
th.App.Srv().ch.AccessControl = mockAccessControlService
|
||||
|
||||
// Expression that admin only partially matches (has department but not team)
|
||||
expression := "user.attributes.department == 'engineering' && user.attributes.team == 'frontend'"
|
||||
opts := model.SubjectSearchOptions{Limit: 50}
|
||||
|
||||
// Mock that admin does NOT match the full expression (for validation)
|
||||
mockAccessControlService.On(
|
||||
"QueryUsersForExpression",
|
||||
rctx,
|
||||
expression,
|
||||
model.SubjectSearchOptions{SubjectID: th.BasicUser.Id, Limit: 1},
|
||||
).Return([]*model.User{}, int64(0), nil) // Admin doesn't match full expression
|
||||
|
||||
// Call the function
|
||||
users, count, appErr := th.App.TestExpressionWithChannelContext(rctx, expression, opts)
|
||||
|
||||
require.Nil(t, appErr)
|
||||
require.Empty(t, users) // Should return empty results
|
||||
require.Equal(t, int64(0), count)
|
||||
mockAccessControlService.AssertExpectations(t)
|
||||
})
|
||||
|
||||
t.Run("should allow expressions with different operators", func(t *testing.T) {
|
||||
// Setup mock access control service
|
||||
mockAccessControlService := &mocks.AccessControlServiceInterface{}
|
||||
th.App.Srv().ch.AccessControl = mockAccessControlService
|
||||
|
||||
// Expression with != operator
|
||||
expression := "user.attributes.department != 'sales'"
|
||||
opts := model.SubjectSearchOptions{Limit: 50}
|
||||
|
||||
// Mock that admin matches the expression (admin has department='engineering')
|
||||
mockAccessControlService.On(
|
||||
"QueryUsersForExpression",
|
||||
rctx,
|
||||
expression,
|
||||
model.SubjectSearchOptions{SubjectID: th.BasicUser.Id, Limit: 1},
|
||||
).Return([]*model.User{th.BasicUser}, int64(1), nil) // Admin matches
|
||||
|
||||
// Mock the actual search results
|
||||
expectedUsers := []*model.User{th.BasicUser, th.BasicUser2}
|
||||
expectedCount := int64(2)
|
||||
mockAccessControlService.On(
|
||||
"QueryUsersForExpression",
|
||||
rctx,
|
||||
expression,
|
||||
opts,
|
||||
).Return(expectedUsers, expectedCount, nil)
|
||||
|
||||
// Call the function
|
||||
users, count, appErr := th.App.TestExpressionWithChannelContext(rctx, expression, opts)
|
||||
|
||||
require.Nil(t, appErr)
|
||||
require.Equal(t, expectedUsers, users)
|
||||
require.Equal(t, expectedCount, count)
|
||||
mockAccessControlService.AssertExpectations(t)
|
||||
})
|
||||
|
||||
t.Run("should handle error in validation step", func(t *testing.T) {
|
||||
// Setup mock access control service
|
||||
mockAccessControlService := &mocks.AccessControlServiceInterface{}
|
||||
th.App.Srv().ch.AccessControl = mockAccessControlService
|
||||
|
||||
expression := "user.attributes.department == 'engineering'"
|
||||
opts := model.SubjectSearchOptions{Limit: 50}
|
||||
|
||||
// Mock that validation step fails
|
||||
mockAccessControlService.On(
|
||||
"QueryUsersForExpression",
|
||||
rctx,
|
||||
expression,
|
||||
model.SubjectSearchOptions{SubjectID: th.BasicUser.Id, Limit: 1},
|
||||
).Return([]*model.User{}, int64(0), model.NewAppError("TestExpressionWithChannelContext", "app.access_control.query.app_error", nil, "validation error", http.StatusInternalServerError))
|
||||
|
||||
// Call the function
|
||||
_, _, appErr := th.App.TestExpressionWithChannelContext(rctx, expression, opts)
|
||||
|
||||
require.NotNil(t, appErr)
|
||||
require.Equal(t, "TestExpressionWithChannelContext", appErr.Where)
|
||||
mockAccessControlService.AssertExpectations(t)
|
||||
})
|
||||
}
|
||||
|
||||
func TestValidateExpressionAgainstRequester(t *testing.T) {
|
||||
th := Setup(t).InitBasic()
|
||||
defer th.TearDown()
|
||||
|
||||
rctx := request.TestContext(t)
|
||||
|
||||
t.Run("should return true when requester matches expression", func(t *testing.T) {
|
||||
// Setup mock access control service
|
||||
mockAccessControlService := &mocks.AccessControlServiceInterface{}
|
||||
th.App.Srv().ch.AccessControl = mockAccessControlService
|
||||
|
||||
expression := "user.attributes.team == 'engineering'"
|
||||
requesterID := th.BasicUser.Id
|
||||
|
||||
// Mock that the requester is found in the results (optimized query)
|
||||
mockUsers := []*model.User{th.BasicUser}
|
||||
mockAccessControlService.On(
|
||||
"QueryUsersForExpression",
|
||||
rctx,
|
||||
expression,
|
||||
model.SubjectSearchOptions{SubjectID: requesterID, Limit: 1},
|
||||
).Return(mockUsers, int64(1), nil)
|
||||
|
||||
// Call the function
|
||||
matches, appErr := th.App.ValidateExpressionAgainstRequester(rctx, expression, requesterID)
|
||||
|
||||
require.Nil(t, appErr)
|
||||
require.True(t, matches)
|
||||
mockAccessControlService.AssertExpectations(t)
|
||||
})
|
||||
|
||||
t.Run("should return false when requester does not match expression", func(t *testing.T) {
|
||||
// Setup mock access control service
|
||||
mockAccessControlService := &mocks.AccessControlServiceInterface{}
|
||||
th.App.Srv().ch.AccessControl = mockAccessControlService
|
||||
|
||||
expression := "user.attributes.team == 'engineering'"
|
||||
requesterID := th.BasicUser.Id
|
||||
|
||||
// Mock that the requester is NOT found in the results (optimized query)
|
||||
mockUsers := []*model.User{} // Empty results - requester doesn't match
|
||||
mockAccessControlService.On(
|
||||
"QueryUsersForExpression",
|
||||
rctx,
|
||||
expression,
|
||||
model.SubjectSearchOptions{SubjectID: requesterID, Limit: 1},
|
||||
).Return(mockUsers, int64(0), nil)
|
||||
|
||||
// Call the function
|
||||
matches, appErr := th.App.ValidateExpressionAgainstRequester(rctx, expression, requesterID)
|
||||
|
||||
require.Nil(t, appErr)
|
||||
require.False(t, matches)
|
||||
mockAccessControlService.AssertExpectations(t)
|
||||
})
|
||||
|
||||
t.Run("should return false when no users match expression", func(t *testing.T) {
|
||||
// Setup mock access control service
|
||||
mockAccessControlService := &mocks.AccessControlServiceInterface{}
|
||||
th.App.Srv().ch.AccessControl = mockAccessControlService
|
||||
|
||||
expression := "user.attributes.team == 'nonexistent'"
|
||||
requesterID := th.BasicUser.Id
|
||||
|
||||
// Mock that no users match the expression (optimized query)
|
||||
mockUsers := []*model.User{}
|
||||
mockAccessControlService.On(
|
||||
"QueryUsersForExpression",
|
||||
rctx,
|
||||
expression,
|
||||
model.SubjectSearchOptions{SubjectID: requesterID, Limit: 1},
|
||||
).Return(mockUsers, int64(0), nil)
|
||||
|
||||
// Call the function
|
||||
matches, appErr := th.App.ValidateExpressionAgainstRequester(rctx, expression, requesterID)
|
||||
|
||||
require.Nil(t, appErr)
|
||||
require.False(t, matches)
|
||||
mockAccessControlService.AssertExpectations(t)
|
||||
})
|
||||
|
||||
t.Run("should handle access control service error", func(t *testing.T) {
|
||||
// Setup mock access control service
|
||||
mockAccessControlService := &mocks.AccessControlServiceInterface{}
|
||||
th.App.Srv().ch.AccessControl = mockAccessControlService
|
||||
|
||||
expression := "invalid expression"
|
||||
requesterID := th.BasicUser.Id
|
||||
|
||||
// Mock that the service returns an error (optimized query)
|
||||
mockAccessControlService.On(
|
||||
"QueryUsersForExpression",
|
||||
rctx,
|
||||
expression,
|
||||
model.SubjectSearchOptions{SubjectID: requesterID, Limit: 1},
|
||||
).Return([]*model.User{}, int64(0), model.NewAppError("ValidateExpressionAgainstRequester", "app.access_control.validate_requester.app_error", nil, "expression parsing error", http.StatusInternalServerError))
|
||||
|
||||
// Call the function
|
||||
matches, appErr := th.App.ValidateExpressionAgainstRequester(rctx, expression, requesterID)
|
||||
|
||||
require.NotNil(t, appErr)
|
||||
require.False(t, matches)
|
||||
require.Equal(t, "ValidateExpressionAgainstRequester", appErr.Where)
|
||||
require.Contains(t, appErr.DetailedError, "expression parsing error")
|
||||
mockAccessControlService.AssertExpectations(t)
|
||||
})
|
||||
|
||||
t.Run("should handle missing access control service", func(t *testing.T) {
|
||||
th.App.Srv().ch.AccessControl = nil
|
||||
|
||||
matches, appErr := th.App.ValidateExpressionAgainstRequester(rctx, "true", th.BasicUser.Id)
|
||||
|
||||
require.NotNil(t, appErr)
|
||||
require.False(t, matches)
|
||||
require.Equal(t, "ValidateExpressionAgainstRequester", appErr.Where)
|
||||
require.Contains(t, appErr.Message, "Could not check expression")
|
||||
})
|
||||
}
|
||||
|
||||
func TestIsSystemPolicyAppliedToChannel(t *testing.T) {
|
||||
th := Setup(t).InitBasic()
|
||||
defer th.TearDown()
|
||||
|
|
@ -834,7 +1149,6 @@ func TestIsSystemPolicyAppliedToChannel(t *testing.T) {
|
|||
rctx := request.TestContext(t)
|
||||
channelID := model.NewId()
|
||||
systemPolicyID := model.NewId()
|
||||
|
||||
t.Run("should return false when channel has no policy", func(t *testing.T) {
|
||||
// Mock access control service to return error (no policy)
|
||||
mockAccessControl := &mocks.AccessControlServiceInterface{}
|
||||
|
|
|
|||
|
|
@ -3113,17 +3113,6 @@ func (a *App) SearchChannels(rctx request.CTX, teamID string, term string) (mode
|
|||
return channelList, nil
|
||||
}
|
||||
|
||||
func (a *App) SearchArchivedChannels(rctx request.CTX, teamID string, term string, userID string) (model.ChannelList, *model.AppError) {
|
||||
term = strings.TrimSpace(term)
|
||||
|
||||
channelList, err := a.Srv().Store().Channel().SearchArchivedInTeam(teamID, term, userID)
|
||||
if err != nil {
|
||||
return nil, model.NewAppError("SearchArchivedChannels", "app.channel.search.app_error", nil, "", http.StatusInternalServerError).Wrap(err)
|
||||
}
|
||||
|
||||
return channelList, nil
|
||||
}
|
||||
|
||||
func (a *App) SearchChannelsForUser(rctx request.CTX, userID, teamID, term string) (model.ChannelList, *model.AppError) {
|
||||
includeDeleted := true
|
||||
|
||||
|
|
|
|||
|
|
@ -2863,69 +2863,61 @@ func TestIsCRTEnabledForUser(t *testing.T) {
|
|||
|
||||
func TestGetGroupMessageMembersCommonTeams(t *testing.T) {
|
||||
mainHelper.Parallel(t)
|
||||
th := SetupWithStoreMock(t)
|
||||
th := Setup(t).InitBasic()
|
||||
defer th.TearDown()
|
||||
|
||||
mockStore := th.App.Srv().Store().(*mocks.Store)
|
||||
|
||||
mockChannelStore := mocks.ChannelStore{}
|
||||
mockStore.On("Channel").Return(&mockChannelStore)
|
||||
mockChannelStore.On("Get", "gm_channel_id", true).Return(&model.Channel{Type: model.ChannelTypeGroup}, nil)
|
||||
|
||||
mockTeamStore := mocks.TeamStore{}
|
||||
mockStore.On("Team").Return(&mockTeamStore)
|
||||
|
||||
th.App.Srv().Store().Team()
|
||||
|
||||
mockTeamStore.On("GetCommonTeamIDsForMultipleUsers", []string{"user_id_1", "user_id_2"}).Return([]string{"team_id_1", "team_id_2", "team_id_3"}, nil).Times(1)
|
||||
mockTeamStore.On("GetMany", []string{"team_id_1", "team_id_2", "team_id_3"}).Return(
|
||||
[]*model.Team{
|
||||
{DisplayName: "Team 1"},
|
||||
{DisplayName: "Team 2"},
|
||||
{DisplayName: "Team 3"},
|
||||
},
|
||||
nil,
|
||||
)
|
||||
|
||||
mockUserStore := mocks.UserStore{}
|
||||
mockStore.On("User").Return(&mockUserStore)
|
||||
options := &model.UserGetOptions{
|
||||
PerPage: model.ChannelGroupMaxUsers,
|
||||
Page: 0,
|
||||
InChannelId: "gm_channel_id",
|
||||
Inactive: false,
|
||||
Active: true,
|
||||
teamsToCreate := 2
|
||||
usersToCreate := 4 // at least 3 users to create a GM channel, last user is not in any team
|
||||
teams := make([]string, 0, teamsToCreate)
|
||||
for i := 0; i < cap(teams); i++ {
|
||||
team := th.CreateTeam()
|
||||
defer func(team *model.Team) {
|
||||
appErr := th.App.PermanentDeleteTeam(th.Context, team)
|
||||
require.Nil(t, appErr)
|
||||
}(team)
|
||||
teams = append(teams, team.Id)
|
||||
}
|
||||
mockUserStore.On("GetProfilesInChannel", options).Return([]*model.User{
|
||||
{
|
||||
Id: "user_id_1",
|
||||
},
|
||||
{
|
||||
Id: "user_id_2",
|
||||
},
|
||||
}, nil)
|
||||
|
||||
var err error
|
||||
th.App.ch.srv.teamService, err = teams.New(teams.ServiceConfig{
|
||||
TeamStore: &mockTeamStore,
|
||||
ChannelStore: &mockChannelStore,
|
||||
GroupStore: &mocks.GroupStore{},
|
||||
Users: th.App.ch.srv.userService,
|
||||
WebHub: th.App.ch.srv.platform,
|
||||
ConfigFn: th.App.ch.srv.platform.Config,
|
||||
LicenseFn: th.App.ch.srv.License,
|
||||
users := make([]string, 0, usersToCreate)
|
||||
for i := 0; i < cap(users); i++ {
|
||||
user := th.CreateUser()
|
||||
defer func(user *model.User) {
|
||||
appErr := th.App.PermanentDeleteUser(th.Context, user)
|
||||
require.Nil(t, appErr)
|
||||
}(user)
|
||||
users = append(users, user.Id)
|
||||
}
|
||||
|
||||
for _, teamId := range teams {
|
||||
// add first 3 users to each team, last user is not in any team
|
||||
for i := range 3 {
|
||||
_, _, appErr := th.App.AddUserToTeam(th.Context, teamId, users[i], "")
|
||||
require.Nil(t, appErr)
|
||||
}
|
||||
}
|
||||
|
||||
// create GM channel with first 3 users who share common teams
|
||||
gmChannel, appErr := th.App.createGroupChannel(th.Context, users[:3], users[0])
|
||||
require.Nil(t, appErr)
|
||||
require.NotNil(t, gmChannel)
|
||||
|
||||
// normally you can't create a GM channel with users that don't share any teams, but we do it here to test the edge case
|
||||
// create GM channel with last 3 users, where last member is not in any team
|
||||
otherGMChannel, appErr := th.App.createGroupChannel(th.Context, users[1:], users[0])
|
||||
require.Nil(t, appErr)
|
||||
require.NotNil(t, otherGMChannel)
|
||||
|
||||
t.Run("Get teams for GM channel", func(t *testing.T) {
|
||||
commonTeams, appErr := th.App.GetGroupMessageMembersCommonTeams(th.Context, gmChannel.Id)
|
||||
require.Nil(t, appErr)
|
||||
require.Equal(t, 2, len(commonTeams))
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
commonTeams, appErr := th.App.GetGroupMessageMembersCommonTeams(th.Context, "gm_channel_id")
|
||||
require.Nil(t, appErr)
|
||||
require.Equal(t, 3, len(commonTeams))
|
||||
|
||||
// case of no common teams
|
||||
mockTeamStore.On("GetCommonTeamIDsForMultipleUsers", []string{"user_id_1", "user_id_2"}).Return([]string{}, nil)
|
||||
commonTeams, appErr = th.App.GetGroupMessageMembersCommonTeams(th.Context, "gm_channel_id")
|
||||
require.Nil(t, appErr)
|
||||
require.Equal(t, 0, len(commonTeams))
|
||||
t.Run("No common teams", func(t *testing.T) {
|
||||
commonTeams, appErr := th.App.GetGroupMessageMembersCommonTeams(th.Context, otherGMChannel.Id)
|
||||
require.Nil(t, appErr)
|
||||
require.Equal(t, 0, len(commonTeams))
|
||||
})
|
||||
}
|
||||
|
||||
func TestConvertGroupMessageToChannel(t *testing.T) {
|
||||
|
|
|
|||
|
|
@ -94,14 +94,6 @@ func (a *App) GetGroupsByUserId(userID string, opts model.GroupSearchOpts) ([]*m
|
|||
return groups, nil
|
||||
}
|
||||
|
||||
func (a *App) GetGroupsByNames(names []string, restrictions *model.ViewUsersRestrictions) ([]*model.Group, *model.AppError) {
|
||||
groups, err := a.Srv().Store().Group().GetByNames(names, restrictions)
|
||||
if err != nil {
|
||||
return nil, model.NewAppError("GetGroupsByNames", "app.select_error", nil, "", http.StatusInternalServerError).Wrap(err)
|
||||
}
|
||||
return groups, nil
|
||||
}
|
||||
|
||||
func (a *App) CreateGroup(group *model.Group) (*model.Group, *model.AppError) {
|
||||
if err := a.isUniqueToUsernames(group.GetName()); err != nil {
|
||||
err.Where = "CreateGroup"
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ import (
|
|||
"net/http"
|
||||
|
||||
"github.com/mattermost/mattermost/server/public/model"
|
||||
"github.com/mattermost/mattermost/server/public/shared/mlog"
|
||||
"github.com/mattermost/mattermost/server/public/shared/request"
|
||||
"github.com/mattermost/mattermost/server/v8/channels/store"
|
||||
)
|
||||
|
|
@ -15,8 +16,8 @@ import (
|
|||
// getChannelIDFromJobData extracts channel ID from access control sync job data.
|
||||
// Returns channel ID if the job is for a specific channel, empty string if it's a system-wide job.
|
||||
func (a *App) getChannelIDFromJobData(jobData model.StringMap) string {
|
||||
parentID, ok := jobData["parent_id"]
|
||||
if !ok || parentID == "" {
|
||||
policyID, ok := jobData["policy_id"]
|
||||
if !ok || policyID == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
|
|
@ -24,14 +25,14 @@ func (a *App) getChannelIDFromJobData(jobData model.StringMap) string {
|
|||
// - Channel policies have ID == channelID
|
||||
// - Parent policies have their own system-wide ID
|
||||
//
|
||||
// For channel admin jobs: parent_id is channelID (since channel policy ID equals channel ID)
|
||||
// For system admin jobs: parent_id could be either channel policy ID or parent policy ID
|
||||
// For channel admin jobs: policy_id is channelID (since channel policy ID equals channel ID)
|
||||
// For system admin jobs: policy_id could be either channel policy ID or parent policy ID
|
||||
//
|
||||
// We return the parent_id as channelID because:
|
||||
// 1. If it's a channel policy ID, it equals the channel ID
|
||||
// 2. If it's a parent policy ID, the permission check will fail safely
|
||||
// 3. This maintains security: only users with permission to that specific ID can create the job
|
||||
return parentID
|
||||
return policyID
|
||||
}
|
||||
|
||||
func (a *App) GetJob(rctx request.CTX, id string) (*model.Job, *model.AppError) {
|
||||
|
|
@ -74,7 +75,48 @@ func (a *App) GetJobsByTypesAndStatuses(rctx request.CTX, jobTypes []string, sta
|
|||
}
|
||||
|
||||
func (a *App) CreateJob(rctx request.CTX, job *model.Job) (*model.Job, *model.AppError) {
|
||||
return a.Srv().Jobs.CreateJob(rctx, job.Type, job.Data)
|
||||
switch job.Type {
|
||||
case model.JobTypeAccessControlSync:
|
||||
// Route ABAC jobs to specialized deduplication handler
|
||||
return a.CreateAccessControlSyncJob(rctx, job.Data)
|
||||
default:
|
||||
return a.Srv().Jobs.CreateJob(rctx, job.Type, job.Data)
|
||||
}
|
||||
}
|
||||
|
||||
func (a *App) CreateAccessControlSyncJob(rctx request.CTX, jobData map[string]string) (*model.Job, *model.AppError) {
|
||||
// Get the policy_id (channel ID) from job data to scope the deduplication
|
||||
policyID, exists := jobData["policy_id"]
|
||||
|
||||
// If policy_id is provided, this is a channel-specific job that needs deduplication
|
||||
if exists && policyID != "" {
|
||||
// Find existing pending or in-progress jobs for this specific policy/channel
|
||||
existingJobs, err := a.Srv().Store().Job().GetByTypeAndData(rctx, model.JobTypeAccessControlSync, map[string]string{
|
||||
"policy_id": policyID,
|
||||
}, true, model.JobStatusPending, model.JobStatusInProgress)
|
||||
if err != nil {
|
||||
return nil, model.NewAppError("CreateAccessControlSyncJob", "app.job.get_existing_jobs.error", nil, "", http.StatusInternalServerError).Wrap(err)
|
||||
}
|
||||
|
||||
// Cancel any existing active jobs for this policy (all returned jobs are already active)
|
||||
for _, job := range existingJobs {
|
||||
rctx.Logger().Info("Canceling existing access control sync job before creating new one",
|
||||
mlog.String("job_id", job.Id),
|
||||
mlog.String("policy_id", policyID),
|
||||
mlog.String("status", job.Status))
|
||||
|
||||
// directly cancel jobs for deduplication
|
||||
if err := a.Srv().Jobs.SetJobCanceled(job); err != nil {
|
||||
rctx.Logger().Warn("Failed to cancel existing access control sync job",
|
||||
mlog.String("job_id", job.Id),
|
||||
mlog.String("policy_id", policyID),
|
||||
mlog.Err(err))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create the new job
|
||||
return a.Srv().Jobs.CreateJob(rctx, model.JobTypeAccessControlSync, jobData)
|
||||
}
|
||||
|
||||
func (a *App) CancelJob(rctx request.CTX, jobId string) *model.AppError {
|
||||
|
|
|
|||
|
|
@ -153,7 +153,7 @@ func TestSessionHasPermissionToCreateAccessControlSyncJob(t *testing.T) {
|
|||
Id: model.NewId(),
|
||||
Type: model.JobTypeAccessControlSync,
|
||||
Data: model.StringMap{
|
||||
"parent_id": privateChannel.Id, // Channel admin jobs have parent_id = channelID
|
||||
"policy_id": privateChannel.Id, // Channel admin jobs have policy_id = channelID
|
||||
},
|
||||
}
|
||||
|
||||
|
|
@ -189,7 +189,7 @@ func TestSessionHasPermissionToCreateAccessControlSyncJob(t *testing.T) {
|
|||
Id: model.NewId(),
|
||||
Type: model.JobTypeAccessControlSync,
|
||||
Data: model.StringMap{
|
||||
"parent_id": otherChannel.Id,
|
||||
"policy_id": otherChannel.Id,
|
||||
},
|
||||
}
|
||||
|
||||
|
|
@ -211,7 +211,7 @@ func TestSessionHasPermissionToCreateAccessControlSyncJob(t *testing.T) {
|
|||
Id: model.NewId(),
|
||||
Type: model.JobTypeAccessControlSync,
|
||||
Data: model.StringMap{
|
||||
"parent_id": privateChannel.Id,
|
||||
"policy_id": privateChannel.Id,
|
||||
},
|
||||
}
|
||||
|
||||
|
|
@ -222,6 +222,179 @@ func TestSessionHasPermissionToCreateAccessControlSyncJob(t *testing.T) {
|
|||
})
|
||||
}
|
||||
|
||||
func TestCreateAccessControlSyncJob(t *testing.T) {
|
||||
mainHelper.Parallel(t)
|
||||
th := Setup(t).InitBasic()
|
||||
defer th.TearDown()
|
||||
|
||||
t.Run("cancels pending job and creates new one", func(t *testing.T) {
|
||||
// Create an existing pending job manually in the store
|
||||
existingJob := &model.Job{
|
||||
Id: model.NewId(),
|
||||
Type: model.JobTypeAccessControlSync,
|
||||
Status: model.JobStatusPending,
|
||||
Data: map[string]string{
|
||||
"policy_id": "channel456",
|
||||
},
|
||||
}
|
||||
_, err := th.App.Srv().Store().Job().Save(existingJob)
|
||||
require.NoError(t, err)
|
||||
t.Cleanup(func() {
|
||||
_, stErr := th.App.Srv().Store().Job().Delete(existingJob.Id)
|
||||
require.NoError(t, stErr)
|
||||
})
|
||||
|
||||
// Test the cancellation logic by calling the method directly
|
||||
existingJobs, storeErr := th.App.Srv().Store().Job().GetByTypeAndData(th.Context, model.JobTypeAccessControlSync, map[string]string{
|
||||
"policy_id": "channel456",
|
||||
}, false, model.JobStatusPending, model.JobStatusInProgress)
|
||||
require.NoError(t, storeErr)
|
||||
require.Len(t, existingJobs, 1)
|
||||
|
||||
// Verify that the store method finds the job
|
||||
assert.Equal(t, existingJob.Id, existingJobs[0].Id)
|
||||
assert.Equal(t, model.JobStatusPending, existingJobs[0].Status)
|
||||
|
||||
// Test the cancellation logic directly
|
||||
for _, job := range existingJobs {
|
||||
if job.Status == model.JobStatusPending || job.Status == model.JobStatusInProgress {
|
||||
appErr := th.App.CancelJob(th.Context, job.Id)
|
||||
require.Nil(t, appErr)
|
||||
}
|
||||
}
|
||||
|
||||
// Verify that the job was cancelled
|
||||
updatedJob, getErr := th.App.Srv().Store().Job().Get(th.Context, existingJob.Id)
|
||||
require.NoError(t, getErr)
|
||||
// Job should be either cancel_requested or canceled (async process)
|
||||
assert.Contains(t, []string{model.JobStatusCancelRequested, model.JobStatusCanceled}, updatedJob.Status)
|
||||
})
|
||||
|
||||
t.Run("cancels in-progress job and creates new one", func(t *testing.T) {
|
||||
// Create an existing in-progress job
|
||||
existingJob := &model.Job{
|
||||
Id: model.NewId(),
|
||||
Type: model.JobTypeAccessControlSync,
|
||||
Status: model.JobStatusInProgress,
|
||||
Data: map[string]string{
|
||||
"policy_id": "channel789",
|
||||
},
|
||||
}
|
||||
_, err := th.App.Srv().Store().Job().Save(existingJob)
|
||||
require.NoError(t, err)
|
||||
t.Cleanup(func() {
|
||||
_, stErr := th.App.Srv().Store().Job().Delete(existingJob.Id)
|
||||
require.NoError(t, stErr)
|
||||
})
|
||||
|
||||
// Test that GetByTypeAndData finds the in-progress job
|
||||
existingJobs, storeErr := th.App.Srv().Store().Job().GetByTypeAndData(th.Context, model.JobTypeAccessControlSync, map[string]string{
|
||||
"policy_id": "channel789",
|
||||
}, false, model.JobStatusPending, model.JobStatusInProgress)
|
||||
require.NoError(t, storeErr)
|
||||
require.Len(t, existingJobs, 1)
|
||||
assert.Equal(t, model.JobStatusInProgress, existingJobs[0].Status)
|
||||
|
||||
// Test cancellation of in-progress job
|
||||
appErr := th.App.CancelJob(th.Context, existingJob.Id)
|
||||
require.Nil(t, appErr)
|
||||
|
||||
// Verify cancellation was requested (job cancellation is asynchronous)
|
||||
updatedJob, getErr := th.App.Srv().Store().Job().Get(th.Context, existingJob.Id)
|
||||
require.NoError(t, getErr)
|
||||
// Job should be either cancel_requested or canceled (async process)
|
||||
assert.Contains(t, []string{model.JobStatusCancelRequested, model.JobStatusCanceled}, updatedJob.Status)
|
||||
})
|
||||
|
||||
t.Run("leaves completed jobs alone", func(t *testing.T) {
|
||||
// Create an existing completed job
|
||||
existingJob := &model.Job{
|
||||
Id: model.NewId(),
|
||||
Type: model.JobTypeAccessControlSync,
|
||||
Status: model.JobStatusSuccess,
|
||||
Data: map[string]string{
|
||||
"policy_id": "channel101",
|
||||
},
|
||||
}
|
||||
_, err := th.App.Srv().Store().Job().Save(existingJob)
|
||||
require.NoError(t, err)
|
||||
t.Cleanup(func() {
|
||||
_, stErr := th.App.Srv().Store().Job().Delete(existingJob.Id)
|
||||
require.NoError(t, stErr)
|
||||
})
|
||||
|
||||
// Test that GetByTypeAndData finds the completed job
|
||||
existingJobs, storeErr := th.App.Srv().Store().Job().GetByTypeAndData(th.Context, model.JobTypeAccessControlSync, map[string]string{
|
||||
"policy_id": "channel101",
|
||||
}, false)
|
||||
require.NoError(t, storeErr)
|
||||
require.Len(t, existingJobs, 1)
|
||||
assert.Equal(t, model.JobStatusSuccess, existingJobs[0].Status)
|
||||
|
||||
// Test that we don't cancel completed jobs (logic test)
|
||||
shouldCancel := existingJob.Status == model.JobStatusPending || existingJob.Status == model.JobStatusInProgress
|
||||
assert.False(t, shouldCancel, "Should not cancel completed jobs")
|
||||
|
||||
// Verify the job status is unchanged
|
||||
updatedJob, getErr := th.App.Srv().Store().Job().Get(th.Context, existingJob.Id)
|
||||
require.NoError(t, getErr)
|
||||
assert.Equal(t, model.JobStatusSuccess, updatedJob.Status)
|
||||
})
|
||||
|
||||
// Test deduplication logic with status filtering to ensure database optimization works correctly
|
||||
|
||||
t.Run("deduplication respects status filtering", func(t *testing.T) {
|
||||
// Create jobs with different statuses
|
||||
pendingJob := &model.Job{
|
||||
Id: model.NewId(),
|
||||
Type: model.JobTypeAccessControlSync,
|
||||
Status: model.JobStatusPending,
|
||||
Data: map[string]string{"policy_id": "channel999"},
|
||||
}
|
||||
|
||||
completedJob := &model.Job{
|
||||
Id: model.NewId(),
|
||||
Type: model.JobTypeAccessControlSync,
|
||||
Status: model.JobStatusSuccess,
|
||||
Data: map[string]string{"policy_id": "channel999"},
|
||||
}
|
||||
|
||||
for _, job := range []*model.Job{pendingJob, completedJob} {
|
||||
_, err := th.App.Srv().Store().Job().Save(job)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Capture job ID to avoid closure variable capture issue
|
||||
jobID := job.Id
|
||||
t.Cleanup(func() {
|
||||
_, stErr := th.App.Srv().Store().Job().Delete(jobID)
|
||||
require.NoError(t, stErr)
|
||||
})
|
||||
}
|
||||
|
||||
// Verify status filtering returns only active jobs
|
||||
activeJobs, err := th.App.Srv().Store().Job().GetByTypeAndData(
|
||||
th.Context,
|
||||
model.JobTypeAccessControlSync,
|
||||
map[string]string{"policy_id": "channel999"},
|
||||
false,
|
||||
model.JobStatusPending, model.JobStatusInProgress, // Only active statuses
|
||||
)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, activeJobs, 1, "Should only find active jobs (pending/in-progress)")
|
||||
assert.Equal(t, pendingJob.Id, activeJobs[0].Id, "Should find the pending job")
|
||||
|
||||
// Verify all jobs are returned when no status filter is provided
|
||||
allJobs, err := th.App.Srv().Store().Job().GetByTypeAndData(
|
||||
th.Context,
|
||||
model.JobTypeAccessControlSync,
|
||||
map[string]string{"policy_id": "channel999"},
|
||||
false, // No status filter
|
||||
)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, allJobs, 2, "Should find all jobs when no status filter")
|
||||
})
|
||||
}
|
||||
|
||||
func TestSessionHasPermissionToReadJob(t *testing.T) {
|
||||
mainHelper.Parallel(t)
|
||||
th := Setup(t)
|
||||
|
|
|
|||
|
|
@ -10,8 +10,8 @@ import (
|
|||
)
|
||||
|
||||
const (
|
||||
maxUsersLimit = 2_500
|
||||
maxUsersHardLimit = 5_000
|
||||
maxUsersLimit = 200
|
||||
maxUsersHardLimit = 250
|
||||
)
|
||||
|
||||
func (a *App) GetServerLimits() (*model.ServerLimits, *model.AppError) {
|
||||
|
|
|
|||
|
|
@ -28,8 +28,8 @@ func TestGetServerLimits(t *testing.T) {
|
|||
|
||||
// InitBasic creates 3 users by default
|
||||
require.Equal(t, int64(3), serverLimits.ActiveUserCount)
|
||||
require.Equal(t, int64(2500), serverLimits.MaxUsersLimit)
|
||||
require.Equal(t, int64(5000), serverLimits.MaxUsersHardLimit)
|
||||
require.Equal(t, int64(200), serverLimits.MaxUsersLimit)
|
||||
require.Equal(t, int64(250), serverLimits.MaxUsersHardLimit)
|
||||
})
|
||||
|
||||
t.Run("user count should increase on creating new user and decrease on permanently deleting", func(t *testing.T) {
|
||||
|
|
@ -279,7 +279,7 @@ func TestIsAtUserLimit(t *testing.T) {
|
|||
th.App.Srv().SetLicense(nil)
|
||||
|
||||
mockUserStore := storemocks.UserStore{}
|
||||
mockUserStore.On("Count", mock.Anything).Return(int64(4000), nil) // Under hard limit of 5000
|
||||
mockUserStore.On("Count", mock.Anything).Return(int64(200), nil) // Under hard limit of 250
|
||||
mockStore := th.App.Srv().Store().(*storemocks.Store)
|
||||
mockStore.On("User").Return(&mockUserStore)
|
||||
|
||||
|
|
@ -295,7 +295,7 @@ func TestIsAtUserLimit(t *testing.T) {
|
|||
th.App.Srv().SetLicense(nil)
|
||||
|
||||
mockUserStore := storemocks.UserStore{}
|
||||
mockUserStore.On("Count", mock.Anything).Return(int64(5000), nil) // At hard limit of 5000
|
||||
mockUserStore.On("Count", mock.Anything).Return(int64(250), nil) // At hard limit of 250
|
||||
mockStore := th.App.Srv().Store().(*storemocks.Store)
|
||||
mockStore.On("User").Return(&mockUserStore)
|
||||
|
||||
|
|
@ -311,7 +311,7 @@ func TestIsAtUserLimit(t *testing.T) {
|
|||
th.App.Srv().SetLicense(nil)
|
||||
|
||||
mockUserStore := storemocks.UserStore{}
|
||||
mockUserStore.On("Count", mock.Anything).Return(int64(6000), nil) // Over hard limit of 5000
|
||||
mockUserStore.On("Count", mock.Anything).Return(int64(300), nil) // Over hard limit of 250
|
||||
mockStore := th.App.Srv().Store().(*storemocks.Store)
|
||||
mockStore.On("User").Return(&mockUserStore)
|
||||
|
||||
|
|
@ -547,8 +547,8 @@ func TestExtraUsersBehavior(t *testing.T) {
|
|||
require.Nil(t, appErr)
|
||||
|
||||
// Unlicensed servers use hard-coded limits without extra users
|
||||
require.Equal(t, int64(2500), serverLimits.MaxUsersLimit)
|
||||
require.Equal(t, int64(5000), serverLimits.MaxUsersHardLimit)
|
||||
require.Equal(t, int64(200), serverLimits.MaxUsersLimit)
|
||||
require.Equal(t, int64(250), serverLimits.MaxUsersHardLimit)
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -850,10 +850,13 @@ func (a *App) AuthorizeOAuthUser(rctx request.CTX, w http.ResponseWriter, r *htt
|
|||
return nil, stateProps, nil, model.NewAppError("AuthorizeOAuthUser", "api.user.authorize_oauth_user.invalid_state.app_error", nil, "", http.StatusBadRequest).Wrap(cookieErr)
|
||||
}
|
||||
|
||||
expectedTokenExtra := generateOAuthStateTokenExtra(stateEmail, stateAction, cookie.Value)
|
||||
if expectedTokenExtra != expectedToken.Extra {
|
||||
err := errors.New("Extra token value does not match token generated from state")
|
||||
return nil, stateProps, nil, model.NewAppError("AuthorizeOAuthUser", "api.user.authorize_oauth_user.invalid_state.app_error", nil, "", http.StatusBadRequest).Wrap(err)
|
||||
tokenEmail, tokenAction, tokenCookie, parseErr := parseOAuthStateTokenExtra(expectedToken.Extra)
|
||||
if parseErr != nil {
|
||||
return nil, stateProps, nil, model.NewAppError("AuthorizeOAuthUser", "api.user.authorize_oauth_user.invalid_state.app_error", nil, "", http.StatusBadRequest).Wrap(parseErr)
|
||||
}
|
||||
|
||||
if tokenEmail != stateEmail || tokenAction != stateAction || tokenCookie != cookie.Value {
|
||||
return nil, stateProps, nil, model.NewAppError("AuthorizeOAuthUser", "api.user.authorize_oauth_user.invalid_state.app_error", nil, "", http.StatusBadRequest).Wrap(errors.New("invalid state token"))
|
||||
}
|
||||
|
||||
appErr = a.DeleteToken(expectedToken)
|
||||
|
|
@ -977,7 +980,7 @@ func (a *App) SwitchEmailToOAuth(rctx request.CTX, w http.ResponseWriter, r *htt
|
|||
stateProps["email"] = email
|
||||
|
||||
if service == model.UserAuthServiceSaml {
|
||||
samlToken, samlErr := a.CreateSamlRelayToken(email)
|
||||
samlToken, samlErr := a.CreateSamlRelayToken(model.TokenTypeSaml, email)
|
||||
if samlErr != nil {
|
||||
return "", samlErr
|
||||
}
|
||||
|
|
@ -1037,3 +1040,18 @@ func (a *App) SwitchOAuthToEmail(rctx request.CTX, email, password, requesterId
|
|||
func generateOAuthStateTokenExtra(email, action, cookie string) string {
|
||||
return email + ":" + action + ":" + cookie
|
||||
}
|
||||
|
||||
// parseOAuthStateTokenExtra parses a token extra string in the format "email:action:cookie".
|
||||
// Returns an error if the token does not contain exactly 3 colon-separated parts.
|
||||
func parseOAuthStateTokenExtra(tokenExtra string) (email, action, cookie string, err error) {
|
||||
parts := strings.Split(tokenExtra, ":")
|
||||
if len(parts) != 3 {
|
||||
return "", "", "", fmt.Errorf("invalid token format: expected exactly 3 parts separated by ':', got %d", len(parts))
|
||||
}
|
||||
|
||||
email = parts[0]
|
||||
action = parts[1]
|
||||
cookie = parts[2]
|
||||
|
||||
return email, action, cookie, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -534,6 +534,7 @@ func TestAuthorizeOAuthUser(t *testing.T) {
|
|||
recorder := httptest.ResponseRecorder{}
|
||||
body, receivedStateProps, _, err := th.App.AuthorizeOAuthUser(th.Context, &recorder, request, model.ServiceGitlab, "", state, "")
|
||||
|
||||
require.Nil(t, err)
|
||||
require.NotNil(t, body)
|
||||
bodyBytes, bodyErr := io.ReadAll(body)
|
||||
require.NoError(t, bodyErr)
|
||||
|
|
@ -695,3 +696,190 @@ func TestDeactivatedUserOAuthApp(t *testing.T) {
|
|||
require.Equal(t, http.StatusBadRequest, appErr.StatusCode)
|
||||
assert.Equal(t, "api.oauth.get_access_token.expired_code.app_error", appErr.Id)
|
||||
}
|
||||
|
||||
func TestParseOAuthStateTokenExtra(t *testing.T) {
|
||||
t.Run("valid token with normal values", func(t *testing.T) {
|
||||
email, action, cookie, err := parseOAuthStateTokenExtra("user@example.com:email_to_sso:randomcookie123")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "user@example.com", email)
|
||||
assert.Equal(t, "email_to_sso", action)
|
||||
assert.Equal(t, "randomcookie123", cookie)
|
||||
})
|
||||
|
||||
t.Run("valid token with empty email and action", func(t *testing.T) {
|
||||
email, action, cookie, err := parseOAuthStateTokenExtra("::randomcookie123")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "", email)
|
||||
assert.Equal(t, "", action)
|
||||
assert.Equal(t, "randomcookie123", cookie)
|
||||
})
|
||||
|
||||
t.Run("token with too many colons", func(t *testing.T) {
|
||||
_, _, _, err := parseOAuthStateTokenExtra("user@example.com:action:value:extra")
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "expected exactly 3 parts")
|
||||
assert.Contains(t, err.Error(), "got 4")
|
||||
})
|
||||
|
||||
t.Run("token with too few colons", func(t *testing.T) {
|
||||
_, _, _, err := parseOAuthStateTokenExtra("user@example.com:email_to_sso")
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "expected exactly 3 parts")
|
||||
assert.Contains(t, err.Error(), "got 2")
|
||||
})
|
||||
|
||||
t.Run("token with no colons", func(t *testing.T) {
|
||||
_, _, _, err := parseOAuthStateTokenExtra("invalidtoken")
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "expected exactly 3 parts")
|
||||
assert.Contains(t, err.Error(), "got 1")
|
||||
})
|
||||
|
||||
t.Run("empty token string", func(t *testing.T) {
|
||||
_, _, _, err := parseOAuthStateTokenExtra("")
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "expected exactly 3 parts")
|
||||
})
|
||||
}
|
||||
|
||||
func TestAuthorizeOAuthUser_InvalidToken(t *testing.T) {
|
||||
mainHelper.Parallel(t)
|
||||
th := Setup(t)
|
||||
defer th.TearDown()
|
||||
|
||||
mockProvider := &mocks.OAuthProvider{}
|
||||
einterfaces.RegisterOAuthProvider(model.ServiceOpenid, mockProvider)
|
||||
|
||||
service := model.ServiceOpenid
|
||||
th.App.UpdateConfig(func(cfg *model.Config) {
|
||||
*cfg.ServiceSettings.EnableOAuthServiceProvider = true
|
||||
cfg.OpenIdSettings.Enable = model.NewPointer(true)
|
||||
cfg.OpenIdSettings.Id = model.NewPointer("test-client-id")
|
||||
cfg.OpenIdSettings.Secret = model.NewPointer("test-secret")
|
||||
cfg.OpenIdSettings.Scope = model.NewPointer(OpenIDScope)
|
||||
})
|
||||
|
||||
mockProvider.On("GetSSOSettings", mock.Anything, mock.Anything, service).Return(&model.SSOSettings{
|
||||
Enable: model.NewPointer(true),
|
||||
Id: model.NewPointer("test-client-id"),
|
||||
Secret: model.NewPointer("test-secret"),
|
||||
}, nil)
|
||||
|
||||
t.Run("rejects token with extra delimiters in email field", func(t *testing.T) {
|
||||
cookieValue := model.NewId()
|
||||
|
||||
invalidEmail := "user@example.com:action"
|
||||
action := "email_to_sso"
|
||||
|
||||
tokenExtra := generateOAuthStateTokenExtra(invalidEmail, action, cookieValue)
|
||||
token, err := th.App.CreateOAuthStateToken(tokenExtra)
|
||||
require.Nil(t, err)
|
||||
|
||||
stateProps := map[string]string{
|
||||
"token": token.Token,
|
||||
"email": "user@example.com",
|
||||
"action": action,
|
||||
}
|
||||
state := base64.StdEncoding.EncodeToString([]byte(model.MapToJSON(stateProps)))
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
r := httptest.NewRequest("GET", "/", nil)
|
||||
r.AddCookie(&http.Cookie{
|
||||
Name: CookieOAuth,
|
||||
Value: "action:" + cookieValue,
|
||||
})
|
||||
|
||||
_, _, _, appErr := th.App.AuthorizeOAuthUser(th.Context, w, r, service, "auth-code", state, "http://localhost/callback")
|
||||
|
||||
require.NotNil(t, appErr)
|
||||
assert.Equal(t, http.StatusBadRequest, appErr.StatusCode)
|
||||
assert.Equal(t, "api.user.authorize_oauth_user.invalid_state.app_error", appErr.Id)
|
||||
})
|
||||
|
||||
t.Run("rejects token with mismatched email", func(t *testing.T) {
|
||||
cookieValue := model.NewId()
|
||||
action := "email_to_sso"
|
||||
|
||||
tokenExtra := generateOAuthStateTokenExtra("token@example.com", action, cookieValue)
|
||||
token, err := th.App.CreateOAuthStateToken(tokenExtra)
|
||||
require.Nil(t, err)
|
||||
|
||||
stateProps := map[string]string{
|
||||
"token": token.Token,
|
||||
"email": "state@example.com",
|
||||
"action": action,
|
||||
}
|
||||
state := base64.StdEncoding.EncodeToString([]byte(model.MapToJSON(stateProps)))
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
r := httptest.NewRequest("GET", "/", nil)
|
||||
r.AddCookie(&http.Cookie{
|
||||
Name: CookieOAuth,
|
||||
Value: cookieValue,
|
||||
})
|
||||
|
||||
_, _, _, appErr := th.App.AuthorizeOAuthUser(th.Context, w, r, service, "auth-code", state, "http://localhost/callback")
|
||||
|
||||
require.NotNil(t, appErr)
|
||||
assert.Equal(t, http.StatusBadRequest, appErr.StatusCode)
|
||||
assert.Equal(t, "api.user.authorize_oauth_user.invalid_state.app_error", appErr.Id)
|
||||
})
|
||||
|
||||
t.Run("rejects token with mismatched action", func(t *testing.T) {
|
||||
cookieValue := model.NewId()
|
||||
email := "user@example.com"
|
||||
|
||||
tokenExtra := generateOAuthStateTokenExtra(email, "email_to_sso", cookieValue)
|
||||
token, err := th.App.CreateOAuthStateToken(tokenExtra)
|
||||
require.Nil(t, err)
|
||||
|
||||
stateProps := map[string]string{
|
||||
"token": token.Token,
|
||||
"email": email,
|
||||
"action": "sso_to_email",
|
||||
}
|
||||
state := base64.StdEncoding.EncodeToString([]byte(model.MapToJSON(stateProps)))
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
r := httptest.NewRequest("GET", "/", nil)
|
||||
r.AddCookie(&http.Cookie{
|
||||
Name: CookieOAuth,
|
||||
Value: cookieValue,
|
||||
})
|
||||
|
||||
_, _, _, appErr := th.App.AuthorizeOAuthUser(th.Context, w, r, service, "auth-code", state, "http://localhost/callback")
|
||||
|
||||
require.NotNil(t, appErr)
|
||||
assert.Equal(t, http.StatusBadRequest, appErr.StatusCode)
|
||||
assert.Equal(t, "api.user.authorize_oauth_user.invalid_state.app_error", appErr.Id)
|
||||
})
|
||||
|
||||
t.Run("rejects token with mismatched cookie", func(t *testing.T) {
|
||||
email := "user@example.com"
|
||||
action := "email_to_sso"
|
||||
|
||||
tokenExtra := generateOAuthStateTokenExtra(email, action, "token-cookie-value")
|
||||
token, err := th.App.CreateOAuthStateToken(tokenExtra)
|
||||
require.Nil(t, err)
|
||||
|
||||
stateProps := map[string]string{
|
||||
"token": token.Token,
|
||||
"email": email,
|
||||
"action": action,
|
||||
}
|
||||
state := base64.StdEncoding.EncodeToString([]byte(model.MapToJSON(stateProps)))
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
r := httptest.NewRequest("GET", "/", nil)
|
||||
r.AddCookie(&http.Cookie{
|
||||
Name: CookieOAuth,
|
||||
Value: "different-cookie-value",
|
||||
})
|
||||
|
||||
_, _, _, appErr := th.App.AuthorizeOAuthUser(th.Context, w, r, service, "auth-code", state, "http://localhost/callback")
|
||||
|
||||
require.NotNil(t, appErr)
|
||||
assert.Equal(t, http.StatusBadRequest, appErr.StatusCode)
|
||||
assert.Equal(t, "api.user.authorize_oauth_user.invalid_state.app_error", appErr.Id)
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,8 +6,8 @@ package platform
|
|||
import (
|
||||
"crypto/ecdsa"
|
||||
"crypto/elliptic"
|
||||
"crypto/md5"
|
||||
"crypto/rand"
|
||||
"crypto/sha256"
|
||||
"crypto/x509"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
|
|
@ -237,7 +237,7 @@ func (ps *PlatformService) regenerateClientConfig() {
|
|||
clientConfigJSON, _ := json.Marshal(clientConfig)
|
||||
ps.clientConfig.Store(clientConfig)
|
||||
ps.limitedClientConfig.Store(limitedClientConfig)
|
||||
ps.clientConfigHash.Store(fmt.Sprintf("%x", md5.Sum(clientConfigJSON)))
|
||||
ps.clientConfigHash.Store(fmt.Sprintf("%x", sha256.Sum256(clientConfigJSON)))
|
||||
}
|
||||
|
||||
// AsymmetricSigningKey will return a private key that can be used for asymmetric signing.
|
||||
|
|
|
|||
|
|
@ -65,6 +65,10 @@ func (ms *mockSuite) HasPermissionToReadChannel(rctx request.CTX, userID string,
|
|||
return true
|
||||
}
|
||||
|
||||
func (ms *mockSuite) MFARequired(rctx request.CTX) *model.AppError {
|
||||
return nil
|
||||
}
|
||||
|
||||
func setupDBStore(tb testing.TB) (store.Store, *model.SqlSettings) {
|
||||
var dbStore store.Store
|
||||
var dbSettings *model.SqlSettings
|
||||
|
|
|
|||
|
|
@ -66,6 +66,26 @@ func (_m *SuiteIFace) HasPermissionToReadChannel(rctx request.CTX, userID string
|
|||
return r0
|
||||
}
|
||||
|
||||
// MFARequired provides a mock function with given fields: rctx
|
||||
func (_m *SuiteIFace) MFARequired(rctx request.CTX) *model.AppError {
|
||||
ret := _m.Called(rctx)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for MFARequired")
|
||||
}
|
||||
|
||||
var r0 *model.AppError
|
||||
if rf, ok := ret.Get(0).(func(request.CTX) *model.AppError); ok {
|
||||
r0 = rf(rctx)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).(*model.AppError)
|
||||
}
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// RolesGrantPermission provides a mock function with given fields: roleNames, permissionId
|
||||
func (_m *SuiteIFace) RolesGrantPermission(roleNames []string, permissionId string) bool {
|
||||
ret := _m.Called(roleNames, permissionId)
|
||||
|
|
|
|||
|
|
@ -453,7 +453,7 @@ func (wc *WebConn) readPump() {
|
|||
if err := wc.WebSocket.SetReadDeadline(time.Now().Add(pongWaitTime)); err != nil {
|
||||
return err
|
||||
}
|
||||
if wc.IsAuthenticated() {
|
||||
if wc.IsBasicAuthenticated() {
|
||||
userID := wc.UserId
|
||||
wc.Platform.Go(func() {
|
||||
wc.Platform.SetStatusAwayIfNeeded(userID, false)
|
||||
|
|
@ -770,8 +770,8 @@ func (wc *WebConn) InvalidateCache() {
|
|||
wc.SetSessionExpiresAt(0)
|
||||
}
|
||||
|
||||
// IsAuthenticated returns whether the given WebConn is authenticated or not.
|
||||
func (wc *WebConn) IsAuthenticated() bool {
|
||||
// IsBasicAuthenticated returns whether the given WebConn has a valid session.
|
||||
func (wc *WebConn) IsBasicAuthenticated() bool {
|
||||
// Check the expiry to see if we need to check for a new session
|
||||
if wc.GetSessionExpiresAt() < model.GetMillis() {
|
||||
if wc.GetSessionToken() == "" {
|
||||
|
|
@ -799,6 +799,24 @@ func (wc *WebConn) IsAuthenticated() bool {
|
|||
return true
|
||||
}
|
||||
|
||||
// IsMFAAuthenticated returns whether the user has completed MFA when required.
|
||||
func (wc *WebConn) IsMFAAuthenticated() bool {
|
||||
session := wc.GetSession()
|
||||
c := request.EmptyContext(wc.Platform.logger).WithSession(session)
|
||||
|
||||
// Check if MFA is required and user has NOT completed MFA
|
||||
if appErr := wc.Suite.MFARequired(c); appErr != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// IsAuthenticated returns whether the given WebConn is fully authenticated (session + MFA).
|
||||
func (wc *WebConn) IsAuthenticated() bool {
|
||||
return wc.IsBasicAuthenticated() && wc.IsMFAAuthenticated()
|
||||
}
|
||||
|
||||
func (wc *WebConn) createHelloMessage() *model.WebSocketEvent {
|
||||
ee := wc.Platform.LicenseManager() != nil
|
||||
|
||||
|
|
@ -856,7 +874,7 @@ func (wc *WebConn) ShouldSendEventToGuest(msg *model.WebSocketEvent) bool {
|
|||
|
||||
// ShouldSendEvent returns whether the message should be sent or not.
|
||||
func (wc *WebConn) ShouldSendEvent(msg *model.WebSocketEvent) bool {
|
||||
// IMPORTANT: Do not send event if WebConn does not have a session
|
||||
// IMPORTANT: Do not send event if WebConn does not have a session and completed MFA
|
||||
if !wc.IsAuthenticated() {
|
||||
return false
|
||||
}
|
||||
|
|
|
|||
|
|
@ -30,6 +30,7 @@ type SuiteIFace interface {
|
|||
RolesGrantPermission(roleNames []string, permissionId string) bool
|
||||
HasPermissionToReadChannel(rctx request.CTX, userID string, channel *model.Channel) bool
|
||||
UserCanSeeOtherUser(rctx request.CTX, userID string, otherUserId string) (bool, *model.AppError)
|
||||
MFARequired(rctx request.CTX) *model.AppError
|
||||
}
|
||||
|
||||
type webConnActivityMessage struct {
|
||||
|
|
@ -572,7 +573,7 @@ func (h *Hub) Start() {
|
|||
}
|
||||
atomic.StoreInt64(&h.connectionCount, int64(connIndex.AllActive()))
|
||||
|
||||
if webConnReg.conn.IsAuthenticated() && webConnReg.conn.reuseCount == 0 {
|
||||
if webConnReg.conn.IsBasicAuthenticated() && webConnReg.conn.reuseCount == 0 {
|
||||
// The hello message should only be sent when the reuseCount is 0.
|
||||
// i.e in server restart, or long timeout, or fresh connection case.
|
||||
// In case of seq number not found in dead queue, it is handled by
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ import (
|
|||
|
||||
"github.com/gorilla/websocket"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/mattermost/mattermost/server/public/model"
|
||||
|
|
@ -598,6 +599,7 @@ func TestHubIsRegistered(t *testing.T) {
|
|||
|
||||
mockSuite := &platform_mocks.SuiteIFace{}
|
||||
mockSuite.On("GetSession", session.Token).Return(session, nil)
|
||||
mockSuite.On("MFARequired", mock.Anything).Return(nil)
|
||||
th.Suite = mockSuite
|
||||
|
||||
s := httptest.NewServer(dummyWebsocketHandler(t))
|
||||
|
|
@ -633,6 +635,7 @@ func TestHubWebConnCount(t *testing.T) {
|
|||
|
||||
mockSuite := &platform_mocks.SuiteIFace{}
|
||||
mockSuite.On("GetSession", session.Token).Return(session, nil)
|
||||
mockSuite.On("MFARequired", mock.Anything).Return(nil)
|
||||
th.Suite = mockSuite
|
||||
|
||||
s := httptest.NewServer(dummyWebsocketHandler(t))
|
||||
|
|
|
|||
|
|
@ -27,6 +27,7 @@ import (
|
|||
"github.com/mattermost/mattermost/server/public/plugin/plugintest"
|
||||
"github.com/mattermost/mattermost/server/public/plugin/utils"
|
||||
"github.com/mattermost/mattermost/server/public/shared/request"
|
||||
"github.com/mattermost/mattermost/server/v8/channels/utils/testutils"
|
||||
"github.com/mattermost/mattermost/server/v8/einterfaces/mocks"
|
||||
)
|
||||
|
||||
|
|
@ -1262,7 +1263,6 @@ func TestHookReactionHasBeenAdded(t *testing.T) {
|
|||
defer th.TearDown()
|
||||
|
||||
var mockAPI plugintest.API
|
||||
mockAPI.On("LoadPluginConfiguration", mock.Anything).Return(nil)
|
||||
mockAPI.On("LogDebug", "smile").Return(nil)
|
||||
|
||||
tearDown, _, _ := SetAppEnvironmentWithPlugins(t,
|
||||
|
|
@ -1298,6 +1298,10 @@ func TestHookReactionHasBeenAdded(t *testing.T) {
|
|||
}
|
||||
_, err := th.App.SaveReactionForPost(th.Context, reaction)
|
||||
require.Nil(t, err)
|
||||
|
||||
assert.EventuallyWithT(t, func(c *assert.CollectT) {
|
||||
mockAPI.AssertExpectations(&testutils.CollectTWithLogf{CollectT: c})
|
||||
}, 5*time.Second, 100*time.Millisecond)
|
||||
}
|
||||
|
||||
func TestHookReactionHasBeenRemoved(t *testing.T) {
|
||||
|
|
@ -1306,7 +1310,6 @@ func TestHookReactionHasBeenRemoved(t *testing.T) {
|
|||
defer th.TearDown()
|
||||
|
||||
var mockAPI plugintest.API
|
||||
mockAPI.On("LoadPluginConfiguration", mock.Anything).Return(nil)
|
||||
mockAPI.On("LogDebug", "star").Return(nil)
|
||||
|
||||
tearDown, _, _ := SetAppEnvironmentWithPlugins(t,
|
||||
|
|
@ -1345,9 +1348,9 @@ func TestHookReactionHasBeenRemoved(t *testing.T) {
|
|||
|
||||
require.Nil(t, err)
|
||||
|
||||
require.Eventually(t, func() bool {
|
||||
return mockAPI.AssertCalled(t, "LogDebug", "star")
|
||||
}, 2*time.Second, 100*time.Millisecond)
|
||||
assert.EventuallyWithT(t, func(c *assert.CollectT) {
|
||||
mockAPI.AssertExpectations(&testutils.CollectTWithLogf{CollectT: c})
|
||||
}, 5*time.Second, 100*time.Millisecond)
|
||||
}
|
||||
|
||||
func TestHookRunDataRetention(t *testing.T) {
|
||||
|
|
@ -1886,15 +1889,15 @@ func TestHookPreferencesHaveChanged(t *testing.T) {
|
|||
|
||||
mockAPI.On("LogDebug", "category=test_category name=test_name_1 value=test_value_1")
|
||||
mockAPI.On("LogDebug", "category=test_category name=test_name_2 value=test_value_2")
|
||||
defer mockAPI.AssertExpectations(t)
|
||||
|
||||
// Run test
|
||||
err := th.App.UpdatePreferences(th.Context, th.BasicUser.Id, preferences)
|
||||
|
||||
require.Nil(t, err)
|
||||
|
||||
// Hooks are run in a goroutine, so wait for those to complete
|
||||
time.Sleep(2 * time.Second)
|
||||
assert.EventuallyWithT(t, func(c *assert.CollectT) {
|
||||
mockAPI.AssertExpectations(&testutils.CollectTWithLogf{CollectT: c})
|
||||
}, 5*time.Second, 100*time.Millisecond)
|
||||
})
|
||||
|
||||
t.Run("should be called when preferences are changed by plugin code", func(t *testing.T) {
|
||||
|
|
@ -2044,11 +2047,12 @@ func TestChannelHasBeenCreated(t *testing.T) {
|
|||
posts, appErr := th.App.GetPosts(channel.Id, 0, 1)
|
||||
|
||||
require.Nil(t, appErr)
|
||||
assert.True(t, len(posts.Order) > 0)
|
||||
|
||||
post := posts.Posts[posts.Order[0]]
|
||||
assert.Equal(t, channel.Id, post.ChannelId)
|
||||
assert.Equal(t, "ChannelHasBeenCreated has been called for "+channel.Id, post.Message)
|
||||
if assert.NotEmpty(t, posts.Order) {
|
||||
post := posts.Posts[posts.Order[0]]
|
||||
assert.Equal(t, channel.Id, post.ChannelId)
|
||||
assert.Equal(t, "ChannelHasBeenCreated has been called for "+channel.Id, post.Message)
|
||||
}
|
||||
}, 5*time.Second, 100*time.Millisecond)
|
||||
})
|
||||
|
||||
|
|
@ -2071,10 +2075,11 @@ func TestChannelHasBeenCreated(t *testing.T) {
|
|||
posts, appErr := th.App.GetPosts(channel.Id, 0, 1)
|
||||
|
||||
require.Nil(t, appErr)
|
||||
assert.True(t, len(posts.Order) > 0)
|
||||
post := posts.Posts[posts.Order[0]]
|
||||
assert.Equal(t, channel.Id, post.ChannelId)
|
||||
assert.Equal(t, "ChannelHasBeenCreated has been called for "+channel.Id, post.Message)
|
||||
if assert.NotEmpty(t, posts.Order) {
|
||||
post := posts.Posts[posts.Order[0]]
|
||||
assert.Equal(t, channel.Id, post.ChannelId)
|
||||
assert.Equal(t, "ChannelHasBeenCreated has been called for "+channel.Id, post.Message)
|
||||
}
|
||||
}, 5*time.Second, 100*time.Millisecond)
|
||||
})
|
||||
|
||||
|
|
@ -2098,10 +2103,11 @@ func TestChannelHasBeenCreated(t *testing.T) {
|
|||
posts, appErr := th.App.GetPosts(channel.Id, 0, 1)
|
||||
|
||||
require.Nil(t, appErr)
|
||||
assert.True(t, len(posts.Order) > 0)
|
||||
post := posts.Posts[posts.Order[0]]
|
||||
assert.Equal(t, channel.Id, post.ChannelId)
|
||||
assert.Equal(t, "ChannelHasBeenCreated has been called for "+channel.Id, post.Message)
|
||||
if assert.NotEmpty(t, posts.Order) {
|
||||
post := posts.Posts[posts.Order[0]]
|
||||
assert.Equal(t, channel.Id, post.ChannelId)
|
||||
assert.Equal(t, "ChannelHasBeenCreated has been called for "+channel.Id, post.Message)
|
||||
}
|
||||
}, 5*time.Second, 100*time.Millisecond)
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -236,7 +236,7 @@ func (ch *Channels) servePluginRequest(w http.ResponseWriter, r *http.Request, h
|
|||
return
|
||||
}
|
||||
|
||||
if validateCSRFForPluginRequest(rctx, r, session, cookieAuth, *ch.cfgSvc.Config().ServiceSettings.StrictCSRFEnforcement) {
|
||||
if validateCSRFForPluginRequest(rctx, r, session, cookieAuth, *ch.cfgSvc.Config().ServiceSettings.ExperimentalStrictCSRFEnforcement) {
|
||||
r.Header.Set("Mattermost-User-Id", session.UserId)
|
||||
context.SessionId = session.Id
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -577,7 +577,7 @@ func TestValidateCSRFForPluginRequest(t *testing.T) {
|
|||
|
||||
t.Run("XMLHttpRequest with strict enforcement disabled", func(t *testing.T) {
|
||||
th.App.UpdateConfig(func(cfg *model.Config) {
|
||||
*cfg.ServiceSettings.StrictCSRFEnforcement = false
|
||||
*cfg.ServiceSettings.ExperimentalStrictCSRFEnforcement = false
|
||||
})
|
||||
|
||||
session := &model.Session{Id: "sessionid", UserId: "userid", Token: "token"}
|
||||
|
|
@ -591,7 +591,7 @@ func TestValidateCSRFForPluginRequest(t *testing.T) {
|
|||
|
||||
t.Run("XMLHttpRequest with strict enforcement enabled", func(t *testing.T) {
|
||||
th.App.UpdateConfig(func(cfg *model.Config) {
|
||||
*cfg.ServiceSettings.StrictCSRFEnforcement = true
|
||||
*cfg.ServiceSettings.ExperimentalStrictCSRFEnforcement = true
|
||||
})
|
||||
|
||||
session := &model.Session{Id: "sessionid", UserId: "userid", Token: "token"}
|
||||
|
|
|
|||
|
|
@ -298,8 +298,8 @@ func (a *App) ResetSamlAuthDataToEmail(includeDeleted bool, dryRun bool, userIDs
|
|||
return
|
||||
}
|
||||
|
||||
func (a *App) CreateSamlRelayToken(extra string) (*model.Token, *model.AppError) {
|
||||
token := model.NewToken(model.TokenTypeSaml, extra)
|
||||
func (a *App) CreateSamlRelayToken(tokenType string, extra string) (*model.Token, *model.AppError) {
|
||||
token := model.NewToken(tokenType, extra)
|
||||
|
||||
if err := a.Srv().Store().Token().Save(token); err != nil {
|
||||
var appErr *model.AppError
|
||||
|
|
|
|||
|
|
@ -1751,6 +1751,21 @@ func (a *App) GetTokenById(token string) (*model.Token, *model.AppError) {
|
|||
return rtoken, nil
|
||||
}
|
||||
|
||||
func (a *App) ConsumeTokenOnce(tokenType, tokenStr string) (*model.Token, *model.AppError) {
|
||||
token, err := a.Srv().Store().Token().ConsumeOnce(tokenType, tokenStr)
|
||||
if err != nil {
|
||||
var status int
|
||||
switch err.(type) {
|
||||
case *store.ErrNotFound:
|
||||
status = http.StatusNotFound
|
||||
default:
|
||||
status = http.StatusInternalServerError
|
||||
}
|
||||
return nil, model.NewAppError("ConsumeTokenOnce", "api.user.create_user.signup_link_invalid.app_error", nil, "", status).Wrap(err)
|
||||
}
|
||||
return token, nil
|
||||
}
|
||||
|
||||
func (a *App) DeleteToken(token *model.Token) *model.AppError {
|
||||
err := a.Srv().Store().Token().Delete(token.Token)
|
||||
if err != nil {
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import (
|
|||
"database/sql"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
|
@ -2484,3 +2485,84 @@ func TestRemoteUserDirectChannelCreation(t *testing.T) {
|
|||
assert.Equal(t, model.ChannelTypeDirect, channel.Type)
|
||||
})
|
||||
}
|
||||
|
||||
func TestConsumeTokenOnce(t *testing.T) {
|
||||
mainHelper.Parallel(t)
|
||||
th := Setup(t).InitBasic()
|
||||
defer th.TearDown()
|
||||
|
||||
t.Run("successfully consume valid token", func(t *testing.T) {
|
||||
token := model.NewToken(model.TokenTypeOAuth, "extra-data")
|
||||
require.NoError(t, th.App.Srv().Store().Token().Save(token))
|
||||
|
||||
consumedToken, appErr := th.App.ConsumeTokenOnce(model.TokenTypeOAuth, token.Token)
|
||||
require.Nil(t, appErr)
|
||||
require.NotNil(t, consumedToken)
|
||||
assert.Equal(t, token.Token, consumedToken.Token)
|
||||
assert.Equal(t, model.TokenTypeOAuth, consumedToken.Type)
|
||||
assert.Equal(t, "extra-data", consumedToken.Extra)
|
||||
|
||||
_, err := th.App.Srv().Store().Token().GetByToken(token.Token)
|
||||
require.Error(t, err)
|
||||
})
|
||||
|
||||
t.Run("token not found returns 404", func(t *testing.T) {
|
||||
nonExistentToken := model.NewRandomString(model.TokenSize)
|
||||
|
||||
consumedToken, appErr := th.App.ConsumeTokenOnce(model.TokenTypeOAuth, nonExistentToken)
|
||||
require.NotNil(t, appErr)
|
||||
require.Nil(t, consumedToken)
|
||||
assert.Equal(t, http.StatusNotFound, appErr.StatusCode)
|
||||
assert.Equal(t, "ConsumeTokenOnce", appErr.Where)
|
||||
})
|
||||
|
||||
t.Run("wrong token type returns not found", func(t *testing.T) {
|
||||
token := model.NewToken(model.TokenTypeOAuth, "extra-data")
|
||||
require.NoError(t, th.App.Srv().Store().Token().Save(token))
|
||||
defer func() {
|
||||
_ = th.App.Srv().Store().Token().Delete(token.Token)
|
||||
}()
|
||||
|
||||
consumedToken, appErr := th.App.ConsumeTokenOnce(model.TokenTypeSaml, token.Token)
|
||||
require.NotNil(t, appErr)
|
||||
require.Nil(t, consumedToken)
|
||||
assert.Equal(t, http.StatusNotFound, appErr.StatusCode)
|
||||
|
||||
_, err := th.App.Srv().Store().Token().GetByToken(token.Token)
|
||||
require.NoError(t, err)
|
||||
})
|
||||
|
||||
t.Run("token can only be consumed once", func(t *testing.T) {
|
||||
token := model.NewToken(model.TokenTypeSSOCodeExchange, "extra-data")
|
||||
require.NoError(t, th.App.Srv().Store().Token().Save(token))
|
||||
|
||||
consumedToken1, appErr := th.App.ConsumeTokenOnce(model.TokenTypeSSOCodeExchange, token.Token)
|
||||
require.Nil(t, appErr)
|
||||
require.NotNil(t, consumedToken1)
|
||||
|
||||
consumedToken2, appErr := th.App.ConsumeTokenOnce(model.TokenTypeSSOCodeExchange, token.Token)
|
||||
require.NotNil(t, appErr)
|
||||
require.Nil(t, consumedToken2)
|
||||
assert.Equal(t, http.StatusNotFound, appErr.StatusCode)
|
||||
})
|
||||
|
||||
t.Run("empty token string returns not found", func(t *testing.T) {
|
||||
consumedToken, appErr := th.App.ConsumeTokenOnce(model.TokenTypeOAuth, "")
|
||||
require.NotNil(t, appErr)
|
||||
require.Nil(t, consumedToken)
|
||||
assert.Equal(t, http.StatusNotFound, appErr.StatusCode)
|
||||
})
|
||||
|
||||
t.Run("empty token type returns not found", func(t *testing.T) {
|
||||
token := model.NewToken(model.TokenTypeOAuth, "extra-data")
|
||||
require.NoError(t, th.App.Srv().Store().Token().Save(token))
|
||||
defer func() {
|
||||
_ = th.App.Srv().Store().Token().Delete(token.Token)
|
||||
}()
|
||||
|
||||
consumedToken, appErr := th.App.ConsumeTokenOnce("", token.Token)
|
||||
require.NotNil(t, appErr)
|
||||
require.Nil(t, consumedToken)
|
||||
assert.Equal(t, http.StatusNotFound, appErr.StatusCode)
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -75,11 +75,14 @@ func TestBatchMigrationWorker(t *testing.T) {
|
|||
waitDone(t, stopped, "worker did not stop")
|
||||
}
|
||||
|
||||
assertJobReset := func(t *testing.T, th *TestHelper, job *model.Job) {
|
||||
actualJob, appErr := th.Server.Jobs.GetJob(th.Context, job.Id)
|
||||
require.Nil(t, appErr)
|
||||
assert.Empty(t, actualJob.Progress)
|
||||
assert.Empty(t, actualJob.Data)
|
||||
assertJobResetEventually := func(t *testing.T, th *TestHelper, job *model.Job) {
|
||||
t.Helper()
|
||||
assert.EventuallyWithT(t, func(t *assert.CollectT) {
|
||||
actualJob, appErr := th.Server.Jobs.GetJob(th.Context, job.Id)
|
||||
require.Nil(t, appErr)
|
||||
assert.Empty(t, actualJob.Progress, "expected no job progress")
|
||||
assert.Empty(t, actualJob.Data, "expected no job data")
|
||||
}, 5*time.Second, 250*time.Millisecond, "job never reset")
|
||||
}
|
||||
|
||||
getBatchNumberFromData := func(t *testing.T, data model.StringMap) int {
|
||||
|
|
@ -142,14 +145,11 @@ func TestBatchMigrationWorker(t *testing.T) {
|
|||
return nil, false, nil
|
||||
})
|
||||
|
||||
// Give the worker time to start running
|
||||
time.Sleep(500 * time.Millisecond)
|
||||
|
||||
// Queue the work to be done
|
||||
worker.JobChannel() <- *job
|
||||
|
||||
th.WaitForJobStatus(t, job, model.JobStatusPending)
|
||||
assertJobReset(t, th, job)
|
||||
assertJobResetEventually(t, th, job)
|
||||
|
||||
stopWorker(t, worker)
|
||||
})
|
||||
|
|
@ -171,14 +171,11 @@ func TestBatchMigrationWorker(t *testing.T) {
|
|||
return getDataFromBatchNumber(batchNumber), false, nil
|
||||
})
|
||||
|
||||
// Give the worker time to start running
|
||||
time.Sleep(500 * time.Millisecond)
|
||||
|
||||
// Queue the work to be done
|
||||
worker.JobChannel() <- *job
|
||||
|
||||
th.WaitForJobStatus(t, job, model.JobStatusPending)
|
||||
assertJobReset(t, th, job)
|
||||
assertJobResetEventually(t, th, job)
|
||||
|
||||
stopWorker(t, worker)
|
||||
})
|
||||
|
|
|
|||
|
|
@ -2955,27 +2955,6 @@ func (s *RetryLayerChannelStore) SearchAllChannels(term string, opts store.Chann
|
|||
|
||||
}
|
||||
|
||||
func (s *RetryLayerChannelStore) SearchArchivedInTeam(teamID string, term string, userID string) (model.ChannelList, error) {
|
||||
|
||||
tries := 0
|
||||
for {
|
||||
result, err := s.ChannelStore.SearchArchivedInTeam(teamID, term, userID)
|
||||
if err == nil {
|
||||
return result, nil
|
||||
}
|
||||
if !isRepeatableError(err) {
|
||||
return result, err
|
||||
}
|
||||
tries++
|
||||
if tries >= 3 {
|
||||
err = errors.Wrap(err, "giving up after 3 consecutive repeatable transaction failures")
|
||||
return result, err
|
||||
}
|
||||
timepkg.Sleep(100 * timepkg.Millisecond)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (s *RetryLayerChannelStore) SearchForUserInTeam(userID string, teamID string, term string, includeDeleted bool) (model.ChannelList, error) {
|
||||
|
||||
tries := 0
|
||||
|
|
@ -5598,27 +5577,6 @@ func (s *RetryLayerGroupStore) GetByName(name string, opts model.GroupSearchOpts
|
|||
|
||||
}
|
||||
|
||||
func (s *RetryLayerGroupStore) GetByNames(names []string, viewRestrictions *model.ViewUsersRestrictions) ([]*model.Group, error) {
|
||||
|
||||
tries := 0
|
||||
for {
|
||||
result, err := s.GroupStore.GetByNames(names, viewRestrictions)
|
||||
if err == nil {
|
||||
return result, nil
|
||||
}
|
||||
if !isRepeatableError(err) {
|
||||
return result, err
|
||||
}
|
||||
tries++
|
||||
if tries >= 3 {
|
||||
err = errors.Wrap(err, "giving up after 3 consecutive repeatable transaction failures")
|
||||
return result, err
|
||||
}
|
||||
timepkg.Sleep(100 * timepkg.Millisecond)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (s *RetryLayerGroupStore) GetByRemoteID(remoteID string, groupSource model.GroupSource) (*model.Group, error) {
|
||||
|
||||
tries := 0
|
||||
|
|
@ -6480,6 +6438,27 @@ func (s *RetryLayerJobStore) GetAllByTypesPage(rctx request.CTX, jobTypes []stri
|
|||
|
||||
}
|
||||
|
||||
func (s *RetryLayerJobStore) GetByTypeAndData(rctx request.CTX, jobType string, data map[string]string, useMaster bool, statuses ...string) ([]*model.Job, error) {
|
||||
|
||||
tries := 0
|
||||
for {
|
||||
result, err := s.JobStore.GetByTypeAndData(rctx, jobType, data, useMaster, statuses...)
|
||||
if err == nil {
|
||||
return result, nil
|
||||
}
|
||||
if !isRepeatableError(err) {
|
||||
return result, err
|
||||
}
|
||||
tries++
|
||||
if tries >= 3 {
|
||||
err = errors.Wrap(err, "giving up after 3 consecutive repeatable transaction failures")
|
||||
return result, err
|
||||
}
|
||||
timepkg.Sleep(100 * timepkg.Millisecond)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (s *RetryLayerJobStore) GetCountByStatusAndType(status string, jobType string) (int64, error) {
|
||||
|
||||
tries := 0
|
||||
|
|
@ -9360,6 +9339,27 @@ func (s *RetryLayerPropertyFieldStore) CountForGroup(groupID string, includeDele
|
|||
|
||||
}
|
||||
|
||||
func (s *RetryLayerPropertyFieldStore) CountForTarget(groupID string, targetType string, targetID string, includeDeleted bool) (int64, error) {
|
||||
|
||||
tries := 0
|
||||
for {
|
||||
result, err := s.PropertyFieldStore.CountForTarget(groupID, targetType, targetID, includeDeleted)
|
||||
if err == nil {
|
||||
return result, nil
|
||||
}
|
||||
if !isRepeatableError(err) {
|
||||
return result, err
|
||||
}
|
||||
tries++
|
||||
if tries >= 3 {
|
||||
err = errors.Wrap(err, "giving up after 3 consecutive repeatable transaction failures")
|
||||
return result, err
|
||||
}
|
||||
timepkg.Sleep(100 * timepkg.Millisecond)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (s *RetryLayerPropertyFieldStore) Create(field *model.PropertyField) (*model.PropertyField, error) {
|
||||
|
||||
tries := 0
|
||||
|
|
@ -14151,6 +14151,27 @@ func (s *RetryLayerTokenStore) Cleanup(expiryTime int64) {
|
|||
|
||||
}
|
||||
|
||||
func (s *RetryLayerTokenStore) ConsumeOnce(tokenType string, tokenStr string) (*model.Token, error) {
|
||||
|
||||
tries := 0
|
||||
for {
|
||||
result, err := s.TokenStore.ConsumeOnce(tokenType, tokenStr)
|
||||
if err == nil {
|
||||
return result, nil
|
||||
}
|
||||
if !isRepeatableError(err) {
|
||||
return result, err
|
||||
}
|
||||
tries++
|
||||
if tries >= 3 {
|
||||
err = errors.Wrap(err, "giving up after 3 consecutive repeatable transaction failures")
|
||||
return result, err
|
||||
}
|
||||
timepkg.Sleep(100 * timepkg.Millisecond)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (s *RetryLayerTokenStore) Delete(token string) error {
|
||||
|
||||
tries := 0
|
||||
|
|
|
|||
|
|
@ -126,6 +126,12 @@ func (s *SqlAttributesStore) SearchUsers(rctx request.CTX, opts model.SubjectSea
|
|||
query = query.Where(sq.Expr(fmt.Sprintf("NOT EXISTS (SELECT 1 FROM ChannelMembers WHERE ChannelMembers.UserId = Users.Id AND ChannelMembers.ChannelId = $%d)", argCount), opts.ExcludeChannelMembers))
|
||||
}
|
||||
|
||||
if opts.SubjectID != "" {
|
||||
argCount++
|
||||
query = query.Where(sq.Expr(fmt.Sprintf("Users.Id = $%d", argCount), opts.SubjectID))
|
||||
count = count.Where(sq.Expr(fmt.Sprintf("Users.Id = $%d", argCount), opts.SubjectID))
|
||||
}
|
||||
|
||||
if opts.Cursor.TargetID != "" {
|
||||
argCount++
|
||||
query = query.Where(sq.Expr(fmt.Sprintf("TargetID > $%d", argCount), opts.Cursor.TargetID))
|
||||
|
|
|
|||
|
|
@ -3315,50 +3315,6 @@ func (s SqlChannelStore) SearchInTeam(teamId string, term string, includeDeleted
|
|||
return s.performSearch(query, term)
|
||||
}
|
||||
|
||||
func (s SqlChannelStore) SearchArchivedInTeam(teamId string, term string, userId string) (model.ChannelList, error) {
|
||||
queryBase := s.getQueryBuilder().Select(channelSliceColumns(true, "Channels")...).
|
||||
From("Channels").
|
||||
Join("Channels c ON (c.Id = Channels.Id)").
|
||||
Where(sq.And{
|
||||
sq.Eq{"c.TeamId": teamId},
|
||||
sq.NotEq{"c.DeleteAt": 0},
|
||||
}).
|
||||
OrderBy("c.DisplayName").
|
||||
Limit(100)
|
||||
|
||||
searchClause := s.searchClause(term)
|
||||
if searchClause != nil {
|
||||
queryBase = queryBase.Where(searchClause)
|
||||
}
|
||||
|
||||
publicQuery := queryBase.
|
||||
Where(sq.NotEq{"c.Type": model.ChannelTypePrivate})
|
||||
|
||||
privateQuery := queryBase.
|
||||
Where(
|
||||
sq.And{
|
||||
sq.Eq{"c.Type": model.ChannelTypePrivate},
|
||||
sq.Expr("c.Id IN (?)", sq.Select("ChannelId").
|
||||
From("ChannelMembers").
|
||||
Where(sq.Eq{"UserId": userId})),
|
||||
})
|
||||
|
||||
publicChannels, err := s.performSearch(publicQuery, term)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
privateChannels, err := s.performSearch(privateQuery, term)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
output := publicChannels
|
||||
output = append(output, privateChannels...)
|
||||
|
||||
return output, nil
|
||||
}
|
||||
|
||||
func (s SqlChannelStore) SearchForUserInTeam(userId string, teamId string, term string, includeDeleted bool) (model.ChannelList, error) {
|
||||
query := s.getQueryBuilder().Select(channelSliceColumns(true, "Channels")...).
|
||||
From("Channels").
|
||||
|
|
|
|||
|
|
@ -296,16 +296,6 @@ func (s *SqlGroupStore) GetByName(name string, opts model.GroupSearchOpts) (*mod
|
|||
return &group, nil
|
||||
}
|
||||
|
||||
func (s *SqlGroupStore) GetByNames(names []string, viewRestrictions *model.ViewUsersRestrictions) ([]*model.Group, error) {
|
||||
groups := []*model.Group{}
|
||||
query := s.userGroupsSelectQuery.Where(sq.Eq{"Name": names})
|
||||
query = applyViewRestrictionsFilter(query, viewRestrictions, true)
|
||||
if err := s.GetReplica().SelectBuilder(&groups, query); err != nil {
|
||||
return nil, errors.Wrap(err, "failed to find Groups by names")
|
||||
}
|
||||
return groups, nil
|
||||
}
|
||||
|
||||
func (s *SqlGroupStore) GetByIDs(groupIDs []string) ([]*model.Group, error) {
|
||||
groups := []*model.Group{}
|
||||
query := s.userGroupsSelectQuery.Where(sq.Eq{"Id": groupIDs})
|
||||
|
|
|
|||
|
|
@ -387,6 +387,38 @@ func (jss SqlJobStore) GetCountByStatusAndType(status string, jobType string) (i
|
|||
return count, nil
|
||||
}
|
||||
|
||||
func (jss SqlJobStore) GetByTypeAndData(rctx request.CTX, jobType string, data map[string]string, useMaster bool, statuses ...string) ([]*model.Job, error) {
|
||||
query := jss.jobQuery.Where(sq.Eq{"Type": jobType})
|
||||
|
||||
// Add status filtering if provided - enables full usage of idx_jobs_status_type index
|
||||
if len(statuses) > 0 {
|
||||
query = query.Where(sq.Eq{"Status": statuses})
|
||||
}
|
||||
|
||||
// Add JSON data filtering for each key-value pair
|
||||
for key, value := range data {
|
||||
query = query.Where(sq.Expr("Data->? = ?", key, fmt.Sprintf(`"%s"`, value)))
|
||||
}
|
||||
|
||||
queryString, args, err := query.ToSql()
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "get_by_type_and_data_tosql")
|
||||
}
|
||||
|
||||
var jobs []*model.Job
|
||||
// For consistency-critical operations (like job deduplication), use master
|
||||
db := jss.GetReplica()
|
||||
if useMaster {
|
||||
db = jss.GetMaster()
|
||||
}
|
||||
|
||||
if err := db.Select(&jobs, queryString, args...); err != nil {
|
||||
return nil, errors.Wrap(err, "failed to get Jobs by type and data")
|
||||
}
|
||||
|
||||
return jobs, nil
|
||||
}
|
||||
|
||||
func (jss SqlJobStore) Delete(id string) (string, error) {
|
||||
query, args, err := jss.getQueryBuilder().
|
||||
Delete("Jobs").
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ const (
|
|||
// After 10, it's major and minor only.
|
||||
// 10.1 would be 100001.
|
||||
// 9.6.3 would be 90603.
|
||||
minimumRequiredPostgresVersion = 130000
|
||||
minimumRequiredPostgresVersion = 140000
|
||||
|
||||
migrationsDirectionUp migrationDirection = "up"
|
||||
migrationsDirectionDown migrationDirection = "down"
|
||||
|
|
|
|||
|
|
@ -543,6 +543,24 @@ func TestEnsureMinimumDBVersion(t *testing.T) {
|
|||
{
|
||||
driver: model.DatabaseDriverPostgres,
|
||||
ver: "130001",
|
||||
ok: false,
|
||||
err: "",
|
||||
},
|
||||
{
|
||||
driver: model.DatabaseDriverPostgres,
|
||||
ver: "140000",
|
||||
ok: true,
|
||||
err: "",
|
||||
},
|
||||
{
|
||||
driver: model.DatabaseDriverPostgres,
|
||||
ver: "141900",
|
||||
ok: true,
|
||||
err: "",
|
||||
},
|
||||
{
|
||||
driver: model.DatabaseDriverPostgres,
|
||||
ver: "150000",
|
||||
ok: true,
|
||||
err: "",
|
||||
},
|
||||
|
|
|
|||
|
|
@ -78,6 +78,21 @@ func (s SqlTokenStore) GetByToken(tokenString string) (*model.Token, error) {
|
|||
return &token, nil
|
||||
}
|
||||
|
||||
func (s SqlTokenStore) ConsumeOnce(tokenType, tokenStr string) (*model.Token, error) {
|
||||
var token model.Token
|
||||
|
||||
query := `DELETE FROM Tokens WHERE Type = ? AND Token = ? RETURNING *`
|
||||
|
||||
if err := s.GetMaster().Get(&token, query, tokenType, tokenStr); err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
return nil, store.NewErrNotFound("Token", tokenStr)
|
||||
}
|
||||
return nil, errors.Wrapf(err, "failed to consume token with type %s", tokenType)
|
||||
}
|
||||
|
||||
return &token, nil
|
||||
}
|
||||
|
||||
func (s SqlTokenStore) Cleanup(expiryTime int64) {
|
||||
if _, err := s.GetMaster().Exec("DELETE FROM Tokens WHERE CreateAt < ?", expiryTime); err != nil {
|
||||
mlog.Error("Unable to cleanup token store.")
|
||||
|
|
|
|||
|
|
@ -274,7 +274,6 @@ type ChannelStore interface {
|
|||
AutocompleteInTeamForSearch(teamID string, userID string, term string, includeDeleted bool) (model.ChannelList, error)
|
||||
SearchAllChannels(term string, opts ChannelSearchOpts) (model.ChannelListWithTeamData, int64, error)
|
||||
SearchInTeam(teamID string, term string, includeDeleted bool) (model.ChannelList, error)
|
||||
SearchArchivedInTeam(teamID string, term string, userID string) (model.ChannelList, error)
|
||||
SearchForUserInTeam(userID string, teamID string, term string, includeDeleted bool) (model.ChannelList, error)
|
||||
SearchMore(userID string, teamID string, term string) (model.ChannelList, error)
|
||||
SearchGroupChannels(userID, term string) (model.ChannelList, error)
|
||||
|
|
@ -693,6 +692,7 @@ type TokenStore interface {
|
|||
Save(recovery *model.Token) error
|
||||
Delete(token string) error
|
||||
GetByToken(token string) (*model.Token, error)
|
||||
ConsumeOnce(tokenType, tokenStr string) (*model.Token, error)
|
||||
Cleanup(expiryTime int64)
|
||||
GetAllTokensByType(tokenType string) ([]*model.Token, error)
|
||||
RemoveAllTokensByType(tokenType string) error
|
||||
|
|
@ -800,6 +800,7 @@ type JobStore interface {
|
|||
GetNewestJobByStatusAndType(status string, jobType string) (*model.Job, error)
|
||||
GetNewestJobByStatusesAndType(statuses []string, jobType string) (*model.Job, error)
|
||||
GetCountByStatusAndType(status string, jobType string) (int64, error)
|
||||
GetByTypeAndData(rctx request.CTX, jobType string, data map[string]string, useMaster bool, statuses ...string) ([]*model.Job, error)
|
||||
Delete(id string) (string, error)
|
||||
Cleanup(expiryTime int64, batchSize int) error
|
||||
}
|
||||
|
|
@ -885,7 +886,6 @@ type GroupStore interface {
|
|||
CreateWithUserIds(group *model.GroupWithUserIds) (*model.Group, error)
|
||||
Get(groupID string) (*model.Group, error)
|
||||
GetByName(name string, opts model.GroupSearchOpts) (*model.Group, error)
|
||||
GetByNames(names []string, viewRestrictions *model.ViewUsersRestrictions) ([]*model.Group, error)
|
||||
GetByIDs(groupIDs []string) ([]*model.Group, error)
|
||||
GetByRemoteID(remoteID string, groupSource model.GroupSource) (*model.Group, error)
|
||||
GetAllBySource(groupSource model.GroupSource) ([]*model.Group, error)
|
||||
|
|
|
|||
|
|
@ -33,6 +33,7 @@ var (
|
|||
func TestAttributesStore(t *testing.T, rctx request.CTX, ss store.Store, s SqlStore) {
|
||||
t.Run("RefreshAndGet", func(t *testing.T) { testAttributesStoreRefresh(t, rctx, ss) })
|
||||
t.Run("SearchUsers", func(t *testing.T) { testAttributesStoreSearchUsers(t, rctx, ss, s) })
|
||||
t.Run("SearchUsersBySubjectID", func(t *testing.T) { testAttributesStoreSearchUsersBySubjectID(t, rctx, ss, s) })
|
||||
t.Run("GetChannelMembersToRemove", func(t *testing.T) { testAttributesStoreGetChannelMembersToRemove(t, rctx, ss, s) })
|
||||
}
|
||||
|
||||
|
|
@ -392,3 +393,59 @@ func testAttributesStoreGetChannelMembersToRemove(t *testing.T, rctx request.CTX
|
|||
require.Len(t, members, 2, "expected 2 channel member to remove")
|
||||
})
|
||||
}
|
||||
|
||||
func testAttributesStoreSearchUsersBySubjectID(t *testing.T, rctx request.CTX, ss store.Store, s SqlStore) {
|
||||
users, _, cleanup := createTestUsers(t, rctx, ss)
|
||||
t.Cleanup(cleanup)
|
||||
require.Len(t, users, 3, "expected 3 users")
|
||||
|
||||
err := ss.Attributes().RefreshAttributes()
|
||||
require.NoError(t, err, "couldn't refresh attributes")
|
||||
|
||||
t.Run("Search users by specific SubjectID", func(t *testing.T) {
|
||||
// Test searching for the first user by their ID
|
||||
subjects, count, err := ss.Attributes().SearchUsers(rctx, model.SubjectSearchOptions{
|
||||
SubjectID: users[0].Id,
|
||||
})
|
||||
require.NoError(t, err, "couldn't search users by SubjectID")
|
||||
require.Len(t, subjects, 1, "expected 1 user")
|
||||
require.Equal(t, int64(1), count, "expected count 1")
|
||||
require.Equal(t, users[0].Id, subjects[0].Id, "expected the specific user")
|
||||
})
|
||||
|
||||
t.Run("Search users by non-existent SubjectID", func(t *testing.T) {
|
||||
// Test with a non-existent user ID
|
||||
nonExistentID := model.NewId()
|
||||
subjects, count, err := ss.Attributes().SearchUsers(rctx, model.SubjectSearchOptions{
|
||||
SubjectID: nonExistentID,
|
||||
})
|
||||
require.NoError(t, err, "couldn't search users by non-existent SubjectID")
|
||||
require.Len(t, subjects, 0, "expected 0 users for non-existent ID")
|
||||
require.Equal(t, int64(0), count, "expected count 0 for non-existent ID")
|
||||
})
|
||||
|
||||
t.Run("Search users by SubjectID with query filter", func(t *testing.T) {
|
||||
// Test combining SubjectID with a query filter
|
||||
subjects, count, err := ss.Attributes().SearchUsers(rctx, model.SubjectSearchOptions{
|
||||
SubjectID: users[0].Id,
|
||||
Query: "Attributes ->> '" + testPropertyA + "' = $1::text",
|
||||
Args: []any{testPropertyValueA1},
|
||||
})
|
||||
require.NoError(t, err, "couldn't search users by SubjectID with query")
|
||||
require.Len(t, subjects, 1, "expected 1 user matching both SubjectID and query")
|
||||
require.Equal(t, int64(1), count, "expected count 1")
|
||||
require.Equal(t, users[0].Id, subjects[0].Id, "expected the specific user")
|
||||
})
|
||||
|
||||
t.Run("Search users by SubjectID with non-matching query filter", func(t *testing.T) {
|
||||
// Test SubjectID with a query that doesn't match that user
|
||||
subjects, count, err := ss.Attributes().SearchUsers(rctx, model.SubjectSearchOptions{
|
||||
SubjectID: users[0].Id,
|
||||
Query: "Attributes ->> '" + testPropertyA + "' = $1::text",
|
||||
Args: []any{"non_matching_value"},
|
||||
})
|
||||
require.NoError(t, err, "couldn't search users by SubjectID with non-matching query")
|
||||
require.Len(t, subjects, 0, "expected 0 users when query doesn't match SubjectID")
|
||||
require.Equal(t, int64(0), count, "expected count 0")
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -123,7 +123,6 @@ func TestChannelStore(t *testing.T, rctx request.CTX, ss store.Store, s SqlStore
|
|||
t.Run("SearchMore", func(t *testing.T) { testChannelStoreSearchMore(t, rctx, ss) })
|
||||
t.Run("SearchInTeam", func(t *testing.T) { testChannelStoreSearchInTeam(t, rctx, ss) })
|
||||
t.Run("Autocomplete", func(t *testing.T) { testAutocomplete(t, rctx, ss, s) })
|
||||
t.Run("SearchArchivedInTeam", func(t *testing.T) { testChannelStoreSearchArchivedInTeam(t, rctx, ss, s) })
|
||||
t.Run("SearchForUserInTeam", func(t *testing.T) { testChannelStoreSearchForUserInTeam(t, rctx, ss) })
|
||||
t.Run("SearchAllChannels", func(t *testing.T) { testChannelStoreSearchAllChannels(t, rctx, ss) })
|
||||
t.Run("GetMembersByIds", func(t *testing.T) { testChannelStoreGetMembersByIds(t, rctx, ss) })
|
||||
|
|
@ -6011,47 +6010,6 @@ func (s ByChannelDisplayName) Less(i, j int) bool {
|
|||
return s[i].Id < s[j].Id
|
||||
}
|
||||
|
||||
func testChannelStoreSearchArchivedInTeam(t *testing.T, rctx request.CTX, ss store.Store, s SqlStore) {
|
||||
teamID := model.NewId()
|
||||
userID := model.NewId()
|
||||
o1 := model.Channel{}
|
||||
o1.TeamId = teamID
|
||||
o1.DisplayName = "Channel1"
|
||||
o1.Name = NewTestID()
|
||||
o1.Type = model.ChannelTypeOpen
|
||||
_, nErr := ss.Channel().Save(rctx, &o1, -1)
|
||||
require.NoError(t, nErr)
|
||||
o1.DeleteAt = model.GetMillis()
|
||||
o1.UpdateAt = o1.DeleteAt
|
||||
nErr = ss.Channel().Delete(o1.Id, o1.DeleteAt)
|
||||
require.NoError(t, nErr)
|
||||
|
||||
t.Run("empty result", func(t *testing.T) {
|
||||
list, err := ss.Channel().SearchArchivedInTeam(teamID, "term", userID)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, list)
|
||||
require.Empty(t, list)
|
||||
})
|
||||
|
||||
t.Run("error", func(t *testing.T) {
|
||||
// trigger a SQL error
|
||||
s.GetMaster().Exec("ALTER TABLE Channels RENAME TO Channels_renamed")
|
||||
defer s.GetMaster().Exec("ALTER TABLE Channels_renamed RENAME TO Channels")
|
||||
|
||||
list, err := ss.Channel().SearchArchivedInTeam(teamID, "term", userID)
|
||||
require.Error(t, err)
|
||||
require.Nil(t, list)
|
||||
})
|
||||
|
||||
t.Run("find term", func(t *testing.T) {
|
||||
list, err := ss.Channel().SearchArchivedInTeam(teamID, "Channel", userID)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, list)
|
||||
require.Equal(t, len(list), 1)
|
||||
require.Equal(t, "Channel1", list[0].DisplayName)
|
||||
})
|
||||
}
|
||||
|
||||
func testChannelStoreSearchInTeam(t *testing.T, rctx request.CTX, ss store.Store) {
|
||||
teamID := model.NewId()
|
||||
otherTeamID := model.NewId()
|
||||
|
|
|
|||
|
|
@ -32,6 +32,7 @@ func TestJobStore(t *testing.T, rctx request.CTX, ss store.Store) {
|
|||
t.Run("GetCountByStatusAndType", func(t *testing.T) { testJobStoreGetCountByStatusAndType(t, rctx, ss) })
|
||||
t.Run("JobUpdateOptimistically", func(t *testing.T) { testJobUpdateOptimistically(t, rctx, ss) })
|
||||
t.Run("JobUpdateStatusUpdateStatusOptimistically", func(t *testing.T) { testJobUpdateStatusUpdateStatusOptimistically(t, rctx, ss) })
|
||||
t.Run("JobGetByTypeAndData", func(t *testing.T) { testJobGetByTypeAndData(t, rctx, ss) })
|
||||
t.Run("JobDelete", func(t *testing.T) { testJobDelete(t, rctx, ss) })
|
||||
t.Run("JobCleanup", func(t *testing.T) { testJobCleanup(t, rctx, ss) })
|
||||
}
|
||||
|
|
@ -792,3 +793,162 @@ func testJobCleanup(t *testing.T, rctx request.CTX, ss store.Store) {
|
|||
require.NoError(t, err)
|
||||
assert.Len(t, jobs, 0)
|
||||
}
|
||||
|
||||
func testJobGetByTypeAndData(t *testing.T, rctx request.CTX, ss store.Store) {
|
||||
// Test setup - create test jobs with different types and data
|
||||
jobType := model.JobTypeAccessControlSync
|
||||
otherJobType := model.JobTypeDataRetention
|
||||
|
||||
// Job 1: Access control sync job with policy_id = "channel1"
|
||||
job1 := &model.Job{
|
||||
Id: model.NewId(),
|
||||
Type: jobType,
|
||||
Status: model.JobStatusPending,
|
||||
Data: map[string]string{
|
||||
"policy_id": "channel1",
|
||||
"extra": "data1",
|
||||
},
|
||||
}
|
||||
|
||||
// Job 2: Access control sync job with policy_id = "channel2"
|
||||
job2 := &model.Job{
|
||||
Id: model.NewId(),
|
||||
Type: jobType,
|
||||
Status: model.JobStatusInProgress,
|
||||
Data: map[string]string{
|
||||
"policy_id": "channel2",
|
||||
"extra": "data2",
|
||||
},
|
||||
}
|
||||
|
||||
// Job 3: Access control sync job with policy_id = "channel1" (same as job1)
|
||||
job3 := &model.Job{
|
||||
Id: model.NewId(),
|
||||
Type: jobType,
|
||||
Status: model.JobStatusSuccess,
|
||||
Data: map[string]string{
|
||||
"policy_id": "channel1",
|
||||
"extra": "data3",
|
||||
},
|
||||
}
|
||||
|
||||
// Job 4: Different job type with same policy_id
|
||||
job4 := &model.Job{
|
||||
Id: model.NewId(),
|
||||
Type: otherJobType,
|
||||
Status: model.JobStatusPending,
|
||||
Data: map[string]string{
|
||||
"policy_id": "channel1",
|
||||
},
|
||||
}
|
||||
|
||||
// Save all jobs
|
||||
_, err := ss.Job().Save(job1)
|
||||
require.NoError(t, err)
|
||||
defer func() { _, _ = ss.Job().Delete(job1.Id) }()
|
||||
|
||||
_, err = ss.Job().Save(job2)
|
||||
require.NoError(t, err)
|
||||
defer func() { _, _ = ss.Job().Delete(job2.Id) }()
|
||||
|
||||
_, err = ss.Job().Save(job3)
|
||||
require.NoError(t, err)
|
||||
defer func() { _, _ = ss.Job().Delete(job3.Id) }()
|
||||
|
||||
_, err = ss.Job().Save(job4)
|
||||
require.NoError(t, err)
|
||||
defer func() { _, _ = ss.Job().Delete(job4.Id) }()
|
||||
|
||||
t.Run("finds jobs by type and single data field", func(t *testing.T) {
|
||||
// Should find job1 and job3 (both have policy_id = "channel1" and correct type)
|
||||
jobs, err := ss.Job().GetByTypeAndData(rctx, jobType, map[string]string{
|
||||
"policy_id": "channel1",
|
||||
}, false)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, jobs, 2)
|
||||
|
||||
// Should contain job1 and job3
|
||||
jobIds := []string{jobs[0].Id, jobs[1].Id}
|
||||
assert.Contains(t, jobIds, job1.Id)
|
||||
assert.Contains(t, jobIds, job3.Id)
|
||||
})
|
||||
|
||||
t.Run("finds jobs by type and multiple data fields", func(t *testing.T) {
|
||||
// Should find only job1 (has both policy_id = "channel1" AND extra = "data1")
|
||||
jobs, err := ss.Job().GetByTypeAndData(rctx, jobType, map[string]string{
|
||||
"policy_id": "channel1",
|
||||
"extra": "data1",
|
||||
}, false)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, jobs, 1)
|
||||
assert.Equal(t, job1.Id, jobs[0].Id)
|
||||
})
|
||||
|
||||
t.Run("returns empty slice when no matches", func(t *testing.T) {
|
||||
// Should find nothing (no jobs with policy_id = "nonexistent")
|
||||
jobs, err := ss.Job().GetByTypeAndData(rctx, jobType, map[string]string{
|
||||
"policy_id": "nonexistent",
|
||||
}, false)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, jobs, 0)
|
||||
})
|
||||
|
||||
t.Run("filters by job type correctly", func(t *testing.T) {
|
||||
// Should find only job4 (different job type with same policy_id)
|
||||
jobs, err := ss.Job().GetByTypeAndData(rctx, otherJobType, map[string]string{
|
||||
"policy_id": "channel1",
|
||||
}, false)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, jobs, 1)
|
||||
assert.Equal(t, job4.Id, jobs[0].Id)
|
||||
})
|
||||
|
||||
// Test status parameter filtering
|
||||
t.Run("filters by single status", func(t *testing.T) {
|
||||
// Filter by single status should return only matching jobs
|
||||
jobs, err := ss.Job().GetByTypeAndData(rctx, jobType, map[string]string{
|
||||
"policy_id": "channel1",
|
||||
}, false, model.JobStatusPending)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, jobs, 1)
|
||||
assert.Equal(t, job1.Id, jobs[0].Id)
|
||||
assert.Equal(t, model.JobStatusPending, jobs[0].Status)
|
||||
})
|
||||
|
||||
t.Run("filters by multiple statuses", func(t *testing.T) {
|
||||
// Filter by multiple statuses should return jobs matching any status
|
||||
jobs, err := ss.Job().GetByTypeAndData(rctx, jobType, map[string]string{
|
||||
"policy_id": "channel1",
|
||||
}, false, model.JobStatusPending, model.JobStatusSuccess)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, jobs, 2)
|
||||
|
||||
// Verify both statuses are represented
|
||||
statuses := []string{jobs[0].Status, jobs[1].Status}
|
||||
assert.Contains(t, statuses, model.JobStatusPending)
|
||||
assert.Contains(t, statuses, model.JobStatusSuccess)
|
||||
})
|
||||
|
||||
t.Run("no status filter returns all statuses", func(t *testing.T) {
|
||||
// No status filter should return all jobs regardless of status
|
||||
jobs, err := ss.Job().GetByTypeAndData(rctx, jobType, map[string]string{
|
||||
"policy_id": "channel1",
|
||||
}, false) // No status parameters
|
||||
require.NoError(t, err)
|
||||
require.Len(t, jobs, 2) // job1 (pending), job3 (success) - both have policy_id=channel1
|
||||
|
||||
// Verify both statuses are present
|
||||
statuses := []string{jobs[0].Status, jobs[1].Status}
|
||||
assert.Contains(t, statuses, model.JobStatusPending)
|
||||
assert.Contains(t, statuses, model.JobStatusSuccess)
|
||||
})
|
||||
|
||||
t.Run("filters by non-existent status returns empty", func(t *testing.T) {
|
||||
// Invalid status filter should return empty result
|
||||
jobs, err := ss.Job().GetByTypeAndData(rctx, jobType, map[string]string{
|
||||
"policy_id": "channel1",
|
||||
}, false, model.JobStatusError)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, jobs, 0)
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2701,36 +2701,6 @@ func (_m *ChannelStore) SearchAllChannels(term string, opts store.ChannelSearchO
|
|||
return r0, r1, r2
|
||||
}
|
||||
|
||||
// SearchArchivedInTeam provides a mock function with given fields: teamID, term, userID
|
||||
func (_m *ChannelStore) SearchArchivedInTeam(teamID string, term string, userID string) (model.ChannelList, error) {
|
||||
ret := _m.Called(teamID, term, userID)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for SearchArchivedInTeam")
|
||||
}
|
||||
|
||||
var r0 model.ChannelList
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(string, string, string) (model.ChannelList, error)); ok {
|
||||
return rf(teamID, term, userID)
|
||||
}
|
||||
if rf, ok := ret.Get(0).(func(string, string, string) model.ChannelList); ok {
|
||||
r0 = rf(teamID, term, userID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).(model.ChannelList)
|
||||
}
|
||||
}
|
||||
|
||||
if rf, ok := ret.Get(1).(func(string, string, string) error); ok {
|
||||
r1 = rf(teamID, term, userID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// SearchForUserInTeam provides a mock function with given fields: userID, teamID, term, includeDeleted
|
||||
func (_m *ChannelStore) SearchForUserInTeam(userID string, teamID string, term string, includeDeleted bool) (model.ChannelList, error) {
|
||||
ret := _m.Called(userID, teamID, term, includeDeleted)
|
||||
|
|
|
|||
|
|
@ -662,36 +662,6 @@ func (_m *GroupStore) GetByName(name string, opts model.GroupSearchOpts) (*model
|
|||
return r0, r1
|
||||
}
|
||||
|
||||
// GetByNames provides a mock function with given fields: names, viewRestrictions
|
||||
func (_m *GroupStore) GetByNames(names []string, viewRestrictions *model.ViewUsersRestrictions) ([]*model.Group, error) {
|
||||
ret := _m.Called(names, viewRestrictions)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for GetByNames")
|
||||
}
|
||||
|
||||
var r0 []*model.Group
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func([]string, *model.ViewUsersRestrictions) ([]*model.Group, error)); ok {
|
||||
return rf(names, viewRestrictions)
|
||||
}
|
||||
if rf, ok := ret.Get(0).(func([]string, *model.ViewUsersRestrictions) []*model.Group); ok {
|
||||
r0 = rf(names, viewRestrictions)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*model.Group)
|
||||
}
|
||||
}
|
||||
|
||||
if rf, ok := ret.Get(1).(func([]string, *model.ViewUsersRestrictions) error); ok {
|
||||
r1 = rf(names, viewRestrictions)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetByRemoteID provides a mock function with given fields: remoteID, groupSource
|
||||
func (_m *GroupStore) GetByRemoteID(remoteID string, groupSource model.GroupSource) (*model.Group, error) {
|
||||
ret := _m.Called(remoteID, groupSource)
|
||||
|
|
|
|||
|
|
@ -271,6 +271,43 @@ func (_m *JobStore) GetAllByTypesPage(rctx request.CTX, jobTypes []string, offse
|
|||
return r0, r1
|
||||
}
|
||||
|
||||
// GetByTypeAndData provides a mock function with given fields: rctx, jobType, data, useMaster, statuses
|
||||
func (_m *JobStore) GetByTypeAndData(rctx request.CTX, jobType string, data map[string]string, useMaster bool, statuses ...string) ([]*model.Job, error) {
|
||||
_va := make([]interface{}, len(statuses))
|
||||
for _i := range statuses {
|
||||
_va[_i] = statuses[_i]
|
||||
}
|
||||
var _ca []interface{}
|
||||
_ca = append(_ca, rctx, jobType, data, useMaster)
|
||||
_ca = append(_ca, _va...)
|
||||
ret := _m.Called(_ca...)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for GetByTypeAndData")
|
||||
}
|
||||
|
||||
var r0 []*model.Job
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(request.CTX, string, map[string]string, bool, ...string) ([]*model.Job, error)); ok {
|
||||
return rf(rctx, jobType, data, useMaster, statuses...)
|
||||
}
|
||||
if rf, ok := ret.Get(0).(func(request.CTX, string, map[string]string, bool, ...string) []*model.Job); ok {
|
||||
r0 = rf(rctx, jobType, data, useMaster, statuses...)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).([]*model.Job)
|
||||
}
|
||||
}
|
||||
|
||||
if rf, ok := ret.Get(1).(func(request.CTX, string, map[string]string, bool, ...string) error); ok {
|
||||
r1 = rf(rctx, jobType, data, useMaster, statuses...)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetCountByStatusAndType provides a mock function with given fields: status, jobType
|
||||
func (_m *JobStore) GetCountByStatusAndType(status string, jobType string) (int64, error) {
|
||||
ret := _m.Called(status, jobType)
|
||||
|
|
|
|||
|
|
@ -19,6 +19,36 @@ func (_m *TokenStore) Cleanup(expiryTime int64) {
|
|||
_m.Called(expiryTime)
|
||||
}
|
||||
|
||||
// ConsumeOnce provides a mock function with given fields: tokenType, tokenStr
|
||||
func (_m *TokenStore) ConsumeOnce(tokenType string, tokenStr string) (*model.Token, error) {
|
||||
ret := _m.Called(tokenType, tokenStr)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for ConsumeOnce")
|
||||
}
|
||||
|
||||
var r0 *model.Token
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(string, string) (*model.Token, error)); ok {
|
||||
return rf(tokenType, tokenStr)
|
||||
}
|
||||
if rf, ok := ret.Get(0).(func(string, string) *model.Token); ok {
|
||||
r0 = rf(tokenType, tokenStr)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).(*model.Token)
|
||||
}
|
||||
}
|
||||
|
||||
if rf, ok := ret.Get(1).(func(string, string) error); ok {
|
||||
r1 = rf(tokenType, tokenStr)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// Delete provides a mock function with given fields: token
|
||||
func (_m *TokenStore) Delete(token string) error {
|
||||
ret := _m.Called(token)
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ import (
|
|||
|
||||
func TestTokensStore(t *testing.T, rctx request.CTX, ss store.Store) {
|
||||
t.Run("TokensCleanup", func(t *testing.T) { testTokensCleanup(t, rctx, ss) })
|
||||
t.Run("ConsumeOnce", func(t *testing.T) { testConsumeOnce(t, rctx, ss) })
|
||||
}
|
||||
|
||||
func testTokensCleanup(t *testing.T, rctx request.CTX, ss store.Store) {
|
||||
|
|
@ -41,3 +42,130 @@ func testTokensCleanup(t *testing.T, rctx request.CTX, ss store.Store) {
|
|||
require.NoError(t, err)
|
||||
assert.Len(t, tokens, 0)
|
||||
}
|
||||
|
||||
func testConsumeOnce(t *testing.T, rctx request.CTX, ss store.Store) {
|
||||
t.Run("successfully consume token once", func(t *testing.T) {
|
||||
token := &model.Token{
|
||||
Token: model.NewRandomString(model.TokenSize),
|
||||
CreateAt: model.GetMillis(),
|
||||
Type: model.TokenTypeOAuth,
|
||||
Extra: "test-extra",
|
||||
}
|
||||
err := ss.Token().Save(token)
|
||||
require.NoError(t, err)
|
||||
|
||||
consumedToken, err := ss.Token().ConsumeOnce(model.TokenTypeOAuth, token.Token)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, token.Token, consumedToken.Token)
|
||||
assert.Equal(t, token.Type, consumedToken.Type)
|
||||
assert.Equal(t, token.Extra, consumedToken.Extra)
|
||||
|
||||
tokens, err := ss.Token().GetAllTokensByType(model.TokenTypeOAuth)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, tokens, 0)
|
||||
})
|
||||
|
||||
t.Run("second consumption of same token fails", func(t *testing.T) {
|
||||
token := &model.Token{
|
||||
Token: model.NewRandomString(model.TokenSize),
|
||||
CreateAt: model.GetMillis(),
|
||||
Type: model.TokenTypeOAuth,
|
||||
Extra: "test-extra",
|
||||
}
|
||||
err := ss.Token().Save(token)
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = ss.Token().ConsumeOnce(model.TokenTypeOAuth, token.Token)
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = ss.Token().ConsumeOnce(model.TokenTypeOAuth, token.Token)
|
||||
require.Error(t, err)
|
||||
var nfErr *store.ErrNotFound
|
||||
assert.ErrorAs(t, err, &nfErr)
|
||||
})
|
||||
|
||||
t.Run("consume with wrong type fails", func(t *testing.T) {
|
||||
token := &model.Token{
|
||||
Token: model.NewRandomString(model.TokenSize),
|
||||
CreateAt: model.GetMillis(),
|
||||
Type: model.TokenTypeOAuth,
|
||||
Extra: "test-extra",
|
||||
}
|
||||
err := ss.Token().Save(token)
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = ss.Token().ConsumeOnce(model.TokenTypeSSOCodeExchange, token.Token)
|
||||
require.Error(t, err)
|
||||
var nfErr *store.ErrNotFound
|
||||
assert.ErrorAs(t, err, &nfErr)
|
||||
|
||||
tokens, err := ss.Token().GetAllTokensByType(model.TokenTypeOAuth)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, tokens, 1)
|
||||
|
||||
err = ss.Token().Delete(token.Token)
|
||||
require.NoError(t, err)
|
||||
})
|
||||
|
||||
t.Run("consume non-existent token fails", func(t *testing.T) {
|
||||
nonExistentToken := model.NewRandomString(model.TokenSize)
|
||||
_, err := ss.Token().ConsumeOnce(model.TokenTypeOAuth, nonExistentToken)
|
||||
require.Error(t, err)
|
||||
var nfErr *store.ErrNotFound
|
||||
assert.ErrorAs(t, err, &nfErr)
|
||||
})
|
||||
|
||||
t.Run("multiple tokens with same type can each be consumed once", func(t *testing.T) {
|
||||
tokens := make([]*model.Token, 3)
|
||||
for i := range tokens {
|
||||
tokens[i] = &model.Token{
|
||||
Token: model.NewRandomString(model.TokenSize),
|
||||
CreateAt: model.GetMillis(),
|
||||
Type: model.TokenTypeOAuth,
|
||||
Extra: "test-extra",
|
||||
}
|
||||
err := ss.Token().Save(tokens[i])
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
for _, token := range tokens {
|
||||
consumedToken, err := ss.Token().ConsumeOnce(model.TokenTypeOAuth, token.Token)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, token.Token, consumedToken.Token)
|
||||
}
|
||||
|
||||
allTokens, err := ss.Token().GetAllTokensByType(model.TokenTypeOAuth)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, allTokens, 0)
|
||||
})
|
||||
|
||||
t.Run("consuming token of different type leaves others intact", func(t *testing.T) {
|
||||
oauthToken := &model.Token{
|
||||
Token: model.NewRandomString(model.TokenSize),
|
||||
CreateAt: model.GetMillis(),
|
||||
Type: model.TokenTypeOAuth,
|
||||
Extra: "oauth-extra",
|
||||
}
|
||||
codeExchangeToken := &model.Token{
|
||||
Token: model.NewRandomString(model.TokenSize),
|
||||
CreateAt: model.GetMillis(),
|
||||
Type: model.TokenTypeSSOCodeExchange,
|
||||
Extra: "password-extra",
|
||||
}
|
||||
err := ss.Token().Save(oauthToken)
|
||||
require.NoError(t, err)
|
||||
err = ss.Token().Save(codeExchangeToken)
|
||||
require.NoError(t, err)
|
||||
|
||||
consumedToken, err := ss.Token().ConsumeOnce(model.TokenTypeOAuth, oauthToken.Token)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, oauthToken.Token, consumedToken.Token)
|
||||
|
||||
codeExchangeTokens, err := ss.Token().GetAllTokensByType(model.TokenTypeSSOCodeExchange)
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, codeExchangeTokens, 1)
|
||||
|
||||
err = ss.Token().Delete(codeExchangeToken.Token)
|
||||
require.NoError(t, err)
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2474,22 +2474,6 @@ func (s *TimerLayerChannelStore) SearchAllChannels(term string, opts store.Chann
|
|||
return result, resultVar1, err
|
||||
}
|
||||
|
||||
func (s *TimerLayerChannelStore) SearchArchivedInTeam(teamID string, term string, userID string) (model.ChannelList, error) {
|
||||
start := time.Now()
|
||||
|
||||
result, err := s.ChannelStore.SearchArchivedInTeam(teamID, term, userID)
|
||||
|
||||
elapsed := float64(time.Since(start)) / float64(time.Second)
|
||||
if s.Root.Metrics != nil {
|
||||
success := "false"
|
||||
if err == nil {
|
||||
success = "true"
|
||||
}
|
||||
s.Root.Metrics.ObserveStoreMethodDuration("ChannelStore.SearchArchivedInTeam", success, elapsed)
|
||||
}
|
||||
return result, err
|
||||
}
|
||||
|
||||
func (s *TimerLayerChannelStore) SearchForUserInTeam(userID string, teamID string, term string, includeDeleted bool) (model.ChannelList, error) {
|
||||
start := time.Now()
|
||||
|
||||
|
|
@ -4519,22 +4503,6 @@ func (s *TimerLayerGroupStore) GetByName(name string, opts model.GroupSearchOpts
|
|||
return result, err
|
||||
}
|
||||
|
||||
func (s *TimerLayerGroupStore) GetByNames(names []string, viewRestrictions *model.ViewUsersRestrictions) ([]*model.Group, error) {
|
||||
start := time.Now()
|
||||
|
||||
result, err := s.GroupStore.GetByNames(names, viewRestrictions)
|
||||
|
||||
elapsed := float64(time.Since(start)) / float64(time.Second)
|
||||
if s.Root.Metrics != nil {
|
||||
success := "false"
|
||||
if err == nil {
|
||||
success = "true"
|
||||
}
|
||||
s.Root.Metrics.ObserveStoreMethodDuration("GroupStore.GetByNames", success, elapsed)
|
||||
}
|
||||
return result, err
|
||||
}
|
||||
|
||||
func (s *TimerLayerGroupStore) GetByRemoteID(remoteID string, groupSource model.GroupSource) (*model.Group, error) {
|
||||
start := time.Now()
|
||||
|
||||
|
|
@ -5191,6 +5159,22 @@ func (s *TimerLayerJobStore) GetAllByTypesPage(rctx request.CTX, jobTypes []stri
|
|||
return result, err
|
||||
}
|
||||
|
||||
func (s *TimerLayerJobStore) GetByTypeAndData(rctx request.CTX, jobType string, data map[string]string, useMaster bool, statuses ...string) ([]*model.Job, error) {
|
||||
start := time.Now()
|
||||
|
||||
result, err := s.JobStore.GetByTypeAndData(rctx, jobType, data, useMaster, statuses...)
|
||||
|
||||
elapsed := float64(time.Since(start)) / float64(time.Second)
|
||||
if s.Root.Metrics != nil {
|
||||
success := "false"
|
||||
if err == nil {
|
||||
success = "true"
|
||||
}
|
||||
s.Root.Metrics.ObserveStoreMethodDuration("JobStore.GetByTypeAndData", success, elapsed)
|
||||
}
|
||||
return result, err
|
||||
}
|
||||
|
||||
func (s *TimerLayerJobStore) GetCountByStatusAndType(status string, jobType string) (int64, error) {
|
||||
start := time.Now()
|
||||
|
||||
|
|
@ -7429,6 +7413,22 @@ func (s *TimerLayerPropertyFieldStore) CountForGroup(groupID string, includeDele
|
|||
return result, err
|
||||
}
|
||||
|
||||
func (s *TimerLayerPropertyFieldStore) CountForTarget(groupID string, targetType string, targetID string, includeDeleted bool) (int64, error) {
|
||||
start := time.Now()
|
||||
|
||||
result, err := s.PropertyFieldStore.CountForTarget(groupID, targetType, targetID, includeDeleted)
|
||||
|
||||
elapsed := float64(time.Since(start)) / float64(time.Second)
|
||||
if s.Root.Metrics != nil {
|
||||
success := "false"
|
||||
if err == nil {
|
||||
success = "true"
|
||||
}
|
||||
s.Root.Metrics.ObserveStoreMethodDuration("PropertyFieldStore.CountForTarget", success, elapsed)
|
||||
}
|
||||
return result, err
|
||||
}
|
||||
|
||||
func (s *TimerLayerPropertyFieldStore) Create(field *model.PropertyField) (*model.PropertyField, error) {
|
||||
start := time.Now()
|
||||
|
||||
|
|
@ -11122,6 +11122,22 @@ func (s *TimerLayerTokenStore) Cleanup(expiryTime int64) {
|
|||
}
|
||||
}
|
||||
|
||||
func (s *TimerLayerTokenStore) ConsumeOnce(tokenType string, tokenStr string) (*model.Token, error) {
|
||||
start := time.Now()
|
||||
|
||||
result, err := s.TokenStore.ConsumeOnce(tokenType, tokenStr)
|
||||
|
||||
elapsed := float64(time.Since(start)) / float64(time.Second)
|
||||
if s.Root.Metrics != nil {
|
||||
success := "false"
|
||||
if err == nil {
|
||||
success = "true"
|
||||
}
|
||||
s.Root.Metrics.ObserveStoreMethodDuration("TokenStore.ConsumeOnce", success, elapsed)
|
||||
}
|
||||
return result, err
|
||||
}
|
||||
|
||||
func (s *TimerLayerTokenStore) Delete(token string) error {
|
||||
start := time.Now()
|
||||
|
||||
|
|
|
|||
|
|
@ -15,10 +15,23 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/mattermost/mattermost/server/v8/channels/utils"
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/mattermost/mattermost/server/v8/channels/utils/fileutils"
|
||||
)
|
||||
|
||||
// CollectTWithLogf adds Logf to assert.CollectT to make this pattern possible:
|
||||
//
|
||||
// assert.EventuallyWithT(t, func(c *assert.CollectT) {
|
||||
// mockAPI.AssertExpectations(&testutils.CollectTWithLogf{CollectT: c})
|
||||
// }, 5*time.Second, 100*time.Millisecond)
|
||||
type CollectTWithLogf struct {
|
||||
*assert.CollectT
|
||||
}
|
||||
|
||||
func (*CollectTWithLogf) Logf(string, ...any) {
|
||||
}
|
||||
|
||||
func ReadTestFile(name string) ([]byte, error) {
|
||||
path, _ := fileutils.FindDir("tests")
|
||||
file, err := os.Open(filepath.Join(path, name))
|
||||
|
|
|
|||
|
|
@ -494,7 +494,7 @@ func (h *Handler) checkCSRFToken(c *Context, r *http.Request, tokenLocation app.
|
|||
mlog.String("user_id", session.UserId),
|
||||
}
|
||||
|
||||
if *c.App.Config().ServiceSettings.StrictCSRFEnforcement {
|
||||
if *c.App.Config().ServiceSettings.ExperimentalStrictCSRFEnforcement {
|
||||
c.Logger.Warn(csrfErrorMessage, fields...)
|
||||
} else {
|
||||
c.Logger.Debug(csrfErrorMessage, fields...)
|
||||
|
|
|
|||
|
|
@ -227,7 +227,7 @@ func TestHandlerServeCSRFToken(t *testing.T) {
|
|||
// Fallback Behavior Used - Success expected
|
||||
// ToDo (DSchalla) 2019/01/04: Remove once legacy CSRF Handling is removed
|
||||
th.App.UpdateConfig(func(config *model.Config) {
|
||||
*config.ServiceSettings.StrictCSRFEnforcement = false
|
||||
*config.ServiceSettings.ExperimentalStrictCSRFEnforcement = false
|
||||
})
|
||||
request = httptest.NewRequest("POST", "/api/v4/test", nil)
|
||||
request.AddCookie(cookie)
|
||||
|
|
@ -244,7 +244,7 @@ func TestHandlerServeCSRFToken(t *testing.T) {
|
|||
// Fallback Behavior Used with Strict Enforcement - Failure Expected
|
||||
// ToDo (DSchalla) 2019/01/04: Remove once legacy CSRF Handling is removed
|
||||
th.App.UpdateConfig(func(config *model.Config) {
|
||||
*config.ServiceSettings.StrictCSRFEnforcement = true
|
||||
*config.ServiceSettings.ExperimentalStrictCSRFEnforcement = true
|
||||
})
|
||||
response = httptest.NewRecorder()
|
||||
handler.ServeHTTP(response, request)
|
||||
|
|
@ -587,12 +587,15 @@ func TestHandlerServeInvalidToken(t *testing.T) {
|
|||
|
||||
func TestHandlerServeCSRFFailureClearsAuthCookie(t *testing.T) {
|
||||
testCases := []struct {
|
||||
Description string
|
||||
SiteURL string
|
||||
ExpectedSetCookieHeaderRegexp string
|
||||
Description string
|
||||
SiteURL string
|
||||
ExpectedSetCookieHeaderRegexp string
|
||||
ExperimentalStrictCSRFEnforcement bool
|
||||
}{
|
||||
{"no subpath", "http://localhost:8065", "^MMAUTHTOKEN=; Path=/"},
|
||||
{"subpath", "http://localhost:8065/subpath", "^MMAUTHTOKEN=; Path=/subpath"},
|
||||
{"no subpath", "http://localhost:8065", "^MMAUTHTOKEN=; Path=/", false},
|
||||
{"subpath", "http://localhost:8065/subpath", "^MMAUTHTOKEN=; Path=/subpath", false},
|
||||
{"no subpath", "http://localhost:8065", "^MMAUTHTOKEN=; Path=/", true},
|
||||
{"subpath", "http://localhost:8065/subpath", "^MMAUTHTOKEN=; Path=/subpath", true},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
|
|
@ -601,6 +604,7 @@ func TestHandlerServeCSRFFailureClearsAuthCookie(t *testing.T) {
|
|||
|
||||
th.App.UpdateConfig(func(cfg *model.Config) {
|
||||
*cfg.ServiceSettings.SiteURL = tc.SiteURL
|
||||
*cfg.ServiceSettings.ExperimentalStrictCSRFEnforcement = tc.ExperimentalStrictCSRFEnforcement
|
||||
})
|
||||
|
||||
session := &model.Session{
|
||||
|
|
@ -635,10 +639,14 @@ func TestHandlerServeCSRFFailureClearsAuthCookie(t *testing.T) {
|
|||
request.Header.Add(model.HeaderRequestedWith, model.HeaderRequestedWithXML)
|
||||
response := httptest.NewRecorder()
|
||||
handler.ServeHTTP(response, request)
|
||||
require.Equal(t, http.StatusUnauthorized, response.Code)
|
||||
|
||||
cookies := response.Header().Get("Set-Cookie")
|
||||
assert.Regexp(t, tc.ExpectedSetCookieHeaderRegexp, cookies)
|
||||
if tc.ExperimentalStrictCSRFEnforcement {
|
||||
require.Equal(t, http.StatusUnauthorized, response.Code)
|
||||
cookies := response.Header().Get("Set-Cookie")
|
||||
assert.Regexp(t, tc.ExpectedSetCookieHeaderRegexp, cookies)
|
||||
} else {
|
||||
require.Equal(t, http.StatusOK, response.Code)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -674,7 +682,7 @@ func TestCheckCSRFToken(t *testing.T) {
|
|||
assert.Nil(t, c.Err)
|
||||
})
|
||||
|
||||
t.Run("should not allow a POST request with an X-Requested-With header", func(t *testing.T) {
|
||||
t.Run("should allow a POST request with an X-Requested-With header", func(t *testing.T) {
|
||||
th := SetupWithStoreMock(t)
|
||||
|
||||
h := &Handler{
|
||||
|
|
@ -700,6 +708,56 @@ func TestCheckCSRFToken(t *testing.T) {
|
|||
|
||||
checked, passed := h.checkCSRFToken(c, r, tokenLocation, session)
|
||||
|
||||
assert.True(t, checked)
|
||||
assert.True(t, passed)
|
||||
assert.Nil(t, c.Err)
|
||||
})
|
||||
|
||||
t.Run("should not allow a POST request with an X-Requested-With header with strict CSRF enforcement enabled", func(t *testing.T) {
|
||||
th := SetupWithStoreMock(t)
|
||||
|
||||
mockStore := th.App.Srv().Store().(*mocks.Store)
|
||||
mockUserStore := mocks.UserStore{}
|
||||
mockUserStore.On("Count", mock.Anything).Return(int64(10), nil)
|
||||
mockPostStore := mocks.PostStore{}
|
||||
mockPostStore.On("GetMaxPostSize").Return(65535, nil)
|
||||
mockSystemStore := mocks.SystemStore{}
|
||||
mockSystemStore.On("GetByName", "UpgradedFromTE").Return(&model.System{Name: "UpgradedFromTE", Value: "false"}, nil)
|
||||
mockSystemStore.On("GetByName", "InstallationDate").Return(&model.System{Name: "InstallationDate", Value: "10"}, nil)
|
||||
mockSystemStore.On("GetByName", "FirstServerRunTimestamp").Return(&model.System{Name: "FirstServerRunTimestamp", Value: "10"}, nil)
|
||||
|
||||
mockStore.On("User").Return(&mockUserStore)
|
||||
mockStore.On("Post").Return(&mockPostStore)
|
||||
mockStore.On("System").Return(&mockSystemStore)
|
||||
mockStore.On("GetDBSchemaVersion").Return(1, nil)
|
||||
|
||||
th.App.UpdateConfig(func(cfg *model.Config) {
|
||||
*cfg.ServiceSettings.ExperimentalStrictCSRFEnforcement = true
|
||||
})
|
||||
|
||||
h := &Handler{
|
||||
RequireSession: true,
|
||||
TrustRequester: false,
|
||||
}
|
||||
|
||||
token := "token"
|
||||
tokenLocation := app.TokenLocationCookie
|
||||
|
||||
c := &Context{
|
||||
App: th.App,
|
||||
Logger: th.App.Log(),
|
||||
AppContext: th.Context,
|
||||
}
|
||||
r, _ := http.NewRequest(http.MethodPost, "", nil)
|
||||
r.Header.Set(model.HeaderRequestedWith, model.HeaderRequestedWithXML)
|
||||
session := &model.Session{
|
||||
Props: map[string]string{
|
||||
"csrf": token,
|
||||
},
|
||||
}
|
||||
|
||||
checked, passed := h.checkCSRFToken(c, r, tokenLocation, session)
|
||||
|
||||
assert.True(t, checked)
|
||||
assert.False(t, passed)
|
||||
assert.Nil(t, c.Err)
|
||||
|
|
|
|||
|
|
@ -37,6 +37,10 @@ func loginWithSaml(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
action := r.URL.Query().Get("action")
|
||||
isMobile := action == model.OAuthActionMobile
|
||||
redirectURL := html.EscapeString(r.URL.Query().Get("redirect_to"))
|
||||
// Optional SAML challenge parameters for mobile code-exchange
|
||||
state := r.URL.Query().Get("state")
|
||||
codeChallenge := r.URL.Query().Get("code_challenge")
|
||||
codeChallengeMethod := r.URL.Query().Get("code_challenge_method")
|
||||
relayProps := map[string]string{}
|
||||
relayState := ""
|
||||
|
||||
|
|
@ -61,6 +65,19 @@ func loginWithSaml(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
relayProps["redirect_to"] = redirectURL
|
||||
}
|
||||
|
||||
// Forward SAML challenge values via RelayState so the complete step can prefer code-exchange
|
||||
if isMobile {
|
||||
if state != "" {
|
||||
relayProps["state"] = state
|
||||
}
|
||||
if codeChallenge != "" {
|
||||
relayProps["code_challenge"] = codeChallenge
|
||||
}
|
||||
if codeChallengeMethod != "" {
|
||||
relayProps["code_challenge_method"] = codeChallengeMethod
|
||||
}
|
||||
}
|
||||
|
||||
desktopToken := r.URL.Query().Get("desktop_token")
|
||||
if desktopToken != "" {
|
||||
relayProps["desktop_token"] = desktopToken
|
||||
|
|
@ -89,7 +106,7 @@ func completeSaml(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
//Validate that the user is with SAML and all that
|
||||
// Validate that the user is with SAML and all that
|
||||
encodedXML := r.FormValue("SAMLResponse")
|
||||
relayState := r.FormValue("RelayState")
|
||||
|
||||
|
|
@ -144,7 +161,8 @@ func completeSaml(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
if err = c.App.CheckUserAllAuthenticationCriteria(c.AppContext, user, ""); err != nil {
|
||||
err = c.App.CheckUserAllAuthenticationCriteria(c.AppContext, user, "")
|
||||
if err != nil {
|
||||
handleError(err)
|
||||
return
|
||||
}
|
||||
|
|
@ -220,7 +238,35 @@ func completeSaml(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
// If it's not a desktop login we create a session for this SAML User that will be used in their browser or mobile app
|
||||
// Decide between legacy token-in-URL vs SAML code-exchange for mobile
|
||||
samlState := relayProps["state"]
|
||||
samlChallenge := relayProps["code_challenge"]
|
||||
samlMethod := relayProps["code_challenge_method"]
|
||||
|
||||
if isMobile && hasRedirectURL && samlChallenge != "" && c.App.Config().FeatureFlags.MobileSSOCodeExchange {
|
||||
// Issue one-time login_code bound to user and SAML challenge values; do not create a session here
|
||||
extra := model.MapToJSON(map[string]string{
|
||||
"user_id": user.Id,
|
||||
"state": samlState,
|
||||
"code_challenge": samlChallenge,
|
||||
"code_challenge_method": samlMethod,
|
||||
})
|
||||
|
||||
var code *model.Token
|
||||
code, err = c.App.CreateSamlRelayToken(model.TokenTypeSSOCodeExchange, extra)
|
||||
if err != nil {
|
||||
handleError(model.NewAppError("completeSaml", "app.recover.save.app_error", nil, "", http.StatusInternalServerError).Wrap(err))
|
||||
return
|
||||
}
|
||||
|
||||
redirectURL = utils.AppendQueryParamsToURL(redirectURL, map[string]string{
|
||||
"login_code": code.Token,
|
||||
})
|
||||
utils.RenderMobileAuthComplete(w, redirectURL)
|
||||
return
|
||||
}
|
||||
|
||||
// Legacy: create a session and attach tokens (web/mobile without SAML code exchange)
|
||||
session, err := c.App.DoLogin(c.AppContext, w, r, user, "", isMobile, false, true)
|
||||
if err != nil {
|
||||
handleError(err)
|
||||
|
|
@ -235,10 +281,13 @@ func completeSaml(c *Context, w http.ResponseWriter, r *http.Request) {
|
|||
if hasRedirectURL {
|
||||
if isMobile {
|
||||
// Mobile clients with redirect url support
|
||||
redirectURL = utils.AppendQueryParamsToURL(redirectURL, map[string]string{
|
||||
model.SessionCookieToken: c.AppContext.Session().Token,
|
||||
model.SessionCookieCsrf: c.AppContext.Session().GetCSRF(),
|
||||
})
|
||||
// Legacy mobile path: return tokens only when SAML code exchange was not requested
|
||||
if samlChallenge == "" {
|
||||
redirectURL = utils.AppendQueryParamsToURL(redirectURL, map[string]string{
|
||||
model.SessionCookieToken: c.AppContext.Session().Token,
|
||||
model.SessionCookieCsrf: c.AppContext.Session().GetCSRF(),
|
||||
})
|
||||
}
|
||||
utils.RenderMobileAuthComplete(w, redirectURL)
|
||||
} else {
|
||||
http.Redirect(w, r, redirectURL, http.StatusFound)
|
||||
|
|
|
|||
|
|
@ -166,14 +166,11 @@ type Client interface {
|
|||
DeletePreferences(ctx context.Context, userId string, preferences model.Preferences) (*model.Response, error)
|
||||
PermanentDeletePost(ctx context.Context, postID string) (*model.Response, error)
|
||||
DeletePost(ctx context.Context, postId string) (*model.Response, error)
|
||||
|
||||
// CPA Field Management
|
||||
ListCPAFields(ctx context.Context) ([]*model.PropertyField, *model.Response, error)
|
||||
CreateCPAField(ctx context.Context, field *model.PropertyField) (*model.PropertyField, *model.Response, error)
|
||||
PatchCPAField(ctx context.Context, fieldID string, patch *model.PropertyFieldPatch) (*model.PropertyField, *model.Response, error)
|
||||
DeleteCPAField(ctx context.Context, fieldID string) (*model.Response, error)
|
||||
|
||||
// CPA Value Management
|
||||
ListCPAValues(ctx context.Context, userID string) (map[string]json.RawMessage, *model.Response, error)
|
||||
PatchCPAValues(ctx context.Context, values map[string]json.RawMessage) (map[string]json.RawMessage, *model.Response, error)
|
||||
PatchCPAValuesForUser(ctx context.Context, userID string, values map[string]json.RawMessage) (map[string]json.RawMessage, *model.Response, error)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,78 +0,0 @@
|
|||
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
|
||||
// See LICENSE.txt for license information.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"maps"
|
||||
|
||||
"github.com/mattermost/mattermost/server/public/model"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var CPACmd = &cobra.Command{
|
||||
Use: "cpa",
|
||||
Short: "Management of Custom Profile Attributes",
|
||||
Long: "Management of Custom Profile Attributes (CPA) fields.",
|
||||
}
|
||||
|
||||
var CPAFieldCmd = &cobra.Command{
|
||||
Use: "field",
|
||||
Short: "Management of CPA fields",
|
||||
Long: "Create, list, edit, and delete Custom Profile Attribute fields.",
|
||||
}
|
||||
|
||||
func init() {
|
||||
CPACmd.AddCommand(
|
||||
CPAFieldCmd,
|
||||
)
|
||||
|
||||
RootCmd.AddCommand(CPACmd)
|
||||
}
|
||||
|
||||
// Helper function to build field attributes from command flags
|
||||
func buildFieldAttrs(cmd *cobra.Command) (model.StringInterface, error) {
|
||||
attrs := make(model.StringInterface)
|
||||
|
||||
// First parse --attrs if provided
|
||||
if attrsStr, err := cmd.Flags().GetString("attrs"); err == nil && attrsStr != "" && cmd.Flags().Changed("attrs") {
|
||||
var attrsMap map[string]any
|
||||
if err := json.Unmarshal([]byte(attrsStr), &attrsMap); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse attrs JSON: %w", err)
|
||||
}
|
||||
// Copy to our attrs map
|
||||
maps.Copy(attrs, attrsMap)
|
||||
}
|
||||
|
||||
// Individual flags override --attrs (applied on top)
|
||||
if cmd.Flags().Changed("managed") {
|
||||
managed, _ := cmd.Flags().GetBool("managed")
|
||||
if managed {
|
||||
attrs["managed"] = "admin"
|
||||
} else {
|
||||
attrs["managed"] = ""
|
||||
}
|
||||
}
|
||||
|
||||
// Handle --option flags for select/multiselect fields
|
||||
if options, err := cmd.Flags().GetStringSlice("option"); err == nil && len(options) > 0 && cmd.Flags().Changed("option") {
|
||||
var selectOptions []*model.CustomProfileAttributesSelectOption
|
||||
for _, optionName := range options {
|
||||
selectOptions = append(selectOptions, &model.CustomProfileAttributesSelectOption{
|
||||
ID: model.NewId(),
|
||||
Name: optionName,
|
||||
})
|
||||
}
|
||||
attrs["options"] = selectOptions
|
||||
}
|
||||
|
||||
return attrs, nil
|
||||
}
|
||||
|
||||
func hasAttrsChanges(cmd *cobra.Command) bool {
|
||||
return cmd.Flags().Changed("managed") ||
|
||||
cmd.Flags().Changed("attrs") ||
|
||||
cmd.Flags().Changed("option")
|
||||
}
|
||||
|
|
@ -1,243 +0,0 @@
|
|||
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
|
||||
// See LICENSE.txt for license information.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"github.com/mattermost/mattermost/server/public/model"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func (s *MmctlUnitTestSuite) TestHasAttrsChanges() {
|
||||
testCases := []struct {
|
||||
Name string
|
||||
FlagChanges map[string]string // map of flag name -> value to set
|
||||
Expected bool
|
||||
}{
|
||||
{
|
||||
Name: "Should return true when managed flag is changed",
|
||||
FlagChanges: map[string]string{"managed": "true"},
|
||||
Expected: true,
|
||||
},
|
||||
{
|
||||
Name: "Should return true when attrs flag is changed",
|
||||
FlagChanges: map[string]string{"attrs": `{"visibility":"always"}`},
|
||||
Expected: true,
|
||||
},
|
||||
{
|
||||
Name: "Should return true when option flag is changed",
|
||||
FlagChanges: map[string]string{"option": "Go"},
|
||||
Expected: true,
|
||||
},
|
||||
{
|
||||
Name: "Should return true when multiple relevant flags are changed",
|
||||
FlagChanges: map[string]string{
|
||||
"managed": "true",
|
||||
"attrs": `{"visibility":"always"}`,
|
||||
"option": "Go",
|
||||
},
|
||||
Expected: true,
|
||||
},
|
||||
{
|
||||
Name: "Should return false when no relevant flags are changed",
|
||||
FlagChanges: map[string]string{}, // No flags set
|
||||
Expected: false,
|
||||
},
|
||||
{
|
||||
Name: "Should return false for other unrelated flag changes like name",
|
||||
FlagChanges: map[string]string{"name": "New Name"},
|
||||
Expected: false,
|
||||
},
|
||||
{
|
||||
Name: "Should return true when managed flag is changed along with unrelated flags",
|
||||
FlagChanges: map[string]string{
|
||||
"managed": "true",
|
||||
"name": "New Name",
|
||||
},
|
||||
Expected: true,
|
||||
},
|
||||
{
|
||||
Name: "Should return true when attrs flag is changed along with unrelated flags",
|
||||
FlagChanges: map[string]string{
|
||||
"attrs": `{"visibility":"always"}`,
|
||||
"name": "New Name",
|
||||
},
|
||||
Expected: true,
|
||||
},
|
||||
{
|
||||
Name: "Should return true when option flag is changed along with unrelated flags",
|
||||
FlagChanges: map[string]string{
|
||||
"option": "Go",
|
||||
"name": "New Name",
|
||||
},
|
||||
Expected: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
s.Run(tc.Name, func() {
|
||||
cmd := &cobra.Command{}
|
||||
|
||||
// Set up all the flags that might be used
|
||||
cmd.Flags().Bool("managed", false, "")
|
||||
cmd.Flags().String("attrs", "", "")
|
||||
cmd.Flags().StringSlice("option", []string{}, "")
|
||||
cmd.Flags().String("name", "", "")
|
||||
|
||||
// Apply the flag changes for this test case
|
||||
for flagName, flagValue := range tc.FlagChanges {
|
||||
err := cmd.Flags().Set(flagName, flagValue)
|
||||
s.Require().NoError(err)
|
||||
}
|
||||
|
||||
result := hasAttrsChanges(cmd)
|
||||
s.Require().Equal(tc.Expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (s *MmctlUnitTestSuite) TestBuildFieldAttrs() {
|
||||
testCases := []struct {
|
||||
Name string
|
||||
FlagChanges map[string]any // map of flag name -> value or []string for options
|
||||
Expected model.StringInterface
|
||||
ShouldError bool
|
||||
ErrorText string
|
||||
}{
|
||||
{
|
||||
Name: "Should return empty attrs when no flags are set",
|
||||
FlagChanges: map[string]any{},
|
||||
Expected: model.StringInterface{},
|
||||
ShouldError: false,
|
||||
},
|
||||
{
|
||||
Name: "Should create attrs with managed=admin when managed=true",
|
||||
FlagChanges: map[string]any{"managed": "true"},
|
||||
Expected: model.StringInterface{"managed": "admin"},
|
||||
ShouldError: false,
|
||||
},
|
||||
{
|
||||
Name: "Should create attrs with managed='' when managed=false",
|
||||
FlagChanges: map[string]any{"managed": "false"},
|
||||
Expected: model.StringInterface{"managed": ""},
|
||||
ShouldError: false,
|
||||
},
|
||||
{
|
||||
Name: "Should parse attrs JSON string and apply to StringInterface",
|
||||
FlagChanges: map[string]any{"attrs": `{"visibility":"always","required":true}`},
|
||||
Expected: model.StringInterface{"visibility": "always", "required": true},
|
||||
ShouldError: false,
|
||||
},
|
||||
{
|
||||
Name: "Should create CustomProfileAttributesSelectOption array with generated IDs for option flags",
|
||||
FlagChanges: map[string]any{"option": []string{"Go"}},
|
||||
Expected: model.StringInterface{},
|
||||
ShouldError: false,
|
||||
},
|
||||
{
|
||||
Name: "Should have individual flags override attrs JSON values",
|
||||
FlagChanges: map[string]any{
|
||||
"attrs": `{"visibility":"always","managed":""}`,
|
||||
"managed": "true", // Should override the managed="" from attrs
|
||||
},
|
||||
Expected: model.StringInterface{
|
||||
"visibility": "always",
|
||||
"managed": "admin", // Individual flag should override
|
||||
},
|
||||
ShouldError: false,
|
||||
},
|
||||
{
|
||||
Name: "Should handle error for invalid attrs JSON syntax",
|
||||
FlagChanges: map[string]any{"attrs": `{"invalid": json}`},
|
||||
Expected: nil,
|
||||
ShouldError: true,
|
||||
ErrorText: "failed to parse attrs JSON",
|
||||
},
|
||||
{
|
||||
Name: "Should combine managed and option flags correctly",
|
||||
FlagChanges: map[string]any{
|
||||
"managed": "true",
|
||||
"option": []string{"Go"},
|
||||
},
|
||||
Expected: model.StringInterface{"managed": "admin"},
|
||||
ShouldError: false,
|
||||
},
|
||||
{
|
||||
Name: "Should handle multiple option flags",
|
||||
FlagChanges: map[string]any{
|
||||
"option": []string{"Go", "React", "Python"},
|
||||
},
|
||||
Expected: model.StringInterface{},
|
||||
ShouldError: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
s.Run(tc.Name, func() {
|
||||
cmd := &cobra.Command{}
|
||||
|
||||
// Set up all the flags that might be used
|
||||
cmd.Flags().Bool("managed", false, "")
|
||||
cmd.Flags().String("attrs", "", "")
|
||||
cmd.Flags().StringSlice("option", []string{}, "")
|
||||
|
||||
// Apply the flag changes for this test case
|
||||
for flagName, flagValue := range tc.FlagChanges {
|
||||
if flagName == "option" {
|
||||
// Handle option flag with list of values
|
||||
if options, ok := flagValue.([]string); ok {
|
||||
for _, optionName := range options {
|
||||
err := cmd.Flags().Set("option", optionName)
|
||||
s.Require().NoError(err)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Handle other flags as strings
|
||||
if stringValue, ok := flagValue.(string); ok {
|
||||
err := cmd.Flags().Set(flagName, stringValue)
|
||||
s.Require().NoError(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result, err := buildFieldAttrs(cmd)
|
||||
|
||||
if tc.ShouldError {
|
||||
s.Require().Error(err)
|
||||
s.Require().Contains(err.Error(), tc.ErrorText)
|
||||
s.Require().Nil(result)
|
||||
} else {
|
||||
s.Require().NoError(err)
|
||||
s.Require().NotNil(result)
|
||||
|
||||
// Check if we expect options based on FlagChanges
|
||||
var expectedOptions []string
|
||||
if optionValue, exists := tc.FlagChanges["option"]; exists {
|
||||
if options, ok := optionValue.([]string); ok {
|
||||
expectedOptions = options
|
||||
}
|
||||
}
|
||||
|
||||
// Validate options if specified
|
||||
if len(expectedOptions) > 0 {
|
||||
s.Require().Contains(result, "options")
|
||||
options, ok := result["options"].([]*model.CustomProfileAttributesSelectOption)
|
||||
s.Require().True(ok, "Options should be []*model.CustomProfileAttributesSelectOption")
|
||||
|
||||
optionNames := make([]string, len(options))
|
||||
for i, opt := range options {
|
||||
optionNames[i] = opt.Name
|
||||
s.Require().NotEmpty(opt.ID)
|
||||
}
|
||||
s.Require().ElementsMatch(expectedOptions, optionNames)
|
||||
}
|
||||
|
||||
// Standard validation for expected fields
|
||||
for key, expectedValue := range tc.Expected {
|
||||
s.Require().Contains(result, key)
|
||||
s.Require().Equal(expectedValue, result[key])
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
238
server/cmd/mmctl/commands/user_attributes.go
Normal file
238
server/cmd/mmctl/commands/user_attributes.go
Normal file
|
|
@ -0,0 +1,238 @@
|
|||
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved.
|
||||
// See LICENSE.txt for license information.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"maps"
|
||||
"strings"
|
||||
|
||||
"github.com/mattermost/mattermost/server/public/model"
|
||||
"github.com/mattermost/mattermost/server/v8/cmd/mmctl/client"
|
||||
"github.com/mattermost/mattermost/server/v8/cmd/mmctl/printer"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var UserAttributesCmd = &cobra.Command{
|
||||
Use: "attributes",
|
||||
Aliases: []string{"attrs", "cpa"},
|
||||
Short: "Management of User Attributes",
|
||||
Long: "Management of User Attributes fields and values.",
|
||||
}
|
||||
|
||||
var UserAttributesFieldCmd = &cobra.Command{
|
||||
Use: "field",
|
||||
Short: "Management of User Attributes fields",
|
||||
Long: "Create, list, edit, and delete User Attribute fields.",
|
||||
}
|
||||
|
||||
var UserAttributesValueCmd = &cobra.Command{
|
||||
Use: "value",
|
||||
Short: "Management of User Attributes values",
|
||||
Long: "List, set, and delete User Attribute values for users.",
|
||||
}
|
||||
|
||||
func init() {
|
||||
UserAttributesCmd.AddCommand(
|
||||
UserAttributesFieldCmd,
|
||||
UserAttributesValueCmd,
|
||||
)
|
||||
|
||||
UserCmd.AddCommand(UserAttributesCmd)
|
||||
}
|
||||
|
||||
// Helper function to build field attributes from command flags. If existingAttrs is
|
||||
// provided, it will be used as the base and merged with flag changes
|
||||
func buildFieldAttrs(cmd *cobra.Command, existingAttrs model.StringInterface) (model.StringInterface, error) {
|
||||
var attrs = make(model.StringInterface)
|
||||
if existingAttrs != nil {
|
||||
maps.Copy(attrs, existingAttrs)
|
||||
}
|
||||
|
||||
// First parse --attrs if provided
|
||||
if attrsStr, err := cmd.Flags().GetString("attrs"); err == nil && attrsStr != "" && cmd.Flags().Changed("attrs") {
|
||||
var attrsMap map[string]any
|
||||
if err := json.Unmarshal([]byte(attrsStr), &attrsMap); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse attrs JSON: %w", err)
|
||||
}
|
||||
// Copy to our attrs map
|
||||
maps.Copy(attrs, attrsMap)
|
||||
}
|
||||
|
||||
// Individual flags override --attrs (applied on top)
|
||||
if cmd.Flags().Changed("managed") {
|
||||
managed, _ := cmd.Flags().GetBool("managed")
|
||||
if managed {
|
||||
attrs["managed"] = "admin"
|
||||
} else {
|
||||
attrs["managed"] = ""
|
||||
}
|
||||
}
|
||||
|
||||
// Handle --option flags for select/multiselect fields
|
||||
if options, err := cmd.Flags().GetStringSlice("option"); err == nil && len(options) > 0 && cmd.Flags().Changed("option") {
|
||||
var selectOptions []*model.CustomProfileAttributesSelectOption
|
||||
|
||||
existingOptionsMap := make(map[string]*model.CustomProfileAttributesSelectOption)
|
||||
if existingOptions, ok := attrs["options"]; ok {
|
||||
existingOptionsJSON, err := json.Marshal(existingOptions)
|
||||
if err == nil {
|
||||
var existingSelectOptions []*model.CustomProfileAttributesSelectOption
|
||||
if err := json.Unmarshal(existingOptionsJSON, &existingSelectOptions); err == nil {
|
||||
for _, option := range existingSelectOptions {
|
||||
existingOptionsMap[option.Name] = option
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, optionName := range options {
|
||||
if existingOption, exists := existingOptionsMap[optionName]; exists {
|
||||
selectOptions = append(selectOptions, existingOption)
|
||||
} else {
|
||||
selectOptions = append(selectOptions, &model.CustomProfileAttributesSelectOption{
|
||||
ID: model.NewId(),
|
||||
Name: optionName,
|
||||
})
|
||||
}
|
||||
}
|
||||
attrs["options"] = selectOptions
|
||||
}
|
||||
|
||||
return attrs, nil
|
||||
}
|
||||
|
||||
func hasAttrsChanges(cmd *cobra.Command) bool {
|
||||
return cmd.Flags().Changed("managed") ||
|
||||
cmd.Flags().Changed("attrs") ||
|
||||
cmd.Flags().Changed("option")
|
||||
}
|
||||
|
||||
func getFieldFromArg(c client.Client, fieldArg string) (*model.PropertyField, error) {
|
||||
fields, _, err := c.ListCPAFields(context.TODO())
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get CPA fields: %w", err)
|
||||
}
|
||||
|
||||
if model.IsValidId(fieldArg) {
|
||||
for _, field := range fields {
|
||||
if field.ID == fieldArg {
|
||||
return field, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, field := range fields {
|
||||
if field.Name == fieldArg {
|
||||
return field, nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("failed to get field for %q", fieldArg)
|
||||
}
|
||||
|
||||
// setupCPATemplateContext sets up template functions for field and value resolution
|
||||
func setupCPATemplateContext(c client.Client) error {
|
||||
// Get all fields once for the entire command
|
||||
fields, _, err := c.ListCPAFields(context.TODO())
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get CPA fields for template context: %w", err)
|
||||
}
|
||||
|
||||
fieldMap := make(map[string]*model.PropertyField)
|
||||
for _, field := range fields {
|
||||
fieldMap[field.ID] = field
|
||||
}
|
||||
|
||||
// Set template function to resolve field ID to field name
|
||||
printer.SetTemplateFunc("fieldName", func(fieldID string) string {
|
||||
if field, exists := fieldMap[fieldID]; exists {
|
||||
return field.Name
|
||||
}
|
||||
return fieldID // fallback to field ID if not found
|
||||
})
|
||||
|
||||
// Set template function to get field type
|
||||
printer.SetTemplateFunc("fieldType", func(fieldID string) string {
|
||||
if field, exists := fieldMap[fieldID]; exists {
|
||||
return string(field.Type)
|
||||
}
|
||||
return "unknown"
|
||||
})
|
||||
|
||||
// Set template function to resolve field value to human-readable format
|
||||
printer.SetTemplateFunc("resolveValue", func(fieldID string, rawValue json.RawMessage) string {
|
||||
field, exists := fieldMap[fieldID]
|
||||
if !exists {
|
||||
return string(rawValue)
|
||||
}
|
||||
|
||||
return resolveDisplayValue(field, rawValue)
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// resolveDisplayValue converts raw field values to human-readable display format
|
||||
func resolveDisplayValue(field *model.PropertyField, rawValue json.RawMessage) string {
|
||||
switch field.Type {
|
||||
case model.PropertyFieldTypeSelect, model.PropertyFieldTypeMultiselect:
|
||||
return resolveOptionDisplayValue(field, rawValue)
|
||||
default:
|
||||
var value any
|
||||
if err := json.Unmarshal(rawValue, &value); err != nil {
|
||||
return string(rawValue)
|
||||
}
|
||||
return fmt.Sprintf("%v", value)
|
||||
}
|
||||
}
|
||||
|
||||
// resolveOptionDisplayValue converts option IDs to option names for select/multiselect fields
|
||||
func resolveOptionDisplayValue(field *model.PropertyField, rawValue json.RawMessage) string {
|
||||
// Convert PropertyField to CPAField to access options
|
||||
cpaField, err := model.NewCPAFieldFromPropertyField(field)
|
||||
if err != nil {
|
||||
return string(rawValue)
|
||||
}
|
||||
|
||||
if len(cpaField.Attrs.Options) == 0 {
|
||||
return string(rawValue)
|
||||
}
|
||||
|
||||
// Create option lookup map
|
||||
optionMap := make(map[string]string)
|
||||
for _, option := range cpaField.Attrs.Options {
|
||||
optionMap[option.ID] = option.Name
|
||||
}
|
||||
|
||||
if field.Type == model.PropertyFieldTypeSelect {
|
||||
// Single select - expect a string
|
||||
var optionID string
|
||||
if err := json.Unmarshal(rawValue, &optionID); err != nil {
|
||||
return string(rawValue)
|
||||
}
|
||||
if optionName, exists := optionMap[optionID]; exists {
|
||||
return optionName
|
||||
}
|
||||
return optionID
|
||||
}
|
||||
|
||||
// Multiselect - expect an array
|
||||
var optionIDs []string
|
||||
if err := json.Unmarshal(rawValue, &optionIDs); err != nil {
|
||||
return string(rawValue)
|
||||
}
|
||||
|
||||
optionNames := make([]string, 0, len(optionIDs))
|
||||
for _, optionID := range optionIDs {
|
||||
if optionName, exists := optionMap[optionID]; exists {
|
||||
optionNames = append(optionNames, optionName)
|
||||
} else {
|
||||
optionNames = append(optionNames, optionID)
|
||||
}
|
||||
}
|
||||
return fmt.Sprintf("[%s]", strings.Join(optionNames, ", "))
|
||||
}
|
||||
|
|
@ -19,41 +19,41 @@ import (
|
|||
|
||||
var CPAFieldListCmd = &cobra.Command{
|
||||
Use: "list",
|
||||
Short: "List CPA fields",
|
||||
Long: "List all Custom Profile Attribute fields with their properties.",
|
||||
Example: ` cpa field list`,
|
||||
Short: "List User Attributes fields",
|
||||
Long: "List all User Attributes fields with their properties.",
|
||||
Example: ` user attributes field list`,
|
||||
Args: cobra.NoArgs,
|
||||
RunE: withClient(cpaFieldListCmdF),
|
||||
}
|
||||
|
||||
var CPAFieldCreateCmd = &cobra.Command{
|
||||
Use: "create [name] [type]",
|
||||
Short: "Create a CPA field",
|
||||
Long: `Create a new Custom Profile Attribute field with the specified name and type.`,
|
||||
Example: ` cpa field create "Department" text --managed
|
||||
cpa field create "Skills" multiselect --option Go --option React --option Python
|
||||
cpa field create "Level" select --attrs '{"visibility":"always"}'`,
|
||||
Short: "Create a User Attributes field",
|
||||
Long: `Create a new User Attributes field with the specified name and type.`,
|
||||
Example: ` user attributes field create "Department" text --managed
|
||||
user attributes field create "Skills" multiselect --option Go --option React --option Python
|
||||
user attributes field create "Level" select --attrs '{"visibility":"always"}'`,
|
||||
Args: cobra.ExactArgs(2),
|
||||
RunE: withClient(cpaFieldCreateCmdF),
|
||||
}
|
||||
|
||||
var CPAFieldEditCmd = &cobra.Command{
|
||||
Use: "edit [field-id]",
|
||||
Short: "Edit a CPA field",
|
||||
Long: "Edit an existing Custom Profile Attribute field.",
|
||||
Example: ` cpa field edit n4qdbtro4j8x3n8z81p48ww9gr --name "Department Name" --managed
|
||||
cpa field edit 8kj9xm4p6f3y7n2z9q5w8r1t4v --option Go --option React --option Python --option Java
|
||||
cpa field edit 3h7k9m2x5b8v4n6p1q9w7r3t2y --managed=false`,
|
||||
Use: "edit [field]",
|
||||
Short: "Edit a User Attributes field",
|
||||
Long: "Edit an existing User Attributes field.",
|
||||
Example: ` user attributes field edit n4qdbtro4j8x3n8z81p48ww9gr --name "Department Name" --managed
|
||||
user attributes field edit Department --option Go --option React --option Python --option Java
|
||||
user attributes field edit Skills --managed=false`,
|
||||
Args: cobra.ExactArgs(1),
|
||||
RunE: withClient(cpaFieldEditCmdF),
|
||||
}
|
||||
|
||||
var CPAFieldDeleteCmd = &cobra.Command{
|
||||
Use: "delete [field-id]",
|
||||
Short: "Delete a CPA field",
|
||||
Long: "Delete a Custom Profile Attribute field. This will automatically delete all user values for this field.",
|
||||
Example: ` cpa field delete n4qdbtro4j8x3n8z81p48ww9gr --confirm
|
||||
cpa field delete 8kj9xm4p6f3y7n2z9q5w8r1t4v --confirm`,
|
||||
Use: "delete [field]",
|
||||
Short: "Delete a User Attributes field",
|
||||
Long: "Delete a User Attributes field. This will automatically delete all user values for this field.",
|
||||
Example: ` user attributes field delete n4qdbtro4j8x3n8z81p48ww9gr --confirm
|
||||
user attributes field delete Department --confirm`,
|
||||
Args: cobra.ExactArgs(1),
|
||||
RunE: withClient(cpaFieldDeleteCmdF),
|
||||
}
|
||||
|
|
@ -73,8 +73,8 @@ func init() {
|
|||
// Delete flags
|
||||
CPAFieldDeleteCmd.Flags().Bool("confirm", false, "Bypass confirmation prompt")
|
||||
|
||||
// Add subcommands to CPAFieldCmd
|
||||
CPAFieldCmd.AddCommand(
|
||||
// Add subcommands to UserAttributesFieldCmd
|
||||
UserAttributesFieldCmd.AddCommand(
|
||||
CPAFieldListCmd,
|
||||
CPAFieldCreateCmd,
|
||||
CPAFieldEditCmd,
|
||||
|
|
@ -164,7 +164,7 @@ func cpaFieldCreateCmdF(c client.Client, cmd *cobra.Command, args []string) erro
|
|||
}
|
||||
|
||||
// Build attrs from flags
|
||||
attrs, err := buildFieldAttrs(cmd)
|
||||
attrs, err := buildFieldAttrs(cmd, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
@ -199,7 +199,10 @@ func cpaFieldCreateCmdF(c client.Client, cmd *cobra.Command, args []string) erro
|
|||
}
|
||||
|
||||
func cpaFieldEditCmdF(c client.Client, cmd *cobra.Command, args []string) error {
|
||||
fieldID := args[0]
|
||||
field, fErr := getFieldFromArg(c, args[0])
|
||||
if fErr != nil {
|
||||
return fErr
|
||||
}
|
||||
|
||||
// Build patch object
|
||||
patch := &model.PropertyFieldPatch{}
|
||||
|
|
@ -211,7 +214,7 @@ func cpaFieldEditCmdF(c client.Client, cmd *cobra.Command, args []string) error
|
|||
|
||||
// Build attrs from flags if any changes
|
||||
if hasAttrsChanges(cmd) {
|
||||
attrs, err := buildFieldAttrs(cmd)
|
||||
attrs, err := buildFieldAttrs(cmd, field.Attrs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
@ -221,7 +224,7 @@ func cpaFieldEditCmdF(c client.Client, cmd *cobra.Command, args []string) error
|
|||
}
|
||||
|
||||
// Update the field
|
||||
updatedField, _, err := c.PatchCPAField(context.TODO(), fieldID, patch)
|
||||
updatedField, _, err := c.PatchCPAField(context.TODO(), field.ID, patch)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to update CPA field: %w", err)
|
||||
}
|
||||
|
|
@ -247,8 +250,6 @@ func cpaFieldEditCmdF(c client.Client, cmd *cobra.Command, args []string) error
|
|||
}
|
||||
|
||||
func cpaFieldDeleteCmdF(c client.Client, cmd *cobra.Command, args []string) error {
|
||||
fieldID := args[0]
|
||||
|
||||
confirmFlag, _ := cmd.Flags().GetBool("confirm")
|
||||
if !confirmFlag {
|
||||
if err := getConfirmation("Are you sure you want to delete this CPA field?", true); err != nil {
|
||||
|
|
@ -256,13 +257,18 @@ func cpaFieldDeleteCmdF(c client.Client, cmd *cobra.Command, args []string) erro
|
|||
}
|
||||
}
|
||||
|
||||
field, fErr := getFieldFromArg(c, args[0])
|
||||
if fErr != nil {
|
||||
return fErr
|
||||
}
|
||||
|
||||
// Delete the field
|
||||
_, err := c.DeleteCPAField(context.TODO(), fieldID)
|
||||
_, err := c.DeleteCPAField(context.TODO(), field.ID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to delete CPA field: %w", err)
|
||||
}
|
||||
|
||||
printer.SetSingle(true)
|
||||
printer.Print(fmt.Sprintf("Successfully deleted CPA field: %s", fieldID))
|
||||
printer.Print(fmt.Sprintf("Successfully deleted CPA field: %s", args[0]))
|
||||
return nil
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue