mirror of
https://github.com/mattermost/focalboard.git
synced 2025-04-20 12:07:24 -04:00
Compare commits
36 commits
Author | SHA1 | Date | |
---|---|---|---|
|
886f50e342 | ||
|
93a57ecfe0 | ||
|
89a24960cc | ||
|
369ebd5fd6 | ||
|
d43ee90fb2 | ||
|
510e0931bf | ||
|
7d26b768bf | ||
|
59e10fb860 | ||
|
33c452783a | ||
|
70893582a2 | ||
|
43f3f4af66 | ||
|
6556a8dd10 | ||
|
c9e8dd3d8d | ||
|
ec292f661f | ||
|
8de83a1de1 | ||
|
98672be3e7 | ||
|
fec3f7a5be | ||
|
07cd0e585d | ||
|
610cab5dea | ||
|
4054294f5b | ||
|
5310193fa6 | ||
|
4bb974c3f4 | ||
|
917ca6b918 | ||
|
136fae748d | ||
|
c2efb74d0d | ||
|
9e1dd374f2 | ||
|
1f09a94161 | ||
|
6b36a6fdc9 | ||
|
c9bd3b8e20 | ||
|
4ca234ec7f | ||
|
b979baab37 | ||
|
1c0393716c | ||
|
fee3ef7963 | ||
|
f68ed3d7fe | ||
|
c8167483f5 | ||
|
a6936552cb |
44 changed files with 793 additions and 125 deletions
12
.github/workflows/ci.yml
vendored
12
.github/workflows/ci.yml
vendored
|
@ -15,7 +15,7 @@ env:
|
|||
jobs:
|
||||
|
||||
ci-ubuntu-server:
|
||||
runs-on: ubuntu-18.04
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
|
@ -44,7 +44,7 @@ jobs:
|
|||
repository: "mattermost/mattermost-server"
|
||||
fetch-depth: "20"
|
||||
path: "mattermost-server"
|
||||
ref : "master"
|
||||
ref : "b61c096497ac1f22f64b77afe58d0dd5a72b38f1"
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
|
@ -54,7 +54,7 @@ jobs:
|
|||
run: cd focalboard; make server-test-${{matrix['db']}}
|
||||
|
||||
ci-ubuntu-webapp:
|
||||
runs-on: ubuntu-18.04
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
@ -74,7 +74,7 @@ jobs:
|
|||
repository: "mattermost/mattermost-server"
|
||||
fetch-depth: "20"
|
||||
path: "mattermost-server"
|
||||
ref : "master"
|
||||
ref : "b61c096497ac1f22f64b77afe58d0dd5a72b38f1"
|
||||
- name: npm ci
|
||||
run: |
|
||||
cd focalboard/webapp && npm ci && cd -
|
||||
|
@ -132,7 +132,7 @@ jobs:
|
|||
repository: "mattermost/mattermost-server"
|
||||
fetch-depth: "20"
|
||||
path: "mattermost-server"
|
||||
ref : "master"
|
||||
ref : "b61c096497ac1f22f64b77afe58d0dd5a72b38f1"
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v3
|
||||
|
@ -169,7 +169,7 @@ jobs:
|
|||
repository: "mattermost/mattermost-server"
|
||||
fetch-depth: "20"
|
||||
path: "mattermost-server"
|
||||
ref : "master"
|
||||
ref : "b61c096497ac1f22f64b77afe58d0dd5a72b38f1"
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v3
|
||||
|
|
32
.github/workflows/dev-release.yml
vendored
32
.github/workflows/dev-release.yml
vendored
|
@ -8,14 +8,14 @@ on:
|
|||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
BRANCH_NAME: ${{ github.head_ref || github.ref_name }}
|
||||
BRANCH_NAME: ${{ github.head_ref || github.ref_name }}
|
||||
EXCLUDE_ENTERPRISE: true
|
||||
|
||||
jobs:
|
||||
|
||||
ubuntu:
|
||||
runs-on: ubuntu-18.04
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
|
@ -25,16 +25,16 @@ jobs:
|
|||
continue-on-error: true
|
||||
with:
|
||||
repository: "mattermost/mattermost-server"
|
||||
fetch-depth: "20"
|
||||
fetch-depth: "20"
|
||||
path: "mattermost-server"
|
||||
ref: ${{ env.BRANCH_NAME }}
|
||||
- uses: actions/checkout@v3
|
||||
if: steps.mattermostServer.outcome == 'failure'
|
||||
with:
|
||||
repository: "mattermost/mattermost-server"
|
||||
fetch-depth: "20"
|
||||
fetch-depth: "20"
|
||||
path: "mattermost-server"
|
||||
ref : "master"
|
||||
ref : "b61c096497ac1f22f64b77afe58d0dd5a72b38f1"
|
||||
|
||||
- name: Replace token 1 server
|
||||
run: sed -i -e "s,placeholder_rudder_dataplane_url,${{ secrets.RUDDER_DATAPLANE_URL }},g" ${{ github.workspace }}/focalboard/server/services/telemetry/telemetry.go
|
||||
|
@ -101,16 +101,16 @@ jobs:
|
|||
continue-on-error: true
|
||||
with:
|
||||
repository: "mattermost/mattermost-server"
|
||||
fetch-depth: "20"
|
||||
fetch-depth: "20"
|
||||
path: "mattermost-server"
|
||||
ref: ${{ env.BRANCH_NAME }}
|
||||
- uses: actions/checkout@v3
|
||||
if: steps.mattermostServer.outcome == 'failure'
|
||||
with:
|
||||
repository: "mattermost/mattermost-server"
|
||||
fetch-depth: "20"
|
||||
fetch-depth: "20"
|
||||
path: "mattermost-server"
|
||||
ref : "master"
|
||||
ref : "b61c096497ac1f22f64b77afe58d0dd5a72b38f1"
|
||||
- name: Replace token 1 server
|
||||
run: sed -i -e "s,placeholder_rudder_dataplane_url,${{ secrets.RUDDER_DATAPLANE_URL }},g" ${{ github.workspace }}/focalboard/server/services/telemetry/telemetry.go
|
||||
|
||||
|
@ -159,16 +159,16 @@ jobs:
|
|||
continue-on-error: true
|
||||
with:
|
||||
repository: "mattermost/mattermost-server"
|
||||
fetch-depth: "20"
|
||||
fetch-depth: "20"
|
||||
path: "mattermost-server"
|
||||
ref: ${{ env.BRANCH_NAME }}
|
||||
- uses: actions/checkout@v3
|
||||
if: steps.mattermostServer.outcome == 'failure'
|
||||
with:
|
||||
repository: "mattermost/mattermost-server"
|
||||
fetch-depth: "20"
|
||||
fetch-depth: "20"
|
||||
path: "mattermost-server"
|
||||
ref : "master"
|
||||
ref : "b61c096497ac1f22f64b77afe58d0dd5a72b38f1"
|
||||
- name: Replace token 1 server
|
||||
run: sed -i -e "s,placeholder_rudder_dataplane_url,${{ secrets.RUDDER_DATAPLANE_URL }},g" ${{ github.workspace }}/focalboard/server/services/telemetry/telemetry.go
|
||||
|
||||
|
@ -218,7 +218,7 @@ jobs:
|
|||
path: ${{ github.workspace }}/focalboard/win-wpf/dist/focalboard-win.zip
|
||||
|
||||
plugin:
|
||||
runs-on: ubuntu-18.04
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
@ -229,16 +229,16 @@ jobs:
|
|||
continue-on-error: true
|
||||
with:
|
||||
repository: "mattermost/mattermost-server"
|
||||
fetch-depth: "20"
|
||||
fetch-depth: "20"
|
||||
path: "mattermost-server"
|
||||
ref: ${{ env.BRANCH_NAME }}
|
||||
- uses: actions/checkout@v3
|
||||
if: steps.mattermostServer.outcome == 'failure'
|
||||
with:
|
||||
repository: "mattermost/mattermost-server"
|
||||
fetch-depth: "20"
|
||||
fetch-depth: "20"
|
||||
path: "mattermost-server"
|
||||
ref : "master"
|
||||
ref : "b61c096497ac1f22f64b77afe58d0dd5a72b38f1"
|
||||
|
||||
- name: Replace token 1 server
|
||||
run: sed -i -e "s,placeholder_rudder_dataplane_url,${{ secrets.RUDDER_DATAPLANE_URL }},g" ${{ github.workspace }}/focalboard/server/services/telemetry/telemetry.go
|
||||
|
|
6
.github/workflows/lint-server.yml
vendored
6
.github/workflows/lint-server.yml
vendored
|
@ -13,7 +13,7 @@ env:
|
|||
|
||||
jobs:
|
||||
down-migrations:
|
||||
runs-on: ubuntu-18.04
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
|
@ -26,7 +26,7 @@ jobs:
|
|||
|
||||
golangci:
|
||||
name: plugin
|
||||
runs-on: ubuntu-18.04
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
|
@ -48,7 +48,7 @@ jobs:
|
|||
repository: "mattermost/mattermost-server"
|
||||
fetch-depth: "20"
|
||||
path: "mattermost-server"
|
||||
ref : "master"
|
||||
ref : "b61c096497ac1f22f64b77afe58d0dd5a72b38f1"
|
||||
- name: set up golangci-lint
|
||||
run: curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v1.50.1
|
||||
- name: lint
|
||||
|
|
28
.github/workflows/prod-release.yml
vendored
28
.github/workflows/prod-release.yml
vendored
|
@ -9,7 +9,7 @@ env:
|
|||
jobs:
|
||||
|
||||
ubuntu:
|
||||
runs-on: ubuntu-18.04
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
|
@ -21,16 +21,16 @@ jobs:
|
|||
continue-on-error: true
|
||||
with:
|
||||
repository: "mattermost/mattermost-server"
|
||||
fetch-depth: "20"
|
||||
fetch-depth: "20"
|
||||
path: "mattermost-server"
|
||||
ref: ${{ env.BRANCH_NAME }}
|
||||
- uses: actions/checkout@v3
|
||||
if: steps.mattermostServer.outcome == 'failure'
|
||||
with:
|
||||
repository: "mattermost/mattermost-server"
|
||||
fetch-depth: "20"
|
||||
fetch-depth: "20"
|
||||
path: "mattermost-server"
|
||||
ref : "master"
|
||||
ref : "b61c096497ac1f22f64b77afe58d0dd5a72b38f1"
|
||||
|
||||
- name: Replace token 1 server
|
||||
run: sed -i -e "s,placeholder_rudder_dataplane_url,${{ secrets.RUDDER_DATAPLANE_URL }},g" ${{ github.workspace }}/focalboard/server/services/telemetry/telemetry.go
|
||||
|
@ -97,16 +97,16 @@ jobs:
|
|||
continue-on-error: true
|
||||
with:
|
||||
repository: "mattermost/mattermost-server"
|
||||
fetch-depth: "20"
|
||||
fetch-depth: "20"
|
||||
path: "mattermost-server"
|
||||
ref: ${{ env.BRANCH_NAME }}
|
||||
- uses: actions/checkout@v3
|
||||
if: steps.mattermostServer.outcome == 'failure'
|
||||
with:
|
||||
repository: "mattermost/mattermost-server"
|
||||
fetch-depth: "20"
|
||||
fetch-depth: "20"
|
||||
path: "mattermost-server"
|
||||
ref : "master"
|
||||
ref : "b61c096497ac1f22f64b77afe58d0dd5a72b38f1"
|
||||
|
||||
- name: Replace token 1 server
|
||||
run: sed -i -e "s,placeholder_rudder_dataplane_url,${{ secrets.RUDDER_DATAPLANE_URL }},g" ${{ github.workspace }}/focalboard/server/services/telemetry/telemetry.go
|
||||
|
@ -156,16 +156,16 @@ jobs:
|
|||
continue-on-error: true
|
||||
with:
|
||||
repository: "mattermost/mattermost-server"
|
||||
fetch-depth: "20"
|
||||
fetch-depth: "20"
|
||||
path: "mattermost-server"
|
||||
ref: ${{ env.BRANCH_NAME }}
|
||||
- uses: actions/checkout@v3
|
||||
if: steps.mattermostServer.outcome == 'failure'
|
||||
with:
|
||||
repository: "mattermost/mattermost-server"
|
||||
fetch-depth: "20"
|
||||
fetch-depth: "20"
|
||||
path: "mattermost-server"
|
||||
ref : "master"
|
||||
ref : "b61c096497ac1f22f64b77afe58d0dd5a72b38f1"
|
||||
|
||||
- name: Replace token 1 server
|
||||
run: sed -i -e "s,placeholder_rudder_dataplane_url,${{ secrets.RUDDER_DATAPLANE_URL }},g" ${{ github.workspace }}/focalboard/server/services/telemetry/telemetry.go
|
||||
|
@ -216,7 +216,7 @@ jobs:
|
|||
path: ${{ github.workspace }}/focalboard/win-wpf/dist/focalboard-win.zip
|
||||
|
||||
plugin-release:
|
||||
runs-on: ubuntu-18.04
|
||||
runs-on: ubuntu-20.04
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
|
@ -228,16 +228,16 @@ jobs:
|
|||
continue-on-error: true
|
||||
with:
|
||||
repository: "mattermost/mattermost-server"
|
||||
fetch-depth: "20"
|
||||
fetch-depth: "20"
|
||||
path: "mattermost-server"
|
||||
ref: ${{ env.BRANCH_NAME }}
|
||||
- uses: actions/checkout@v3
|
||||
if: steps.mattermostServer.outcome == 'failure'
|
||||
with:
|
||||
repository: "mattermost/mattermost-server"
|
||||
fetch-depth: "20"
|
||||
fetch-depth: "20"
|
||||
path: "mattermost-server"
|
||||
ref : "master"
|
||||
ref : "b61c096497ac1f22f64b77afe58d0dd5a72b38f1"
|
||||
|
||||
- name: Replace token 1 server
|
||||
run: sed -i -e "s,placeholder_rudder_dataplane_url,${{ secrets.RUDDER_DATAPLANE_URL }},g" ${{ github.workspace }}/focalboard/server/services/telemetry/telemetry.go
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
"support_url": "https://github.com/mattermost/focalboard/issues",
|
||||
"release_notes_url": "https://github.com/mattermost/focalboard/releases",
|
||||
"icon_path": "assets/starter-template-icon.svg",
|
||||
"version": "7.9.0",
|
||||
"version": "7.9.7",
|
||||
"min_server_version": "7.2.0",
|
||||
"server": {
|
||||
"executables": {
|
||||
|
|
2
mattermost-plugin/server/manifest.go
generated
2
mattermost-plugin/server/manifest.go
generated
|
@ -20,7 +20,7 @@ const manifestStr = `
|
|||
"support_url": "https://github.com/mattermost/focalboard/issues",
|
||||
"release_notes_url": "https://github.com/mattermost/focalboard/releases",
|
||||
"icon_path": "assets/starter-template-icon.svg",
|
||||
"version": "7.9.0",
|
||||
"version": "7.9.7",
|
||||
"min_server_version": "7.2.0",
|
||||
"server": {
|
||||
"executables": {
|
||||
|
|
|
@ -96,3 +96,9 @@ exports[`components/boardsUnfurl/BoardsUnfurl renders when limited 1`] = `
|
|||
</a>
|
||||
</div>
|
||||
`;
|
||||
|
||||
exports[`components/boardsUnfurl/BoardsUnfurl test invalid card, invalid block 1`] = `<div />`;
|
||||
|
||||
exports[`components/boardsUnfurl/BoardsUnfurl test invalid card, valid block 1`] = `<div />`;
|
||||
|
||||
exports[`components/boardsUnfurl/BoardsUnfurl test no card 1`] = `<div />`;
|
||||
|
|
|
@ -10,6 +10,8 @@ import {Provider as ReduxProvider} from 'react-redux'
|
|||
|
||||
import {mocked} from 'jest-mock'
|
||||
|
||||
import {createBoardView} from '../../../../../webapp/src/blocks/boardView'
|
||||
|
||||
import {Utils} from '../../../../../webapp/src/utils'
|
||||
import {createCard} from '../../../../../webapp/src/blocks/card'
|
||||
import {createBoard} from '../../../../../webapp/src/blocks/board'
|
||||
|
@ -116,5 +118,118 @@ describe('components/boardsUnfurl/BoardsUnfurl', () => {
|
|||
|
||||
expect(container).toMatchSnapshot()
|
||||
})
|
||||
|
||||
it('test no card', async () => {
|
||||
const mockStore = configureStore([])
|
||||
const store = mockStore({
|
||||
language: {
|
||||
value: 'en',
|
||||
},
|
||||
teams: {
|
||||
allTeams: [team],
|
||||
current: team,
|
||||
},
|
||||
})
|
||||
|
||||
const board = {...createBoard(), title: 'test board'}
|
||||
// mockedOctoClient.getBoard.mockResolvedValueOnce(board)
|
||||
|
||||
const component = (
|
||||
<ReduxProvider store={store}>
|
||||
{wrapIntl(
|
||||
<BoardsUnfurl
|
||||
embed={{data: JSON.stringify({workspaceID: 'foo', cardID: '', boardID: board.id, readToken: 'abc', originalPath: '/test'})}}
|
||||
/>,
|
||||
)}
|
||||
</ReduxProvider>
|
||||
)
|
||||
|
||||
let container: Element | DocumentFragment | null = null
|
||||
|
||||
await act(async () => {
|
||||
const result = render(component)
|
||||
container = result.container
|
||||
})
|
||||
expect(container).toMatchSnapshot()
|
||||
})
|
||||
|
||||
it('test invalid card, valid block', async () => {
|
||||
const mockStore = configureStore([])
|
||||
const store = mockStore({
|
||||
language: {
|
||||
value: 'en',
|
||||
},
|
||||
teams: {
|
||||
allTeams: [team],
|
||||
current: team,
|
||||
},
|
||||
})
|
||||
|
||||
const cards = [{...createBoardView(), title: 'test view', updateAt: 12345}]
|
||||
const board = {...createBoard(), title: 'test board'}
|
||||
|
||||
mockedOctoClient.getBlocksWithBlockID.mockResolvedValueOnce(cards)
|
||||
mockedOctoClient.getBoard.mockResolvedValueOnce(board)
|
||||
|
||||
const component = (
|
||||
<ReduxProvider store={store}>
|
||||
{wrapIntl(
|
||||
<BoardsUnfurl
|
||||
embed={{data: JSON.stringify({workspaceID: 'foo', cardID: cards[0].id, boardID: board.id, readToken: 'abc', originalPath: '/test'})}}
|
||||
/>,
|
||||
)}
|
||||
</ReduxProvider>
|
||||
)
|
||||
|
||||
let container: Element | DocumentFragment | null = null
|
||||
|
||||
await act(async () => {
|
||||
const result = render(component)
|
||||
container = result.container
|
||||
})
|
||||
expect(mockedOctoClient.getBoard).toBeCalledWith(board.id)
|
||||
expect(mockedOctoClient.getBlocksWithBlockID).toBeCalledWith(cards[0].id, board.id, 'abc')
|
||||
|
||||
expect(container).toMatchSnapshot()
|
||||
})
|
||||
|
||||
it('test invalid card, invalid block', async () => {
|
||||
const mockStore = configureStore([])
|
||||
const store = mockStore({
|
||||
language: {
|
||||
value: 'en',
|
||||
},
|
||||
teams: {
|
||||
allTeams: [team],
|
||||
current: team,
|
||||
},
|
||||
})
|
||||
|
||||
const board = {...createBoard(), title: 'test board'}
|
||||
|
||||
mockedOctoClient.getBlocksWithBlockID.mockResolvedValueOnce([])
|
||||
mockedOctoClient.getBoard.mockResolvedValueOnce(board)
|
||||
|
||||
const component = (
|
||||
<ReduxProvider store={store}>
|
||||
{wrapIntl(
|
||||
<BoardsUnfurl
|
||||
embed={{data: JSON.stringify({workspaceID: 'foo', cardID: 'invalidCard', boardID: board.id, readToken: 'abc', originalPath: '/test'})}}
|
||||
/>,
|
||||
)}
|
||||
</ReduxProvider>
|
||||
)
|
||||
|
||||
let container: Element | DocumentFragment | null = null
|
||||
|
||||
await act(async () => {
|
||||
const result = render(component)
|
||||
container = result.container
|
||||
})
|
||||
expect(mockedOctoClient.getBoard).toBeCalledWith(board.id)
|
||||
expect(mockedOctoClient.getBlocksWithBlockID).toBeCalledWith('invalidCard', board.id, 'abc')
|
||||
|
||||
expect(container).toMatchSnapshot()
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -84,7 +84,7 @@ export const BoardsUnfurl = (props: Props): JSX.Element => {
|
|||
],
|
||||
)
|
||||
const [firstCard] = cards as Card[]
|
||||
if (!firstCard || !fetchedBoard) {
|
||||
if (!firstCard || !fetchedBoard || firstCard.type !== 'card') {
|
||||
setLoading(false)
|
||||
return null
|
||||
}
|
||||
|
@ -116,7 +116,7 @@ export const BoardsUnfurl = (props: Props): JSX.Element => {
|
|||
useWebsockets(currentTeamId, (wsClient: WSClient) => {
|
||||
const onChangeHandler = (_: WSClient, blocks: Block[]): void => {
|
||||
const cardBlock: Block|undefined = blocks.find(b => b.id === cardID)
|
||||
if (cardBlock && !cardBlock.deleteAt) {
|
||||
if (cardBlock && !cardBlock.deleteAt && cardBlock.type === 'card') {
|
||||
setCard(cardBlock as Card)
|
||||
}
|
||||
|
||||
|
|
|
@ -8,6 +8,8 @@ import (
|
|||
"errors"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
|
@ -20,9 +22,30 @@ import (
|
|||
mmModel "github.com/mattermost/mattermost-server/v6/model"
|
||||
|
||||
"github.com/mattermost/mattermost-server/v6/shared/mlog"
|
||||
"github.com/mattermost/mattermost-server/v6/shared/web"
|
||||
)
|
||||
|
||||
var UnsafeContentTypes = [...]string{
|
||||
"application/javascript",
|
||||
"application/ecmascript",
|
||||
"text/javascript",
|
||||
"text/ecmascript",
|
||||
"application/x-javascript",
|
||||
"text/html",
|
||||
}
|
||||
|
||||
var MediaContentTypes = [...]string{
|
||||
"image/jpeg",
|
||||
"image/png",
|
||||
"image/bmp",
|
||||
"image/gif",
|
||||
"image/tiff",
|
||||
"video/avi",
|
||||
"video/mpeg",
|
||||
"video/mp4",
|
||||
"audio/mpeg",
|
||||
"audio/wav",
|
||||
}
|
||||
|
||||
// FileUploadResponse is the response to a file upload
|
||||
// swagger:model
|
||||
type FileUploadResponse struct {
|
||||
|
@ -170,10 +193,74 @@ func (a *API) handleServeFile(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
defer fileReader.Close()
|
||||
web.WriteFileResponse(filename, fileInfo.MimeType, fileInfo.Size, time.Now(), "", fileReader, false, w, r)
|
||||
mimeType := ""
|
||||
var fileSize int64
|
||||
if fileInfo != nil {
|
||||
mimeType = fileInfo.MimeType
|
||||
fileSize = fileInfo.Size
|
||||
}
|
||||
writeFileResponse(filename, mimeType, fileSize, time.Now(), "", fileReader, false, w, r)
|
||||
auditRec.Success()
|
||||
}
|
||||
|
||||
func writeFileResponse(filename string, contentType string, contentSize int64,
|
||||
lastModification time.Time, webserverMode string, fileReader io.ReadSeeker, forceDownload bool, w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Cache-Control", "private, no-cache")
|
||||
w.Header().Set("X-Content-Type-Options", "nosniff")
|
||||
|
||||
if contentSize > 0 {
|
||||
contentSizeStr := strconv.Itoa(int(contentSize))
|
||||
if webserverMode == "gzip" {
|
||||
w.Header().Set("X-Uncompressed-Content-Length", contentSizeStr)
|
||||
} else {
|
||||
w.Header().Set("Content-Length", contentSizeStr)
|
||||
}
|
||||
}
|
||||
|
||||
if contentType == "" {
|
||||
contentType = "application/octet-stream"
|
||||
} else {
|
||||
for _, unsafeContentType := range UnsafeContentTypes {
|
||||
if strings.HasPrefix(contentType, unsafeContentType) {
|
||||
contentType = "text/plain"
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", contentType)
|
||||
|
||||
var toDownload bool
|
||||
if forceDownload {
|
||||
toDownload = true
|
||||
} else {
|
||||
isMediaType := false
|
||||
|
||||
for _, mediaContentType := range MediaContentTypes {
|
||||
if strings.HasPrefix(contentType, mediaContentType) {
|
||||
isMediaType = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
toDownload = !isMediaType
|
||||
}
|
||||
|
||||
filename = url.PathEscape(filename)
|
||||
|
||||
if toDownload {
|
||||
w.Header().Set("Content-Disposition", "attachment;filename=\""+filename+"\"; filename*=UTF-8''"+filename)
|
||||
} else {
|
||||
w.Header().Set("Content-Disposition", "inline;filename=\""+filename+"\"; filename*=UTF-8''"+filename)
|
||||
}
|
||||
|
||||
// prevent file links from being embedded in iframes
|
||||
w.Header().Set("X-Frame-Options", "DENY")
|
||||
w.Header().Set("Content-Security-Policy", "Frame-ancestors 'none'")
|
||||
|
||||
http.ServeContent(w, r, filename, lastModification, fileReader)
|
||||
}
|
||||
|
||||
func (a *API) getFileInfo(w http.ResponseWriter, r *http.Request) {
|
||||
// swagger:operation GET /files/teams/{teamID}/{boardID}/{filename}/info getFile
|
||||
//
|
||||
|
|
|
@ -410,20 +410,6 @@ func (a *App) DeleteBlockAndNotify(blockID string, modifiedBy string, disableNot
|
|||
return err
|
||||
}
|
||||
|
||||
if block.Type == model.TypeImage {
|
||||
fileName, fileIDExists := block.Fields["fileId"]
|
||||
if fileName, fileIDIsString := fileName.(string); fileIDExists && fileIDIsString {
|
||||
filePath := filepath.Join(block.BoardID, fileName)
|
||||
err = a.filesBackend.RemoveFile(filePath)
|
||||
|
||||
if err != nil {
|
||||
a.logger.Error("Error deleting image file",
|
||||
mlog.String("FilePath", filePath),
|
||||
mlog.Err(err))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
a.blockChangeNotifier.Enqueue(func() error {
|
||||
a.wsAdapter.BroadcastBlockDelete(board.TeamID, blockID, block.BoardID)
|
||||
a.metrics.IncrementBlocksDeleted(1)
|
||||
|
|
|
@ -355,12 +355,15 @@ func (a *App) PatchBoard(patch *model.BoardPatch, boardID, userID string) (*mode
|
|||
var oldMembers []*model.BoardMember
|
||||
|
||||
if patch.Type != nil || patch.ChannelID != nil {
|
||||
testChannel := ""
|
||||
if patch.ChannelID != nil && *patch.ChannelID == "" {
|
||||
var err error
|
||||
oldMembers, err = a.GetMembersForBoard(boardID)
|
||||
if err != nil {
|
||||
a.logger.Error("Unable to get the board members", mlog.Err(err))
|
||||
}
|
||||
} else if patch.ChannelID != nil && *patch.ChannelID != "" {
|
||||
testChannel = *patch.ChannelID
|
||||
}
|
||||
|
||||
board, err := a.store.GetBoard(boardID)
|
||||
|
@ -372,7 +375,17 @@ func (a *App) PatchBoard(patch *model.BoardPatch, boardID, userID string) (*mode
|
|||
}
|
||||
oldChannelID = board.ChannelID
|
||||
isTemplate = board.IsTemplate
|
||||
if testChannel == "" {
|
||||
testChannel = oldChannelID
|
||||
}
|
||||
|
||||
if testChannel != "" {
|
||||
if !a.permissions.HasPermissionToChannel(userID, testChannel, model.PermissionCreatePost) {
|
||||
return nil, model.NewErrPermission("access denied to channel")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
updatedBoard, err := a.store.PatchBoard(boardID, patch, userID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
|
@ -185,6 +185,7 @@ func TestPatchBoard(t *testing.T) {
|
|||
|
||||
// Type not null will retrieve team members
|
||||
th.Store.EXPECT().GetUsersByTeam(teamID, "", false, false).Return([]*model.User{}, nil)
|
||||
th.Store.EXPECT().GetUserByID(userID).Return(&model.User{ID: userID, Username: "UserName"}, nil)
|
||||
|
||||
th.Store.EXPECT().PatchBoard(boardID, patch, userID).Return(
|
||||
&model.Board{
|
||||
|
@ -399,6 +400,104 @@ func TestPatchBoard(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
require.Equal(t, boardID, patchedBoard.ID)
|
||||
})
|
||||
|
||||
t.Run("patch type channel, user without post permissions", func(t *testing.T) {
|
||||
const boardID = "board_id_1"
|
||||
const userID = "user_id_2"
|
||||
const teamID = "team_id_1"
|
||||
|
||||
channelID := "myChannel"
|
||||
patchType := model.BoardTypeOpen
|
||||
patch := &model.BoardPatch{
|
||||
Type: &patchType,
|
||||
ChannelID: &channelID,
|
||||
}
|
||||
|
||||
// Type not nil, will cause board to be reteived
|
||||
// to check isTemplate
|
||||
th.Store.EXPECT().GetBoard(boardID).Return(&model.Board{
|
||||
ID: boardID,
|
||||
TeamID: teamID,
|
||||
IsTemplate: true,
|
||||
}, nil).Times(1)
|
||||
|
||||
th.API.EXPECT().HasPermissionToChannel(userID, channelID, model.PermissionCreatePost).Return(false).Times(1)
|
||||
_, err := th.App.PatchBoard(patch, boardID, userID)
|
||||
require.Error(t, err)
|
||||
})
|
||||
|
||||
t.Run("patch type channel, user with post permissions", func(t *testing.T) {
|
||||
const boardID = "board_id_1"
|
||||
const userID = "user_id_2"
|
||||
const teamID = "team_id_1"
|
||||
|
||||
channelID := "myChannel"
|
||||
patch := &model.BoardPatch{
|
||||
ChannelID: &channelID,
|
||||
}
|
||||
|
||||
// Type not nil, will cause board to be reteived
|
||||
// to check isTemplate
|
||||
th.Store.EXPECT().GetBoard(boardID).Return(&model.Board{
|
||||
ID: boardID,
|
||||
TeamID: teamID,
|
||||
}, nil).Times(2)
|
||||
|
||||
th.API.EXPECT().HasPermissionToChannel(userID, channelID, model.PermissionCreatePost).Return(true).Times(1)
|
||||
|
||||
th.Store.EXPECT().PatchBoard(boardID, patch, userID).Return(
|
||||
&model.Board{
|
||||
ID: boardID,
|
||||
TeamID: teamID,
|
||||
},
|
||||
nil)
|
||||
|
||||
// Should call GetMembersForBoard 2 times
|
||||
// - for WS BroadcastBoardChange
|
||||
// - for AddTeamMembers check
|
||||
th.Store.EXPECT().GetMembersForBoard(boardID).Return([]*model.BoardMember{}, nil).Times(2)
|
||||
|
||||
th.Store.EXPECT().PostMessage(utils.Anything, "", "").Times(1)
|
||||
|
||||
patchedBoard, err := th.App.PatchBoard(patch, boardID, userID)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, boardID, patchedBoard.ID)
|
||||
})
|
||||
|
||||
t.Run("patch type remove channel, user without post permissions", func(t *testing.T) {
|
||||
const boardID = "board_id_1"
|
||||
const userID = "user_id_2"
|
||||
const teamID = "team_id_1"
|
||||
|
||||
const channelID = "myChannel"
|
||||
clearChannel := ""
|
||||
patchType := model.BoardTypeOpen
|
||||
patch := &model.BoardPatch{
|
||||
Type: &patchType,
|
||||
ChannelID: &clearChannel,
|
||||
}
|
||||
|
||||
// Type not nil, will cause board to be reteived
|
||||
// to check isTemplate
|
||||
th.Store.EXPECT().GetBoard(boardID).Return(&model.Board{
|
||||
ID: boardID,
|
||||
TeamID: teamID,
|
||||
IsTemplate: true,
|
||||
ChannelID: channelID,
|
||||
}, nil).Times(2)
|
||||
|
||||
th.API.EXPECT().HasPermissionToChannel(userID, channelID, model.PermissionCreatePost).Return(false).Times(1)
|
||||
|
||||
th.API.EXPECT().HasPermissionToTeam(userID, teamID, model.PermissionManageTeam).Return(false).Times(1)
|
||||
// Should call GetMembersForBoard 2 times
|
||||
// for WS BroadcastBoardChange
|
||||
// for AddTeamMembers check
|
||||
// We are returning the user as a direct Board Member, so BroadcastMemberDelete won't be called
|
||||
th.Store.EXPECT().GetMembersForBoard(boardID).Return([]*model.BoardMember{{BoardID: boardID, UserID: userID, SchemeEditor: true}}, nil).Times(1)
|
||||
|
||||
_, err := th.App.PatchBoard(patch, boardID, userID)
|
||||
require.Error(t, err)
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetBoardCount(t *testing.T) {
|
||||
|
|
|
@ -54,6 +54,10 @@ func (a *Auth) IsValidReadToken(boardID string, readToken string) (bool, error)
|
|||
return false, err
|
||||
}
|
||||
|
||||
if !a.config.EnablePublicSharedBoards {
|
||||
return false, errors.New("public shared boards disabled")
|
||||
}
|
||||
|
||||
if sharing != nil && (sharing.ID == boardID && sharing.Enabled && sharing.Token == readToken) {
|
||||
return true, nil
|
||||
}
|
||||
|
|
|
@ -581,6 +581,35 @@ func TestPermissionsGetBoard(t *testing.T) {
|
|||
})
|
||||
}
|
||||
|
||||
func TestPermissionsGetBoardPublic(t *testing.T) {
|
||||
ttCases := []TestCase{
|
||||
{"/boards/{PRIVATE_BOARD_ID}?read_token=invalid", methodGet, "", userAnon, http.StatusUnauthorized, 0},
|
||||
{"/boards/{PRIVATE_BOARD_ID}?read_token=valid", methodGet, "", userAnon, http.StatusUnauthorized, 1},
|
||||
{"/boards/{PRIVATE_BOARD_ID}?read_token=invalid", methodGet, "", userNoTeamMember, http.StatusForbidden, 0},
|
||||
{"/boards/{PRIVATE_BOARD_ID}?read_token=valid", methodGet, "", userTeamMember, http.StatusForbidden, 1},
|
||||
}
|
||||
t.Run("plugin", func(t *testing.T) {
|
||||
th := SetupTestHelperPluginMode(t)
|
||||
defer th.TearDown()
|
||||
cfg := th.Server.Config()
|
||||
cfg.EnablePublicSharedBoards = false
|
||||
th.Server.UpdateAppConfig()
|
||||
clients := setupClients(th)
|
||||
testData := setupData(t, th)
|
||||
runTestCases(t, ttCases, testData, clients)
|
||||
})
|
||||
t.Run("local", func(t *testing.T) {
|
||||
th := SetupTestHelperLocalMode(t)
|
||||
defer th.TearDown()
|
||||
cfg := th.Server.Config()
|
||||
cfg.EnablePublicSharedBoards = false
|
||||
th.Server.UpdateAppConfig()
|
||||
clients := setupLocalClients(th)
|
||||
testData := setupData(t, th)
|
||||
runTestCases(t, ttCases, testData, clients)
|
||||
})
|
||||
}
|
||||
|
||||
func TestPermissionsPatchBoard(t *testing.T) {
|
||||
ttCases := []TestCase{
|
||||
{"/boards/{PRIVATE_BOARD_ID}", methodPatch, "{\"title\": \"test\"}", userAnon, http.StatusUnauthorized, 0},
|
||||
|
|
|
@ -8,6 +8,13 @@ import (
|
|||
// It should be maintained in chronological order with most current
|
||||
// release at the front of the list.
|
||||
var versions = []string{
|
||||
"7.9.7",
|
||||
"7.9.6",
|
||||
"7.9.5",
|
||||
"7.9.4",
|
||||
"7.9.3",
|
||||
"7.9.2",
|
||||
"7.9.1",
|
||||
"7.9.0",
|
||||
"7.8.0",
|
||||
"7.7.0",
|
||||
|
|
|
@ -365,6 +365,14 @@ func (s *SQLStore) deleteBlock(db sq.BaseRunner, blockID string, modifiedBy stri
|
|||
return s.deleteBlockAndChildren(db, blockID, modifiedBy, false)
|
||||
}
|
||||
|
||||
func retrieveFileIDFromBlockFieldStorage(id string) string {
|
||||
parts := strings.Split(id, ".")
|
||||
if len(parts) < 1 {
|
||||
return ""
|
||||
}
|
||||
return parts[0][1:]
|
||||
}
|
||||
|
||||
func (s *SQLStore) deleteBlockAndChildren(db sq.BaseRunner, blockID string, modifiedBy string, keepChildren bool) error {
|
||||
block, err := s.getBlock(db, blockID)
|
||||
if model.IsErrNotFound(err) {
|
||||
|
@ -415,6 +423,30 @@ func (s *SQLStore) deleteBlockAndChildren(db sq.BaseRunner, blockID string, modi
|
|||
return err
|
||||
}
|
||||
|
||||
// fileId and attachmentId shoudn't exist at the same time
|
||||
fileID := ""
|
||||
fileIDWithExtention, fileIDExists := block.Fields["fileId"]
|
||||
if fileIDExists {
|
||||
fileID = retrieveFileIDFromBlockFieldStorage(fileIDWithExtention.(string))
|
||||
}
|
||||
|
||||
if fileID == "" {
|
||||
attachmentIDWithExtention, attachmentIDExists := block.Fields["attachmentId"]
|
||||
if attachmentIDExists {
|
||||
fileID = retrieveFileIDFromBlockFieldStorage(attachmentIDWithExtention.(string))
|
||||
}
|
||||
}
|
||||
|
||||
if fileID != "" {
|
||||
deleteFileInfoQuery := s.getQueryBuilder(db).
|
||||
Update("FileInfo").
|
||||
Set("DeleteAt", model.GetMillis()).
|
||||
Where(sq.Eq{"id": fileID})
|
||||
if _, err := deleteFileInfoQuery.Exec(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
deleteQuery := s.getQueryBuilder(db).
|
||||
Delete(s.tablePrefix + "blocks").
|
||||
Where(sq.Eq{"id": blockID})
|
||||
|
@ -931,6 +963,48 @@ func (s *SQLStore) deleteBlockChildren(db sq.BaseRunner, boardID string, parentI
|
|||
return err
|
||||
}
|
||||
|
||||
fileDeleteQuery := s.getQueryBuilder(db).
|
||||
Select(s.blockFields("")...).
|
||||
From(s.tablePrefix + "blocks").
|
||||
Where(sq.Eq{"board_id": boardID})
|
||||
|
||||
if parentID != "" {
|
||||
fileDeleteQuery = fileDeleteQuery.Where(sq.Eq{"parent_id": parentID})
|
||||
}
|
||||
|
||||
rows, err := fileDeleteQuery.Query()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer s.CloseRows(rows)
|
||||
blocks, err := s.blocksFromRows(rows)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fileIDs := make([]string, 0, len(blocks))
|
||||
for _, block := range blocks {
|
||||
fileIDWithExtention, fileIDExists := block.Fields["fileId"]
|
||||
if fileIDExists {
|
||||
fileIDs = append(fileIDs, retrieveFileIDFromBlockFieldStorage(fileIDWithExtention.(string)))
|
||||
}
|
||||
attachmentIDWithExtention, attachmentIDExists := block.Fields["attachmentId"]
|
||||
if attachmentIDExists {
|
||||
fileIDs = append(fileIDs, retrieveFileIDFromBlockFieldStorage(attachmentIDWithExtention.(string)))
|
||||
}
|
||||
}
|
||||
|
||||
if len(fileIDs) > 0 {
|
||||
deleteFileInfoQuery := s.getQueryBuilder(db).
|
||||
Update("FileInfo").
|
||||
Set("DeleteAt", model.GetMillis()).
|
||||
Where(sq.Eq{"id": fileIDs})
|
||||
|
||||
if _, err := deleteFileInfoQuery.Exec(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
deleteQuery := s.getQueryBuilder(db).
|
||||
Delete(s.tablePrefix + "blocks").
|
||||
Where(sq.Eq{"board_id": boardID})
|
||||
|
|
|
@ -21,11 +21,12 @@ const (
|
|||
// query, so we want to stay safely below.
|
||||
CategoryInsertBatch = 1000
|
||||
|
||||
TemplatesToTeamsMigrationKey = "TemplatesToTeamsMigrationComplete"
|
||||
UniqueIDsMigrationKey = "UniqueIDsMigrationComplete"
|
||||
CategoryUUIDIDMigrationKey = "CategoryUuidIdMigrationComplete"
|
||||
TeamLessBoardsMigrationKey = "TeamLessBoardsMigrationComplete"
|
||||
DeletedMembershipBoardsMigrationKey = "DeletedMembershipBoardsMigrationComplete"
|
||||
TemplatesToTeamsMigrationKey = "TemplatesToTeamsMigrationComplete"
|
||||
UniqueIDsMigrationKey = "UniqueIDsMigrationComplete"
|
||||
CategoryUUIDIDMigrationKey = "CategoryUuidIdMigrationComplete"
|
||||
TeamLessBoardsMigrationKey = "TeamLessBoardsMigrationComplete"
|
||||
DeletedMembershipBoardsMigrationKey = "DeletedMembershipBoardsMigrationComplete"
|
||||
DeDuplicateCategoryBoardTableMigrationKey = "DeDuplicateCategoryBoardTableComplete"
|
||||
)
|
||||
|
||||
func (s *SQLStore) getBlocksWithSameID(db sq.BaseRunner) ([]*model.Block, error) {
|
||||
|
@ -790,3 +791,100 @@ func (s *SQLStore) getCollationAndCharset(tableName string) (string, string, err
|
|||
|
||||
return collation, charSet, nil
|
||||
}
|
||||
|
||||
func (s *SQLStore) RunDeDuplicateCategoryBoardsMigration(currentMigration int) error {
|
||||
// not supported for SQLite
|
||||
if s.dbType == model.SqliteDBType {
|
||||
if mErr := s.setSystemSetting(s.db, DeDuplicateCategoryBoardTableMigrationKey, strconv.FormatBool(true)); mErr != nil {
|
||||
return fmt.Errorf("cannot mark migration %s as completed: %w", "RunDeDuplicateCategoryBoardsMigration", mErr)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
setting, err := s.GetSystemSetting(DeDuplicateCategoryBoardTableMigrationKey)
|
||||
if err != nil {
|
||||
return fmt.Errorf("cannot get DeDuplicateCategoryBoardTableMigration state: %w", err)
|
||||
}
|
||||
|
||||
// If the migration is already completed, do not run it again.
|
||||
if hasAlreadyRun, _ := strconv.ParseBool(setting); hasAlreadyRun {
|
||||
return nil
|
||||
}
|
||||
|
||||
if currentMigration >= (deDuplicateCategoryBoards + 1) {
|
||||
// if the migration for which we're fixing the data is already applied,
|
||||
// no need to check fix anything
|
||||
|
||||
if mErr := s.setSystemSetting(s.db, DeDuplicateCategoryBoardTableMigrationKey, strconv.FormatBool(true)); mErr != nil {
|
||||
return fmt.Errorf("cannot mark migration %s as completed: %w", "RunDeDuplicateCategoryBoardsMigration", mErr)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
needed, err := s.doesDuplicateCategoryBoardsExist()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !needed {
|
||||
if mErr := s.setSystemSetting(s.db, DeDuplicateCategoryBoardTableMigrationKey, strconv.FormatBool(true)); mErr != nil {
|
||||
return fmt.Errorf("cannot mark migration %s as completed: %w", "RunDeDuplicateCategoryBoardsMigration", mErr)
|
||||
}
|
||||
}
|
||||
|
||||
if s.dbType == model.MysqlDBType {
|
||||
return s.runMySQLDeDuplicateCategoryBoardsMigration()
|
||||
} else if s.dbType == model.PostgresDBType {
|
||||
return s.runPostgresDeDuplicateCategoryBoardsMigration()
|
||||
}
|
||||
|
||||
if mErr := s.setSystemSetting(s.db, DeDuplicateCategoryBoardTableMigrationKey, strconv.FormatBool(true)); mErr != nil {
|
||||
return fmt.Errorf("cannot mark migration %s as completed: %w", "RunDeDuplicateCategoryBoardsMigration", mErr)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *SQLStore) doesDuplicateCategoryBoardsExist() (bool, error) {
|
||||
subQuery := s.getQueryBuilder(s.db).
|
||||
Select("user_id", "board_id", "count(*) AS count").
|
||||
From(s.tablePrefix+"category_boards").
|
||||
GroupBy("user_id", "board_id").
|
||||
Having("count(*) > 1")
|
||||
|
||||
query := s.getQueryBuilder(s.db).
|
||||
Select("COUNT(user_id)").
|
||||
FromSelect(subQuery, "duplicate_dataset")
|
||||
|
||||
row := query.QueryRow()
|
||||
|
||||
count := 0
|
||||
if err := row.Scan(&count); err != nil {
|
||||
s.logger.Error("Error occurred reading number of duplicate records in category_boards table", mlog.Err(err))
|
||||
return false, err
|
||||
}
|
||||
|
||||
return count > 0, nil
|
||||
}
|
||||
|
||||
func (s *SQLStore) runMySQLDeDuplicateCategoryBoardsMigration() error {
|
||||
query := "DELETE FROM " + s.tablePrefix + "category_boards WHERE id NOT IN " +
|
||||
"(SELECT * FROM ( SELECT min(id) FROM " + s.tablePrefix + "category_boards GROUP BY user_id, board_id ) as data)"
|
||||
if _, err := s.db.Exec(query); err != nil {
|
||||
s.logger.Error("Failed to de-duplicate data in category_boards table", mlog.Err(err))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *SQLStore) runPostgresDeDuplicateCategoryBoardsMigration() error {
|
||||
query := "WITH duplicates AS (SELECT id, ROW_NUMBER() OVER(PARTITION BY user_id, board_id) AS rownum " +
|
||||
"FROM " + s.tablePrefix + "category_boards) " +
|
||||
"DELETE FROM " + s.tablePrefix + "category_boards USING duplicates " +
|
||||
"WHERE " + s.tablePrefix + "category_boards.id = duplicates.id AND duplicates.rownum > 1;"
|
||||
if _, err := s.db.Exec(query); err != nil {
|
||||
s.logger.Error("Failed to de-duplicate data in category_boards table", mlog.Err(err))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@ const (
|
|||
uniqueIDsMigrationRequiredVersion = 14
|
||||
teamLessBoardsMigrationRequiredVersion = 18
|
||||
categoriesUUIDIDMigrationRequiredVersion = 20
|
||||
deDuplicateCategoryBoards = 35
|
||||
|
||||
tempSchemaMigrationTableName = "temp_schema_migration"
|
||||
)
|
||||
|
@ -248,6 +249,15 @@ func (s *SQLStore) runMigrationSequence(engine *morph.Morph, driver drivers.Driv
|
|||
return err
|
||||
}
|
||||
|
||||
if mErr := s.ensureMigrationsAppliedUpToVersion(engine, driver, deDuplicateCategoryBoards); mErr != nil {
|
||||
return mErr
|
||||
}
|
||||
|
||||
currentMigrationVersion := len(appliedMigrations)
|
||||
if mErr := s.RunDeDuplicateCategoryBoardsMigration(currentMigrationVersion); mErr != nil {
|
||||
return mErr
|
||||
}
|
||||
|
||||
s.logger.Debug("== Applying all remaining migrations ====================",
|
||||
mlog.Int("current_version", len(appliedMigrations)),
|
||||
)
|
||||
|
@ -309,7 +319,7 @@ func (s *SQLStore) GetTemplateHelperFuncs() template.FuncMap {
|
|||
|
||||
func (s *SQLStore) genAddColumnIfNeeded(tableName, columnName, datatype, constraint string) (string, error) {
|
||||
tableName = addPrefixIfNeeded(tableName, s.tablePrefix)
|
||||
normTableName := normalizeTablename(s.schemaName, tableName)
|
||||
normTableName := s.normalizeTablename(tableName)
|
||||
|
||||
switch s.dbType {
|
||||
case model.SqliteDBType:
|
||||
|
@ -348,7 +358,7 @@ func (s *SQLStore) genAddColumnIfNeeded(tableName, columnName, datatype, constra
|
|||
|
||||
func (s *SQLStore) genDropColumnIfNeeded(tableName, columnName string) (string, error) {
|
||||
tableName = addPrefixIfNeeded(tableName, s.tablePrefix)
|
||||
normTableName := normalizeTablename(s.schemaName, tableName)
|
||||
normTableName := s.normalizeTablename(tableName)
|
||||
|
||||
switch s.dbType {
|
||||
case model.SqliteDBType:
|
||||
|
@ -385,7 +395,7 @@ func (s *SQLStore) genDropColumnIfNeeded(tableName, columnName string) (string,
|
|||
func (s *SQLStore) genCreateIndexIfNeeded(tableName, columns string) (string, error) {
|
||||
indexName := getIndexName(tableName, columns)
|
||||
tableName = addPrefixIfNeeded(tableName, s.tablePrefix)
|
||||
normTableName := normalizeTablename(s.schemaName, tableName)
|
||||
normTableName := s.normalizeTablename(tableName)
|
||||
|
||||
switch s.dbType {
|
||||
case model.SqliteDBType:
|
||||
|
@ -425,7 +435,7 @@ func (s *SQLStore) genRenameTableIfNeeded(oldTableName, newTableName string) (st
|
|||
oldTableName = addPrefixIfNeeded(oldTableName, s.tablePrefix)
|
||||
newTableName = addPrefixIfNeeded(newTableName, s.tablePrefix)
|
||||
|
||||
normOldTableName := normalizeTablename(s.schemaName, oldTableName)
|
||||
normOldTableName := s.normalizeTablename(oldTableName)
|
||||
|
||||
vars := map[string]string{
|
||||
"schema": s.schemaName,
|
||||
|
@ -456,14 +466,14 @@ func (s *SQLStore) genRenameTableIfNeeded(oldTableName, newTableName string) (st
|
|||
case model.PostgresDBType:
|
||||
return replaceVars(`
|
||||
do $$
|
||||
begin
|
||||
begin
|
||||
if (SELECT COUNT(table_name) FROM INFORMATION_SCHEMA.TABLES
|
||||
WHERE table_name = '[[new_table_name]]'
|
||||
AND table_schema = '[[schema]]'
|
||||
) = 0 then
|
||||
) = 0 then
|
||||
ALTER TABLE [[norm_old_table_name]] RENAME TO [[new_table_name]];
|
||||
end if;
|
||||
end$$;
|
||||
end$$;
|
||||
`, vars), nil
|
||||
default:
|
||||
return "", ErrUnsupportedDatabaseType
|
||||
|
@ -472,7 +482,7 @@ func (s *SQLStore) genRenameTableIfNeeded(oldTableName, newTableName string) (st
|
|||
|
||||
func (s *SQLStore) genRenameColumnIfNeeded(tableName, oldColumnName, newColumnName, dataType string) (string, error) {
|
||||
tableName = addPrefixIfNeeded(tableName, s.tablePrefix)
|
||||
normTableName := normalizeTablename(s.schemaName, tableName)
|
||||
normTableName := s.normalizeTablename(tableName)
|
||||
|
||||
vars := map[string]string{
|
||||
"schema": s.schemaName,
|
||||
|
@ -506,15 +516,15 @@ func (s *SQLStore) genRenameColumnIfNeeded(tableName, oldColumnName, newColumnNa
|
|||
case model.PostgresDBType:
|
||||
return replaceVars(`
|
||||
do $$
|
||||
begin
|
||||
begin
|
||||
if (SELECT COUNT(table_name) FROM INFORMATION_SCHEMA.COLUMNS
|
||||
WHERE table_name = '[[table_name]]'
|
||||
AND table_schema = '[[schema]]'
|
||||
AND column_name = '[[new_column_name]]'
|
||||
) = 0 then
|
||||
) = 0 then
|
||||
ALTER TABLE [[norm_table_name]] RENAME COLUMN [[old_column_name]] TO [[new_column_name]];
|
||||
end if;
|
||||
end$$;
|
||||
end$$;
|
||||
`, vars), nil
|
||||
default:
|
||||
return "", ErrUnsupportedDatabaseType
|
||||
|
@ -610,7 +620,7 @@ func (s *SQLStore) doesColumnExist(tableName, columnName string) (bool, error) {
|
|||
|
||||
func (s *SQLStore) genAddConstraintIfNeeded(tableName, constraintName, constraintType, constraintDefinition string) (string, error) {
|
||||
tableName = addPrefixIfNeeded(tableName, s.tablePrefix)
|
||||
normTableName := normalizeTablename(s.schemaName, tableName)
|
||||
normTableName := s.normalizeTablename(tableName)
|
||||
|
||||
var query string
|
||||
|
||||
|
@ -676,8 +686,12 @@ func addPrefixIfNeeded(s, prefix string) string {
|
|||
return s
|
||||
}
|
||||
|
||||
func normalizeTablename(schemaName, tableName string) string {
|
||||
if schemaName != "" && !strings.HasPrefix(tableName, schemaName+".") {
|
||||
func (s *SQLStore) normalizeTablename(tableName string) string {
|
||||
if s.schemaName != "" && !strings.HasPrefix(tableName, s.schemaName+".") {
|
||||
schemaName := s.schemaName
|
||||
if s.dbType == model.MysqlDBType {
|
||||
schemaName = "`" + schemaName + "`"
|
||||
}
|
||||
tableName = schemaName + "." + tableName
|
||||
}
|
||||
return tableName
|
||||
|
|
|
@ -23,4 +23,4 @@
|
|||
SELECT id, user_id, category_id, board_id, create_at, update_at, sort_order, hidden FROM {{.prefix}}category_boards_old;
|
||||
DROP TABLE {{.prefix}}category_boards_old;
|
||||
|
||||
{{end}}
|
||||
{{end}}
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
SELECT 1;
|
|
@ -0,0 +1,8 @@
|
|||
{{if .plugin}}
|
||||
UPDATE FileInfo
|
||||
SET DeleteAt = 0
|
||||
WHERE CreatorId = 'boards'
|
||||
AND DeleteAt != 0;
|
||||
{{else}}
|
||||
SELECT 1;
|
||||
{{end}}
|
|
@ -246,6 +246,9 @@ func (bm *BoardsMigrator) MigrateToStep(step int) error {
|
|||
func (bm *BoardsMigrator) Interceptors() map[int]foundation.Interceptor {
|
||||
return map[int]foundation.Interceptor{
|
||||
18: bm.store.RunDeletedMembershipBoardsMigration,
|
||||
35: func() error {
|
||||
return bm.store.RunDeDuplicateCategoryBoardsMigration(35)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
package migrationstests
|
||||
|
||||
import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestRunDeDuplicateCategoryBoardsMigration(t *testing.T) {
|
||||
th, tearDown := SetupTestHelper(t)
|
||||
defer tearDown()
|
||||
|
||||
if th.IsSQLite() {
|
||||
t.Skip("SQLite is not supported for this")
|
||||
}
|
||||
|
||||
th.f.MigrateToStepSkippingLastInterceptor(35).
|
||||
ExecFile("./fixtures/testDeDuplicateCategoryBoardsMigration.sql")
|
||||
|
||||
th.f.RunInterceptor(35)
|
||||
|
||||
// verifying count of rows
|
||||
var count int
|
||||
countQuery := "SELECT COUNT(*) FROM focalboard_category_boards"
|
||||
row := th.f.DB().QueryRow(countQuery)
|
||||
err := row.Scan(&count)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 4, count)
|
||||
}
|
|
@ -1,6 +1,8 @@
|
|||
INSERT INTO focalboard_category_boards values
|
||||
INSERT INTO focalboard_category_boards
|
||||
(id, user_id, category_id, board_id, create_at, update_at, delete_at, sort_order)
|
||||
values
|
||||
('id-1', 'user_id-1', 'category-id-1', 'board-id-1', 1672988834402, 1672988834402, 0, 0),
|
||||
('id-2', 'user_id-1', 'category-id-2', 'board-id-1', 1672988834402, 1672988834402, 0, 0),
|
||||
('id-3', 'user_id-2', 'category-id-3', 'board-id-2', 1672988834402, 1672988834402, 1672988834402, 0),
|
||||
('id-4', 'user_id-2', 'category-id-3', 'board-id-4', 1672988834402, 1672988834402, 0, 0),
|
||||
('id-5', 'user_id-3', 'category-id-4', 'board-id-3', 1672988834402, 1672988834402, 1672988834402, 0);
|
||||
('id-5', 'user_id-3', 'category-id-4', 'board-id-3', 1672988834402, 1672988834402, 1672988834402, 0);
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
INSERT INTO focalboard_category_boards values
|
||||
INSERT INTO focalboard_category_boards
|
||||
(id, user_id, category_id, board_id, create_at, update_at, delete_at, sort_order)
|
||||
values
|
||||
('id-1', 'user_id-1', 'category-id-1', 'board-id-1', 1672988834402, 1672988834402, 0, 0),
|
||||
('id-2', 'user_id-1', 'category-id-2', 'board-id-1', 1672988834402, 1672988834402, 0, 0),
|
||||
('id-3', 'user_id-2', 'category-id-3', 'board-id-2', 1672988834402, 1672988834402, 0, 0),
|
||||
('id-4', 'user_id-2', 'category-id-3', 'board-id-4', 1672988834402, 1672988834402, 0, 0),
|
||||
('id-5', 'user_id-3', 'category-id-4', 'board-id-3', 1672988834402, 1672988834402, 0, 0);
|
||||
('id-5', 'user_id-3', 'category-id-4', 'board-id-3', 1672988834402, 1672988834402, 0, 0);
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
INSERT INTO focalboard_category_boards VALUES
|
||||
INSERT INTO focalboard_category_boards
|
||||
(id, user_id, category_id, board_id, create_at, update_at, sort_order, hidden)
|
||||
VALUES
|
||||
('id-1', 'user-id-1', 'category-id-1', 'board-id-1', 1672889246832, 1672889246832, 0, false),
|
||||
('id-2', 'user-id-1', 'category-id-2', 'board-id-2', 1672889246832, 1672889246832, 0, false),
|
||||
('id-3', 'user-id-2', 'category-id-3', 'board-id-3', 1672889246832, 1672889246832, 0, false),
|
||||
|
@ -7,4 +9,4 @@ INSERT INTO focalboard_category_boards VALUES
|
|||
|
||||
INSERT INTO Preferences VALUES
|
||||
('user-id-1', 'focalboard', 'hiddenBoardIDs', '["board-id-1"]'),
|
||||
('user-id-2', 'focalboard', 'hiddenBoardIDs', '["board-id-3", "board-id-4"]');
|
||||
('user-id-2', 'focalboard', 'hiddenBoardIDs', '["board-id-3", "board-id-4"]');
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
INSERT INTO focalboard_category_boards VALUES
|
||||
INSERT INTO focalboard_category_boards
|
||||
(id, user_id, category_id, board_id, create_at, update_at, sort_order, hidden)
|
||||
VALUES
|
||||
('id-1', 'user-id-1', 'category-id-1', 'board-id-1', 1672889246832, 1672889246832, 0, false),
|
||||
('id-2', 'user-id-1', 'category-id-2', 'board-id-2', 1672889246832, 1672889246832, 0, false),
|
||||
('id-3', 'user-id-2', 'category-id-3', 'board-id-3', 1672889246832, 1672889246832, 0, false),
|
||||
('id-4', 'user-id-2', 'category-id-3', 'board-id-4', 1672889246832, 1672889246832, 0, false),
|
||||
('id-5', 'user-id-3', 'category-id-4', 'board-id-5', 1672889246832, 1672889246832, 0, false);
|
||||
('id-5', 'user-id-3', 'category-id-4', 'board-id-5', 1672889246832, 1672889246832, 0, false);
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
INSERT INTO focalboard_category_boards VALUES
|
||||
INSERT INTO focalboard_category_boards
|
||||
(id, user_id, category_id, board_id, create_at, update_at, sort_order, hidden)
|
||||
VALUES
|
||||
('id-1', 'user-id-1', 'category-id-1', 'board-id-1', 1672889246832, 1672889246832, 0, false),
|
||||
('id-2', 'user-id-1', 'category-id-2', 'board-id-2', 1672889246832, 1672889246832, 0, false),
|
||||
('id-3', 'user-id-2', 'category-id-3', 'board-id-3', 1672889246832, 1672889246832, 0, false),
|
||||
|
@ -7,4 +9,4 @@ INSERT INTO focalboard_category_boards VALUES
|
|||
|
||||
INSERT INTO Preferences VALUES
|
||||
('user-id-1', 'focalboard', 'hiddenBoardIDs', ''),
|
||||
('user-id-2', 'focalboard', 'hiddenBoardIDs', '');
|
||||
('user-id-2', 'focalboard', 'hiddenBoardIDs', '');
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
INSERT INTO focalboard_category_boards VALUES
|
||||
INSERT INTO focalboard_category_boards
|
||||
(id, user_id, category_id, board_id, create_at, update_at, sort_order, hidden)
|
||||
VALUES
|
||||
('id-1', 'user-id-1', 'category-id-1', 'board-id-1', 1672889246832, 1672889246832, 0, false),
|
||||
('id-2', 'user-id-1', 'category-id-2', 'board-id-2', 1672889246832, 1672889246832, 0, false),
|
||||
('id-3', 'user-id-2', 'category-id-3', 'board-id-3', 1672889246832, 1672889246832, 0, false),
|
||||
|
@ -7,4 +9,4 @@ INSERT INTO focalboard_category_boards VALUES
|
|||
|
||||
INSERT INTO focalboard_preferences VALUES
|
||||
('user-id-1', 'focalboard', 'hiddenBoardIDs', '["board-id-1"]'),
|
||||
('user-id-2', 'focalboard', 'hiddenBoardIDs', '["board-id-3", "board-id-4"]');
|
||||
('user-id-2', 'focalboard', 'hiddenBoardIDs', '["board-id-3", "board-id-4"]');
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
INSERT INTO focalboard_category_boards VALUES
|
||||
INSERT INTO focalboard_category_boards
|
||||
(id, user_id, category_id, board_id, create_at, update_at, sort_order, hidden)
|
||||
VALUES
|
||||
('id-1', 'user-id-1', 'category-id-1', 'board-id-1', 1672889246832, 1672889246832, 0, false),
|
||||
('id-2', 'user-id-1', 'category-id-2', 'board-id-2', 1672889246832, 1672889246832, 0, false),
|
||||
('id-3', 'user-id-2', 'category-id-3', 'board-id-3', 1672889246832, 1672889246832, 0, false),
|
||||
|
@ -7,4 +9,4 @@ INSERT INTO focalboard_category_boards VALUES
|
|||
|
||||
INSERT INTO focalboard_preferences VALUES
|
||||
('user-id-1', 'focalboard', 'hiddenBoardIDs', ''),
|
||||
('user-id-2', 'focalboard', 'hiddenBoardIDs', '');
|
||||
('user-id-2', 'focalboard', 'hiddenBoardIDs', '');
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
INSERT INTO FileInfo
|
||||
(Id, CreatorId, CreateAt, UpdateAt, DeleteAt)
|
||||
VALUES
|
||||
('fileinfo-1', 'user-id', 1, 1, 1000),
|
||||
('fileinfo-2', 'user-id', 1, 1, 1000),
|
||||
('fileinfo-3', 'user-id', 1, 1, 0),
|
||||
('fileinfo-4', 'boards', 1, 1, 2000),
|
||||
('fileinfo-5', 'boards', 1, 1, 2000),
|
||||
('fileinfo-6', 'boards', 1, 1, 0);
|
|
@ -0,0 +1,9 @@
|
|||
INSERT INTO focalboard_category_boards(id, user_id, category_id, board_id, create_at, update_at, sort_order)
|
||||
VALUES
|
||||
('id_1', 'user_id_1', 'category_id_1', 'board_id_1', 0, 0, 0),
|
||||
('id_2', 'user_id_1', 'category_id_2', 'board_id_1', 0, 0, 0),
|
||||
('id_3', 'user_id_1', 'category_id_3', 'board_id_1', 0, 0, 0),
|
||||
('id_4', 'user_id_2', 'category_id_4', 'board_id_2', 0, 0, 0),
|
||||
('id_5', 'user_id_2', 'category_id_5', 'board_id_2', 0, 0, 0),
|
||||
('id_6', 'user_id_3', 'category_id_6', 'board_id_3', 0, 0, 0),
|
||||
('id_7', 'user_id_4', 'category_id_6', 'board_id_4', 0, 0, 0);
|
|
@ -0,0 +1,48 @@
|
|||
package migrationstests
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test40FixFileinfoSoftDeletes(t *testing.T) {
|
||||
th, tearDown := SetupPluginTestHelper(t)
|
||||
defer tearDown()
|
||||
|
||||
th.f.MigrateToStep(39).
|
||||
ExecFile("./fixtures/test40FixFileinfoSoftDeletes.sql").
|
||||
MigrateToStep(40)
|
||||
|
||||
type FileInfo struct {
|
||||
Id string
|
||||
DeleteAt int
|
||||
}
|
||||
|
||||
getFileInfo := func(t *testing.T, id string) FileInfo {
|
||||
t.Helper()
|
||||
fileInfo := FileInfo{}
|
||||
|
||||
query := "SELECT id, deleteat FROM FileInfo WHERE id = $1"
|
||||
if th.IsMySQL() {
|
||||
query = "SELECT Id as id, DeleteAt as deleteat FROM FileInfo WHERE Id = ?"
|
||||
}
|
||||
|
||||
err := th.f.DB().Get(&fileInfo, query, id)
|
||||
require.NoError(t, err)
|
||||
|
||||
return fileInfo
|
||||
}
|
||||
|
||||
t.Run("the file infos that don't belong to boards will not be restored", func(t *testing.T) {
|
||||
require.Equal(t, 1000, getFileInfo(t, "fileinfo-1").DeleteAt)
|
||||
require.Equal(t, 1000, getFileInfo(t, "fileinfo-2").DeleteAt)
|
||||
require.Empty(t, getFileInfo(t, "fileinfo-3").DeleteAt)
|
||||
})
|
||||
|
||||
t.Run("the file infos that belong to boards should correctly be restored", func(t *testing.T) {
|
||||
require.Empty(t, getFileInfo(t, "fileinfo-3").DeleteAt)
|
||||
require.Empty(t, getFileInfo(t, "fileinfo-4").DeleteAt)
|
||||
require.Empty(t, getFileInfo(t, "fileinfo-5").DeleteAt)
|
||||
})
|
||||
}
|
|
@ -123,6 +123,13 @@ func (s *SQLStore) isSchemaMigrationNeeded() (bool, error) {
|
|||
"TABLE_NAME": s.tablePrefix + "schema_migrations",
|
||||
})
|
||||
|
||||
switch s.dbType {
|
||||
case model.MysqlDBType:
|
||||
query = query.Where(sq.Eq{"TABLE_SCHEMA": s.schemaName})
|
||||
case model.PostgresDBType:
|
||||
query = query.Where("table_schema = current_schema()")
|
||||
}
|
||||
|
||||
rows, err := query.Query()
|
||||
if err != nil {
|
||||
s.logger.Error("failed to fetch columns in schema_migrations table", mlog.Err(err))
|
||||
|
|
|
@ -10,8 +10,9 @@ import (
|
|||
// these system settings are created when running the data migrations,
|
||||
// so they will be present after the tests setup.
|
||||
var dataMigrationSystemSettings = map[string]string{
|
||||
"UniqueIDsMigrationComplete": "true",
|
||||
"CategoryUuidIdMigrationComplete": "true",
|
||||
"UniqueIDsMigrationComplete": "true",
|
||||
"CategoryUuidIdMigrationComplete": "true",
|
||||
"DeDuplicateCategoryBoardTableComplete": "true",
|
||||
}
|
||||
|
||||
func addBaseSettings(m map[string]string) map[string]string {
|
||||
|
|
4
webapp/package-lock.json
generated
4
webapp/package-lock.json
generated
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "focalboard",
|
||||
"version": "7.9.0",
|
||||
"version": "7.9.7",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "focalboard",
|
||||
"version": "7.9.0",
|
||||
"version": "7.9.7",
|
||||
"dependencies": {
|
||||
"@draft-js-plugins/editor": "^4.1.2",
|
||||
"@draft-js-plugins/emoji": "^4.6.0",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "focalboard",
|
||||
"version": "7.9.0",
|
||||
"version": "7.9.7",
|
||||
"private": true,
|
||||
"description": "",
|
||||
"scripts": {
|
||||
|
|
|
@ -10,7 +10,7 @@ exports[`components/sidebar/GlobalHeader header menu should match snapshot 1`] =
|
|||
/>
|
||||
<a
|
||||
class="GlobalHeaderComponent__button help-button"
|
||||
href="https://www.focalboard.com/fwlink/doc-boards.html?v=7.9.0"
|
||||
href="https://www.focalboard.com/fwlink/doc-boards.html?v=7.9.7"
|
||||
rel="noreferrer"
|
||||
target="_blank"
|
||||
>
|
||||
|
|
|
@ -47,19 +47,22 @@ const TeamPermissionsRow = (): JSX.Element => {
|
|||
}
|
||||
}
|
||||
|
||||
let currentRoleName = intl.formatMessage({id: 'BoardMember.schemeNone', defaultMessage: 'None'})
|
||||
if (board.type === BoardTypeOpen && board.minimumRole === MemberRole.Admin) {
|
||||
currentRoleName = intl.formatMessage({id: 'BoardMember.schemeAdmin', defaultMessage: 'Admin'})
|
||||
} else if (board.type === BoardTypeOpen && board.minimumRole === MemberRole.Editor) {
|
||||
if (board.isTemplate) {
|
||||
let currentRoleName = intl.formatMessage({id: 'BoardMember.schemeAdmin', defaultMessage: 'Admin'})
|
||||
if (board.type === BoardTypeOpen) {
|
||||
currentRoleName = intl.formatMessage({id: 'BoardMember.schemeEditor', defaultMessage: 'Editor'})
|
||||
if (board.minimumRole === MemberRole.Editor) {
|
||||
if (board.isTemplate) {
|
||||
currentRoleName = intl.formatMessage({id: 'BoardMember.schemeViewer', defaultMessage: 'Viewer'})
|
||||
} else {
|
||||
currentRoleName = intl.formatMessage({id: 'BoardMember.schemeEditor', defaultMessage: 'Editor'})
|
||||
}
|
||||
} else if (board.minimumRole === MemberRole.Commenter) {
|
||||
currentRoleName = intl.formatMessage({id: 'BoardMember.schemeCommenter', defaultMessage: 'Commenter'})
|
||||
} else if (board.minimumRole === MemberRole.Viewer) {
|
||||
currentRoleName = intl.formatMessage({id: 'BoardMember.schemeViewer', defaultMessage: 'Viewer'})
|
||||
} else {
|
||||
currentRoleName = intl.formatMessage({id: 'BoardMember.schemeEditor', defaultMessage: 'Editor'})
|
||||
}
|
||||
} else if (board.type === BoardTypeOpen && board.minimumRole === MemberRole.Commenter) {
|
||||
currentRoleName = intl.formatMessage({id: 'BoardMember.schemeCommenter', defaultMessage: 'Commenter'})
|
||||
} else if (board.type === BoardTypeOpen && board.minimumRole === MemberRole.Viewer) {
|
||||
currentRoleName = intl.formatMessage({id: 'BoardMember.schemeViewer', defaultMessage: 'Viewer'})
|
||||
} else {
|
||||
currentRoleName = intl.formatMessage({id: 'BoardMember.schemeNone', defaultMessage: 'None'})
|
||||
}
|
||||
|
||||
const confirmationDialog = (
|
||||
|
|
|
@ -51,9 +51,9 @@ exports[`components/sidebarSidebar dont show hidden boards 1`] = `
|
|||
>
|
||||
<div
|
||||
class="version"
|
||||
title="v7.9.0"
|
||||
title="v7.9.7"
|
||||
>
|
||||
v7.9.0
|
||||
v7.9.7
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -252,9 +252,9 @@ exports[`components/sidebarSidebar should assign default category if current boa
|
|||
>
|
||||
<div
|
||||
class="version"
|
||||
title="v7.9.0"
|
||||
title="v7.9.7"
|
||||
>
|
||||
v7.9.0
|
||||
v7.9.7
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -508,9 +508,9 @@ exports[`components/sidebarSidebar shouldnt do any category assignment is board
|
|||
>
|
||||
<div
|
||||
class="version"
|
||||
title="v7.9.0"
|
||||
title="v7.9.7"
|
||||
>
|
||||
v7.9.0
|
||||
v7.9.7
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -919,9 +919,9 @@ exports[`components/sidebarSidebar sidebar hidden 1`] = `
|
|||
>
|
||||
<div
|
||||
class="version"
|
||||
title="v7.9.0"
|
||||
title="v7.9.7"
|
||||
>
|
||||
v7.9.0
|
||||
v7.9.7
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -1213,9 +1213,9 @@ exports[`components/sidebarSidebar some categories hidden 1`] = `
|
|||
>
|
||||
<div
|
||||
class="version"
|
||||
title="v7.9.0"
|
||||
title="v7.9.7"
|
||||
>
|
||||
v7.9.0
|
||||
v7.9.7
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -37,7 +37,7 @@ class Constants {
|
|||
static readonly titleColumnId = '__title'
|
||||
static readonly badgesColumnId = '__badges'
|
||||
|
||||
static readonly versionString = '7.9.0'
|
||||
static readonly versionString = '7.9.7'
|
||||
static readonly versionDisplayString = 'Mar 2023'
|
||||
|
||||
static readonly archiveHelpPage = 'https://docs.mattermost.com/boards/migrate-to-boards.html'
|
||||
|
|
|
@ -35,11 +35,14 @@ const URLProperty = (props: PropertyProps): JSX.Element => {
|
|||
if (value !== (props.card.fields.properties[props.propertyTemplate?.id || ''] || '')) {
|
||||
mutator.changePropertyValue(props.board.id, props.card, props.propertyTemplate?.id || '', value)
|
||||
}
|
||||
}, [props.card, props.propertyTemplate, value])
|
||||
}, [props.board.id, props.card, props.propertyTemplate?.id, value])
|
||||
|
||||
const saveTextPropertyRef = useRef<() => void>(saveTextProperty)
|
||||
saveTextPropertyRef.current = saveTextProperty
|
||||
|
||||
if (props.readOnly) {
|
||||
saveTextPropertyRef.current = () => null
|
||||
} else {
|
||||
saveTextPropertyRef.current = saveTextProperty
|
||||
}
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
saveTextPropertyRef.current && saveTextPropertyRef.current()
|
||||
|
|
|
@ -5,6 +5,8 @@ import React, {FC} from 'react'
|
|||
import 'emoji-mart/css/emoji-mart.css'
|
||||
import {Picker, BaseEmoji} from 'emoji-mart'
|
||||
|
||||
import {Utils} from '../utils'
|
||||
|
||||
import './emojiPicker.scss'
|
||||
|
||||
import emojiSpirit from '../../static/emoji_spirit.png'
|
||||
|
@ -20,7 +22,7 @@ const EmojiPicker: FC<Props> = (props: Props): JSX.Element => (
|
|||
>
|
||||
<Picker
|
||||
onSelect={(emoji: BaseEmoji) => props.onSelect(emoji.native)}
|
||||
backgroundImageFn={() => emojiSpirit}
|
||||
backgroundImageFn={() => Utils.buildURL(emojiSpirit, true)}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue