mirror of
https://git.pleroma.social/pleroma/pleroma.git
synced 2025-04-23 13:27:34 -04:00
Merge branch 'develop' into 'multitenancy'
# Conflicts: # config/config.exs
This commit is contained in:
commit
5a3713c4a8
361 changed files with 3007 additions and 1648 deletions
5
.gitignore
vendored
5
.gitignore
vendored
|
@ -6,7 +6,7 @@
|
|||
/test/instance
|
||||
/test/uploads
|
||||
/.elixir_ls
|
||||
/test/fixtures/DSCN0010_tmp.jpg
|
||||
/test/fixtures/DSCN0010_tmp*
|
||||
/test/fixtures/test_tmp.txt
|
||||
/test/fixtures/image_tmp.jpg
|
||||
/test/tmp/
|
||||
|
@ -60,3 +60,6 @@ pleroma.iml
|
|||
*~
|
||||
*#
|
||||
*.swp
|
||||
|
||||
archive-*
|
||||
.gitlab-ci-local
|
||||
|
|
|
@ -18,9 +18,7 @@ workflow:
|
|||
- if: $CI_COMMIT_BRANCH
|
||||
|
||||
cache: &global_cache_policy
|
||||
key:
|
||||
files:
|
||||
- mix.lock
|
||||
key: $CI_JOB_IMAGE-$CI_COMMIT_SHORT_SHA
|
||||
paths:
|
||||
- deps
|
||||
- _build
|
||||
|
@ -80,13 +78,12 @@ build-1.13.4-otp-25:
|
|||
script:
|
||||
- mix compile --force
|
||||
|
||||
build-1.15.8-otp-26:
|
||||
build-1.17.1-otp-26:
|
||||
extends:
|
||||
- .build_changes_policy
|
||||
- .using-ci-base
|
||||
stage: build
|
||||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15.8-otp-26
|
||||
allow_failure: true
|
||||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.17.1-otp-26
|
||||
script:
|
||||
- mix compile --force
|
||||
|
||||
|
@ -136,7 +133,7 @@ unit-testing-1.13.4-otp-25:
|
|||
script: &testing_script
|
||||
- mix ecto.create
|
||||
- mix ecto.migrate
|
||||
- mix test --cover --preload-modules
|
||||
- mix pleroma.test_runner --cover --preload-modules
|
||||
coverage: '/^Line total: ([^ ]*%)$/'
|
||||
artifacts:
|
||||
reports:
|
||||
|
@ -144,34 +141,19 @@ unit-testing-1.13.4-otp-25:
|
|||
coverage_format: cobertura
|
||||
path: coverage.xml
|
||||
|
||||
unit-testing-1.15.8-otp-26:
|
||||
unit-testing-1.17.1-otp-26:
|
||||
extends:
|
||||
- .build_changes_policy
|
||||
- .using-ci-base
|
||||
stage: test
|
||||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.15.8-otp-26
|
||||
allow_failure: true
|
||||
image: git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.17.1-otp-26
|
||||
cache: *testing_cache_policy
|
||||
services: *testing_services
|
||||
script: *testing_script
|
||||
|
||||
unit-testing-1.13.4-otp-25-erratic:
|
||||
extends:
|
||||
- .build_changes_policy
|
||||
- .using-ci-base
|
||||
stage: test
|
||||
retry: 2
|
||||
allow_failure: true
|
||||
cache: *testing_cache_policy
|
||||
services: *testing_services
|
||||
script:
|
||||
- mix ecto.create
|
||||
- mix ecto.migrate
|
||||
- mix test --only=erratic
|
||||
|
||||
formatting-1.13:
|
||||
formatting-1.15:
|
||||
extends: .build_changes_policy
|
||||
image: &formatting_elixir elixir:1.13-alpine
|
||||
image: &formatting_elixir elixir:1.15-alpine
|
||||
stage: lint
|
||||
cache: *testing_cache_policy
|
||||
before_script: ¤t_bfr_script
|
||||
|
@ -183,7 +165,7 @@ formatting-1.13:
|
|||
script:
|
||||
- mix format --check-formatted
|
||||
|
||||
cycles-1.13:
|
||||
cycles-1.15:
|
||||
extends: .build_changes_policy
|
||||
image: *formatting_elixir
|
||||
stage: lint
|
||||
|
|
1
changelog.d/4167-strip-gps-info-in-png.fix
Normal file
1
changelog.d/4167-strip-gps-info-in-png.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Ensure that StripLocation actually removes everything resembling GPS data from PNGs
|
1
changelog.d/adminfe-logger.change
Normal file
1
changelog.d/adminfe-logger.change
Normal file
|
@ -0,0 +1 @@
|
|||
Elixir Logger configuration is now longer permitted through AdminFE and ConfigDB
|
0
changelog.d/ci-cache.skip
Normal file
0
changelog.d/ci-cache.skip
Normal file
0
changelog.d/ci-elixir-1.16.skip
Normal file
0
changelog.d/ci-elixir-1.16.skip
Normal file
0
changelog.d/ci-elixir-1.17.skip
Normal file
0
changelog.d/ci-elixir-1.17.skip
Normal file
0
changelog.d/ci-erratic.skip
Normal file
0
changelog.d/ci-erratic.skip
Normal file
0
changelog.d/commonapi-reordering.skip
Normal file
0
changelog.d/commonapi-reordering.skip
Normal file
0
changelog.d/debug-logs.skip
Normal file
0
changelog.d/debug-logs.skip
Normal file
0
changelog.d/deps-poison-test-only.skip
Normal file
0
changelog.d/deps-poison-test-only.skip
Normal file
0
changelog.d/dialyzer5.skip
Normal file
0
changelog.d/dialyzer5.skip
Normal file
1
changelog.d/docs-netbsd-update.change
Normal file
1
changelog.d/docs-netbsd-update.change
Normal file
|
@ -0,0 +1 @@
|
|||
Update and extend NetBSD installation docs
|
1
changelog.d/elixir-1.15.fix
Normal file
1
changelog.d/elixir-1.15.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Elixir 1.15 compatibility
|
1
changelog.d/fix-mrfs.add
Normal file
1
changelog.d/fix-mrfs.add
Normal file
|
@ -0,0 +1 @@
|
|||
Added a Mix task "pleroma.config fix_mrf_policies" which will remove erroneous MRF policies from ConfigDB.
|
1
changelog.d/group-repeats.fix
Normal file
1
changelog.d/group-repeats.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Deactivated groups would still try to repeat a post.
|
0
changelog.d/gun-logs-debug.skip
Normal file
0
changelog.d/gun-logs-debug.skip
Normal file
1
changelog.d/gun_pool4.fix
Normal file
1
changelog.d/gun_pool4.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Gun Connection Pool was not retrying to acquire a connection if the pool was full and stale connections were reclaimed
|
1
changelog.d/handle-non-validate-delete-errors.change
Normal file
1
changelog.d/handle-non-validate-delete-errors.change
Normal file
|
@ -0,0 +1 @@
|
|||
Transmogrifier: handle non-validate errors on incoming Delete activities
|
0
changelog.d/ingestion-queue.skip
Normal file
0
changelog.d/ingestion-queue.skip
Normal file
1
changelog.d/ldap-error-logging.change
Normal file
1
changelog.d/ldap-error-logging.change
Normal file
|
@ -0,0 +1 @@
|
|||
Improve error logging when LDAP authentication fails.
|
1
changelog.d/ldap.fix
Normal file
1
changelog.d/ldap.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Fix LDAP support
|
1
changelog.d/metadata-provider-empty-post.fix
Normal file
1
changelog.d/metadata-provider-empty-post.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Fix OpenGraph and Twitter metadata providers when parsing objects with no content or summary fields.
|
1
changelog.d/oban-cancel-badreq.change
Normal file
1
changelog.d/oban-cancel-badreq.change
Normal file
|
@ -0,0 +1 @@
|
|||
Publisher jobs will not retry if the error received is a 400
|
1
changelog.d/oban-cancel-federation.add
Normal file
1
changelog.d/oban-cancel-federation.add
Normal file
|
@ -0,0 +1 @@
|
|||
Deleting, Unfavoriting, Unrepeating, or Unreacting will cancel undelivered publishing jobs for the original activity.
|
1
changelog.d/oban-cancel-poll-result.change
Normal file
1
changelog.d/oban-cancel-poll-result.change
Normal file
|
@ -0,0 +1 @@
|
|||
PollWorker jobs will not retry if the activity no longer exists.
|
1
changelog.d/oban-cancel-receiverworker.change
Normal file
1
changelog.d/oban-cancel-receiverworker.change
Normal file
|
@ -0,0 +1 @@
|
|||
Improved detecting unrecoverable errors for incoming federation jobs
|
1
changelog.d/oban-cancel.change
Normal file
1
changelog.d/oban-cancel.change
Normal file
|
@ -0,0 +1 @@
|
|||
Changed some jobs to return :cancel on unrecoverable errors that should not be retried
|
0
changelog.d/oban-deprecated-discards.skip
Normal file
0
changelog.d/oban-deprecated-discards.skip
Normal file
1
changelog.d/oban-fetcher-rejected.change
Normal file
1
changelog.d/oban-fetcher-rejected.change
Normal file
|
@ -0,0 +1 @@
|
|||
Discard Remote Fetcher jobs which errored due to an MRF rejection.
|
1
changelog.d/oban-live_dashboard.add
Normal file
1
changelog.d/oban-live_dashboard.add
Normal file
|
@ -0,0 +1 @@
|
|||
Oban jobs can now be viewed in the Live Dashboard
|
1
changelog.d/oban-rich-media-errors.fix
Normal file
1
changelog.d/oban-rich-media-errors.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Prevent Rich Media backfill jobs from retrying in cases where it is likely they will fail again.
|
1
changelog.d/oban-timeouts.change
Normal file
1
changelog.d/oban-timeouts.change
Normal file
|
@ -0,0 +1 @@
|
|||
Ensure all Oban jobs have timeouts defined
|
0
changelog.d/oban-timeouts.skip
Normal file
0
changelog.d/oban-timeouts.skip
Normal file
1
changelog.d/oban-user-refresh-unique.fix
Normal file
1
changelog.d/oban-user-refresh-unique.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Oban Jobs for refreshing users were not respecting the uniqueness setting
|
1
changelog.d/rich-media-hardening.fix
Normal file
1
changelog.d/rich-media-hardening.fix
Normal file
|
@ -0,0 +1 @@
|
|||
Harden Rich Media parsing against very slow or malicious URLs
|
1
changelog.d/rich_media_backfill.change
Normal file
1
changelog.d/rich_media_backfill.change
Normal file
|
@ -0,0 +1 @@
|
|||
Rich Media backfilling is now an Oban job
|
0
changelog.d/rich_media_oban.skip
Normal file
0
changelog.d/rich_media_oban.skip
Normal file
0
changelog.d/rich_media_stream_test.skip
Normal file
0
changelog.d/rich_media_stream_test.skip
Normal file
0
changelog.d/user-refresh-rework.skip
Normal file
0
changelog.d/user-refresh-rework.skip
Normal file
1
changelog.d/user-refresh.change
Normal file
1
changelog.d/user-refresh.change
Normal file
|
@ -0,0 +1 @@
|
|||
User profile refreshes are now asynchronous
|
8
ci/elixir-1.16.3-otp-26/Dockerfile
Normal file
8
ci/elixir-1.16.3-otp-26/Dockerfile
Normal file
|
@ -0,0 +1,8 @@
|
|||
FROM elixir:1.16.3-otp-26
|
||||
|
||||
# Single RUN statement, otherwise intermediate images are created
|
||||
# https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#run
|
||||
RUN apt-get update &&\
|
||||
apt-get install -y libmagic-dev cmake libimage-exiftool-perl ffmpeg &&\
|
||||
mix local.hex --force &&\
|
||||
mix local.rebar --force
|
1
ci/elixir-1.16.3-otp-26/build_and_push.sh
Executable file
1
ci/elixir-1.16.3-otp-26/build_and_push.sh
Executable file
|
@ -0,0 +1 @@
|
|||
docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.16.3-otp-26 --push .
|
8
ci/elixir-1.17.1-otp-26/Dockerfile
Normal file
8
ci/elixir-1.17.1-otp-26/Dockerfile
Normal file
|
@ -0,0 +1,8 @@
|
|||
FROM elixir:1.17.1-otp-26
|
||||
|
||||
# Single RUN statement, otherwise intermediate images are created
|
||||
# https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#run
|
||||
RUN apt-get update &&\
|
||||
apt-get install -y libmagic-dev cmake libimage-exiftool-perl ffmpeg &&\
|
||||
mix local.hex --force &&\
|
||||
mix local.rebar --force
|
1
ci/elixir-1.17.1-otp-26/build_and_push.sh
Executable file
1
ci/elixir-1.17.1-otp-26/build_and_push.sh
Executable file
|
@ -0,0 +1 @@
|
|||
docker buildx build --platform linux/amd64,linux/arm64 -t git.pleroma.social:5050/pleroma/pleroma/ci-base:elixir-1.17.1-otp-26 --push .
|
|
@ -132,6 +132,8 @@ config :pleroma, Pleroma.Web.Endpoint,
|
|||
]
|
||||
|
||||
# Configures Elixir's Logger
|
||||
config :logger, backends: [:console]
|
||||
|
||||
config :logger, :console,
|
||||
level: :debug,
|
||||
format: "\n$time $metadata[$level] $message\n",
|
||||
|
@ -449,7 +451,7 @@ config :pleroma, :rich_media,
|
|||
Pleroma.Web.RichMedia.Parsers.TwitterCard,
|
||||
Pleroma.Web.RichMedia.Parsers.OEmbed
|
||||
],
|
||||
failure_backoff: 60_000,
|
||||
timeout: 5_000,
|
||||
ttl_setters: [
|
||||
Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl,
|
||||
Pleroma.Web.RichMedia.Parser.TTL.Opengraph
|
||||
|
@ -581,6 +583,8 @@ config :pleroma, Pleroma.User,
|
|||
],
|
||||
email_blacklist: []
|
||||
|
||||
# The Pruner :max_age must be longer than Worker :unique
|
||||
# value or it cannot enforce uniqueness.
|
||||
config :pleroma, Oban,
|
||||
repo: Pleroma.Repo,
|
||||
log: false,
|
||||
|
@ -588,15 +592,14 @@ config :pleroma, Oban,
|
|||
activity_expiration: 10,
|
||||
federator_incoming: 5,
|
||||
federator_outgoing: 5,
|
||||
ingestion_queue: 50,
|
||||
web_push: 50,
|
||||
transmogrifier: 20,
|
||||
background: 5,
|
||||
background: 20,
|
||||
search_indexing: [limit: 10, paused: true],
|
||||
slow: 1,
|
||||
slow: 5,
|
||||
check_domain_resolve: 1
|
||||
],
|
||||
plugins: [Oban.Plugins.Pruner],
|
||||
plugins: [{Oban.Plugins.Pruner, max_age: 900}],
|
||||
crontab: [
|
||||
{"0 0 * * 0", Pleroma.Workers.Cron.CheckDomainsResolveWorker},
|
||||
{"0 0 * * 0", Pleroma.Workers.Cron.DigestEmailsWorker},
|
||||
|
|
|
@ -1245,79 +1245,6 @@ config :pleroma, :config_description, [
|
|||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
group: :logger,
|
||||
type: :group,
|
||||
description: "Logger-related settings",
|
||||
children: [
|
||||
%{
|
||||
key: :backends,
|
||||
type: [:atom, :tuple, :module],
|
||||
description:
|
||||
"Where logs will be sent, :console - send logs to stdout, { ExSyslogger, :ex_syslogger } - to syslog, Quack.Logger - to Slack.",
|
||||
suggestions: [:console, {ExSyslogger, :ex_syslogger}]
|
||||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
group: :logger,
|
||||
type: :group,
|
||||
key: :ex_syslogger,
|
||||
label: "ExSyslogger",
|
||||
description: "ExSyslogger-related settings",
|
||||
children: [
|
||||
%{
|
||||
key: :level,
|
||||
type: {:dropdown, :atom},
|
||||
description: "Log level",
|
||||
suggestions: [:debug, :info, :warning, :error]
|
||||
},
|
||||
%{
|
||||
key: :ident,
|
||||
type: :string,
|
||||
description:
|
||||
"A string that's prepended to every message, and is typically set to the app name",
|
||||
suggestions: ["pleroma"]
|
||||
},
|
||||
%{
|
||||
key: :format,
|
||||
type: :string,
|
||||
description: "Default: \"$date $time [$level] $levelpad$node $metadata $message\"",
|
||||
suggestions: ["$metadata[$level] $message"]
|
||||
},
|
||||
%{
|
||||
key: :metadata,
|
||||
type: {:list, :atom},
|
||||
suggestions: [:request_id]
|
||||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
group: :logger,
|
||||
type: :group,
|
||||
key: :console,
|
||||
label: "Console Logger",
|
||||
description: "Console logger settings",
|
||||
children: [
|
||||
%{
|
||||
key: :level,
|
||||
type: {:dropdown, :atom},
|
||||
description: "Log level",
|
||||
suggestions: [:debug, :info, :warning, :error]
|
||||
},
|
||||
%{
|
||||
key: :format,
|
||||
type: :string,
|
||||
description: "Default: \"$date $time [$level] $levelpad$node $metadata $message\"",
|
||||
suggestions: ["$metadata[$level] $message"]
|
||||
},
|
||||
%{
|
||||
key: :metadata,
|
||||
type: {:list, :atom},
|
||||
suggestions: [:request_id]
|
||||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
group: :pleroma,
|
||||
key: :frontend_configurations,
|
||||
|
@ -2191,11 +2118,11 @@ config :pleroma, :config_description, [
|
|||
]
|
||||
},
|
||||
%{
|
||||
key: :failure_backoff,
|
||||
key: :timeout,
|
||||
type: :integer,
|
||||
description:
|
||||
"Amount of milliseconds after request failure, during which the request will not be retried.",
|
||||
suggestions: [60_000]
|
||||
"Amount of milliseconds after which the HTTP request is forcibly terminated.",
|
||||
suggestions: [5_000]
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
|
@ -36,7 +36,7 @@ config :pleroma, Pleroma.Emails.Mailer, adapter: Swoosh.Adapters.Local
|
|||
# different ports.
|
||||
|
||||
# Do not include timestamps in development logs
|
||||
config :logger, :console, format: "$metadata[$level] $message\n"
|
||||
config :logger, Logger.Backends.Console, format: "$metadata[$level] $message\n"
|
||||
|
||||
# Set a higher stacktrace during development. Avoid configuring such
|
||||
# in production as building large stacktraces may be expensive.
|
||||
|
|
|
@ -20,6 +20,7 @@ config :pleroma, Pleroma.Web.Endpoint,
|
|||
config :phoenix, serve_endpoints: true
|
||||
|
||||
# Do not print debug messages in production
|
||||
config :logger, Logger.Backends.Console, level: :info
|
||||
config :logger, :console, level: :info
|
||||
config :logger, :ex_syslogger, level: :info
|
||||
|
||||
|
|
|
@ -49,7 +49,8 @@ config :pleroma, Pleroma.Repo,
|
|||
hostname: System.get_env("DB_HOST") || "localhost",
|
||||
port: System.get_env("DB_PORT") || "5432",
|
||||
pool: Ecto.Adapters.SQL.Sandbox,
|
||||
pool_size: System.schedulers_online() * 2
|
||||
pool_size: System.schedulers_online() * 2,
|
||||
log: false
|
||||
|
||||
config :pleroma, :dangerzone, override_repo_pool_size: true
|
||||
|
||||
|
@ -177,11 +178,16 @@ config :pleroma, Pleroma.Application,
|
|||
streamer_registry: false,
|
||||
test_http_pools: true
|
||||
|
||||
config :pleroma, Pleroma.Web.Streaming, sync_streaming: true
|
||||
|
||||
config :pleroma, Pleroma.Uploaders.Uploader, timeout: 1_000
|
||||
|
||||
config :pleroma, Pleroma.Emoji.Loader, test_emoji: true
|
||||
|
||||
config :pleroma, Pleroma.Web.RichMedia.Backfill, provider: Pleroma.Web.RichMedia.Backfill
|
||||
config :pleroma, Pleroma.Web.RichMedia.Backfill,
|
||||
stream_out: Pleroma.Web.ActivityPub.ActivityPubMock
|
||||
|
||||
config :pleroma, Pleroma.Web.Plugs.HTTPSecurityPlug, enable: false
|
||||
|
||||
if File.exists?("./config/test.secret.exs") do
|
||||
import_config "test.secret.exs"
|
||||
|
|
|
@ -154,4 +154,19 @@ This forcibly removes all saved values in the database.
|
|||
|
||||
```sh
|
||||
mix pleroma.config [--force] reset
|
||||
|
||||
```
|
||||
|
||||
## Remove invalid MRF modules from the database
|
||||
|
||||
This forcibly removes any enabled MRF that does not exist and will fix the ability of the instance to start.
|
||||
|
||||
=== "OTP"
|
||||
```sh
|
||||
./bin/pleroma_ctl config fix_mrf_policies
|
||||
```
|
||||
|
||||
=== "From Source"
|
||||
```sh
|
||||
mix pleroma.config fix_mrf_policies
|
||||
```
|
|
@ -436,7 +436,7 @@ config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Http,
|
|||
* `ignore_hosts`: list of hosts which will be ignored by the metadata parser. For example `["accounts.google.com", "xss.website"]`, defaults to `[]`.
|
||||
* `ignore_tld`: list TLDs (top-level domains) which will ignore for parse metadata. default is ["local", "localdomain", "lan"].
|
||||
* `parsers`: list of Rich Media parsers.
|
||||
* `failure_backoff`: Amount of milliseconds after request failure, during which the request will not be retried.
|
||||
* `timeout`: Amount of milliseconds after which the HTTP request is forcibly terminated.
|
||||
|
||||
## HTTP server
|
||||
|
||||
|
@ -853,7 +853,7 @@ config :logger,
|
|||
backends: [{ExSyslogger, :ex_syslogger}]
|
||||
|
||||
config :logger, :ex_syslogger,
|
||||
level: :warn
|
||||
level: :warning
|
||||
```
|
||||
|
||||
Another example, keeping console output and adding the pid to syslog output:
|
||||
|
@ -862,7 +862,7 @@ config :logger,
|
|||
backends: [:console, {ExSyslogger, :ex_syslogger}]
|
||||
|
||||
config :logger, :ex_syslogger,
|
||||
level: :warn,
|
||||
level: :warning,
|
||||
option: [:pid, :ndelay]
|
||||
```
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
## Required dependencies
|
||||
|
||||
* PostgreSQL >=11.0
|
||||
* Elixir >=1.13.0 <1.15
|
||||
* Elixir >=1.13.0 <1.17
|
||||
* Erlang OTP >=22.2.0 (supported: <27)
|
||||
* git
|
||||
* file / libmagic
|
||||
|
|
|
@ -2,14 +2,41 @@
|
|||
|
||||
{! backend/installation/generic_dependencies.include !}
|
||||
|
||||
## Installing software used in this guide
|
||||
# Installation options
|
||||
|
||||
Currently there are two options available for NetBSD: manual installation (from source) or using experimental package from [pkgsrc-wip](https://github.com/NetBSD/pkgsrc-wip/tree/master/pleroma).
|
||||
|
||||
WIP package can be installed via pkgsrc and can be crosscompiled for easier binary distribution. Source installation most probably will be restricted to a single machine.
|
||||
|
||||
## pkgsrc installation
|
||||
|
||||
WIP package creates Mix.Release (similar to how Docker images are built) but doesn't bundle Erlang runtime, listing it as a dependency instead. This allows for easier and more modular installations, especially on weaker machines. Currently this method also does not support all features of `pleroma_ctl` command (like changing installation type or managing frontends) as NetBSD is not yet a supported binary flavour of Pleroma's CI.
|
||||
|
||||
In any case, you can install it the same way as any other `pkgsrc-wip` package:
|
||||
|
||||
```
|
||||
cd /usr/pkgsrc
|
||||
git clone --depth 1 git://wip.pkgsrc.org/pkgsrc-wip.git wip
|
||||
cp -rf wip/pleroma www
|
||||
cp -rf wip/libvips graphics
|
||||
cd /usr/pkgsrc/www/pleroma
|
||||
bmake && bmake install
|
||||
```
|
||||
|
||||
Use `bmake package` to create a binary package. This can come especially handy if you're targeting embedded or low-power systems and are crosscompiling on a more powerful machine.
|
||||
|
||||
> Note: Elixir has [endianness bug](https://github.com/elixir-lang/elixir/issues/2785) which requires it to be compiled on a machine with the same endianness. In other words, package crosscompiled on amd64 (little endian) won't work on powerpc or sparc machines (big endian). While _in theory™_ nothing catastrophic should happen, one can see that for example regexes won't work properly. Some distributions just strip this warning away, so it doesn't bother the users... anyway, you've been warned.
|
||||
|
||||
## Source installation
|
||||
|
||||
pkgin should have been installed by the NetBSD installer if you selected
|
||||
the right options. If it isn't installed, install it using pkg_add.
|
||||
the right options. If it isn't installed, install it using `pkg_add`.
|
||||
|
||||
Note that `postgresql11-contrib` is needed for the Postgres extensions
|
||||
Pleroma uses.
|
||||
|
||||
> Note: you can use modern versions of PostgreSQL. In this case, just use `postgresql16-contrib` and so on.
|
||||
|
||||
The `mksh` shell is needed to run the Elixir `mix` script.
|
||||
|
||||
`# pkgin install acmesh elixir git-base git-docs mksh nginx postgresql11-server postgresql11-client postgresql11-contrib sudo ffmpeg4 ImageMagick`
|
||||
|
@ -29,29 +56,6 @@ shells/mksh
|
|||
www/nginx
|
||||
```
|
||||
|
||||
Copy the rc.d scripts to the right directory:
|
||||
|
||||
```
|
||||
# cp /usr/pkg/share/examples/rc.d/nginx /usr/pkg/share/examples/rc.d/pgsql /etc/rc.d
|
||||
```
|
||||
|
||||
Add nginx and Postgres to `/etc/rc.conf`:
|
||||
|
||||
```
|
||||
nginx=YES
|
||||
pgsql=YES
|
||||
```
|
||||
|
||||
## Configuring postgres
|
||||
|
||||
First, run `# /etc/rc.d/pgsql start`. Then, `$ sudo -Hu pgsql -g pgsql createdb`.
|
||||
|
||||
### Install media / graphics packages (optional, see [`docs/installation/optional/media_graphics_packages.md`](../installation/optional/media_graphics_packages.md))
|
||||
|
||||
`# pkgin install ImageMagick ffmpeg4 p5-Image-ExifTool`
|
||||
|
||||
## Configuring Pleroma
|
||||
|
||||
Create a user for Pleroma:
|
||||
|
||||
```
|
||||
|
@ -68,41 +72,98 @@ $ cd /home/pleroma
|
|||
$ git clone -b stable https://git.pleroma.social/pleroma/pleroma.git
|
||||
```
|
||||
|
||||
Configure Pleroma. Note that you need a domain name at this point:
|
||||
Get deps and compile:
|
||||
|
||||
```
|
||||
$ cd /home/pleroma/pleroma
|
||||
$ export MIX_ENV=prod
|
||||
$ mix deps.get
|
||||
$ MIX_ENV=prod mix pleroma.instance gen # You will be asked a few questions here.
|
||||
$ mix compile
|
||||
```
|
||||
|
||||
Since Postgres is configured, we can now initialize the database. There should
|
||||
now be a file in `config/setup_db.psql` that makes this easier. Edit it, and
|
||||
*change the password* to a password of your choice. Make sure it is secure, since
|
||||
## Install media / graphics packages (optional, see [`docs/installation/optional/media_graphics_packages.md`](../installation/optional/media_graphics_packages.md))
|
||||
|
||||
`# pkgin install ImageMagick ffmpeg4 p5-Image-ExifTool`
|
||||
|
||||
or via pkgsrc:
|
||||
|
||||
```
|
||||
graphics/p5-Image-ExifTool
|
||||
graphics/ImageMagick
|
||||
multimedia/ffmpeg4
|
||||
```
|
||||
|
||||
# Configuration
|
||||
|
||||
## Understanding $PREFIX
|
||||
|
||||
From now on, you may encounter `$PREFIX` variable in the paths. This variable indicates your current local pkgsrc prefix. Usually it's `/usr/pkg` unless you configured it otherwise. Translating to pkgsrc's lingo, it's called `LOCALBASE`, which essentially means the same this. You may want to set it up for your local shell session (this uses `mksh` which should already be installed as one of the required dependencies):
|
||||
|
||||
```
|
||||
$ export PREFIX=$(pkg_info -Q LOCALBASE mksh)
|
||||
$ echo $PREFIX
|
||||
/usr/pkg
|
||||
```
|
||||
|
||||
## Setting up your instance
|
||||
|
||||
Now, you need to configure your instance. During this initial configuration, you will be asked some questions about your server. You will need a domain name at this point; it doesn't have to be deployed, but changing it later will be very cumbersome.
|
||||
|
||||
If you've installed via pkgsrc, `pleroma_ctl` should already be in your `PATH`; if you've installed from source, it's located at `/home/pleroma/pleroma/release/bin/pleroma_ctl`.
|
||||
|
||||
```
|
||||
$ su -l pleroma
|
||||
$ pleroma_ctl instance gen --output $PREFIX/etc/pleroma/config.exs --output-psql /tmp/setup_db.psql
|
||||
```
|
||||
|
||||
During installation, you will be asked about static and upload directories. Don't forget to create them and update permissions:
|
||||
|
||||
```
|
||||
mkdir -p /var/lib/pleroma/uploads
|
||||
chown -R pleroma:pleroma /var/lib/pleroma
|
||||
```
|
||||
|
||||
## Setting up the database
|
||||
|
||||
First, run `# /etc/rc.d/pgsql start`. Then, `$ sudo -Hu pgsql -g pgsql createdb`.
|
||||
|
||||
We can now initialize the database. You'll need to edit generated SQL file from the previous step. It's located at `/tmp/setup_db.psql`.
|
||||
|
||||
Edit this file, and *change the password* to a password of your choice. Make sure it is secure, since
|
||||
it'll be protecting your database. Now initialize the database:
|
||||
|
||||
```
|
||||
$ sudo -Hu pgsql -g pgsql psql -f config/setup_db.psql
|
||||
$ sudo -Hu pgsql -g pgsql psql -f /tmp/setup_db.psql
|
||||
```
|
||||
|
||||
Postgres allows connections from all users without a password by default. To
|
||||
fix this, edit `/usr/pkg/pgsql/data/pg_hba.conf`. Change every `trust` to
|
||||
fix this, edit `$PREFIX/pgsql/data/pg_hba.conf`. Change every `trust` to
|
||||
`password`.
|
||||
|
||||
Once this is done, restart Postgres with `# /etc/rc.d/pgsql restart`.
|
||||
|
||||
Run the database migrations.
|
||||
|
||||
### pkgsrc installation
|
||||
|
||||
```
|
||||
pleroma_ctl migrate
|
||||
```
|
||||
|
||||
### Source installation
|
||||
|
||||
You will need to do this whenever you update with `git pull`:
|
||||
|
||||
```
|
||||
$ cd /home/pleroma/pleroma
|
||||
$ MIX_ENV=prod mix ecto.migrate
|
||||
```
|
||||
|
||||
## Configuring nginx
|
||||
|
||||
Install the example configuration file
|
||||
`/home/pleroma/pleroma/installation/pleroma.nginx` to
|
||||
`/usr/pkg/etc/nginx.conf`.
|
||||
(`$PREFIX/share/examples/pleroma/pleroma.nginx` or `/home/pleroma/pleroma/installation/pleroma.nginx`) to
|
||||
`$PREFIX/etc/nginx.conf`.
|
||||
|
||||
Note that it will need to be wrapped in a `http {}` block. You should add
|
||||
settings for the nginx daemon outside of the http block, for example:
|
||||
|
@ -176,27 +237,45 @@ Let's add auto-renewal to `/etc/daily.local`
|
|||
--stateless
|
||||
```
|
||||
|
||||
## Creating a startup script for Pleroma
|
||||
## Autostart
|
||||
|
||||
Copy the startup script to the correct location and make sure it's executable:
|
||||
For properly functioning instance, you will need pleroma (backend service), nginx (reverse proxy) and postgresql (database) services running. There's no requirement for them to reside on the same machine, but you have to provide autostart for each of them.
|
||||
|
||||
### nginx
|
||||
```
|
||||
# cp $PREFIX/share/examples/rc.d/nginx /etc/rc.d
|
||||
# echo "nginx=YES" >> /etc/rc.conf
|
||||
```
|
||||
|
||||
### postgresql
|
||||
|
||||
```
|
||||
# cp $PREFIX/share/examples/rc.d/pgsql /etc/rc.d
|
||||
# echo "pgsql=YES" >> /etc/rc.conf
|
||||
```
|
||||
|
||||
### pleroma
|
||||
|
||||
First, copy the script (pkgsrc variant)
|
||||
```
|
||||
# cp $PREFIX/share/examples/pleroma/pleroma.rc /etc/rc.d/pleroma
|
||||
```
|
||||
|
||||
or source variant
|
||||
```
|
||||
# cp /home/pleroma/pleroma/installation/netbsd/rc.d/pleroma /etc/rc.d/pleroma
|
||||
# chmod +x /etc/rc.d/pleroma
|
||||
```
|
||||
|
||||
Add the following to `/etc/rc.conf`:
|
||||
Then, add the following to `/etc/rc.conf`:
|
||||
|
||||
```
|
||||
pleroma=YES
|
||||
pleroma_home="/home/pleroma"
|
||||
pleroma_user="pleroma"
|
||||
```
|
||||
|
||||
Run `# /etc/rc.d/pleroma start` to start Pleroma.
|
||||
|
||||
## Conclusion
|
||||
|
||||
Run `# /etc/rc.d/pleroma start` to start Pleroma.
|
||||
Restart nginx with `# /etc/rc.d/nginx restart` and you should be up and running.
|
||||
|
||||
Make sure your time is in sync, or other instances will receive your posts with
|
||||
|
|
|
@ -1,11 +1,14 @@
|
|||
#!/bin/sh
|
||||
# PROVIDE: pleroma
|
||||
# REQUIRE: DAEMON pgsql
|
||||
# REQUIRE: DAEMON pgsql nginx
|
||||
|
||||
if [ -f /etc/rc.subr ]; then
|
||||
. /etc/rc.subr
|
||||
fi
|
||||
|
||||
pleroma_home="/home/pleroma"
|
||||
pleroma_user="pleroma"
|
||||
|
||||
name="pleroma"
|
||||
rcvar=${name}
|
||||
command="/usr/pkg/bin/elixir"
|
||||
|
@ -19,10 +22,10 @@ pleroma_env="HOME=${pleroma_home} MIX_ENV=prod"
|
|||
check_pidfile()
|
||||
{
|
||||
pid=$(pgrep -U "${pleroma_user}" /bin/beam.smp$)
|
||||
echo -n "${pid}"
|
||||
printf '%s' "${pid}"
|
||||
}
|
||||
|
||||
if [ -f /etc/rc.subr -a -d /etc/rc.d -a -f /etc/rc.d/DAEMON ]; then
|
||||
if [ -f /etc/rc.subr ] && [ -d /etc/rc.d ] && [ -f /etc/rc.d/DAEMON ]; then
|
||||
# newer NetBSD
|
||||
load_rc_config ${name}
|
||||
run_rc_command "$1"
|
||||
|
@ -39,7 +42,7 @@ else
|
|||
stop)
|
||||
echo "Stopping ${name}."
|
||||
check_pidfile
|
||||
! [ -n ${pid} ] && kill ${pid}
|
||||
! [ -n "${pid}" ] && kill "${pid}"
|
||||
;;
|
||||
|
||||
restart)
|
||||
|
|
|
@ -14,7 +14,8 @@ defmodule Mix.Pleroma do
|
|||
:swoosh,
|
||||
:timex,
|
||||
:fast_html,
|
||||
:oban
|
||||
:oban,
|
||||
:logger_backends
|
||||
]
|
||||
@cachex_children ["object", "user", "scrubber", "web_resp"]
|
||||
@doc "Common functions to be reused in mix tasks"
|
||||
|
|
|
@ -205,6 +205,35 @@ defmodule Mix.Tasks.Pleroma.Config do
|
|||
end
|
||||
end
|
||||
|
||||
# Removes any policies that are not a real module
|
||||
# as they will prevent the server from starting
|
||||
def run(["fix_mrf_policies"]) do
|
||||
check_configdb(fn ->
|
||||
start_pleroma()
|
||||
|
||||
group = :pleroma
|
||||
key = :mrf
|
||||
|
||||
%{value: value} =
|
||||
group
|
||||
|> ConfigDB.get_by_group_and_key(key)
|
||||
|
||||
policies =
|
||||
Keyword.get(value, :policies, [])
|
||||
|> Enum.filter(&is_atom(&1))
|
||||
|> Enum.filter(fn mrf ->
|
||||
case Code.ensure_compiled(mrf) do
|
||||
{:module, _} -> true
|
||||
{:error, _} -> false
|
||||
end
|
||||
end)
|
||||
|
||||
value = Keyword.put(value, :policies, policies)
|
||||
|
||||
ConfigDB.update_or_create(%{group: group, key: key, value: value})
|
||||
end)
|
||||
end
|
||||
|
||||
@spec migrate_to_db(Path.t() | nil) :: any()
|
||||
def migrate_to_db(file_path \\ nil) do
|
||||
with :ok <- Pleroma.Config.DeprecationWarnings.warn() do
|
||||
|
|
|
@ -351,7 +351,7 @@ defmodule Mix.Tasks.Pleroma.Database do
|
|||
)
|
||||
end
|
||||
|
||||
shell_info('Done.')
|
||||
shell_info(~c"Done.")
|
||||
end
|
||||
end
|
||||
|
||||
|
|
25
lib/mix/tasks/pleroma/test_runner.ex
Normal file
25
lib/mix/tasks/pleroma/test_runner.ex
Normal file
|
@ -0,0 +1,25 @@
|
|||
defmodule Mix.Tasks.Pleroma.TestRunner do
|
||||
@shortdoc "Retries tests once if they fail"
|
||||
|
||||
use Mix.Task
|
||||
|
||||
def run(args \\ []) do
|
||||
case System.cmd("mix", ["test"] ++ args, into: IO.stream(:stdio, :line)) do
|
||||
{_, 0} ->
|
||||
:ok
|
||||
|
||||
_ ->
|
||||
retry(args)
|
||||
end
|
||||
end
|
||||
|
||||
def retry(args) do
|
||||
case System.cmd("mix", ["test", "--failed"] ++ args, into: IO.stream(:stdio, :line)) do
|
||||
{_, 0} ->
|
||||
:ok
|
||||
|
||||
_ ->
|
||||
exit(1)
|
||||
end
|
||||
end
|
||||
end
|
|
@ -14,7 +14,6 @@ defmodule Pleroma.Application do
|
|||
@name Mix.Project.config()[:name]
|
||||
@version Mix.Project.config()[:version]
|
||||
@repository Mix.Project.config()[:source_url]
|
||||
@compile_env Mix.env()
|
||||
|
||||
def name, do: @name
|
||||
def version, do: @version
|
||||
|
@ -53,7 +52,7 @@ defmodule Pleroma.Application do
|
|||
Pleroma.Config.Oban.warn()
|
||||
Config.DeprecationWarnings.warn()
|
||||
|
||||
if @compile_env != :test do
|
||||
if Config.get([Pleroma.Web.Plugs.HTTPSecurityPlug, :enable], true) do
|
||||
Pleroma.Web.Plugs.HTTPSecurityPlug.warn_if_disabled()
|
||||
end
|
||||
|
||||
|
|
|
@ -241,10 +241,9 @@ defmodule Pleroma.ApplicationRequirements do
|
|||
|
||||
missing_mrfs =
|
||||
Enum.reduce(mrfs, [], fn x, acc ->
|
||||
if Code.ensure_compiled(x) do
|
||||
acc
|
||||
else
|
||||
acc ++ [x]
|
||||
case Code.ensure_compiled(x) do
|
||||
{:module, _} -> acc
|
||||
{:error, _} -> acc ++ [x]
|
||||
end
|
||||
end)
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# Copyright © 2017-2023 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Config.TransferTask do
|
||||
|
@ -44,14 +44,9 @@ defmodule Pleroma.Config.TransferTask do
|
|||
with {_, true} <- {:configurable, Config.get(:configurable_from_database)} do
|
||||
# We need to restart applications for loaded settings take effect
|
||||
|
||||
{logger, other} =
|
||||
settings =
|
||||
(Repo.all(ConfigDB) ++ deleted_settings)
|
||||
|> Enum.map(&merge_with_default/1)
|
||||
|> Enum.split_with(fn {group, _, _, _} -> group in [:logger] end)
|
||||
|
||||
logger
|
||||
|> Enum.sort()
|
||||
|> Enum.each(&configure/1)
|
||||
|
||||
started_applications = Application.started_applications()
|
||||
|
||||
|
@ -64,7 +59,7 @@ defmodule Pleroma.Config.TransferTask do
|
|||
[:pleroma | reject]
|
||||
end
|
||||
|
||||
other
|
||||
settings
|
||||
|> Enum.map(&update/1)
|
||||
|> Enum.uniq()
|
||||
|> Enum.reject(&(&1 in reject))
|
||||
|
@ -102,38 +97,6 @@ defmodule Pleroma.Config.TransferTask do
|
|||
{group, key, value, merged}
|
||||
end
|
||||
|
||||
# change logger configuration in runtime, without restart
|
||||
defp configure({_, :backends, _, merged}) do
|
||||
# removing current backends
|
||||
Enum.each(Application.get_env(:logger, :backends), &Logger.remove_backend/1)
|
||||
|
||||
Enum.each(merged, &Logger.add_backend/1)
|
||||
|
||||
:ok = update_env(:logger, :backends, merged)
|
||||
end
|
||||
|
||||
defp configure({_, key, _, merged}) when key in [:console, :ex_syslogger] do
|
||||
merged =
|
||||
if key == :console do
|
||||
put_in(merged[:format], merged[:format] <> "\n")
|
||||
else
|
||||
merged
|
||||
end
|
||||
|
||||
backend =
|
||||
if key == :ex_syslogger,
|
||||
do: {ExSyslogger, :ex_syslogger},
|
||||
else: key
|
||||
|
||||
Logger.configure_backend(backend, merged)
|
||||
:ok = update_env(:logger, key, merged)
|
||||
end
|
||||
|
||||
defp configure({_, key, _, merged}) do
|
||||
Logger.configure([{key, merged}])
|
||||
:ok = update_env(:logger, key, merged)
|
||||
end
|
||||
|
||||
defp update({group, key, value, merged}) do
|
||||
try do
|
||||
:ok = update_env(group, key, merged)
|
||||
|
|
|
@ -165,8 +165,7 @@ defmodule Pleroma.ConfigDB do
|
|||
{:pleroma, :ecto_repos},
|
||||
{:mime, :types},
|
||||
{:cors_plug, [:max_age, :methods, :expose, :headers]},
|
||||
{:swarm, :node_blacklist},
|
||||
{:logger, :backends}
|
||||
{:swarm, :node_blacklist}
|
||||
]
|
||||
|
||||
Enum.any?(full_key_update, fn
|
||||
|
@ -385,7 +384,12 @@ defmodule Pleroma.ConfigDB do
|
|||
|
||||
@spec module_name?(String.t()) :: boolean()
|
||||
def module_name?(string) do
|
||||
Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Ueberauth|Swoosh)\./, string) or
|
||||
string in ["Oban", "Ueberauth", "ExSyslogger", "ConcurrentLimiter"]
|
||||
if String.contains?(string, ".") do
|
||||
[name | _] = String.split(string, ".", parts: 2)
|
||||
|
||||
name in ~w[Pleroma Phoenix Tesla Ueberauth Swoosh Logger LoggerBackends]
|
||||
else
|
||||
string in ~w[Oban Ueberauth ExSyslogger ConcurrentLimiter]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -12,6 +12,8 @@ defmodule Pleroma.Conversation.Participation do
|
|||
import Ecto.Changeset
|
||||
import Ecto.Query
|
||||
|
||||
@type t :: %__MODULE__{}
|
||||
|
||||
schema "conversation_participations" do
|
||||
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
|
||||
belongs_to(:conversation, Conversation)
|
||||
|
|
|
@ -416,10 +416,10 @@ defmodule Pleroma.Emoji.Pack do
|
|||
end
|
||||
|
||||
defp create_archive_and_cache(pack, hash) do
|
||||
files = ['pack.json' | Enum.map(pack.files, fn {_, file} -> to_charlist(file) end)]
|
||||
files = [~c"pack.json" | Enum.map(pack.files, fn {_, file} -> to_charlist(file) end)]
|
||||
|
||||
{:ok, {_, result}} =
|
||||
:zip.zip('#{pack.name}.zip', files, [:memory, cwd: to_charlist(pack.path)])
|
||||
:zip.zip(~c"#{pack.name}.zip", files, [:memory, cwd: to_charlist(pack.path)])
|
||||
|
||||
ttl_per_file = Pleroma.Config.get!([:emoji, :shared_pack_cache_seconds_per_file])
|
||||
overall_ttl = :timer.seconds(ttl_per_file * Enum.count(files))
|
||||
|
@ -586,7 +586,7 @@ defmodule Pleroma.Emoji.Pack do
|
|||
with :ok <- File.mkdir_p!(local_pack.path) do
|
||||
files = Enum.map(remote_pack["files"], fn {_, path} -> to_charlist(path) end)
|
||||
# Fallback cannot contain a pack.json file
|
||||
files = if pack_info[:fallback], do: files, else: ['pack.json' | files]
|
||||
files = if pack_info[:fallback], do: files, else: [~c"pack.json" | files]
|
||||
|
||||
:zip.unzip(archive, cwd: to_charlist(local_pack.path), file_list: files)
|
||||
end
|
||||
|
|
|
@ -199,8 +199,8 @@ defmodule Pleroma.FollowingRelationship do
|
|||
|> preload([:follower])
|
||||
|> Repo.all()
|
||||
|> Enum.map(fn following_relationship ->
|
||||
Pleroma.Web.CommonAPI.follow(following_relationship.follower, target)
|
||||
Pleroma.Web.CommonAPI.unfollow(following_relationship.follower, origin)
|
||||
Pleroma.Web.CommonAPI.follow(target, following_relationship.follower)
|
||||
Pleroma.Web.CommonAPI.unfollow(origin, following_relationship.follower)
|
||||
end)
|
||||
|> case do
|
||||
[] ->
|
||||
|
|
|
@ -43,10 +43,6 @@ defmodule Pleroma.Frontend do
|
|||
{:download_or_unzip, _} ->
|
||||
Logger.info("Could not download or unzip the frontend")
|
||||
{:error, "Could not download or unzip the frontend"}
|
||||
|
||||
_e ->
|
||||
Logger.info("Could not install the frontend")
|
||||
{:error, "Could not install the frontend"}
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ defmodule Pleroma.Gun.ConnectionPool.Reclaimer do
|
|||
|
||||
def start_monitor do
|
||||
pid =
|
||||
case GenServer.start_link(__MODULE__, [], name: {:via, Registry, {registry(), "reclaimer"}}) do
|
||||
case GenServer.start(__MODULE__, [], name: {:via, Registry, {registry(), "reclaimer"}}) do
|
||||
{:ok, pid} ->
|
||||
pid
|
||||
|
||||
|
|
|
@ -5,6 +5,9 @@
|
|||
defmodule Pleroma.Gun.ConnectionPool.WorkerSupervisor do
|
||||
@moduledoc "Supervisor for pool workers. Does not do anything except enforce max connection limit"
|
||||
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.Gun.ConnectionPool.Worker
|
||||
|
||||
use DynamicSupervisor
|
||||
|
||||
def start_link(opts) do
|
||||
|
@ -14,21 +17,28 @@ defmodule Pleroma.Gun.ConnectionPool.WorkerSupervisor do
|
|||
def init(_opts) do
|
||||
DynamicSupervisor.init(
|
||||
strategy: :one_for_one,
|
||||
max_children: Pleroma.Config.get([:connections_pool, :max_connections])
|
||||
max_children: Config.get([:connections_pool, :max_connections])
|
||||
)
|
||||
end
|
||||
|
||||
def start_worker(opts, last_attempt \\ false) do
|
||||
case DynamicSupervisor.start_child(__MODULE__, {Pleroma.Gun.ConnectionPool.Worker, opts}) do
|
||||
{:error, :max_children} ->
|
||||
funs = [fn -> last_attempt end, fn -> match?(:error, free_pool()) end]
|
||||
def start_worker(opts, last_attempt \\ false)
|
||||
|
||||
if Enum.any?(funs, fn fun -> fun.() end) do
|
||||
:telemetry.execute([:pleroma, :connection_pool, :provision_failure], %{opts: opts})
|
||||
{:error, :pool_full}
|
||||
else
|
||||
start_worker(opts, true)
|
||||
end
|
||||
def start_worker(opts, true) do
|
||||
case DynamicSupervisor.start_child(__MODULE__, {Worker, opts}) do
|
||||
{:error, :max_children} ->
|
||||
:telemetry.execute([:pleroma, :connection_pool, :provision_failure], %{opts: opts})
|
||||
{:error, :pool_full}
|
||||
|
||||
res ->
|
||||
res
|
||||
end
|
||||
end
|
||||
|
||||
def start_worker(opts, false) do
|
||||
case DynamicSupervisor.start_child(__MODULE__, {Worker, opts}) do
|
||||
{:error, :max_children} ->
|
||||
free_pool()
|
||||
start_worker(opts, true)
|
||||
|
||||
res ->
|
||||
res
|
||||
|
|
|
@ -68,7 +68,9 @@ defmodule Pleroma.HTTP do
|
|||
|
||||
adapter = Application.get_env(:tesla, :adapter)
|
||||
|
||||
client = Tesla.client(adapter_middlewares(adapter), adapter)
|
||||
extra_middleware = options[:tesla_middleware] || []
|
||||
|
||||
client = Tesla.client(adapter_middlewares(adapter, extra_middleware), adapter)
|
||||
|
||||
maybe_limit(
|
||||
fn ->
|
||||
|
@ -102,20 +104,21 @@ defmodule Pleroma.HTTP do
|
|||
fun.()
|
||||
end
|
||||
|
||||
defp adapter_middlewares(Tesla.Adapter.Gun) do
|
||||
[Tesla.Middleware.FollowRedirects, Pleroma.Tesla.Middleware.ConnectionPool]
|
||||
defp adapter_middlewares(Tesla.Adapter.Gun, extra_middleware) do
|
||||
[Tesla.Middleware.FollowRedirects, Pleroma.Tesla.Middleware.ConnectionPool] ++
|
||||
extra_middleware
|
||||
end
|
||||
|
||||
defp adapter_middlewares({Tesla.Adapter.Finch, _}) do
|
||||
[Tesla.Middleware.FollowRedirects]
|
||||
defp adapter_middlewares({Tesla.Adapter.Finch, _}, extra_middleware) do
|
||||
[Tesla.Middleware.FollowRedirects] ++ extra_middleware
|
||||
end
|
||||
|
||||
defp adapter_middlewares(_) do
|
||||
defp adapter_middlewares(_, extra_middleware) do
|
||||
if Pleroma.Config.get(:env) == :test do
|
||||
# Emulate redirects in test env, which are handled by adapters in other environments
|
||||
[Tesla.Middleware.FollowRedirects]
|
||||
else
|
||||
[]
|
||||
extra_middleware
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -10,7 +10,7 @@ defmodule Pleroma.Instances.Instance do
|
|||
alias Pleroma.Maps
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.User
|
||||
alias Pleroma.Workers.BackgroundWorker
|
||||
alias Pleroma.Workers.DeleteWorker
|
||||
|
||||
use Ecto.Schema
|
||||
|
||||
|
@ -297,7 +297,7 @@ defmodule Pleroma.Instances.Instance do
|
|||
all of those users' activities and notifications.
|
||||
"""
|
||||
def delete_users_and_activities(host) when is_binary(host) do
|
||||
BackgroundWorker.enqueue("delete_instance", %{"host" => host})
|
||||
DeleteWorker.enqueue("delete_instance", %{"host" => host})
|
||||
end
|
||||
|
||||
def perform(:delete_instance, host) when is_binary(host) do
|
||||
|
|
|
@ -734,7 +734,7 @@ defmodule Pleroma.Notification do
|
|||
|
||||
def mark_as_read?(activity, target_user) do
|
||||
user = Activity.user_actor(activity)
|
||||
User.mutes_user?(target_user, user) || CommonAPI.thread_muted?(target_user, activity)
|
||||
User.mutes_user?(target_user, user) || CommonAPI.thread_muted?(activity, target_user)
|
||||
end
|
||||
|
||||
def for_user_and_activity(user, activity) do
|
||||
|
|
|
@ -99,21 +99,24 @@ defmodule Pleroma.Object do
|
|||
def get_by_id(nil), do: nil
|
||||
def get_by_id(id), do: Repo.get(Object, id)
|
||||
|
||||
@spec get_by_id_and_maybe_refetch(integer(), list()) :: Object.t() | nil
|
||||
def get_by_id_and_maybe_refetch(id, opts \\ []) do
|
||||
%{updated_at: updated_at} = object = get_by_id(id)
|
||||
with %Object{updated_at: updated_at} = object <- get_by_id(id) do
|
||||
if opts[:interval] &&
|
||||
NaiveDateTime.diff(NaiveDateTime.utc_now(), updated_at) > opts[:interval] do
|
||||
case Fetcher.refetch_object(object) do
|
||||
{:ok, %Object{} = object} ->
|
||||
object
|
||||
|
||||
if opts[:interval] &&
|
||||
NaiveDateTime.diff(NaiveDateTime.utc_now(), updated_at) > opts[:interval] do
|
||||
case Fetcher.refetch_object(object) do
|
||||
{:ok, %Object{} = object} ->
|
||||
object
|
||||
|
||||
e ->
|
||||
Logger.error("Couldn't refresh #{object.data["id"]}:\n#{inspect(e)}")
|
||||
object
|
||||
e ->
|
||||
Logger.error("Couldn't refresh #{object.data["id"]}:\n#{inspect(e)}")
|
||||
object
|
||||
end
|
||||
else
|
||||
object
|
||||
end
|
||||
else
|
||||
object
|
||||
nil -> nil
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -59,6 +59,7 @@ defmodule Pleroma.Object.Fetcher do
|
|||
end
|
||||
|
||||
# Note: will create a Create activity, which we need internally at the moment.
|
||||
@spec fetch_object_from_id(String.t(), list()) :: {:ok, Object.t()} | {:error | :reject, any()}
|
||||
def fetch_object_from_id(id, options \\ []) do
|
||||
with {_, nil} <- {:fetch_object, Object.get_cached_by_ap_id(id)},
|
||||
{_, true} <- {:allowed_depth, Federator.allowed_thread_distance?(options[:depth])},
|
||||
|
|
|
@ -134,7 +134,10 @@ defmodule Pleroma.Object.Updater do
|
|||
else
|
||||
%{updated_object: updated_data} =
|
||||
updated_data
|
||||
|> maybe_update_history(original_data, updated: updated, use_history_in_new_object?: false)
|
||||
|> maybe_update_history(original_data,
|
||||
updated: updated,
|
||||
use_history_in_new_object?: false
|
||||
)
|
||||
|
||||
updated_data
|
||||
|> Map.put("updated", date)
|
||||
|
|
|
@ -16,6 +16,6 @@ defmodule Pleroma.Search do
|
|||
|
||||
def healthcheck_endpoints do
|
||||
search_module = Pleroma.Config.get([Pleroma.Search, :module])
|
||||
search_module.healthcheck_endpoints
|
||||
search_module.healthcheck_endpoints()
|
||||
end
|
||||
end
|
||||
|
|
|
@ -39,7 +39,7 @@ defmodule Pleroma.Telemetry.Logger do
|
|||
_,
|
||||
_
|
||||
) do
|
||||
Logger.error(fn ->
|
||||
Logger.debug(fn ->
|
||||
"Connection pool failed to reclaim any connections due to all of them being in use. It will have to drop requests for opening connections to new hosts"
|
||||
end)
|
||||
end
|
||||
|
@ -70,7 +70,7 @@ defmodule Pleroma.Telemetry.Logger do
|
|||
%{key: key},
|
||||
_
|
||||
) do
|
||||
Logger.warning(fn ->
|
||||
Logger.debug(fn ->
|
||||
"Pool worker for #{key}: Client #{inspect(client_pid)} died before releasing the connection with #{inspect(reason)}"
|
||||
end)
|
||||
end
|
||||
|
|
|
@ -249,14 +249,16 @@ defmodule Pleroma.Upload do
|
|||
|
||||
defp url_from_spec(_upload, _base_url, {:url, url}), do: url
|
||||
|
||||
@spec base_url() :: binary
|
||||
def base_url do
|
||||
uploader = @config_impl.get([Pleroma.Upload, :uploader])
|
||||
upload_base_url = @config_impl.get([Pleroma.Upload, :base_url])
|
||||
upload_fallback_url = Pleroma.Web.Endpoint.url() <> "/media/"
|
||||
upload_base_url = @config_impl.get([Pleroma.Upload, :base_url]) || upload_fallback_url
|
||||
public_endpoint = @config_impl.get([uploader, :public_endpoint])
|
||||
|
||||
case uploader do
|
||||
Pleroma.Uploaders.Local ->
|
||||
upload_base_url || Pleroma.Web.Endpoint.url() <> "/media/"
|
||||
upload_base_url
|
||||
|
||||
Pleroma.Uploaders.S3 ->
|
||||
bucket = @config_impl.get([Pleroma.Uploaders.S3, :bucket])
|
||||
|
@ -268,11 +270,14 @@ defmodule Pleroma.Upload do
|
|||
!is_nil(truncated_namespace) ->
|
||||
truncated_namespace
|
||||
|
||||
!is_nil(namespace) ->
|
||||
!is_nil(namespace) and !is_nil(bucket) ->
|
||||
namespace <> ":" <> bucket
|
||||
|
||||
true ->
|
||||
!is_nil(bucket) ->
|
||||
bucket
|
||||
|
||||
true ->
|
||||
""
|
||||
end
|
||||
|
||||
if public_endpoint do
|
||||
|
@ -285,7 +290,7 @@ defmodule Pleroma.Upload do
|
|||
@config_impl.get([Pleroma.Uploaders.IPFS, :get_gateway_url])
|
||||
|
||||
_ ->
|
||||
public_endpoint || upload_base_url || Pleroma.Web.Endpoint.url() <> "/media/"
|
||||
public_endpoint || upload_base_url
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -16,7 +16,9 @@ defmodule Pleroma.Upload.Filter.Exiftool.StripLocation do
|
|||
|
||||
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
|
||||
try do
|
||||
case System.cmd("exiftool", ["-overwrite_original", "-gps:all=", file], parallelism: true) do
|
||||
case System.cmd("exiftool", ["-overwrite_original", "-gps:all=", "-png:all=", file],
|
||||
parallelism: true
|
||||
) do
|
||||
{_response, 0} -> {:ok, :filtered}
|
||||
{error, 1} -> {:error, error}
|
||||
end
|
||||
|
|
|
@ -39,6 +39,8 @@ defmodule Pleroma.User do
|
|||
alias Pleroma.Web.OAuth
|
||||
alias Pleroma.Web.RelMe
|
||||
alias Pleroma.Workers.BackgroundWorker
|
||||
alias Pleroma.Workers.DeleteWorker
|
||||
alias Pleroma.Workers.UserRefreshWorker
|
||||
|
||||
require Logger
|
||||
require Pleroma.Constants
|
||||
|
@ -2015,7 +2017,7 @@ defmodule Pleroma.User do
|
|||
def delete(%User{} = user) do
|
||||
# Purge the user immediately
|
||||
purge(user)
|
||||
BackgroundWorker.enqueue("delete_user", %{"user_id" => user.id})
|
||||
DeleteWorker.enqueue("delete_user", %{"user_id" => user.id})
|
||||
end
|
||||
|
||||
# *Actually* delete the user from the DB
|
||||
|
@ -2188,20 +2190,20 @@ defmodule Pleroma.User do
|
|||
|
||||
def fetch_by_ap_id(ap_id), do: ActivityPub.make_user_from_ap_id(ap_id)
|
||||
|
||||
@spec get_or_fetch_by_ap_id(String.t()) :: {:ok, User.t()} | {:error, any()}
|
||||
def get_or_fetch_by_ap_id(ap_id) do
|
||||
cached_user = get_cached_by_ap_id(ap_id)
|
||||
with cached_user = %User{} <- get_cached_by_ap_id(ap_id),
|
||||
_ <- maybe_refresh(cached_user) do
|
||||
{:ok, cached_user}
|
||||
else
|
||||
_ -> fetch_by_ap_id(ap_id)
|
||||
end
|
||||
end
|
||||
|
||||
maybe_fetched_user = needs_update?(cached_user) && fetch_by_ap_id(ap_id)
|
||||
|
||||
case {cached_user, maybe_fetched_user} do
|
||||
{_, {:ok, %User{} = user}} ->
|
||||
{:ok, user}
|
||||
|
||||
{%User{} = user, _} ->
|
||||
{:ok, user}
|
||||
|
||||
_ ->
|
||||
{:error, :not_found}
|
||||
defp maybe_refresh(user) do
|
||||
if needs_update?(user) do
|
||||
UserRefreshWorker.new(%{"ap_id" => user.ap_id})
|
||||
|> Oban.insert()
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -197,12 +197,12 @@ defmodule Pleroma.User.Backup do
|
|||
end
|
||||
|
||||
@files [
|
||||
'actor.json',
|
||||
'outbox.json',
|
||||
'likes.json',
|
||||
'bookmarks.json',
|
||||
'followers.json',
|
||||
'following.json'
|
||||
~c"actor.json",
|
||||
~c"outbox.json",
|
||||
~c"likes.json",
|
||||
~c"bookmarks.json",
|
||||
~c"followers.json",
|
||||
~c"following.json"
|
||||
]
|
||||
@spec export(Pleroma.User.Backup.t(), pid()) :: {:ok, String.t()} | :error
|
||||
def export(%__MODULE__{} = backup, caller_pid) do
|
||||
|
|
|
@ -31,7 +31,7 @@ defmodule Pleroma.User.Import do
|
|||
identifiers,
|
||||
fn identifier ->
|
||||
with {:ok, %User{} = blocked} <- User.get_or_fetch(identifier),
|
||||
{:ok, _block} <- CommonAPI.block(blocker, blocked) do
|
||||
{:ok, _block} <- CommonAPI.block(blocked, blocker) do
|
||||
blocked
|
||||
else
|
||||
error -> handle_error(:blocks_import, identifier, error)
|
||||
|
@ -46,7 +46,7 @@ defmodule Pleroma.User.Import do
|
|||
fn identifier ->
|
||||
with {:ok, %User{} = followed} <- User.get_or_fetch(identifier),
|
||||
{:ok, follower, followed} <- User.maybe_direct_follow(follower, followed),
|
||||
{:ok, _, _, _} <- CommonAPI.follow(follower, followed) do
|
||||
{:ok, _, _, _} <- CommonAPI.follow(followed, follower) do
|
||||
followed
|
||||
else
|
||||
error -> handle_error(:follow_import, identifier, error)
|
||||
|
|
|
@ -163,7 +163,7 @@ defmodule Pleroma.Web do
|
|||
"""
|
||||
def safe_render_many(collection, view, template, assigns \\ %{}) do
|
||||
Enum.map(collection, fn resource ->
|
||||
as = Map.get(assigns, :as) || view.__resource__
|
||||
as = Map.get(assigns, :as) || view.__resource__()
|
||||
assigns = Map.put(assigns, as, resource)
|
||||
safe_render(view, template, assigns)
|
||||
end)
|
||||
|
|
|
@ -1680,7 +1680,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
}}
|
||||
else
|
||||
{:error, _} = e -> e
|
||||
e -> {:error, e}
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -204,7 +204,7 @@ defmodule Pleroma.Web.ActivityPub.MRF do
|
|||
if function_exported?(policy, :config_description, 0) do
|
||||
description =
|
||||
@default_description
|
||||
|> Map.merge(policy.config_description)
|
||||
|> Map.merge(policy.config_description())
|
||||
|> Map.put(:group, :pleroma)
|
||||
|> Map.put(:tab, :mrf)
|
||||
|> Map.put(:type, :group)
|
||||
|
|
|
@ -49,7 +49,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.FollowBotPolicy do
|
|||
"#{__MODULE__}: Follow request from #{follower.nickname} to #{user.nickname}"
|
||||
)
|
||||
|
||||
CommonAPI.follow(follower, user)
|
||||
CommonAPI.follow(user, follower)
|
||||
end
|
||||
end)
|
||||
|
||||
|
|
|
@ -137,7 +137,6 @@ defmodule Pleroma.Web.ActivityPub.MRF.NsfwApiPolicy do
|
|||
{:ok, object}
|
||||
else
|
||||
{:nsfw, _data} -> handle_nsfw(object)
|
||||
_ -> {:reject, "NSFW: Attachment rejected"}
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -220,9 +220,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicy do
|
|||
{:ok, object} <- check_object(object) do
|
||||
{:ok, object}
|
||||
else
|
||||
{:reject, nil} -> {:reject, "[SimplePolicy]"}
|
||||
{:reject, _} = e -> e
|
||||
_ -> {:reject, "[SimplePolicy]"}
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -236,9 +234,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicy do
|
|||
{:ok, object} <- check_banner_removal(actor_info, object) do
|
||||
{:ok, object}
|
||||
else
|
||||
{:reject, nil} -> {:reject, "[SimplePolicy]"}
|
||||
{:reject, _} = e -> e
|
||||
_ -> {:reject, "[SimplePolicy]"}
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -249,9 +245,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.SimplePolicy do
|
|||
{:ok, object} <- check_reject(uri, object) do
|
||||
{:ok, object}
|
||||
else
|
||||
{:reject, nil} -> {:reject, "[SimplePolicy]"}
|
||||
{:reject, _} = e -> e
|
||||
_ -> {:reject, "[SimplePolicy]"}
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -31,7 +31,6 @@ defmodule Pleroma.Web.ActivityPub.MRF.VocabularyPolicy do
|
|||
{:reject, _} = e -> e
|
||||
{:accepted, _} -> {:reject, "[VocabularyPolicy] #{message_type} not in accept list"}
|
||||
{:rejected, _} -> {:reject, "[VocabularyPolicy] #{message_type} in reject list"}
|
||||
_ -> {:reject, "[VocabularyPolicy]"}
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ defmodule Pleroma.Web.ActivityPub.Pipeline do
|
|||
defp config, do: Config.get([:pipeline, :config], Config)
|
||||
|
||||
@spec common_pipeline(map(), keyword()) ::
|
||||
{:ok, Activity.t() | Object.t(), keyword()} | {:error, any()}
|
||||
{:ok, Activity.t() | Object.t(), keyword()} | {:error | :reject, any()}
|
||||
def common_pipeline(object, meta) do
|
||||
case Repo.transaction(fn -> do_common_pipeline(object, meta) end, Utils.query_timeout()) do
|
||||
{:ok, {:ok, activity, meta}} ->
|
||||
|
|
|
@ -123,9 +123,10 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
|
|||
Logger.error("Publisher failed to inbox #{inbox} with status #{code}")
|
||||
|
||||
case response do
|
||||
%{status: 403} -> {:discard, :forbidden}
|
||||
%{status: 404} -> {:discard, :not_found}
|
||||
%{status: 410} -> {:discard, :not_found}
|
||||
%{status: 400} -> {:cancel, :bad_request}
|
||||
%{status: 403} -> {:cancel, :forbidden}
|
||||
%{status: 404} -> {:cancel, :not_found}
|
||||
%{status: 410} -> {:cancel, :not_found}
|
||||
_ -> {:error, e}
|
||||
end
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ defmodule Pleroma.Web.ActivityPub.Relay do
|
|||
def follow(target_instance) do
|
||||
with %User{} = local_user <- get_actor(),
|
||||
{:ok, %User{} = target_user} <- User.get_or_fetch_by_ap_id(target_instance),
|
||||
{:ok, _, _, activity} <- CommonAPI.follow(local_user, target_user) do
|
||||
{:ok, _, _, activity} <- CommonAPI.follow(target_user, local_user) do
|
||||
Logger.info("relay: followed instance: #{target_instance}; id=#{activity.data["id"]}")
|
||||
{:ok, activity}
|
||||
else
|
||||
|
|
|
@ -453,7 +453,7 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
|
|||
) do
|
||||
orig_object_ap_id = updated_object["id"]
|
||||
orig_object = Object.get_by_ap_id(orig_object_ap_id)
|
||||
orig_object_data = orig_object.data
|
||||
orig_object_data = Map.get(orig_object, :data)
|
||||
|
||||
updated_object =
|
||||
if meta[:local] do
|
||||
|
|
|
@ -530,6 +530,9 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
else
|
||||
_ -> e
|
||||
end
|
||||
|
||||
e ->
|
||||
{:error, e}
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -939,26 +939,14 @@ defmodule Pleroma.Web.ActivityPub.Utils do
|
|||
|> Repo.all()
|
||||
end
|
||||
|
||||
@spec maybe_handle_group_posts(Activity.t()) :: :ok
|
||||
@doc "Automatically repeats posts for local group actor recipients"
|
||||
def maybe_handle_group_posts(activity) do
|
||||
poster = User.get_cached_by_ap_id(activity.actor)
|
||||
|
||||
mentions =
|
||||
activity.data["to"]
|
||||
|> Enum.filter(&(&1 != activity.actor))
|
||||
|
||||
mentioned_local_groups =
|
||||
User.get_all_by_ap_id(mentions)
|
||||
|> Enum.filter(fn user ->
|
||||
user.actor_type == "Group" and
|
||||
user.local and
|
||||
not User.blocks?(user, poster)
|
||||
end)
|
||||
|
||||
mentioned_local_groups
|
||||
|> Enum.each(fn group ->
|
||||
Pleroma.Web.CommonAPI.repeat(activity.id, group)
|
||||
end)
|
||||
|
||||
:ok
|
||||
User.get_recipients_from_activity(activity)
|
||||
|> Enum.filter(&match?("Group", &1.actor_type))
|
||||
|> Enum.reject(&User.blocks?(&1, poster))
|
||||
|> Enum.each(&Pleroma.Web.CommonAPI.repeat(activity.id, &1))
|
||||
end
|
||||
end
|
||||
|
|
|
@ -46,7 +46,6 @@ defmodule Pleroma.Web.AdminAPI.InviteController do
|
|||
render(conn, "show.json", invite: updated_invite)
|
||||
else
|
||||
nil -> {:error, :not_found}
|
||||
error -> error
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ defmodule Pleroma.Web.AdminAPI.RuleController do
|
|||
|
||||
plug(OAuthScopesPlug, %{scopes: ["admin:read"]} when action == :index)
|
||||
|
||||
action_fallback(AdminAPI.FallbackController)
|
||||
action_fallback(Pleroma.Web.AdminAPI.FallbackController)
|
||||
|
||||
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.Admin.RuleOperation
|
||||
|
||||
|
|
|
@ -79,7 +79,9 @@ defmodule Pleroma.Web.ApiSpec.SearchOperation do
|
|||
%Schema{type: :string, enum: ["accounts", "hashtags", "statuses"]},
|
||||
"Search type"
|
||||
),
|
||||
Operation.parameter(:q, :query, %Schema{type: :string}, "The search query", required: true),
|
||||
Operation.parameter(:q, :query, %Schema{type: :string}, "The search query",
|
||||
required: true
|
||||
),
|
||||
Operation.parameter(
|
||||
:resolve,
|
||||
:query,
|
||||
|
|
|
@ -139,7 +139,7 @@ defmodule Pleroma.Web.ApiSpec.StreamingOperation do
|
|||
end
|
||||
|
||||
defp get_schema(%Schema{} = schema), do: schema
|
||||
defp get_schema(schema), do: schema.schema
|
||||
defp get_schema(schema), do: schema.schema()
|
||||
|
||||
defp server_sent_event_helper(name, description, type, payload, opts \\ []) do
|
||||
payload_type = Keyword.get(opts, :payload_type, :json)
|
||||
|
|
|
@ -68,7 +68,7 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Chat do
|
|||
},
|
||||
"id" => "1",
|
||||
"unread" => 2,
|
||||
"last_message" => ChatMessage.schema().example(),
|
||||
"last_message" => ChatMessage.schema().example,
|
||||
"updated_at" => "2020-04-21T15:06:45.000Z"
|
||||
}
|
||||
})
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue