Skip to content

Commit 30a1f8a

Browse files
committed
Merge branch 'v4' into develop
# Conflicts: # .github/workflows/ci.yml # composer.json # composer.lock # ecs.php # phpstan.neon # src/Plugin.php # src/base/Element.php # src/base/Field.php # src/elements/Entry.php # src/fields/Assets.php # src/fields/Checkboxes.php # src/fields/Linkit.php # src/fields/TypedLink.php # src/helpers/DataHelper.php # src/models/FeedModel.php # src/services/Process.php
2 parents 9c272ab + 289ef9f commit 30a1f8a

28 files changed

+763
-101
lines changed

.ddev/commands/.gitattributes

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
# #ddev-generated
2+
# Everything in the commands directory needs LF line-endings
3+
# Not CRLF as from Windows.
4+
# bash especially just can't cope if it finds CRLF in a script.
5+
* -text eol=lf

.ddev/commands/db/README.txt

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
#ddev-generated
2+
Scripts in this directory will be executed inside the db
3+
container. A number of environment variables are supplied to the scripts.
4+
5+
See https://ddev.readthedocs.io/en/stable/users/extend/custom-commands/#environment-variables-provided for a list of environment variables.

.ddev/commands/host/README.txt

+6
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
#ddev-generated
2+
Scripts in this directory will be executed on the host
3+
but they can take easily take action on containers by using
4+
`ddev exec`.
5+
6+
See https://ddev.readthedocs.io/en/stable/users/extend/custom-commands/#environment-variables-provided for a list of environment variables that can be used in the scripts.

.ddev/commands/host/solrtail.example

+11
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
#!/bin/bash
2+
3+
## #ddev-generated
4+
## Description: Tail the main solr log
5+
## Usage: solrtail
6+
## Example: ddev solrtail
7+
8+
# This can't work unless you have a solr service,
9+
# See https://ddev.readthedocs.io/en/stable/users/extend/additional-services/
10+
11+
ddev exec -s solr tail -40lf /opt/solr/server/logs/solr.log

.ddev/commands/solr/README.txt

+15
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
#ddev-generated
2+
Scripts in this directory will be executed inside the solr
3+
container (if it exists, of course). This is just an example,
4+
but any named service can have a directory with commands.
5+
6+
Note that /mnt/ddev_config must be mounted into the 3rd-party service
7+
with a stanza like this in the docker-compose.solr.yaml:
8+
9+
volumes:
10+
- type: "bind"
11+
source: "."
12+
target: "/mnt/ddev_config"
13+
14+
15+
See https://ddev.readthedocs.io/en/stable/users/extend/custom-commands/#environment-variables-provided for a list of environment variables that can be used in the scripts.

.ddev/commands/solr/solrtail.example

+13
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
#!/bin/bash
2+
3+
## #ddev-generated
4+
## Description: Tail the main solr log
5+
## Usage: solrtail
6+
## Example: ddev solrtail
7+
8+
# This example runs inside the solr container.
9+
# Note that this requires that /mnt/ddev_config be mounted
10+
# into the solr container and of course that you have a container
11+
# named solr.
12+
13+
tail -f /opt/solr/server/logs/solr.log

.ddev/commands/web/README.txt

+4
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
#ddev-generated
2+
Scripts in this directory will be executed inside the web container.
3+
4+
See https://ddev.readthedocs.io/en/stable/users/extend/custom-commands/#environment-variables-provided for a list of environment variables that can be used in the scripts.

.ddev/homeadditions/README.txt

+7
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
#ddev-generated
2+
Files in .ddev/homeadditions will be copied into the web container's home directory.
3+
4+
An example bash_aliases.example is provided here. To make this file active you can either
5+
6+
cp bash_aliases.example .bash_aliases
7+
or ln -s bash_aliases.example .bash_aliases
+6
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
# #ddev-generated
2+
# To make this file active you can either
3+
# cp bash_aliases.example .bash_aliases
4+
# or ln -s bash_aliases.example .bash_aliases
5+
6+
alias ll="ls -lhA"

.ddev/providers/README.txt

+34
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
Providers README
2+
================
3+
4+
#ddev-generated
5+
6+
## Introduction to Hosting Provider Integration
7+
8+
DDEV's hosting provider integration lets you integrate with any upstream source of database dumps and files (such as your production or staging server) and provides examples of configuration for Acquia, Platform.sh, Pantheon, rsync, etc.
9+
10+
The best part of this is you can change them and adapt them in any way you need to, they're all short scripted recipes. There are several example recipes created in the .ddev/providers directory of every project or see them in the code at https://github.com/ddev/ddev/tree/master/cmd/ddev/cmd/dotddev_assets/providers.
11+
12+
ddev provides the `pull` command with whatever recipes you have configured. For example, `ddev pull acquia` if you have created `.ddev/providers/acquia.yaml`.
13+
14+
ddev also provides the `push` command to push database and files to upstream. This is very dangerous to your upstream site and should only be used with extreme caution. It's recommended not even to implement the push stanzas in your yaml file, but if it fits your workflow, use it well.
15+
16+
Each provider recipe is a yaml file that can be named any way you want to name it. The examples are mostly named after the hosting providers, but they could be named "upstream.yaml" or "live.yaml", so you could `ddev pull upstream` or `ddev pull live`. If you wanted different upstream environments to pull from, you could name one "prod" and one "dev" and `ddev pull prod` and `ddev pull dev`.
17+
18+
Several example recipes are at https://github.com/ddev/ddev/tree/master/cmd/ddev/cmd/dotddev_assets/providers and in this directory.
19+
20+
Each provider recipe is a file named `<provider>.yaml` and consists of several mostly-optional stanzas:
21+
22+
* `environment_variables`: Environment variables will be created in the web container for each of these during pull or push operations. They're used to provide context (project id, environment name, etc.) for each of the other stanzas.
23+
* `db_pull_command`: A script that determines how ddev should pull a database. It's job is to create a gzipped database dump in /var/www/html/.ddev/.downloads/db.sql.gz.
24+
* `files_pull_command`: A script that determines how ddev can get user-generated files from upstream. Its job is to copy the files from upstream to /var/www/html/.ddev/.downloads/files.
25+
* `db_push_command`: A script that determines how ddev should push a database. It's job is to take a gzipped database dump from /var/www/html/.ddev/.downloads/db.sql.gz and load it on the hosting provider.
26+
* `files_pull_command`: A script that determines how ddev push user-generated files to upstream. Its job is to copy the files from the project's user-files directory ($DDEV_FILES_DIR) to the correct place on the upstream provider.
27+
28+
The environment variables provided to custom commands (see https://ddev.readthedocs.io/en/stable/users/extend/custom-commands/#environment-variables-provided) are also available for use in these recipes.
29+
30+
### Provider Debugging
31+
32+
You can uncomment the `set -x` in each stanza to see more of what's going on. It really helps.
33+
34+
Although the various commands could be executed on the host or in other containers if configured that way, most commands are executed in the web container. So the best thing to do is to `ddev ssh` and manually execute each command you want to use. When you have it right, use it in the yaml file.

.ddev/providers/acquia.yaml.example

+82
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
#ddev-generated
2+
# Example Acquia provider configuration.
3+
4+
# To use this configuration,
5+
6+
# 1. Get your Acquia API token from your Account Settings->API Tokens.
7+
# 2. Make sure your ssh key is authorized on your Acquia account at Account Settings->SSH Keys
8+
# 3. `ddev auth ssh` (this typically needs only be done once per ddev session, not every pull.)
9+
# 4. Add / update the web_environment section in ~/.ddev/global_config.yaml with the API keys:
10+
# ```yaml
11+
# web_environment:
12+
# - ACQUIA_API_KEY=xxxxxxxx
13+
# - ACQUIA_API_SECRET=xxxxx
14+
# ```
15+
# 5. Copy .ddev/providers/acquia.yaml.example to .ddev/providers/acquia.yaml.
16+
# 6. Update the project_id and database corresponding to the environment you want to work with.
17+
# - If have acli install, you can use the following command: `acli remote:aliases:list`
18+
# - Or, on the Acquia Cloud Platform navigate to the environments page, click on the header and look for the "SSH URL" line. Eg. `[email protected]` would have a project ID of `project1.dev`
19+
# 7. Your project must include drush; `ddev composer require drush/drush` if it isn't there already.
20+
# 8. `ddev restart`
21+
# 9. Use `ddev pull acquia` to pull the project database and files.
22+
# 10. Optionally use `ddev push acquia` to push local files and database to Acquia. Note that `ddev push` is a command that can potentially damage your production site, so this is not recommended.
23+
24+
# Debugging: Use `ddev exec acli command` and `ddev exec acli auth:login`
25+
# Make sure you remembered to `ddev auth ssh`
26+
27+
environment_variables:
28+
project_id: yourproject.dev
29+
database_name: yourproject
30+
31+
auth_command:
32+
command: |
33+
set -eu -o pipefail
34+
if [ -z "${ACQUIA_API_KEY:-}" ] || [ -z "${ACQUIA_API_SECRET:-}" ]; then echo "Please make sure you have set ACQUIA_API_KEY and ACQUIA_API_SECRET in ~/.ddev/global_config.yaml" && exit 1; fi
35+
if ! command -v drush >/dev/null ; then echo "Please make sure your project contains drush, ddev composer require drush/drush" && exit 1; fi
36+
ssh-add -l >/dev/null || ( echo "Please 'ddev auth ssh' before running this command." && exit 1 )
37+
38+
acli -n auth:login -n --key="${ACQUIA_API_KEY}" --secret="${ACQUIA_API_SECRET}"
39+
acli -n remote:aliases:download --all --destination-dir $HOME/.drush -n >/dev/null
40+
41+
db_pull_command:
42+
command: |
43+
#set -x # You can enable bash debugging output by uncommenting
44+
set -eu -o pipefail
45+
# If no database_name is configured, infer it from project_id
46+
if [ -z "${database_name:-}" ]; then database_name=${project_id%%.*}; fi
47+
backup_time=$(acli -n api:environments:database-backup-list ${project_id} ${database_name} --limit=1 | jq -r .[].completed_at)
48+
backup_id="$(acli -n api:environments:database-backup-list ${project_id} ${database_name} --limit=1 | jq -r .[].id)"
49+
backup_url=$(acli -n api:environments:database-backup-download ${project_id} ${database_name} ${backup_id} | jq -r .url)
50+
ls /var/www/html/.ddev >/dev/null # This just refreshes stale NFS if possible
51+
echo "Downloading backup $backup_id from $backup_time"
52+
curl -o /var/www/html/.ddev/.downloads/db.sql.gz ${backup_url}
53+
54+
files_pull_command:
55+
command: |
56+
# set -x # You can enable bash debugging output by uncommenting
57+
set -eu -o pipefail
58+
ls /var/www/html/.ddev >/dev/null # This just refreshes stale NFS if possible
59+
pushd /var/www/html/.ddev/.downloads >/dev/null;
60+
drush -r docroot rsync --exclude-paths='styles:css:js' --alias-path=~/.drush -q -y @${project_id}:%files ./files
61+
62+
# push is a dangerous command. If not absolutely needed it's better to delete these lines.
63+
db_push_command:
64+
command: |
65+
set -x # You can enable bash debugging output by uncommenting
66+
set -eu -o pipefail
67+
TIMESTAMP=$(date +%y%m%d%H%M%S)
68+
ls /var/www/html/.ddev >/dev/null # This just refreshes stale NFS if possible
69+
cd /var/www/html/.ddev/.downloads
70+
drush rsync -y --alias-path=~/.drush ./db.sql.gz @${project_id}:/tmp/db.${TIMESTAMP}.sql.gz
71+
acli -n remote:ssh -n ${project_id} -- "cd /tmp && gunzip db.${TIMESTAMP}.sql.gz"
72+
acli -n remote:drush -n ${project_id} -- "sql-cli </tmp/db.${TIMESTAMP}.sql"
73+
acli -n remote:drush -n ${project_id} -- cr
74+
acli -n remote:ssh -n ${project_id} -- "rm /tmp/db.${TIMESTAMP}.*"
75+
76+
# push is a dangerous command. If not absolutely needed it's better to delete these lines.
77+
files_push_command:
78+
command: |
79+
# set -x # You can enable bash debugging output by uncommenting
80+
set -eu -o pipefail
81+
ls ${DDEV_FILES_DIR} >/dev/null # This just refreshes stale NFS if possible
82+
drush rsync -y --alias-path=~/.drush @self:%files @${project_id}:%files

.ddev/providers/git.yaml.example

+40
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
#ddev-generated
2+
# Example git provider configuration.
3+
4+
# To use this configuration,
5+
6+
# 1. Create a git repository that contains a database dump (db.sql.gz) and a files tarball. It can be private or public, but for most people they will be private.
7+
# 2. Configure access to the repository so that it can be accessed from where you need it. For example, on gitpod, you'll need to enable access to GitHub or Gitlab. On a regular local dev environment, you'll need to be able to access github via https or ssh.
8+
# 3. Update the environment_variables below to point to the git repository that contains your database dump and files.
9+
10+
environment_variables:
11+
project_url: https://github.com/ddev/ddev-pull-git-test-repo
12+
branch: main
13+
checkout_dir: ~/tmp/ddev-pull-git-test-repo
14+
15+
16+
auth_command:
17+
service: host
18+
# This actually doesn't auth, but rather just checks out the repository
19+
command: |
20+
set -eu -o pipefail
21+
if [ ! -d ${checkout_dir}/.git ] ; then
22+
git clone -q ${project_url} --branch=${branch} ${checkout_dir}
23+
else
24+
cd ${checkout_dir}
25+
git reset --hard -q && git fetch && git checkout -q origin/${branch}
26+
fi
27+
28+
db_import_command:
29+
service: host
30+
command: |
31+
set -eu -o pipefail
32+
# set -x
33+
ddev import-db --src="${checkout_dir}/db.sql.gz"
34+
35+
files_import_command:
36+
service: host
37+
command: |
38+
set -eu -o pipefail
39+
# set -x
40+
ddev import-files --src="${checkout_dir}/files"
+38
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
#ddev-generated
2+
# Example local file provider configuration.
3+
4+
# This will pull a database and files from an existing location, for example,
5+
# from a Dropbox location on disk
6+
7+
# To use this configuration,
8+
# 1. You need a database dump and/or user-generated files tarball.
9+
# 2. Copy localfile.yaml.example to localfile.yaml.
10+
# 3. Change the copy commands as needed.
11+
# 4. Use `ddev pull localfile` to pull the project database and files.
12+
13+
# In this example, db_pull_command is not used
14+
15+
# Here db_import_command imports directly from the source location
16+
# instead of looking in .ddev/.downloads/files
17+
db_import_command:
18+
command: |
19+
set -eu -o pipefail
20+
echo $PATH
21+
ddev --version
22+
set -x
23+
gzip -dc ~/Dropbox/db.sql.gz | ddev mysql db
24+
service: host
25+
26+
# In this example, files_pull_command is not used
27+
28+
# files_import_command is an example of a custom importer
29+
# that directly untars the files into their appropriate destination
30+
files_import_command:
31+
command: |
32+
set -eu -o pipefail
33+
echo $PATH
34+
ddev --version
35+
set -x
36+
mkdir -p web/sites/default/files
37+
tar -zxf ~/Dropbox/files.tar.gz -C web/sites/default/files
38+
service: host

.ddev/providers/pantheon.yaml.example

+88
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
#ddev-generated
2+
# Example Pantheon.io provider configuration.
3+
# This example is Drupal/drush oriented,
4+
# but can be adapted for other CMSs supported by Pantheon
5+
6+
# To use this configuration:
7+
#
8+
# 1. Get your Pantheon.io machine token:
9+
# a. Login to your Pantheon Dashboard, and [Generate a Machine Token](https://pantheon.io/docs/machine-tokens/) for ddev to use.
10+
# b. Add the API token to the `web_environment` section in your global ddev configuration at ~/.ddev/global_config.yaml
11+
#
12+
# ```
13+
# web_environment:
14+
# - TERMINUS_MACHINE_TOKEN=abcdeyourtoken
15+
# ```
16+
#
17+
# 2. Choose a Pantheon site and environment you want to use with ddev. You can usually use the site name, but in some environments you may need the site uuid, which is the long 3rd component of your site dashboard URL. So if the site dashboard is at <https://dashboard.pantheon.io/sites/009a2cda-2c22-4eee-8f9d-96f017321555#dev/>, the site ID is 009a2cda-2c22-4eee-8f9d-96f017321555.
18+
#
19+
# 3. On the pantheon dashboard, make sure that at least one backup has been created. (When you need to refresh what you pull, do a new backup.)
20+
#
21+
# 4. Make sure your public ssh key is configured in Pantheon (Account->SSH Keys)
22+
#
23+
# 5. Check out project codebase from Pantheon. Enable the "Git Connection Mode" and use `git clone` to check out the code locally.
24+
#
25+
# 6. Configure the local checkout for ddev using `ddev config`
26+
#
27+
# 7. Verify that drush is installed in your project, `ddev composer require drush/drush`
28+
#
29+
# 8. In your project's .ddev/providers directory, copy pantheon.yaml.example to pantheon.yaml and edit the "project" under `environment_variables` (change it from `yourproject.dev`). If you want to use a different environment than "dev", change `dev` to the name of the environment.
30+
#
31+
# 9. If using Colima, may need to set an explicit nameserver in `~/.colima/default/colima.yaml` like `1.1.1.1`. If this configuration is changed, may also need to restart Colima.
32+
#
33+
# 10. `ddev restart`
34+
#
35+
# 11. Run `ddev pull pantheon`. The ddev environment will download the Pantheon database and files using terminus and will import the database and files into the ddev environment. You should now be able to access the project locally.
36+
#
37+
# 12. Optionally use `ddev push pantheon` to push local files and database to Pantheon. Note that `ddev push` is a command that can potentially damage your production site, so this is not recommended.
38+
#
39+
40+
# Debugging: Use `ddev exec terminus auth:whoami` to see what terminus knows about
41+
# `ddev exec terminus site:list` will show available sites
42+
43+
environment_variables:
44+
project: yourproject.dev
45+
46+
auth_command:
47+
command: |
48+
set -eu -o pipefail
49+
ssh-add -l >/dev/null || ( echo "Please 'ddev auth ssh' before running this command." && exit 1 )
50+
if ! command -v drush >/dev/null ; then echo "Please make sure your project contains drush, ddev composer require drush/drush" && exit 1; fi
51+
if [ -z "${TERMINUS_MACHINE_TOKEN:-}" ]; then echo "Please make sure you have set TERMINUS_MACHINE_TOKEN in ~/.ddev/global_config.yaml" && exit 1; fi
52+
terminus auth:login --machine-token="${TERMINUS_MACHINE_TOKEN}" || ( echo "terminus auth login failed, check your TERMINUS_MACHINE_TOKEN" && exit 1 )
53+
terminus aliases 2>/dev/null
54+
55+
db_pull_command:
56+
command: |
57+
set -x # You can enable bash debugging output by uncommenting
58+
set -eu -o pipefail
59+
ls /var/www/html/.ddev >/dev/null # This just refreshes stale NFS if possible
60+
pushd /var/www/html/.ddev/.downloads >/dev/null
61+
terminus backup:get ${project} --element=db --to=db.sql.gz
62+
63+
files_pull_command:
64+
command: |
65+
set -x # You can enable bash debugging output by uncommenting
66+
set -eu -o pipefail
67+
ls /var/www/html/.ddev >/dev/null # This just refreshes stale NFS if possible
68+
pushd /var/www/html/.ddev/.downloads >/dev/null;
69+
terminus backup:get ${project} --element=files --to=files.tgz
70+
mkdir -p files && tar --strip-components=1 -C files -zxf files.tgz
71+
72+
# push is a dangerous command. If not absolutely needed it's better to delete these lines.
73+
db_push_command:
74+
command: |
75+
set -x # You can enable bash debugging output by uncommenting
76+
set -eu -o pipefail
77+
ls /var/www/html/.ddev >/dev/null # This just refreshes stale NFS if possible
78+
pushd /var/www/html/.ddev/.downloads >/dev/null;
79+
terminus remote:drush ${project} -- sql-drop -y
80+
gzip -dc db.sql.gz | terminus remote:drush ${project} -- sql-cli
81+
82+
# push is a dangerous command. If not absolutely needed it's better to delete these lines.
83+
files_push_command:
84+
command: |
85+
set -x # You can enable bash debugging output by uncommenting
86+
set -eu -o pipefail
87+
ls ${DDEV_FILES_DIR} >/dev/null # This just refreshes stale NFS if possible
88+
drush rsync -y @self:%files @${project}:%files

0 commit comments

Comments
 (0)