forked from ec-europa/joinup-dev
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathbuild.properties
More file actions
324 lines (231 loc) · 9.71 KB
/
build.properties
File metadata and controls
324 lines (231 loc) · 9.71 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
# Drupal configuration
# --------------------
# The project name.
project.name = joinup
# The install profile to use.
website.profile.name = ${project.name}
# The site name.
website.site.name = Joinup
# The infrastructure stack.
infra.type = joinup
# SQL database settings.
drupal.db.name = db_name
drupal.db.user = db_user
drupal.db.password = db_pass
drupal.db.dsn = ${drupal.db.type}:host=${drupal.db.host};dbname=${drupal.db.name}
# SPARQL database settings.
sparql.host = localhost
sparql.port = 8890
sparql.dsn = localhost
sparql.user = dba
sparql.password = dba
sparql.namespace = Drupal\\Driver\\Database\\joinup_sparql
# Comma-separated list of demo users to create. The username and password will
# be taken from the role. A normal authenticated user with username and password
# 'user' will also be created.
drupal.demo.users = administrator, moderator
# Virtuoso
# --------
# The location of the Virtuoso binary. Preferably overwrite this locally with a
# absolute path for increased security.
virtuoso.binary = virtuoso-t
# The location of the database dump to use when restoring Virtuoso.
virtuoso.backup = ${exports.virtuoso.destination.folder}
# Paths
# -----
# Paths to executables.
git.bin = /usr/bin/git
isql.bin = /usr/bin/isql-vt
solr.bin = ${project.basedir}/vendor/bin/solr
# Files and directories inside the Drupal installation.
documentroot.path = web
website.drupal.dir = ${project.basedir}/${documentroot.path}
website.services.yml = ${website.settings.dir}/services.yml
# Composer related paths.
composer.json = ${project.basedir}/composer.json
# Temporary files.
tmp.dir = ${project.basedir}/tmp
# Exported configuration.
drupal.config.dir = ${project.basedir}/config
# Drupal private files directory.
drupal.private_files.dir = ${project.basedir}/private
# Behat configuration
# -------------------
# Enable strict mode in Behat tests. Will only pass if all tests are explicitly
# passing.
behat.options.strict = false
# Set verbosity for Behat tests. 0 is completely silent, 1 is normal output, 2
# shows exception backtraces, 3 shows debugging information.
behat.options.verbosity = 2
# The location where to save screenshots that are taken whenever a test fails.
# If left empty no screenshots will be taken.
# behat.screenshots.local.dir = ${tmp.dir}/behat-screenshots
behat.screenshots.local.dir =
# The location on AWS S3 where to upload screenshots that are taken whenever a
# test fails. This is especially helpful for storing screenshots taken during
# test runs on continuous integration services. The S3 bucket, key and secret
# to use are configured in the 'AWS S3' section below.
# If either this or any of the 'AWS S3' properties are left empty no screenshots
# will be uploaded to S3.
# behat.screenshots.s3.dir = behat-screenshots
behat.screenshots.s3.dir =
# PHP CodeSniffer configuration
# -----------------------------
# Whether or not to run a coding standards check before doing a git push. Note
# that this will abort the push if the coding standards check fails.
phpcs.prepush.enable = 1
# Paths to check, delimited by semicolons.
phpcs.files = ${website.modules.dir};${website.profiles.dir};${website.themes.dir};${behat.dir}/src
# Paths to ignore, delimited by semicolons.
phpcs.ignore = ${website.modules.dir}/contrib;${website.themes.dir}/joinup/prototype;${website.modules.dir}/custom/joinup_communities/tallinn/lib
# PHPUnit configuration
# ---------------------
# Comma-separated list of directories containing tests to execute. These are
# relative to the Drupal root.
phpunit.directories = ./modules/custom,./profiles,./themes/custom,./modules/contrib/file_url
# Comma-separated list of test files to execute. These are relative to the
# Drupal root.
phpunit.files = ./modules/contrib/og/tests/src/Kernel/Entity/OgMembershipRoleReferenceTest.php
# The database URL for testing SPARQL queries.
phpunit.sparql_url = sparql://${sparql.host}:${sparql.port}/
# The name of the legacy database used for migration testing.
phpunit.migrate.legacy_db.name = d6_joinup_test
# Apache Solr configuration
# -------------------------
# Apache Solr version to use.
solr.version = 6.6.5
# Filename of the download.
solr.tarball.filename = solr-${solr.version}.tgz
# Download URL.
solr.download.url = http://www-eu.apache.org/dist/lucene/solr/${solr.version}/${solr.tarball.filename}
# Installation path.
solr.vendor.dir = ${project.basedir}/vendor/apache
solr.dir = ${solr.vendor.dir}/solr-${solr.version}
# Search API configuration path.
solr.config.dir = ${website.modules.dir}/contrib/search_api_solr/solr-conf/6.x
solr.published_core.scheme = http
solr.published_core.host = localhost
solr.published_core.port = 8983
solr.published_core.path = /solr
solr.published_core.url = ${solr.published_core.scheme}://${solr.published_core.host}:${solr.published_core.port}${solr.published_core.path}
solr.published_core.core_name = drupal_published
solr.unpublished_core.scheme = http
solr.unpublished_core.host = localhost
solr.unpublished_core.port = 8983
solr.unpublished_core.path = /solr
solr.unpublished_core.url = ${solr.unpublished_core.scheme}://${solr.unpublished_core.host}:${solr.unpublished_core.port}${solr.unpublished_core.path}
solr.unpublished_core.core_name = drupal_unpublished
# Development options
# -------------------
# Development modules to enable.
drupal.modules.dev = admin_toolbar config_devel config_update field_ui menu_ui views_ui
# Modules to enable for doing user acceptance testing.
drupal.modules.uat = demo_users
# Redirect outgoing e-mail to disk. This prevents e-mail from being
# accidentally sent out on development machines. The mails can be found in the
# folder temporary://devel-mails. This requires the devel module to be enabled.
drupal.redirect.email = yes
# AWS SDK
# -------
aws.profile = default
aws.region = eu-west-1
# AWS CodeDeploy
# --------------
deploy.serviceRole = arn:aws:iam::918396081007:role/CodeDeploy
# AWS S3
# ------
# The S3 bucket that contains the database dumps.
exports.s3.bucket = joinup2
# The key and secret to access the bucket. If left empty the credentials will
# be retrieved from the environment. For documentation on how to set the
# credentials on the environment, see \Aws\Credentials\CredentialProvider.
exports.s3.key =
exports.s3.secret =
# The paths to the database dumps. Note that the Virtuoso dump consists of
# multiple files so this is pointing to the folder containing the files.
# Folder names should end in slashes.
# The following two variables also need to be set. Each, is the direct link to the
# corresponding dump.
# exports.virtuoso.source = http://example.com/virtuoso.tar.gz
# exports.sql.source = http://example.com/sql.sql.gz
exports.virtuoso.destination.folder = ${tmp.dir}/dump-virtuoso/
exports.sql.destination = ${tmp.dir}/dump.sql.gz
asda.username =
asda.password =
# Packer configuration
# --------------------
packer.path = /usr/bin/env packer
packer.args =
application.name = ${project.name}
# SASS compiler configuration
# ---------------------------
# The ruby sass executable.
sass.bin = sass
# The code style. Can be 'nested', 'compact', 'compressed' or 'expanded'.
sass.style = compressed
# How to link generated output to the source files. Can be 'auto', 'file', 'inline' or 'none'.
sass.sourcemap = none
# The path to the SASS file to compile.
sass.input = ${website.themes.dir}/joinup/prototype/sass/app.sass
# The directory where the compiled CSS file should be placed.
sass.output.dir = ${website.themes.dir}/joinup/css
# The filename of the compiled CSS file.
sass.output.file = style.min.css
# Migration configuration
# -----------------------
# Main database settings.
migration.db.type = ${drupal.db.type}
migration.db.host = ${drupal.db.host}
migration.db.port = ${drupal.db.port}
migration.db.user = ${drupal.db.user}
migration.db.password = ${drupal.db.password}
# Migration specific database settings.
migration.db.name = db_name
migration.db.prefix =
migration.db.import_path = ./tmp/d6-joinup.sql
migration.db.views_import_path = ./tmp/d6.sql
# Modules to enable.
migration.modules = joinup_migrate
# The web root of the D6 site.
migration.source.files = https://joinup.ec.europa.eu/sites/default/files
# How the migration will run: 'production' or 'test' mode.
migration.mode = production
migration.mock_filesystem = TRUE
# Uncomment the next line to run the migration during an acceptance build.
# migration.acceptance = 1
# Matomo configuration
# -------------------
# Installation path.
matomo.dir = ${project.basedir}/vendor/matomo-org/matomo
# Git repository.
matomo.repo = https://github.com/libresh/compose-matomo.git
matomo.branch = master
# Configuration file.
matomo.config = docker-compose.yml
# Credentials.
matomo.db.password = ${drupal.db.password}
# Port to use. Matomo will be available on http://localhost:{port}
matomo.port = 8000
# Website ID. This can be seen in the Matomo UI, at Administration > Websites >
# Manage.
matomo.website_id = 1
# Optional HTTP URL to the Matomo server. Only intended for development
# environments.
matomo.url.http = http://localhost:${matomo.port}/
# Optional HTTPS URL to the Matomo server. Intended for production environments.
matomo.url.https =
# Authentication token. This can be retrieved from the Matomo web interface at
# Administration > Platform > API > User authentication.
matomo.token = 0123456789abcdef0123456789abcdef
# Redis configuration
# -------------------
# The IP address of the Redis host.
# Uncomment this line to enable the Redis cache.
# redis.host = 127.0.0.1
# Stage file proxy settings
# -------------------------
# The origin of the files without the trailing '/'. Leave empty to disable the functionality.
stage_file_proxy.origin = http://example.com
# If true, then the request will get a 301 redirection to the remote server
# rather than downloading the file.
stage_file_proxy.hotlink = TRUE