Merge "Merge branch 'stable-3.2'"
This commit is contained in:
@@ -1 +1 @@
|
||||
3.4.1
|
||||
3.5.0
|
||||
|
||||
@@ -2828,7 +2828,8 @@ By default, `gerrit`.
|
||||
+
|
||||
Enable (or disable) the `'$site_path'/logs/httpd_log` request log.
|
||||
If enabled, an NCSA combined log format request log file is written
|
||||
out by the internal HTTP daemon.
|
||||
out by the internal HTTP daemon. The httpd log format is documented
|
||||
link:logs.html#_httpd_log[here].
|
||||
+
|
||||
`log4j.appender` with the name `httpd_log` can be configured to overwrite
|
||||
programmatic configuration.
|
||||
@@ -4881,6 +4882,21 @@ Values should use common unit suffixes to express their setting:
|
||||
+
|
||||
By default, 30s.
|
||||
|
||||
[[sshd.gracefulStopTimeout]]sshd.gracefulStopTimeout::
|
||||
+
|
||||
Set a graceful stop time. If set, Gerrit ensures that all open SSH
|
||||
sessions are preserved for a maximum period of time, before forcing the
|
||||
shutdown of the SSH daemon. During this period, no new requests
|
||||
will be accepted. This option is meant to be used in setups performing
|
||||
rolling restarts.
|
||||
+
|
||||
Values should use common unit suffixes to express their setting:
|
||||
+
|
||||
* s, sec, second, seconds
|
||||
* m, min, minute, minutes
|
||||
+
|
||||
By default, 0 seconds (immediate shutdown).
|
||||
|
||||
[[sshd.maxConnectionsPerUser]]sshd.maxConnectionsPerUser::
|
||||
+
|
||||
Maximum number of concurrent SSH sessions that a user account
|
||||
@@ -5006,6 +5022,7 @@ By default, `host/canonical.host.name`
|
||||
+
|
||||
Enable (or disable) the `'$site_path'/logs/sshd_log` request log.
|
||||
If enabled, a request log file is written out by the SSH daemon.
|
||||
The sshd log format is documented link:logs.html#_sshd_log[here].
|
||||
+
|
||||
`log4j.appender` with the name `sshd_log` can be configured to overwrite
|
||||
programmatic configuration.
|
||||
|
||||
@@ -21,6 +21,13 @@ during 'init'.
|
||||
listenUrl = proxy-http://127.0.0.1:8081/r/
|
||||
----
|
||||
|
||||
== Reverse proxy and client IPs
|
||||
|
||||
When behind a reverse proxy the http_log will log the IP of the reverse proxy
|
||||
as client.ip. To log the correct client IP you must provide the
|
||||
'X-Forwarded-For' header from the reverse proxy.
|
||||
See the nginx configuration example below.
|
||||
|
||||
|
||||
== Apache 2 Configuration
|
||||
|
||||
|
||||
@@ -90,12 +90,12 @@ they could be added to ~/.bazelrc resource file:
|
||||
|
||||
```
|
||||
$ cat << EOF > ~/.bazelrc
|
||||
> build --define=ABSOLUTE_JAVABASE=<path-to-java-13>
|
||||
> build --javabase=@bazel_tools//tools/jdk:absolute_javabase
|
||||
> build --host_javabase=@bazel_tools//tools/jdk:absolute_javabase
|
||||
> build --host_java_toolchain=@bazel_tools//tools/jdk:toolchain_vanilla
|
||||
> build --java_toolchain=@bazel_tools//tools/jdk:toolchain_vanilla
|
||||
> EOF
|
||||
build --define=ABSOLUTE_JAVABASE=<path-to-java-13>
|
||||
build --javabase=@bazel_tools//tools/jdk:absolute_javabase
|
||||
build --host_javabase=@bazel_tools//tools/jdk:absolute_javabase
|
||||
build --host_java_toolchain=@bazel_tools//tools/jdk:toolchain_vanilla
|
||||
build --java_toolchain=@bazel_tools//tools/jdk:toolchain_vanilla
|
||||
EOF
|
||||
```
|
||||
|
||||
Now, invoking Bazel with just `bazel build :release` would include
|
||||
|
||||
@@ -27,14 +27,33 @@ link:https://github.com/GerritForge/gatling-git[Gatling Git extension,role=exter
|
||||
leveraged to run tests at the Git protocol level.
|
||||
|
||||
Gatling is written in Scala, but the abstraction provided by the Gatling DSL makes the scenarios
|
||||
implementation easy even without any Scala knowledge. The
|
||||
link:https://gitenterprise.me/2019/12/20/stress-your-gerrit-with-gatling/[Stress your Gerrit with Gatling,role=external,window=_blank]
|
||||
blog post has more introductory information.
|
||||
implementation easy even without any Scala knowledge. The online `End-to-end tests`
|
||||
link:https://www.gerritcodereview.com/presentations.html#list-of-presentations[presentation,role=external,window=_blank]
|
||||
links posted on the homepage have more introductory information.
|
||||
|
||||
== IDE: IntelliJ
|
||||
|
||||
Examples of scenarios can be found in the `e2e-tests` directory. The files in that directory should
|
||||
be formatted using the mainstream
|
||||
link:https://plugins.jetbrains.com/plugin/1347-scala[Scala plugin for IntelliJ,role=external,window=_blank].
|
||||
The latter is not mandatory but preferred for `sbt` and Scala IDE purposes in this project.
|
||||
So, Eclipse can also be used alongside as a development IDE; this is described below.
|
||||
|
||||
=== Eclipse
|
||||
|
||||
1. Install the link:http://scala-ide.org/docs/user/gettingstarted.html[Scala plugin for Eclipse,role=external,window=_blank].
|
||||
1. Run `sbt eclipse` from the `e2e-tests` root directory.
|
||||
1. Import the resulting `e2e-tests` eclipse file inside the Gerrit project, in Eclipse.
|
||||
1. You should see errors in Eclipse telling you there are missing packages.
|
||||
1. This is due to the sbt-eclipse plugin not properly linking the Gerrit Gatling e2e tests with
|
||||
Gatling Git plugin.
|
||||
1. You then have to right-click on the root directory and choose the build path->link source option.
|
||||
1. Then you have to browse to `.sbt/1.0/staging`, find the folder where gatling-git is contained,
|
||||
and choose that.
|
||||
1. That last step should link the gatling-git plugin to the project; e2e tests should not show
|
||||
errors anymore.
|
||||
1. You may get errors in the gatling-git directory; these should not affect Gerrit Gatling
|
||||
development and can be ignored.
|
||||
|
||||
== How to build the tests
|
||||
|
||||
@@ -163,10 +182,11 @@ time.
|
||||
Plugin or otherwise non-core scenarios may do so just as well. The core java package
|
||||
`com.google.gerrit.scenarios` from the example above has to be replaced with the one under which
|
||||
those scenario classes are. Such extending scenarios can also add extension-specific properties.
|
||||
Early examples of this can be found in the Gerrit
|
||||
`link:https://gerrit.googlesource.com/plugins/high-availability[high-availability,role=external,window=_blank]`
|
||||
and `link:https://gerrit.googlesource.com/plugins/multi-site[multi-site,role=external,window=_blank]`
|
||||
plugins test code.
|
||||
Examples of this can be found in these Gerrit plugins test code:
|
||||
|
||||
* `link:https://gerrit.googlesource.com/plugins/high-availability[high-availability,role=external,window=_blank]`
|
||||
* `link:https://gerrit.googlesource.com/plugins/multi-site[multi-site,role=external,window=_blank]`
|
||||
* `link:https://gerrit.googlesource.com/plugins/rename-project[rename-project,role=external,window=_blank]`
|
||||
|
||||
Further above, the `_PROJECT` keyword is prefixed with an underscore, which means that its value
|
||||
gets automatically generated by the scenario. Any property setting for it is therefore not
|
||||
|
||||
@@ -41,6 +41,37 @@ that get saved to the `.project` file, for example adding Resource
|
||||
Filters on a folder, they will be overwritten the next time you run
|
||||
`tools/eclipse/project.py`.
|
||||
|
||||
=== Eclipse project on MacOS
|
||||
|
||||
By default, bazel uses `/private/var/tmp` as the
|
||||
link:https://docs.bazel.build/versions/master/output_directories.html[outputRoot on MacOS].
|
||||
This means that the eclipse project will reference libraries stored under that directory.
|
||||
However, MacOS runs periodic cleanup task which deletes the content under `/private/var/tmp`
|
||||
which wasn't accessed or modified for some days, by default 3 days. This can lead to a broken
|
||||
Eclipse project as referenced libraries get deleted.
|
||||
|
||||
There are two possibilities to mitigate this issue.
|
||||
|
||||
==== Change the location of the bazel output directory
|
||||
On Linux, the output directory defaults to `$HOME/.cache/bazel` and the same can be configured
|
||||
on Mac too. Edit, or create, the `$HOME/.bazelrc` file and add the following line:
|
||||
----
|
||||
startup --output_user_root=/Users/johndoe/.cache/bazel
|
||||
----
|
||||
|
||||
==== Increase the treshold for the cleanup of temporary files
|
||||
The default treshold for the cleanup can be overriden by creating a configuration file under
|
||||
`/etc/periodic.conf` and setting a larger value for the `daily_clean_tmps_days`.
|
||||
|
||||
An example `/etc/periodic.conf` file:
|
||||
|
||||
----
|
||||
# This file overrides the settings from /etc/defaults/periodic.conf
|
||||
daily_clean_tmps_days="45" # If not accessed for
|
||||
----
|
||||
|
||||
For more details about the proposed workaround see link:https://superuser.com/a/187105[this post]
|
||||
|
||||
=== Eclipse project with custom plugins ===
|
||||
|
||||
To add custom plugins to the eclipse project add them to `tools/bzl/plugins.bzl`
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 12 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 66 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 112 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 13 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 24 KiB |
@@ -69,6 +69,7 @@
|
||||
. link:cmd-index.html[Command Line Tools]
|
||||
. link:config-plugins.html#replication[Replication]
|
||||
. link:config-plugins.html[Plugins]
|
||||
. link:logs.html[Log Files]
|
||||
. link:metrics.html[Metrics]
|
||||
. link:config-reverseproxy.html[Reverse Proxy]
|
||||
. link:config-auto-site-initialization.html[Automatic Site Initialization on Startup]
|
||||
|
||||
174
Documentation/logs.txt
Normal file
174
Documentation/logs.txt
Normal file
@@ -0,0 +1,174 @@
|
||||
= Gerrit Code Review - Logs
|
||||
|
||||
Gerrit writes log files in the `$site_path/logs/` folder tracking requests,
|
||||
background and plugin activity and errors. By default logs are written in
|
||||
link:config-gerrit.html#log.textLogging[text format], optionally in
|
||||
link:config-gerrit.html#log.jsonLogging[JSON format].
|
||||
By default log files are link:config-gerrit.html#log.compress[compressed]
|
||||
at server startup and then daily at 11pm and
|
||||
link:config-gerrit.html#log.rotate[rotated] every midnight.
|
||||
|
||||
== Logs
|
||||
|
||||
The following logs can be written.
|
||||
|
||||
=== HTTPD Log
|
||||
|
||||
The httpd log tracks HTTP requests processed by Gerrit's http daemon
|
||||
and is written to `$site_path/logs/httpd_log`. Enabled or disabled via the
|
||||
link:config-gerrit.html#httpd.requestLog[httpd.requestLog] option.
|
||||
|
||||
Format is an enhanced
|
||||
link:https://httpd.apache.org/docs/2.4/logs.html#combined[NCSA combined log],
|
||||
if a log field is not present, a "-" is substituted:
|
||||
|
||||
* `host`: The IP address of the HTTP client that made the HTTP resource request.
|
||||
If you are using a reverse proxy it depends on the proxy configuration if the
|
||||
proxy IP address or the client IP address is logged.
|
||||
* `[thread name]`: name of the Java thread executing the request.
|
||||
* `remote logname`: the identifier used to
|
||||
link: https://tools.ietf.org/html/rfc1413[identify the client making the HTTP request],
|
||||
Gerrit always logs a dash `-`.
|
||||
* `username`: the username used by the client for authentication. "-" for
|
||||
anonymous requests.
|
||||
* `[date:time]`: The date and time stamp of the HTTP request.
|
||||
The time that the request was received. The format is until Gerrit 3.1
|
||||
`[dd/MMM/yyyy:HH:mm:ss.SSS ZZZZ]`. For Gerrit 3.2 or newer
|
||||
link:https://www.w3.org/TR/NOTE-datetime[ISO 8601 format] `[yyyy-MM-dd'T'HH:mm:ss,SSSZ]`
|
||||
is used for all timestamps.
|
||||
* `request`: The request line from the client is given in double quotes.
|
||||
** the HTTP method used by the client.
|
||||
** the resource the client requested.
|
||||
** the protocol/version used by the client.
|
||||
* `statuscode`: the link:https://tools.ietf.org/html/rfc2616#section-10[HTTP status code]
|
||||
that the server sent back to the client.
|
||||
* `response size`: the number of bytes of data transferred as part of the HTTP
|
||||
response, not including the HTTP header.
|
||||
* `latency`: response time in milliseconds.
|
||||
* `referer`: the `Referer` HTTP request header. This gives the site that
|
||||
the client reports having been referred from.
|
||||
* `client agent`: the client agent which sent the request.
|
||||
|
||||
Example:
|
||||
```
|
||||
12.34.56.78 [HTTP-4136374] - johndoe [28/Aug/2020:10:02:20 +0200] "GET /a/plugins/metrics-reporter-prometheus/metrics HTTP/1.1" 200 1247498 1900 - "Prometheus/2.13.1"
|
||||
```
|
||||
|
||||
=== SSHD Log
|
||||
|
||||
The sshd log tracks ssh requests processed by Gerrit's ssh daemon
|
||||
and is written to `$site_path/logs/sshd_log`. Enabled or disabled
|
||||
via option link:config-gerrit.html#sshd.requestLog[sshd.requestLog].
|
||||
|
||||
Log format:
|
||||
|
||||
* `[date time]`: The time that the request was received.
|
||||
The format is until Gerrit 3.1 `[yyyy-mm-dd HH:mm:ss.SSS ZZZZ]`.
|
||||
For Gerrit 3.2 or newer
|
||||
link:https://www.w3.org/TR/NOTE-datetime[ISO 8601 format] `[yyyy-MM-dd'T'HH:mm:ss,SSSZ]`
|
||||
is used for all timestamps.
|
||||
* `sessionid`: hexadecimal session identifier, all requests of the
|
||||
same connection share the same sessionid. Gerrit does not support multiplexing multiple
|
||||
sessions on the same connection. Grep the log file using the sessionid as filter to
|
||||
get all requests from that session.
|
||||
* `[thread name]`: name of the Java thread executing the request.
|
||||
* `username`: the username used by the client for authentication.
|
||||
* `a/accountid`: identifier of the Gerrit account which is logged on.
|
||||
* `operation`: the operation being executed via ssh.
|
||||
** `LOGIN FROM <host>`: login and start new SSH session from the given host.
|
||||
** `AUTH FAILURE FROM <host> <message>`: failed authentication from given host and cause of failure.
|
||||
** `LOGOUT`: logout and terminate SSH session.
|
||||
** `git-upload-pack.<projectname>`: git fetch or clone command for given project.
|
||||
** `git-receive-pack.<projectname>`: git push command for given project.
|
||||
** Gerrit ssh commands which may be logged in this field are documented
|
||||
link:cmd-index.html#_server[here].
|
||||
* `wait`: command wait time, time in milliseconds the command waited for an execution thread.
|
||||
* `exec`: command execution time, time in milliseconds to execute the command.
|
||||
* `status`: status code. 0 means success, any other value is an error.
|
||||
|
||||
The `git-upload-pack` command provides the following additional fields after the `exec`
|
||||
and before the `status` field. All times are in milliseconds. Fields are -1 if not available
|
||||
when the upload-pack request returns an empty result since the client's repository was up to date:
|
||||
|
||||
* `time negotiating`: time for negotiating which objects need to be transferred.
|
||||
* `time searching for reuse`: time jgit searched for deltas which can be reused.
|
||||
That is the time spent matching existing representations against objects that
|
||||
will be transmitted, or that the client can be assumed to already have.
|
||||
* `time searching for sizes`: time jgit was searching for sizes of all objects that
|
||||
will enter the delta compression search window. The sizes need to
|
||||
be known to better match similar objects together and improve
|
||||
delta compression ratios.
|
||||
* `time counting`: time jgit spent enumerating the objects that need to
|
||||
be included in the output. This time includes any restarts that
|
||||
occur when a cached pack is selected for reuse.
|
||||
* `time compressing`: time jgit was compressing objects. This is observed
|
||||
wall-clock time and does not accurately track CPU time used when
|
||||
multiple threads were used to perform the delta compression.
|
||||
* `time writing`: time jgit needed to write packfile, from start of
|
||||
header until end of trailer. The transfer speed can be
|
||||
approximated by dividing `total bytes` by this value.
|
||||
* `total time in UploadPack`: total time jgit spent in upload-pack.
|
||||
* `bitmap index misses`: number of misses when trying to use bitmap index,
|
||||
-1 means no bitmap index available. This is the count of objects that
|
||||
needed to be discovered through an object walk because they were not found
|
||||
in bitmap indices.
|
||||
* `total deltas`: total number of deltas transferred. This may be lower than the actual
|
||||
number of deltas if a cached pack was reused.
|
||||
* `total objects`: total number of objects transferred. This total includes
|
||||
the value of `total deltas`.
|
||||
* `total bytes`: total number of bytes transferred. This size includes the pack
|
||||
header, trailer, thin pack, and reused cached packs.
|
||||
* `client agent`: the client agent and version which sent the request.
|
||||
|
||||
Example: a CI system established a SSH connection, sent an upload-pack command (git fetch) and closed the connection:
|
||||
```
|
||||
[2020-08-28 11:00:01,391 +0200] 8a154cae [sshd-SshServer[570fc452]-nio2-thread-299] voter a/1000023 LOGIN FROM 12.34.56.78
|
||||
[2020-08-28 11:00:01,556 +0200] 8a154cae [SSH git-upload-pack /AP/ajs/jpaas-msg-svc.git (voter)] voter a/1000056 git-upload-pack./demo/project.git 0ms 115ms 92ms 1ms 0ms 6ms 0ms 0ms 7ms 3 10 26 2615 0 git/2.26.2
|
||||
[2020-08-28 11:00:01,583 +0200] 8a154cae [sshd-SshServer[570fc452]-nio2-thread-168] voter a/1000023 LOGOUT
|
||||
```
|
||||
|
||||
=== Error Log
|
||||
|
||||
The error log tracks errors and stack traces and is written to
|
||||
`$site_path/logs/error_log`.
|
||||
|
||||
Log format:
|
||||
|
||||
* `[date time]`: The time that the request was received.
|
||||
The format is until Gerrit 3.1 `[yyyy-mm-dd HH:mm:ss.SSS ZZZZ]`.
|
||||
For Gerrit 3.2 or newer
|
||||
link:https://www.w3.org/TR/NOTE-datetime[ISO 8601 format] `[yyyy-MM-dd'T'HH:mm:ss,SSSZ]`
|
||||
is used for all timestamps.
|
||||
* `[thread name]`: : name of the Java thread executing the request.
|
||||
* `level`: log level (ERROR, WARN, INFO, DEBUG).
|
||||
* `logger`: name of the logger.
|
||||
* `message`: log message.
|
||||
* `stacktrace`: Java stacktrace when an execption was caught, usually spans multiple lines.
|
||||
|
||||
=== GC Log
|
||||
|
||||
The gc log tracks git garbage collection running in a background thread
|
||||
if enabled and is written to `$site_path/logs/gc_log`.
|
||||
|
||||
Log format:
|
||||
|
||||
* `[date time]`: The time that the request was received.
|
||||
The format is until Gerrit 3.1 `[yyyy-mm-dd HH:mm:ss.SSS ZZZZ]`.
|
||||
For Gerrit 3.2 or newer
|
||||
link:https://www.w3.org/TR/NOTE-datetime[ISO 8601 format] `[yyyy-MM-dd'T'HH:mm:ss,SSSZ]`
|
||||
is used for all timestamps.
|
||||
* `level`: log level (ERROR, WARN, INFO, DEBUG).
|
||||
* `message`: log message.
|
||||
|
||||
=== Plugin Logs
|
||||
|
||||
Some plugins write their own log file.
|
||||
E.g. the replication plugin writes its log to `$site_path/logs/replication_log`.
|
||||
Refer to each plugin's documentation for more details on their logs.
|
||||
|
||||
GERRIT
|
||||
------
|
||||
Part of link:index.html[Gerrit Code Review]
|
||||
|
||||
SEARCHBOX
|
||||
---------
|
||||
10
WORKSPACE
10
WORKSPACE
@@ -219,15 +219,15 @@ maven_jar(
|
||||
sha1 = GUAVA_BIN_SHA1,
|
||||
)
|
||||
|
||||
CAFFEINE_VERS = "2.8.0"
|
||||
CAFFEINE_VERS = "2.8.5"
|
||||
|
||||
maven_jar(
|
||||
name = "caffeine",
|
||||
artifact = "com.github.ben-manes.caffeine:caffeine:" + CAFFEINE_VERS,
|
||||
sha1 = "6000774d7f8412ced005a704188ced78beeed2bb",
|
||||
sha1 = "f0eafef6e1529a44e36549cd9d1fc06d3a57f384",
|
||||
)
|
||||
|
||||
CAFFEINE_GUAVA_SHA256 = "3a66ee3ec70971dee0bae6e56bda7b8742bc4bedd7489161bfbbaaf7137d89e1"
|
||||
CAFFEINE_GUAVA_SHA256 = "a7ce6d29c40bccd688815a6734070c55b20cd326351a06886a6144005aa32299"
|
||||
|
||||
# TODO(davido): Rename guava.jar to caffeine-guava.jar on fetch to prevent potential
|
||||
# naming collision between caffeine guava adapter and guava library itself.
|
||||
@@ -758,8 +758,8 @@ maven_jar(
|
||||
# Keep this version of Soy synchronized with the version used in Gitiles.
|
||||
maven_jar(
|
||||
name = "soy",
|
||||
artifact = "com.google.template:soy:2019-10-08",
|
||||
sha1 = "4518bf8bac2dbbed684849bc209c39c4cb546237",
|
||||
artifact = "com.google.template:soy:2020-08-24",
|
||||
sha1 = "e774bf5cc95923d2685292883fe219e231346e50",
|
||||
)
|
||||
|
||||
maven_jar(
|
||||
|
||||
@@ -2,7 +2,6 @@ import Dependencies._
|
||||
|
||||
enablePlugins(GatlingPlugin)
|
||||
|
||||
lazy val gatlingGitExtension = RootProject(uri("git://github.com/GerritForge/gatling-git.git"))
|
||||
lazy val root = (project in file("."))
|
||||
.settings(
|
||||
inThisBuild(List(
|
||||
@@ -12,8 +11,8 @@ lazy val root = (project in file("."))
|
||||
)),
|
||||
name := "gerrit",
|
||||
libraryDependencies ++=
|
||||
gatling ++
|
||||
gatling ++ gatlingGit ++
|
||||
Seq("io.gatling" % "gatling-core" % GatlingVersion) ++
|
||||
Seq("io.gatling" % "gatling-app" % GatlingVersion),
|
||||
scalacOptions += "-language:postfixOps"
|
||||
) dependsOn gatlingGitExtension
|
||||
)
|
||||
|
||||
@@ -2,9 +2,17 @@ import sbt._
|
||||
|
||||
object Dependencies {
|
||||
val GatlingVersion = "3.2.0"
|
||||
val GatlingGitVersion = "1.0.12"
|
||||
|
||||
lazy val gatling = Seq(
|
||||
"io.gatling.highcharts" % "gatling-charts-highcharts",
|
||||
"io.gatling" % "gatling-test-framework",
|
||||
).map(_ % GatlingVersion % Test)
|
||||
|
||||
lazy val gatlingGit = Seq(
|
||||
"com.gerritforge" %% "gatling-git" % GatlingGitVersion excludeAll(
|
||||
ExclusionRule(organization = "io.gatling"),
|
||||
ExclusionRule(organization = "io.gatling.highcharts")
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1 +1,2 @@
|
||||
addSbtPlugin("io.gatling" % "gatling-sbt" % "3.0.0")
|
||||
addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "5.2.4")
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
{
|
||||
"create_empty_commit": "true"
|
||||
"create_empty_commit": "true",
|
||||
"parent": "${parent}"
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
[
|
||||
{
|
||||
"url": "HTTP_SCHEME://HOSTNAME:HTTP_PORT/a/projects/PROJECT"
|
||||
"url": "HTTP_SCHEME://HOSTNAME:HTTP_PORT/a/projects/PROJECT",
|
||||
"parent": "PARENT"
|
||||
}
|
||||
]
|
||||
|
||||
@@ -28,7 +28,7 @@ class CreateProject extends ProjectSimulation {
|
||||
|
||||
val test: ScenarioBuilder = scenario(unique)
|
||||
.feed(data)
|
||||
.exec(httpRequest.body(RawFileBody(body)).asJson)
|
||||
.exec(httpRequest.body(ElFileBody(body)).asJson)
|
||||
|
||||
setUp(
|
||||
test.inject(
|
||||
|
||||
@@ -67,6 +67,8 @@ class GerritSimulation extends Simulation {
|
||||
case ("number", number) =>
|
||||
val precedes = replaceKeyWith("_number", 0, number.toString)
|
||||
replaceProperty("number", 1, precedes)
|
||||
case ("parent", parent) =>
|
||||
replaceProperty("parent", "All-Projects", parent.toString)
|
||||
case ("project", project) =>
|
||||
var precedes = replaceKeyWith("_project", name, project.toString)
|
||||
precedes = replaceOverride(precedes)
|
||||
|
||||
@@ -62,7 +62,9 @@ import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import org.apache.mina.transport.socket.SocketSessionConfig;
|
||||
import org.apache.sshd.common.BaseBuilder;
|
||||
@@ -72,6 +74,8 @@ import org.apache.sshd.common.cipher.Cipher;
|
||||
import org.apache.sshd.common.compression.BuiltinCompressions;
|
||||
import org.apache.sshd.common.compression.Compression;
|
||||
import org.apache.sshd.common.forward.DefaultForwarderFactory;
|
||||
import org.apache.sshd.common.future.CloseFuture;
|
||||
import org.apache.sshd.common.future.SshFutureListener;
|
||||
import org.apache.sshd.common.io.AbstractIoServiceFactory;
|
||||
import org.apache.sshd.common.io.IoAcceptor;
|
||||
import org.apache.sshd.common.io.IoServiceFactory;
|
||||
@@ -142,6 +146,7 @@ public class SshDaemon extends SshServer implements SshInfo, LifecycleListener {
|
||||
private final List<HostKey> hostKeys;
|
||||
private volatile IoAcceptor daemonAcceptor;
|
||||
private final Config cfg;
|
||||
private final long gracefulStopTimeout;
|
||||
|
||||
@Inject
|
||||
SshDaemon(
|
||||
@@ -212,6 +217,8 @@ public class SshDaemon extends SshServer implements SshInfo, LifecycleListener {
|
||||
SshSessionBackend backend = cfg.getEnum("sshd", null, "backend", SshSessionBackend.NIO2);
|
||||
boolean channelIdTracking = cfg.getBoolean("sshd", "enableChannelIdTracking", true);
|
||||
|
||||
gracefulStopTimeout = cfg.getTimeUnit("sshd", null, "gracefulStopTimeout", 0, TimeUnit.SECONDS);
|
||||
|
||||
System.setProperty(
|
||||
IoServiceFactoryFactory.class.getName(),
|
||||
backend == SshSessionBackend.MINA
|
||||
@@ -341,6 +348,12 @@ public class SshDaemon extends SshServer implements SshInfo, LifecycleListener {
|
||||
public synchronized void stop() {
|
||||
if (daemonAcceptor != null) {
|
||||
try {
|
||||
if (gracefulStopTimeout > 0) {
|
||||
logger.atInfo().log(
|
||||
"Stopping SSHD sessions gracefully with %d seconds timeout.", gracefulStopTimeout);
|
||||
daemonAcceptor.unbind(daemonAcceptor.getBoundAddresses());
|
||||
waitForSessionClose();
|
||||
}
|
||||
daemonAcceptor.close(true).await();
|
||||
shutdownExecutors();
|
||||
logger.atInfo().log("Stopped Gerrit SSHD");
|
||||
@@ -352,6 +365,30 @@ public class SshDaemon extends SshServer implements SshInfo, LifecycleListener {
|
||||
}
|
||||
}
|
||||
|
||||
private void waitForSessionClose() {
|
||||
Collection<IoSession> ioSessions = daemonAcceptor.getManagedSessions().values();
|
||||
CountDownLatch allSessionsClosed = new CountDownLatch(ioSessions.size());
|
||||
for (IoSession io : ioSessions) {
|
||||
logger.atFine().log("Waiting for session %s to stop.", io.getId());
|
||||
io.addCloseFutureListener(
|
||||
new SshFutureListener<CloseFuture>() {
|
||||
@Override
|
||||
public void operationComplete(CloseFuture future) {
|
||||
allSessionsClosed.countDown();
|
||||
}
|
||||
});
|
||||
}
|
||||
try {
|
||||
if (!allSessionsClosed.await(gracefulStopTimeout, TimeUnit.SECONDS)) {
|
||||
logger.atWarning().log(
|
||||
"Timeout waiting for SSH session to close. SSHD will be shut down immediately.");
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
logger.atWarning().withCause(e).log(
|
||||
"Interrupted waiting for SSH-sessions to close. SSHD will be shut down immediately.");
|
||||
}
|
||||
}
|
||||
|
||||
private void shutdownExecutors() {
|
||||
if (executor != null) {
|
||||
executor.shutdownNow();
|
||||
|
||||
@@ -41,7 +41,7 @@ public class ElasticContainer extends ElasticsearchContainer {
|
||||
case V6_7:
|
||||
return "blacktop/elasticsearch:6.7.2";
|
||||
case V6_8:
|
||||
return "blacktop/elasticsearch:6.8.11";
|
||||
return "blacktop/elasticsearch:6.8.12";
|
||||
case V7_0:
|
||||
return "blacktop/elasticsearch:7.0.1";
|
||||
case V7_1:
|
||||
|
||||
@@ -72,15 +72,15 @@ public class ElasticVersionTest {
|
||||
public void atLeastMinorVersion() throws Exception {
|
||||
assertThat(ElasticVersion.V6_7.isAtLeastMinorVersion(ElasticVersion.V6_7)).isTrue();
|
||||
assertThat(ElasticVersion.V6_8.isAtLeastMinorVersion(ElasticVersion.V6_8)).isTrue();
|
||||
assertThat(ElasticVersion.V7_0.isAtLeastMinorVersion(ElasticVersion.V6_7)).isFalse();
|
||||
assertThat(ElasticVersion.V7_1.isAtLeastMinorVersion(ElasticVersion.V6_7)).isFalse();
|
||||
assertThat(ElasticVersion.V7_2.isAtLeastMinorVersion(ElasticVersion.V6_7)).isFalse();
|
||||
assertThat(ElasticVersion.V7_3.isAtLeastMinorVersion(ElasticVersion.V6_7)).isFalse();
|
||||
assertThat(ElasticVersion.V7_4.isAtLeastMinorVersion(ElasticVersion.V6_7)).isFalse();
|
||||
assertThat(ElasticVersion.V7_5.isAtLeastMinorVersion(ElasticVersion.V6_7)).isFalse();
|
||||
assertThat(ElasticVersion.V7_6.isAtLeastMinorVersion(ElasticVersion.V6_7)).isFalse();
|
||||
assertThat(ElasticVersion.V7_7.isAtLeastMinorVersion(ElasticVersion.V6_7)).isFalse();
|
||||
assertThat(ElasticVersion.V7_8.isAtLeastMinorVersion(ElasticVersion.V6_7)).isFalse();
|
||||
assertThat(ElasticVersion.V7_0.isAtLeastMinorVersion(ElasticVersion.V6_8)).isFalse();
|
||||
assertThat(ElasticVersion.V7_1.isAtLeastMinorVersion(ElasticVersion.V6_8)).isFalse();
|
||||
assertThat(ElasticVersion.V7_2.isAtLeastMinorVersion(ElasticVersion.V6_8)).isFalse();
|
||||
assertThat(ElasticVersion.V7_3.isAtLeastMinorVersion(ElasticVersion.V6_8)).isFalse();
|
||||
assertThat(ElasticVersion.V7_4.isAtLeastMinorVersion(ElasticVersion.V6_8)).isFalse();
|
||||
assertThat(ElasticVersion.V7_5.isAtLeastMinorVersion(ElasticVersion.V6_8)).isFalse();
|
||||
assertThat(ElasticVersion.V7_6.isAtLeastMinorVersion(ElasticVersion.V6_8)).isFalse();
|
||||
assertThat(ElasticVersion.V7_7.isAtLeastMinorVersion(ElasticVersion.V6_8)).isFalse();
|
||||
assertThat(ElasticVersion.V7_8.isAtLeastMinorVersion(ElasticVersion.V6_8)).isFalse();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@@ -540,7 +540,7 @@ class GrReplyDialog extends KeyboardShortcutMixin(GestureEventListeners(
|
||||
if (reviewer.account) {
|
||||
reviewerId = reviewer.account._account_id || reviewer.account.email;
|
||||
} else if (reviewer.group) {
|
||||
reviewerId = reviewer.group.id;
|
||||
reviewerId = decodeURIComponent(reviewer.group.id);
|
||||
confirmed = reviewer.group.confirmed;
|
||||
}
|
||||
return {reviewer: reviewerId, confirmed};
|
||||
|
||||
@@ -72,6 +72,7 @@ const RoutePattern = {
|
||||
DASHBOARD: /^\/dashboard\/(.+)$/,
|
||||
CUSTOM_DASHBOARD: /^\/dashboard\/?$/,
|
||||
PROJECT_DASHBOARD: /^\/p\/(.+)\/\+\/dashboard\/(.+)/,
|
||||
LEGACY_PROJECT_DASHBOARD: /^\/projects\/(.+),dashboards\/(.+)/,
|
||||
|
||||
AGREEMENTS: /^\/settings\/agreements\/?/,
|
||||
NEW_AGREEMENTS: /^\/settings\/new-agreement\/?/,
|
||||
@@ -880,6 +881,11 @@ export class GrRouter extends GestureEventListeners(
|
||||
'_handleProjectDashboardRoute'
|
||||
);
|
||||
|
||||
this._mapRoute(
|
||||
RoutePattern.LEGACY_PROJECT_DASHBOARD,
|
||||
'_handleLegacyProjectDashboardRoute'
|
||||
);
|
||||
|
||||
this._mapRoute(RoutePattern.GROUP_INFO, '_handleGroupInfoRoute', true);
|
||||
|
||||
this._mapRoute(
|
||||
@@ -1255,6 +1261,10 @@ export class GrRouter extends GestureEventListeners(
|
||||
this.reporting.setRepoName(project);
|
||||
}
|
||||
|
||||
_handleLegacyProjectDashboardRoute(data: PageContextWithQueryMap) {
|
||||
this._redirect('/p/' + data.params[0] + '/+/dashboard/' + data.params[1]);
|
||||
}
|
||||
|
||||
_handleGroupInfoRoute(data: PageContextWithQueryMap) {
|
||||
this._redirect('/admin/groups/' + encodeURIComponent(data.params[0]));
|
||||
}
|
||||
|
||||
@@ -197,6 +197,7 @@ suite('gr-router tests', () => {
|
||||
'_handleImproperlyEncodedPlusRoute',
|
||||
'_handlePassThroughRoute',
|
||||
'_handleProjectDashboardRoute',
|
||||
'_handleLegacyProjectDashboardRoute',
|
||||
'_handleProjectsOldRoute',
|
||||
'_handleRepoAccessRoute',
|
||||
'_handleRepoDashboardsRoute',
|
||||
@@ -617,6 +618,14 @@ suite('gr-router tests', () => {
|
||||
handlePassThroughRoute = sinon.stub(element, '_handlePassThroughRoute');
|
||||
});
|
||||
|
||||
test('_handleLegacyProjectDashboardRoute', () => {
|
||||
const params = {0: 'gerrit/project', 1: 'dashboard:main'};
|
||||
element._handleLegacyProjectDashboardRoute({params});
|
||||
assert.isTrue(redirectStub.calledOnce);
|
||||
assert.equal(redirectStub.lastCall.args[0],
|
||||
'/p/gerrit/project/+/dashboard/dashboard:main');
|
||||
});
|
||||
|
||||
test('_handleAgreementsRoute', () => {
|
||||
const data = {params: {}};
|
||||
element._handleAgreementsRoute(data);
|
||||
|
||||
Reference in New Issue
Block a user