提交 6c577c9a 编写于 作者: G GitLab Bot

Add latest changes from gitlab-org/gitlab@master

上级 fe965271
......@@ -15,6 +15,17 @@ module Gitlab
# this is better than a project being stuck in the "import" state
# forever.
sidekiq_options dead: false, retry: 5
sidekiq_retries_exhausted do |msg, e|
Gitlab::Import::Logger.error(
event: :github_importer_exhausted,
message: msg['error_message'],
class: msg['class'],
args: msg['args'],
exception_message: e.message,
exception_backtrace: e.backtrace
)
end
end
end
end
......
---
title: Improve performance of commit search by limiting the number of results requested
merge_request: 32260
author:
type: performance
---
title: Prevent multiple Auto DevOps deployment jobs running concurrently when using
manual rollout
merge_request: 32824
author:
type: fixed
......@@ -86,6 +86,8 @@
- 1
- - elastic_indexer
- 1
- - elastic_indexing_control
- 1
- - elastic_namespace_indexer
- 1
- - elastic_namespace_rollout
......
# frozen_string_literal: true
class AddElasticsearchPauseIndexingToApplicationSettings < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
with_lock_retries do
add_column :application_settings, :elasticsearch_pause_indexing, :boolean, default: false, null: false
end
end
def down
remove_column :application_settings, :elasticsearch_pause_indexing
end
end
......@@ -443,6 +443,7 @@ CREATE TABLE public.application_settings (
container_registry_features text[] DEFAULT '{}'::text[] NOT NULL,
spam_check_endpoint_url text,
spam_check_endpoint_enabled boolean DEFAULT false NOT NULL,
elasticsearch_pause_indexing boolean DEFAULT false NOT NULL,
CONSTRAINT check_d03919528d CHECK ((char_length(container_registry_vendor) <= 255)),
CONSTRAINT check_d820146492 CHECK ((char_length(spam_check_endpoint_url) <= 255)),
CONSTRAINT check_e5aba18f02 CHECK ((char_length(container_registry_version) <= 255))
......@@ -13886,6 +13887,7 @@ COPY "schema_migrations" (version) FROM STDIN;
20200508021128
20200508050301
20200508091106
20200508140959
20200511080113
20200511083541
20200511092246
......
......@@ -431,7 +431,7 @@ documentation](index.md#3-gitaly-server-configuration).
gitlab_workhorse['enable'] = false
prometheus_monitoring['enable'] = false
# Enable only the Praefect service
# Enable only the Gitaly service
gitaly['enable'] = true
# Prevent database connections during 'gitlab-ctl reconfigure'
......
......@@ -912,7 +912,7 @@ fetch = +refs/environments/*:refs/remotes/origin/environments/*
### Scoping environments with specs
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/2112) in [GitLab Premium](https://about.gitlab.com/pricing/) 9.4.
> - [Scoping for environment variables was moved to Core](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/30779) to Core in GitLab 12.2.
> - [Scoping for environment variables was moved to Core](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/30779) in GitLab 12.2.
You can limit the environment scope of a variable by
defining which environments it can be available for.
......
......@@ -142,6 +142,7 @@ The following Elasticsearch settings are available:
| Parameter | Description |
| ----------------------------------------------------- | ----------- |
| `Elasticsearch indexing` | Enables/disables Elasticsearch indexing. You may want to enable indexing but disable search in order to give the index time to be fully completed, for example. Also, keep in mind that this option doesn't have any impact on existing data, this only enables/disables background indexer which tracks data changes. So by enabling this you will not get your existing data indexed, use special Rake task for that as explained in [Adding GitLab's data to the Elasticsearch index](#adding-gitlabs-data-to-the-elasticsearch-index). |
| `Elasticsearch pause indexing` | Enables/disables temporary indexing pause. This is useful for cluster migration/reindexing. All changes are still tracked, but they are not committed to the Elasticsearch index until unpaused. |
| `Search with Elasticsearch enabled` | Enables/disables using Elasticsearch in search. |
| `URL` | The URL to use for connecting to Elasticsearch. Use a comma-separated list to support clustering (e.g., `http://host1, https://host2:9200`). If your Elasticsearch instance is password protected, pass the `username:password` in the URL (e.g., `http://<username>:<password>@<elastic_host>:9200/`). |
| `Number of Elasticsearch shards` | Elasticsearch indexes are split into multiple shards for performance reasons. In general, larger indexes need to have more shards. Changes to this value do not take effect until the index is recreated. You can read more about tradeoffs in the [Elasticsearch documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-create-index.html#create-index-settings) |
......
......@@ -176,6 +176,7 @@ production_manual:
.manual_rollout_template: &manual_rollout_template
<<: *rollout_template
stage: production
resource_group: production
rules:
- if: '$CI_KUBERNETES_ACTIVE == null || $CI_KUBERNETES_ACTIVE == ""'
when: never
......
......@@ -20,7 +20,7 @@ module Gitlab
when 'wiki_blobs'
paginated_wiki_blobs(wiki_blobs(limit: limit_up_to_page(page, per_page)), page, per_page)
when 'commits'
Kaminari.paginate_array(commits).page(page).per(per_page)
paginated_commits(page, per_page)
when 'users'
users.page(page).per(per_page)
else
......@@ -37,7 +37,7 @@ module Gitlab
when 'wiki_blobs'
wiki_blobs_count.to_s
when 'commits'
commits_count.to_s
formatted_limited_count(commits_count)
else
super
end
......@@ -72,7 +72,7 @@ module Gitlab
end
def commits_count
@commits_count ||= commits.count
@commits_count ||= commits(limit: count_limit).count
end
def single_commit_result?
......@@ -86,6 +86,12 @@ module Gitlab
private
def paginated_commits(page, per_page)
results = commits(limit: limit_up_to_page(page, per_page))
Kaminari.paginate_array(results).page(page).per(per_page)
end
def paginated_blobs(blobs, page, per_page)
results = Kaminari.paginate_array(blobs).page(page).per(per_page)
......@@ -139,21 +145,21 @@ module Gitlab
end
# rubocop: enable CodeReuse/ActiveRecord
def commits
@commits ||= find_commits(query)
def commits(limit:)
@commits ||= find_commits(query, limit: limit)
end
def find_commits(query)
def find_commits(query, limit:)
return [] unless Ability.allowed?(@current_user, :download_code, @project)
commits = find_commits_by_message(query)
commits = find_commits_by_message(query, limit: limit)
commit_by_sha = find_commit_by_sha(query)
commits |= [commit_by_sha] if commit_by_sha
commits
end
def find_commits_by_message(query)
project.repository.find_commits_by_message(query)
def find_commits_by_message(query, limit:)
project.repository.find_commits_by_message(query, nil, nil, limit)
end
def find_commit_by_sha(query)
......
......@@ -3849,6 +3849,9 @@ msgstr ""
msgid "Changes are shown as if the <b>source</b> revision was being merged into the <b>target</b> revision."
msgstr ""
msgid "Changes are still tracked. Useful for cluster/index migrations."
msgstr ""
msgid "Changes are unknown"
msgstr ""
......
......@@ -34,7 +34,7 @@ describe Gitlab::ProjectSearchResults do
'blobs' | :limited_blobs_count | max_limited_count
'notes' | :limited_notes_count | max_limited_count
'wiki_blobs' | :wiki_blobs_count | '1234'
'commits' | :commits_count | '1234'
'commits' | :commits_count | max_limited_count
'projects' | :limited_projects_count | max_limited_count
'unknown' | nil | nil
end
......@@ -386,6 +386,19 @@ describe Gitlab::ProjectSearchResults do
end
end
describe '#commits_count' do
let(:project) { create(:project, :public, :repository) }
it 'limits the number of commits requested' do
expect(project.repository)
.to receive(:find_commits_by_message)
.with(anything, anything, anything, described_class::COUNT_LIMIT)
.and_call_original
described_class.new(user, project, '.').commits_count
end
end
# Examples for commit access level test
#
# params:
......@@ -452,6 +465,54 @@ describe Gitlab::ProjectSearchResults do
end
describe 'commit search' do
context 'pagination' do
let(:project) { create(:project, :public, :repository) }
it 'returns the correct results for each page' do
expect(results_page(1)).to contain_exactly(commit('b83d6e391c22777fca1ed3012fce84f633d7fed0'))
expect(results_page(2)).to contain_exactly(commit('498214de67004b1da3d820901307bed2a68a8ef6'))
expect(results_page(3)).to contain_exactly(commit('1b12f15a11fc6e62177bef08f47bc7b5ce50b141'))
end
it 'returns the correct number of pages' do
expect(results_page(1).total_pages).to eq(project.repository.commit_count)
end
context 'limiting requested commits' do
context 'on page 1' do
it "limits to #{described_class::COUNT_LIMIT}" do
expect(project.repository)
.to receive(:find_commits_by_message)
.with(anything, anything, anything, described_class::COUNT_LIMIT)
.and_call_original
results_page(1)
end
end
context 'on subsequent pages' do
it "limits to #{described_class::COUNT_LIMIT} plus page offset" do
expect(project.repository)
.to receive(:find_commits_by_message)
.with(anything, anything, anything, described_class::COUNT_LIMIT + 1)
.and_call_original
results_page(2)
end
end
end
def results_page(page)
described_class.new(user, project, '.').objects('commits', per_page: 1, page: page)
end
def commit(hash)
project.repository.commit(hash)
end
end
context 'by commit message' do
let(:project) { create(:project, :public, :repository) }
let(:commit) { project.repository.commit('59e29889be61e6e0e5e223bfa9ac2721d31605b8') }
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册