提交 6f0f893b 编写于 作者: G GitLab Bot

Add latest changes from gitlab-org/gitlab@master

上级 8b1228b0
......@@ -10,5 +10,8 @@ lint-ci-gitlab:
- "**/*.yml"
image: sdesbure/yamllint:latest
dependencies: []
variables:
LINT_PATHS: .gitlab-ci.yml .gitlab/ci lib/gitlab/ci/templates changelogs
script:
- yamllint .gitlab-ci.yml .gitlab/ci lib/gitlab/ci/templates changelogs
- '[[ ! -d "ee/" ]] || export LINT_PATHS="$LINT_PATHS ee/changelogs"'
- yamllint $LINT_PATHS
......@@ -238,7 +238,7 @@ entry.
- Skip updating LFS objects in mirror updates if repository has not changed. !21744
- Add indexes on deployments to improve environments search. !21789
### Added (117 changes, 16 of them are from the community)
### Added (119 changes, 18 of them are from the community)
- Add upvote/downvotes attributes to GraphQL Epic query. !14311
- Delete kubernetes cluster association and resources. !16954
......@@ -357,6 +357,8 @@ entry.
- Added migration which adds service desk username column. !21733
- Add SentryIssue table to store a link between issue and sentry issue. !37026
- Add path based targeting to broadcast messages.
- Add allow failure in pipeline webhook event. !20978 (Gaetan Semet)
- Add runner information in build web hook event. !20709 (Gaetan Semet)
### Other (51 changes, 28 of them are from the community)
......@@ -483,7 +485,7 @@ entry.
- Do not display project labels that are not visible for user accessing group labels.
- Standardize error response when route is missing.
### Fixed (99 changes, 14 of them are from the community)
### Fixed (100 changes, 15 of them are from the community)
- Fix incorrect selection of custom templates. !17205
- Smaller width for design comments layout, truncate image title. !17547
......@@ -584,6 +586,7 @@ entry.
- Only allow confirmed users to run pipelines.
- Fix scroll to bottom with new job log.
- Fixed protected branches flash styling.
- Show tag link whenever it's a tag in chat message integration for push events and pipeline events. !18126 (Mats Estensen)
### Deprecated (2 changes)
......
import axios from '~/lib/utils/axios_utils';
import Poll from './poll';
import httpStatusCodes from './http_status';
/**
* Polls an endpoint until it returns either a 200 OK or a error status.
* The Poll-Interval header in the responses are used to determine how
* frequently to poll.
*
* Once a 200 OK is received, the promise resolves with that response. If an
* error status is received, the promise rejects with the error.
*
* @param {string} url - The URL to poll.
* @param {Object} [config] - The config to provide to axios.get().
* @returns {Promise}
*/
export default (url, config = {}) =>
new Promise((resolve, reject) => {
const eTagPoll = new Poll({
resource: {
axiosGet(data) {
return axios.get(data.url, {
headers: {
'Content-Type': 'application/json',
},
...data.config,
});
},
},
data: { url, config },
method: 'axiosGet',
successCallback: response => {
if (response.status === httpStatusCodes.OK) {
resolve(response);
eTagPoll.stop();
}
},
errorCallback: reject,
});
eTagPoll.makeRequest();
});
......@@ -66,7 +66,7 @@ export default {
:svg-path="illustrationPath"
:description="
__(
'Releases mark specific points in a project\'s development history, communicate information about the type of change, and deliver on prepared, often compiled, versions of the software to be reused elsewhere. Currently, releases can only be created through the API.',
'Releases are based on Git tags and mark specific points in a project\'s development history. They can contain information about the type of changes and can also deliver binaries, like compiled versions of your software.',
)
"
:primary-button-link="documentationLink"
......
......@@ -76,6 +76,9 @@ module Types
field :last_release_short_version, GraphQL::STRING_TYPE,
null: true,
description: "Release version the error was last seen"
field :gitlab_commit, GraphQL::STRING_TYPE,
null: true,
description: "GitLab commit SHA attributed to the Error based on the release version"
def first_seen
DateTime.parse(object.first_seen)
......
# frozen_string_literal: true
module Ci
class Bridge < CommitStatus
include Ci::Processable
class Bridge < Ci::Processable
include Ci::Contextable
include Ci::PipelineDelegator
include Importable
......
# frozen_string_literal: true
module Ci
class Build < CommitStatus
include Ci::Processable
class Build < Ci::Processable
include Ci::Metadatable
include Ci::Contextable
include Ci::PipelineDelegator
......
......@@ -33,8 +33,7 @@ module Ci
has_many :stages, -> { order(position: :asc) }, inverse_of: :pipeline
has_many :statuses, class_name: 'CommitStatus', foreign_key: :commit_id, inverse_of: :pipeline
has_many :latest_statuses_ordered_by_stage, -> { latest.order(:stage_idx, :stage) }, class_name: 'CommitStatus', foreign_key: :commit_id, inverse_of: :pipeline
has_many :processables, -> { processables },
class_name: 'CommitStatus', foreign_key: :commit_id, inverse_of: :pipeline
has_many :processables, class_name: 'Ci::Processable', foreign_key: :commit_id, inverse_of: :pipeline
has_many :builds, foreign_key: :commit_id, inverse_of: :pipeline
has_many :trigger_requests, dependent: :destroy, foreign_key: :commit_id # rubocop:disable Cop/ActiveRecordDependent
has_many :variables, class_name: 'Ci::PipelineVariable'
......
# frozen_string_literal: true
module Ci
##
# This module implements methods that need to be implemented by CI/CD
# entities that are supposed to go through pipeline processing
# services.
#
#
module Processable
extend ActiveSupport::Concern
class Processable < ::CommitStatus
has_many :needs, class_name: 'Ci::BuildNeed', foreign_key: :build_id, inverse_of: :build
included do
has_many :needs, class_name: 'Ci::BuildNeed', foreign_key: :build_id, inverse_of: :build
accepts_nested_attributes_for :needs
accepts_nested_attributes_for :needs
scope :preload_needs, -> { preload(:needs) }
scope :preload_needs, -> { preload(:needs) }
end
validates :type, presence: true
def schedulable?
raise NotImplementedError
......
......@@ -45,7 +45,6 @@ class CommitStatus < ApplicationRecord
scope :before_stage, -> (index) { where('stage_idx < ?', index) }
scope :for_stage, -> (index) { where(stage_idx: index) }
scope :after_stage, -> (index) { where('stage_idx > ?', index) }
scope :processables, -> { where(type: %w[Ci::Build Ci::Bridge]) }
scope :for_ids, -> (ids) { where(id: ids) }
scope :for_ref, -> (ref) { where(ref: ref) }
scope :by_name, -> (name) { where(name: name) }
......
......@@ -113,9 +113,8 @@ module ErrorTracking
when 'list_issues'
sentry_client.list_issues(**opts.symbolize_keys)
when 'issue_details'
{
issue: sentry_client.issue_details(**opts.symbolize_keys)
}
issue = sentry_client.issue_details(**opts.symbolize_keys)
{ issue: add_gitlab_issue_details(issue) }
when 'issue_latest_event'
{
latest_event: sentry_client.issue_latest_event(**opts.symbolize_keys)
......@@ -140,6 +139,20 @@ module ErrorTracking
private
def add_gitlab_issue_details(issue)
issue.gitlab_commit = match_gitlab_commit(issue.first_release_version)
issue
end
def match_gitlab_commit(release_version)
return unless release_version
commit = project.repository.commit(release_version)
commit&.id
end
def handle_exceptions
yield
rescue Sentry::Client::Error => e
......
......@@ -81,6 +81,10 @@ class Release < ApplicationRecord
evidence&.summary || {}
end
def milestone_titles
self.milestones.map {|m| m.title }.sort.join(", ")
end
private
def actual_sha
......
# frozen_string_literal: true
module Ci
class BridgePresenter < CommitStatusPresenter
class BridgePresenter < ProcessablePresenter
def detailed_status
@detailed_status ||= subject.detailed_status(user)
end
......
# frozen_string_literal: true
module Ci
class BuildPresenter < CommitStatusPresenter
class BuildPresenter < ProcessablePresenter
def erased_by_user?
# Build can be erased through API, therefore it does not have
# `erased_by` user assigned in that case.
......
# frozen_string_literal: true
module Ci
class ProcessablePresenter < CommitStatusPresenter
end
end
......@@ -8,6 +8,7 @@ module ErrorTracking
:external_url,
:first_release_last_commit,
:first_release_short_version,
:gitlab_commit,
:first_seen,
:frequency,
:gitlab_issue,
......
......@@ -11,10 +11,13 @@ module Releases
return error('params is empty', 400) if empty_params?
return error("Milestone(s) not found: #{inexistent_milestones.join(', ')}", 400) if inexistent_milestones.any?
params[:milestones] = milestones if param_for_milestone_titles_provided?
if param_for_milestone_titles_provided?
previous_milestones = release.milestones.map(&:title)
params[:milestones] = milestones
end
if release.update(params)
success(tag: existing_tag, release: release)
success(tag: existing_tag, release: release, milestones_updated: milestones_updated?(previous_milestones))
else
error(release.errors.messages || '400 Bad request', 400)
end
......@@ -29,5 +32,11 @@ module Releases
def empty_params?
params.except(:tag).empty?
end
def milestones_updated?(previous_milestones)
return false unless param_for_milestone_titles_provided?
previous_milestones.to_set != release.milestones.map(&:title)
end
end
end
......@@ -188,3 +188,4 @@
- create_evidence
- group_export
- self_monitoring_project_create
- self_monitoring_project_delete
# frozen_string_literal: true
module SelfMonitoringProjectWorker
extend ActiveSupport::Concern
included do
# This worker falls under Self-monitoring with Monitor::APM group. However,
# self-monitoring is not classified as a feature category but rather as
# Other Functionality. Metrics seems to be the closest feature_category for
# this worker.
feature_category :metrics
end
LEASE_TIMEOUT = 15.minutes.to_i
EXCLUSIVE_LEASE_KEY = 'self_monitoring_service_creation_deletion'
class_methods do
# @param job_id [String]
# Job ID that is used to construct the cache keys.
# @return [Hash]
# Returns true if the job is enqueued or in progress and false otherwise.
def in_progress?(job_id)
Gitlab::SidekiqStatus.job_status(Array.wrap(job_id)).first
end
end
private
def lease_key
EXCLUSIVE_LEASE_KEY
end
def lease_timeout
self.class::LEASE_TIMEOUT
end
end
......@@ -3,38 +3,11 @@
class SelfMonitoringProjectCreateWorker
include ApplicationWorker
include ExclusiveLeaseGuard
# This worker falls under Self-monitoring with Monitor::APM group. However,
# self-monitoring is not classified as a feature category but rather as
# Other Functionality. Metrics seems to be the closest feature_category for
# this worker.
feature_category :metrics
LEASE_TIMEOUT = 15.minutes.to_i
EXCLUSIVE_LEASE_KEY = 'self_monitoring_service_creation_deletion'
include SelfMonitoringProjectWorker
def perform
try_obtain_lease do
Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService.new.execute
end
end
# @param job_id [String]
# Job ID that is used to construct the cache keys.
# @return [Hash]
# Returns true if the job is enqueued or in progress and false otherwise.
def self.in_progress?(job_id)
Gitlab::SidekiqStatus.job_status(Array.wrap(job_id)).first
end
private
def lease_key
EXCLUSIVE_LEASE_KEY
end
def lease_timeout
LEASE_TIMEOUT
end
end
# frozen_string_literal: true
class SelfMonitoringProjectDeleteWorker
include ApplicationWorker
include ExclusiveLeaseGuard
include SelfMonitoringProjectWorker
def perform
try_obtain_lease do
Gitlab::DatabaseImporters::SelfMonitoring::Project::DeleteService.new.execute
end
end
end
---
title: Migrate the database to activate projects prometheus service integration for projects with prometheus installed on shared k8s cluster.
merge_request: 19956
author:
type: fixed
---
title: "Show tag link whenever it's a tag in chat message integration for push events and pipeline events"
merge_request: 18126
author: Mats Estensen
type: fixed
---
title: Add runner information in build web hook event
merge_request: 20709
author: Gaetan Semet
type: added
---
title: |
Add allow failure in pipeline webhook event
merge_request: 20978
author: Gaetan Semet
type: added
---
title: Add GitLab commit to error detail endpoint
merge_request: 22174
author:
type: added
---
title: Add support to export and import award emojis for issues, issue notes, MR, MR notes and snippet notes
merge_request: 22493
author:
type: fixed
---
title: Remove unused index on project_mirror_data
merge_request: 22647
author:
type: performance
......@@ -100,6 +100,7 @@
- [create_evidence, 2]
- [group_export, 1]
- [self_monitoring_project_create, 2]
- [self_monitoring_project_delete, 2]
# EE-specific queues
- [analytics, 1]
......
# frozen_string_literal: true
class AddTimestampSoftwarelicensespolicy < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def up
add_timestamps_with_timezone(:software_license_policies, null: true)
end
def down
remove_timestamps(:software_license_policies)
end
end
# frozen_string_literal: true
class RemoveIndexProjectMirrorDataOnJid < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
remove_concurrent_index :project_mirror_data, :jid
end
def down
add_concurrent_index :project_mirror_data, :jid
end
end
# frozen_string_literal: true
class PatchPrometheusServicesForSharedClusterApplications < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
MIGRATION = 'ActivatePrometheusServicesForSharedClusterApplications'.freeze
BATCH_SIZE = 500
DELAY = 2.minutes
disable_ddl_transaction!
module Migratable
module Applications
class Prometheus < ActiveRecord::Base
self.table_name = 'clusters_applications_prometheus'
enum status: {
errored: -1,
installed: 3,
updated: 5
}
end
end
class Project < ActiveRecord::Base
self.table_name = 'projects'
include ::EachBatch
scope :with_application_on_group_clusters, -> {
joins("INNER JOIN namespaces ON namespaces.id = projects.namespace_id")
.joins("INNER JOIN cluster_groups ON cluster_groups.group_id = namespaces.id")
.joins("INNER JOIN clusters ON clusters.id = cluster_groups.cluster_id AND clusters.cluster_type = #{Cluster.cluster_types['group_type']}")
.joins("INNER JOIN clusters_applications_prometheus ON clusters_applications_prometheus.cluster_id = clusters.id
AND clusters_applications_prometheus.status IN (#{Applications::Prometheus.statuses[:installed]}, #{Applications::Prometheus.statuses[:updated]})")
}
scope :without_active_prometheus_services, -> {
joins("LEFT JOIN services ON services.project_id = projects.id AND services.type = 'PrometheusService'")
.where("services.id IS NULL OR (services.active = FALSE AND services.properties = '{}')")
}
end
class Cluster < ActiveRecord::Base
self.table_name = 'clusters'
enum cluster_type: {
instance_type: 1,
group_type: 2
}
def self.has_prometheus_application?
joins("INNER JOIN clusters_applications_prometheus ON clusters_applications_prometheus.cluster_id = clusters.id
AND clusters_applications_prometheus.status IN (#{Applications::Prometheus.statuses[:installed]}, #{Applications::Prometheus.statuses[:updated]})").exists?
end
end
end
def up
projects_without_active_prometheus_service.group('projects.id').each_batch(of: BATCH_SIZE) do |batch, index|
bg_migrations_batch = batch.select('projects.id').map { |project| [MIGRATION, project.id] }
delay = index * DELAY
BackgroundMigrationWorker.bulk_perform_in(delay.seconds, bg_migrations_batch)
end
end
def down
# no-op
end
private
def projects_without_active_prometheus_service
scope = Migratable::Project.without_active_prometheus_services
return scope if migrate_instance_cluster?
scope.with_application_on_group_clusters
end
def migrate_instance_cluster?
if instance_variable_defined?('@migrate_instance_cluster')
@migrate_instance_cluster
else
@migrate_instance_cluster = Migratable::Cluster.instance_type.has_prometheus_application?
end
end
end
......@@ -10,7 +10,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2020_01_08_155731) do
ActiveRecord::Schema.define(version: 2020_01_08_233040) do
# These are extensions that must be enabled in order to support this database
enable_extension "pg_trgm"
......@@ -3199,7 +3199,6 @@ ActiveRecord::Schema.define(version: 2020_01_08_155731) do
t.text "last_error"
t.datetime_with_timezone "last_update_at"
t.datetime_with_timezone "last_successful_update_at"
t.index ["jid"], name: "index_project_mirror_data_on_jid"
t.index ["last_successful_update_at"], name: "index_project_mirror_data_on_last_successful_update_at"
t.index ["last_update_at", "retry_count"], name: "index_project_mirror_data_on_last_update_at_and_retry_count"
t.index ["next_execution_timestamp", "retry_count"], name: "index_mirror_data_on_next_execution_and_retry_count"
......@@ -3824,6 +3823,8 @@ ActiveRecord::Schema.define(version: 2020_01_08_155731) do
t.integer "project_id", null: false
t.integer "software_license_id", null: false
t.integer "classification", default: 0, null: false
t.datetime_with_timezone "created_at"
t.datetime_with_timezone "updated_at"
t.index ["project_id", "software_license_id"], name: "index_software_license_policies_unique_per_project", unique: true
t.index ["software_license_id"], name: "index_software_license_policies_on_software_license_id"
end
......
......@@ -364,6 +364,7 @@ The following documentation relates to the DevOps **Secure** stage:
| [Dependency Scanning](user/application_security/dependency_scanning/index.md) **(ULTIMATE)** | Analyze your dependencies for known vulnerabilities. |
| [Dynamic Application Security Testing (DAST)](user/application_security/dast/index.md) **(ULTIMATE)** | Analyze running web applications for known vulnerabilities. |
| [Group Security Dashboard](user/application_security/security_dashboard/index.md#group-security-dashboard) **(ULTIMATE)** | View vulnerabilities in all the projects in a group and its subgroups. |
| [Instance Security Dashboard](user/application_security/security_dashboard/index.md#instance-security-dashboard) **(ULTIMATE)** | View vulnerabilities in all the projects you're interested in. |
| [License Compliance](user/application_security/license_compliance/index.md) **(ULTIMATE)** | Search your project's dependencies for their licenses. |
| [Pipeline Security Dashboard](user/application_security/security_dashboard/index.md#pipeline-security-dashboard) **(ULTIMATE)** | View the security reports for your project's pipelines. |
| [Project Security Dashboard](user/application_security/security_dashboard/index.md#project-security-dashboard) **(ULTIMATE)** | View the latest security reports for your project. |
......
......@@ -80,6 +80,9 @@ From there, you can see the following actions:
- Project was archived
- Project was unarchived
- Added/removed/updated protected branches
- Release was added to a project
- Release was updated
- Release milestone associations changed
### Instance events **(PREMIUM ONLY)**
......
......@@ -154,7 +154,7 @@ Some basic Ruby runtime metrics are available:
| `ruby_sampler_duration_seconds` | Counter | 11.1 | Time spent collecting stats |
| `ruby_process_cpu_seconds_total` | Gauge | 12.0 | Total amount of CPU time per process |
| `ruby_process_max_fds` | Gauge | 12.0 | Maximum number of open file descriptors per process |
| `ruby_process_resident_memory_bytes` | Gauge | 12.0 | Memory usage by process, measured in bytes |
| `ruby_process_resident_memory_bytes` | Gauge | 12.0 | Memory usage by process |
| `ruby_process_start_time_seconds` | Gauge | 12.0 | UNIX timestamp of process start time |
[GC.stat]: https://ruby-doc.org/core-2.6.5/GC.html#method-c-stat
......
......@@ -5711,6 +5711,11 @@ type SentryDetailedError {
"""
frequency: [SentryErrorFrequency!]!
"""
GitLab commit SHA attributed to the Error based on the release version
"""
gitlabCommit: String
"""
ID (global ID) of the error
"""
......
......@@ -15548,6 +15548,20 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "gitlabCommit",
"description": "GitLab commit SHA attributed to the Error based on the release version",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "id",
"description": "ID (global ID) of the error",
......
......@@ -893,6 +893,7 @@ Autogenerated return type of RemoveAwardEmoji
| `lastReleaseLastCommit` | String | Commit the error was last seen |
| `firstReleaseShortVersion` | String | Release version the error was first seen |
| `lastReleaseShortVersion` | String | Release version the error was last seen |
| `gitlabCommit` | String | GitLab commit SHA attributed to the Error based on the release version |
## SentryErrorFrequency
......
......@@ -210,7 +210,7 @@ For reference, GitLab.com's [auto-scaling shared runner](../user/gitlab_com/inde
## Supported web browsers
We support the current and the previous major release of:
GitLab supports the following web browsers:
- Firefox
- Chrome/Chromium
......@@ -218,10 +218,11 @@ We support the current and the previous major release of:
- Microsoft Edge
- Internet Explorer 11
The browser vendors release regular minor version updates with important bug fixes and security updates.
Support is only provided for the current minor version of the major version you are running.
For the listed web browsers, GitLab supports:
Each time a new browser version is released, we begin supporting that version and stop supporting the third most recent version.
- The current and previous major versions of browsers except Internet Explorer.
- Only version 11 of Internet Explorer.
- The current minor version of a supported major version.
NOTE: **Note:** We do not support running GitLab with JavaScript disabled in the browser and have no plans of supporting that
in the future because we have features such as Issue Boards which require JavaScript extensively.
......
......@@ -26,7 +26,7 @@ The Security Dashboard supports the following reports:
## Requirements
To use the group, project or pipeline security dashboard:
To use the instance, group, project or pipeline security dashboard:
1. At least one project inside a group must be configured with at least one of
the [supported reports](#supported-reports).
......@@ -110,6 +110,31 @@ vulnerabilities are not included either.
Read more on how to [interact with the vulnerabilities](../index.md#interacting-with-the-vulnerabilities).
## Instance Security Dashboard
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/6953) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.7.
At the instance level, the Security Dashboard displays the vulnerabilities
present in all of the projects that you have added to it.
You can access the Instance Security Dashboard from the menu
bar at the top of the page. Under **More**, select **Security**.
![Instance Security Dashboard navigation link](img/instance_security_dashboard_link_v12_4.png)
### Adding projects to the dashboard
To add projects to the dashboard:
1. Click the **Edit dashboard** button on the Instance Security Dashboard page.
1. Search for and add one or more projects using the **Search your projects** field.
1. Click the **Add projects** button.
Once added, the dashboard will display the vulnerabilities found in your chosen
projects.
![Instance Security Dashboard with projects](img/instance_security_dashboard_with_projects_v12_7.png)
## Keeping the dashboards up to date
The Security Dashboard displays information from the results of the most recent
......
......@@ -4,12 +4,12 @@ The ability to contribute conversationally is offered throughout GitLab.
You can leave a comment in the following places:
- issues
- epics **(ULTIMATE)**
- merge requests
- snippets
- commits
- commit diffs
- Issues
- Epics **(ULTIMATE)**
- Merge requests
- Snippets
- Commits
- Commit diffs
There are standard comments, and you also have the option to create a comment
in the form of a thread. A comment can also be [turned into a thread](#start-a-thread-by-replying-to-a-standard-comment)
......@@ -29,9 +29,7 @@ There is a limit of 5,000 comments for every object, for example: issue, epic, a
## Resolvable comments and threads
> **Notes:**
>
> - The main feature was [introduced][ce-5022] in GitLab 8.11.
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/5022) in GitLab 8.11.
> - Resolvable threads can be added only to merge request diffs.
Thread resolution helps keep track of progress during planning or code review.
......@@ -398,18 +396,21 @@ the Merge Request authored by the user that applied them.
1. Choose a line of code to be changed, add a new comment, then click
on the **Insert suggestion** icon in the toolbar:
![Add a new comment](img/insert_suggestion.png)
![Add a new comment](img/suggestion_button_v12_7.png)
1. In the comment, add your suggestion to the pre-populated code block:
![Add a suggestion into a code block tagged properly](img/make_suggestion.png)
![Add a suggestion into a code block tagged properly](img/make_suggestion_v12_7.png)
1. Click **Comment**.
NOTE: **Note:**
If you're using GitLab Premium, GitLab.com Silver, and higher tiers, the thread will display [Review](#merge-request-reviews-premium) options. Click either **Start a review**, **Add comment now**, or **Add to review** to obtain the same result.
The suggestions in the comment can be applied by the merge request author
directly from the merge request:
![Apply suggestions](img/suggestion.png)
![Apply suggestions](img/apply_suggestion_v12_7.png)
Once the author applies a suggestion, it will be marked with the **Applied** label,
the thread will be automatically resolved, and GitLab will create a new commit
......@@ -464,7 +465,6 @@ to the original comment, so a note about when it was last edited will appear und
This feature only exists for Issues, Merge requests, and Epics. Commits, Snippets and Merge request diff threads are
not supported yet.
[ce-5022]: https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/5022
[ce-7125]: https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/7125
[ce-7527]: https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/7527
[ce-7180]: https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/7180
......
......@@ -16,8 +16,12 @@ GitLab's **Releases** are a way to track deliverables in your project. Consider
a snapshot in time of the source, build output, and other metadata or artifacts
associated with a released version of your code.
At the moment, you can create Release entries via the [Releases API](../../../api/releases/index.md);
we recommend doing this as one of the last steps in your CI/CD release pipeline.
There are several ways to create a Release:
- In the interface, when you create a new Git tag.
- In the interface, by adding a release note to an existing Git tag.
- Using the [Releases API](../../../api/releases/index.md): we recommend doing this as one of the last
steps in your CI/CD release pipeline.
## Getting started with Releases
......@@ -135,8 +139,9 @@ drag and drop files to it. Release notes are stored in GitLab's database.
There are several ways to add release notes:
- In the interface, when you create a new Git tag.
- In the interface, by adding a note to an existing Git tag.
- Using the GitLab API.
- In the interface, by adding a release note to an existing Git tag.
- Using the [Releases API](../../../api/releases/index.md): (we recommend doing this as one of the last
steps in your CI/CD release pipeline).
To create a new tag, navigate to your project's **Repository > Tags** and
click **New tag**. From there, you can fill the form with all the information
......
......@@ -66,6 +66,8 @@ module API
.execute
if result[:status] == :success
log_release_created_audit_event(result[:release])
present result[:release], with: Entities::Release, current_user: current_user
else
render_api_error!(result[:message], result[:http_status])
......@@ -91,6 +93,9 @@ module API
.execute
if result[:status] == :success
log_release_updated_audit_event
log_release_milestones_updated_audit_event if result[:milestones_updated]
present result[:release], with: Entities::Release, current_user: current_user
else
render_api_error!(result[:message], result[:http_status])
......@@ -147,6 +152,20 @@ module API
def release
@release ||= user_project.releases.find_by_tag(params[:tag])
end
def log_release_created_audit_event(release)
# This is a separate method so that EE can extend its behaviour
end
def log_release_updated_audit_event
# This is a separate method so that EE can extend its behaviour
end
def log_release_milestones_updated_audit_event
# This is a separate method so that EE can extend its behaviour
end
end
end
end
API::Releases.prepend_if_ee('EE::API::Releases')
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# Create missing PrometheusServices records or sets active attribute to true
# for all projects which belongs to cluster with Prometheus Application installed.
class ActivatePrometheusServicesForSharedClusterApplications
module Migratable
# Migration model namespace isolated from application code.
class PrometheusService < ActiveRecord::Base
self.inheritance_column = :_type_disabled
self.table_name = 'services'
default_scope { where("services.type = 'PrometheusService'") }
def self.for_project(project_id)
new(
project_id: project_id,
active: true,
properties: '{}',
type: 'PrometheusService',
template: false,
push_events: true,
issues_events: true,
merge_requests_events: true,
tag_push_events: true,
note_events: true,
category: 'monitoring',
default: false,
wiki_page_events: true,
pipeline_events: true,
confidential_issues_events: true,
commit_events: true,
job_events: true,
confidential_note_events: true,
deployment_events: false
)
end
def managed?
properties == '{}'
end
end
end
def perform(project_id)
service = Migratable::PrometheusService.find_by(project_id: project_id) || Migratable::PrometheusService.for_project(project_id)
service.update!(active: true) if service.managed?
end
end
end
end
......@@ -12,10 +12,12 @@ module Gitlab
:external_url,
:first_release_last_commit,
:first_release_short_version,
:first_release_version,
:first_seen,
:frequency,
:gitlab_project,
:gitlab_commit,
:gitlab_issue,
:gitlab_project,
:id,
:last_release_last_commit,
:last_release_short_version,
......
......@@ -16,6 +16,7 @@ tree:
- :timelogs
- notes:
- :author
- :award_emoji
- events:
- :push_event_payload
- label_links:
......@@ -32,18 +33,22 @@ tree:
- :issue_assignees
- :zoom_meetings
- :sentry_issue
- :award_emoji
- snippets:
- :award_emoji
- notes:
- :author
- :award_emoji
- releases:
- :links
- project_members:
- :user
- merge_requests:
- :metrics
- :award_emoji
- notes:
- :author
- :award_emoji
- events:
- :push_event_payload
- :suggestions
......
......@@ -143,7 +143,7 @@ module Sentry
end
def map_to_detailed_error(issue)
Gitlab::ErrorTracking::DetailedError.new(
Gitlab::ErrorTracking::DetailedError.new({
id: issue.fetch('id'),
first_seen: issue.fetch('firstSeen', nil),
last_seen: issue.fetch('lastSeen', nil),
......@@ -159,15 +159,16 @@ module Sentry
short_id: issue.fetch('shortId', nil),
status: issue.fetch('status', nil),
frequency: issue.dig('stats', '24h'),
gitlab_issue: parse_gitlab_issue(issue.fetch('pluginIssues', nil)),
project_id: issue.dig('project', 'id'),
project_name: issue.dig('project', 'name'),
project_slug: issue.dig('project', 'slug'),
gitlab_issue: parse_gitlab_issue(issue.fetch('pluginIssues', nil)),
first_release_last_commit: issue.dig('firstRelease', 'lastCommit'),
last_release_last_commit: issue.dig('lastRelease', 'lastCommit'),
first_release_short_version: issue.dig('firstRelease', 'shortVersion'),
first_release_version: issue.dig('firstRelease', 'version'),
last_release_last_commit: issue.dig('lastRelease', 'lastCommit'),
last_release_short_version: issue.dig('lastRelease', 'shortVersion')
)
})
end
def extract_tags(issue)
......
......@@ -6156,9 +6156,15 @@ msgstr ""
msgid "DesignManagement|Adding a design with the same filename replaces the file in a new version."
msgstr ""
msgid "DesignManagement|Are you sure you want to cancel creating this comment?"
msgstr ""
msgid "DesignManagement|Are you sure you want to delete the selected designs?"
msgstr ""
msgid "DesignManagement|Cancel comment confirmation"
msgstr ""
msgid "DesignManagement|Could not add a new comment. Please try again."
msgstr ""
......@@ -6177,6 +6183,9 @@ msgstr ""
msgid "DesignManagement|Deselect all"
msgstr ""
msgid "DesignManagement|Discard comment"
msgstr ""
msgid "DesignManagement|Error uploading a new design. Please try again."
msgstr ""
......@@ -6189,6 +6198,9 @@ msgstr ""
msgid "DesignManagement|Go to previous design"
msgstr ""
msgid "DesignManagement|Keep comment"
msgstr ""
msgid "DesignManagement|Requested design version does not exist. Showing latest version instead"
msgstr ""
......@@ -15016,10 +15028,10 @@ msgstr ""
msgid "Releases"
msgstr ""
msgid "Releases are based on Git tags. We recommend naming tags that fit within semantic versioning, for example %{codeStart}v1.0%{codeEnd}, %{codeStart}v2.0-pre%{codeEnd}."
msgid "Releases are based on Git tags and mark specific points in a project's development history. They can contain information about the type of changes and can also deliver binaries, like compiled versions of your software."
msgstr ""
msgid "Releases mark specific points in a project's development history, communicate information about the type of change, and deliver on prepared, often compiled, versions of the software to be reused elsewhere. Currently, releases can only be created through the API."
msgid "Releases are based on Git tags. We recommend naming tags that fit within semantic versioning, for example %{codeStart}v1.0%{codeEnd}, %{codeStart}v2.0-pre%{codeEnd}."
msgstr ""
msgid "Release|Something went wrong while getting the release details"
......@@ -18809,6 +18821,9 @@ msgstr ""
msgid "ThreatMonitoring|Environment"
msgstr ""
msgid "ThreatMonitoring|No traffic to display"
msgstr ""
msgid "ThreatMonitoring|Requests"
msgstr ""
......@@ -18839,6 +18854,9 @@ msgstr ""
msgid "ThreatMonitoring|Web Application Firewall not enabled"
msgstr ""
msgid "ThreatMonitoring|While it's rare to have no traffic coming to your application, it can happen. In any event, we ask that you double check your settings to make sure you've set up the WAF correctly."
msgstr ""
msgid "Thursday"
msgstr ""
......
#!/bin/sh
lint_paths="changelogs/unreleased"
[ -d "ee/" ] && lint_paths="$lint_paths ee/changelogs/unreleased"
invalid_files=$(find $lint_paths -type f -not -name "*.yml" -not -name ".gitkeep")
if [ -n "$invalid_files" ]; then
echo "Changelog files must end in .yml, but these did not:"
echo "$invalid_files" | sed -e "s/^/* /"
exit 1
fi
......@@ -48,7 +48,8 @@ def jobs_to_run(node_index, node_total)
%w[bundle exec rubocop --parallel],
%w[scripts/lint-conflicts.sh],
%w[scripts/lint-rugged],
%w[scripts/frontend/check_no_partial_karma_jest.sh]
%w[scripts/frontend/check_no_partial_karma_jest.sh],
%w[scripts/lint-changelog-filenames]
]
case node_total
......
......@@ -224,9 +224,11 @@ describe Projects::ErrorTrackingController do
let(:error) { build(:detailed_error_tracking_error) }
it 'returns an error' do
expected_error = error.as_json.except('first_release_version').merge({ 'gitlab_commit' => nil })
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('error_tracking/issue_detailed')
expect(json_response['error']).to eq(error.as_json)
expect(json_response['error']).to eq(expected_error)
end
it_behaves_like 'sets the polling header'
......
......@@ -133,6 +133,7 @@ describe 'Database schema' do
'Ci::BuildTraceChunk' => %w[data_store],
'Ci::JobArtifact' => %w[file_type],
'Ci::Pipeline' => %w[source config_source failure_reason],
'Ci::Processable' => %w[failure_reason],
'Ci::Runner' => %w[access_level],
'Ci::Stage' => %w[status],
'Clusters::Applications::Ingress' => %w[ingress_type],
......
......@@ -34,6 +34,7 @@ FactoryBot.define do
last_release_last_commit { '9ad419c86' }
first_release_short_version { 'abc123' }
last_release_short_version { 'abc123' }
first_release_version { '123456' }
skip_create
end
......
......@@ -20,5 +20,14 @@ FactoryBot.define do
create(:evidence, release: release)
end
end
trait :with_milestones do
transient do
milestones_count { 2 }
end
after(:create) do |release, evaluator|
create_list(:milestone, evaluator.milestones_count, project: evaluator.project, releases: [release])
end
end
end
end
{
"type": "object",
"required" : [
"required": [
"external_url",
"external_base_url",
"last_seen",
......@@ -18,9 +18,10 @@
"first_release_last_commit",
"last_release_last_commit",
"first_release_short_version",
"last_release_short_version"
"last_release_short_version",
"gitlab_commit"
],
"properties" : {
"properties": {
"id": { "type": "string" },
"first_seen": { "type": "string", "format": "date-time" },
"last_seen": { "type": "string", "format": "date-time" },
......@@ -30,13 +31,10 @@
"count": { "type": "integer" },
"external_url": { "type": "string" },
"external_base_url": { "type": "string" },
"user_count": { "type": "integer"},
"user_count": { "type": "integer" },
"tags": {
"type": "object",
"required" : [
"level",
"logger"
],
"required": ["level", "logger"],
"properties": {
"level": {
"type": "string"
......@@ -57,7 +55,8 @@
"first_release_last_commit": { "type": ["string", "null"] },
"last_release_last_commit": { "type": ["string", "null"] },
"first_release_short_version": { "type": ["string", "null"] },
"last_release_short_version": { "type": ["string", "null"] }
"last_release_short_version": { "type": ["string", "null"] },
"gitlab_commit": { "type": ["string", "null"] }
},
"additionalProperties": false
}
......@@ -80,6 +80,17 @@
"issue_id": 40
}
],
"award_emoji": [
{
"id": 1,
"name": "musical_keyboard",
"user_id": 1,
"awardable_type": "Issue",
"awardable_id": 40,
"created_at": "2020-01-07T11:55:22.234Z",
"updated_at": "2020-01-07T11:55:22.234Z"
}
],
"zoom_meetings": [
{
"id": 1,
......@@ -188,7 +199,18 @@
"author": {
"name": "User 4"
},
"events": []
"events": [],
"award_emoji": [
{
"id": 1,
"name": "clapper",
"user_id": 1,
"awardable_type": "Note",
"awardable_id": 351,
"created_at": "2020-01-07T11:55:22.234Z",
"updated_at": "2020-01-07T11:55:22.234Z"
}
]
},
{
"id": 352,
......@@ -2297,7 +2319,32 @@
"updated_at": "2019-11-05T15:37:24.645Z"
}
],
"notes": []
"notes": [
{
"id": 872,
"note": "This is a test note",
"noteable_type": "Snippet",
"author_id": 1,
"created_at": "2019-11-05T15:37:24.645Z",
"updated_at": "2019-11-05T15:37:24.645Z",
"noteable_id": 1,
"author": {
"name": "Random name"
},
"events": [],
"award_emoji": [
{
"id": 12,
"name": "thumbsup",
"user_id": 1,
"awardable_type": "Note",
"awardable_id": 872,
"created_at": "2019-11-05T15:37:21.287Z",
"updated_at": "2019-11-05T15:37:21.287Z"
}
]
}
]
}
],
"releases": [],
......@@ -2434,7 +2481,18 @@
"author": {
"name": "User 4"
},
"events": []
"events": [],
"award_emoji": [
{
"id": 1,
"name": "tada",
"user_id": 1,
"awardable_type": "Note",
"awardable_id": 1,
"created_at": "2019-11-05T15:37:21.287Z",
"updated_at": "2019-11-05T15:37:21.287Z"
}
]
},
{
"id": 672,
......@@ -2840,7 +2898,27 @@
"author_id": 1
}
],
"approvals_before_merge": 1
"approvals_before_merge": 1,
"award_emoji": [
{
"id": 1,
"name": "thumbsup",
"user_id": 1,
"awardable_type": "MergeRequest",
"awardable_id": 27,
"created_at": "2020-01-07T11:21:21.235Z",
"updated_at": "2020-01-07T11:21:21.235Z"
},
{
"id": 2,
"name": "drum",
"user_id": 1,
"awardable_type": "MergeRequest",
"awardable_id": 27,
"created_at": "2020-01-07T11:21:21.235Z",
"updated_at": "2020-01-07T11:21:21.235Z"
}
]
},
{
"id": 26,
......
......@@ -6,7 +6,13 @@ import axios from '~/lib/utils/axios_utils';
import Issue from '~/issue';
import '~/lib/utils/text_utility';
describe('Issue', function() {
describe('Issue', () => {
let testContext;
beforeEach(() => {
testContext = {};
});
let $boxClosed, $boxOpen, $btn;
preloadFixtures('issues/closed-issue.html');
......@@ -80,10 +86,18 @@ describe('Issue', function() {
}
[true, false].forEach(isIssueInitiallyOpen => {
describe(`with ${isIssueInitiallyOpen ? 'open' : 'closed'} issue`, function() {
describe(`with ${isIssueInitiallyOpen ? 'open' : 'closed'} issue`, () => {
const action = isIssueInitiallyOpen ? 'close' : 'reopen';
let mock;
function setup() {
testContext.issue = new Issue();
expectIssueState(isIssueInitiallyOpen);
testContext.$projectIssuesCounter = $('.issue_counter').first();
testContext.$projectIssuesCounter.text('1,001');
}
function mockCloseButtonResponseSuccess(url, response) {
mock.onPut(url).reply(() => {
expectNewBranchButtonState(true, false);
......@@ -103,7 +117,7 @@ describe('Issue', function() {
});
}
beforeEach(function() {
beforeEach(() => {
if (isIssueInitiallyOpen) {
loadFixtures('issues/open-issue.html');
} else {
......@@ -111,19 +125,11 @@ describe('Issue', function() {
}
mock = new MockAdapter(axios);
mock.onGet(/(.*)\/related_branches$/).reply(200, {});
jest.spyOn(axios, 'get');
findElements(isIssueInitiallyOpen);
this.issue = new Issue();
expectIssueState(isIssueInitiallyOpen);
this.$triggeredButton = $btn;
this.$projectIssuesCounter = $('.issue_counter').first();
this.$projectIssuesCounter.text('1,001');
spyOn(axios, 'get').and.callThrough();
testContext.$triggeredButton = $btn;
});
afterEach(() => {
......@@ -131,82 +137,90 @@ describe('Issue', function() {
$('div.flash-alert').remove();
});
it(`${action}s the issue`, function(done) {
mockCloseButtonResponseSuccess(this.$triggeredButton.attr('href'), {
it(`${action}s the issue`, done => {
mockCloseButtonResponseSuccess(testContext.$triggeredButton.attr('href'), {
id: 34,
});
mockCanCreateBranch(!isIssueInitiallyOpen);
this.$triggeredButton.trigger('click');
setup();
testContext.$triggeredButton.trigger('click');
setTimeout(() => {
setImmediate(() => {
expectIssueState(!isIssueInitiallyOpen);
expect(this.$triggeredButton.get(0).getAttribute('disabled')).toBeNull();
expect(this.$projectIssuesCounter.text()).toBe(isIssueInitiallyOpen ? '1,000' : '1,002');
expect(testContext.$triggeredButton.get(0).getAttribute('disabled')).toBeNull();
expect(testContext.$projectIssuesCounter.text()).toBe(
isIssueInitiallyOpen ? '1,000' : '1,002',
);
expectNewBranchButtonState(false, !isIssueInitiallyOpen);
done();
});
});
it(`fails to ${action} the issue if saved:false`, function(done) {
mockCloseButtonResponseSuccess(this.$triggeredButton.attr('href'), {
it(`fails to ${action} the issue if saved:false`, done => {
mockCloseButtonResponseSuccess(testContext.$triggeredButton.attr('href'), {
saved: false,
});
mockCanCreateBranch(isIssueInitiallyOpen);
this.$triggeredButton.trigger('click');
setup();
testContext.$triggeredButton.trigger('click');
setTimeout(() => {
setImmediate(() => {
expectIssueState(isIssueInitiallyOpen);
expect(this.$triggeredButton.get(0).getAttribute('disabled')).toBeNull();
expect(testContext.$triggeredButton.get(0).getAttribute('disabled')).toBeNull();
expectErrorMessage();
expect(this.$projectIssuesCounter.text()).toBe('1,001');
expect(testContext.$projectIssuesCounter.text()).toBe('1,001');
expectNewBranchButtonState(false, isIssueInitiallyOpen);
done();
});
});
it(`fails to ${action} the issue if HTTP error occurs`, function(done) {
mockCloseButtonResponseError(this.$triggeredButton.attr('href'));
it(`fails to ${action} the issue if HTTP error occurs`, done => {
mockCloseButtonResponseError(testContext.$triggeredButton.attr('href'));
mockCanCreateBranch(isIssueInitiallyOpen);
this.$triggeredButton.trigger('click');
setup();
testContext.$triggeredButton.trigger('click');
setTimeout(() => {
setImmediate(() => {
expectIssueState(isIssueInitiallyOpen);
expect(this.$triggeredButton.get(0).getAttribute('disabled')).toBeNull();
expect(testContext.$triggeredButton.get(0).getAttribute('disabled')).toBeNull();
expectErrorMessage();
expect(this.$projectIssuesCounter.text()).toBe('1,001');
expect(testContext.$projectIssuesCounter.text()).toBe('1,001');
expectNewBranchButtonState(false, isIssueInitiallyOpen);
done();
});
});
it('disables the new branch button if Ajax call fails', function() {
mockCloseButtonResponseError(this.$triggeredButton.attr('href'));
it('disables the new branch button if Ajax call fails', () => {
mockCloseButtonResponseError(testContext.$triggeredButton.attr('href'));
mock.onGet(/(.*)\/can_create_branch$/).networkError();
this.$triggeredButton.trigger('click');
setup();
testContext.$triggeredButton.trigger('click');
expectNewBranchButtonState(false, false);
});
it('does not trigger Ajax call if new branch button is missing', function(done) {
mockCloseButtonResponseError(this.$triggeredButton.attr('href'));
Issue.$btnNewBranch = $();
this.canCreateBranchDeferred = null;
it('does not trigger Ajax call if new branch button is missing', done => {
mockCloseButtonResponseError(testContext.$triggeredButton.attr('href'));
document.querySelector('#related-branches').remove();
document.querySelector('.create-mr-dropdown-wrap').remove();
this.$triggeredButton.trigger('click');
setup();
testContext.$triggeredButton.trigger('click');
setTimeout(() => {
setImmediate(() => {
expect(axios.get).not.toHaveBeenCalled();
done();
......
import AxiosMockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import pollUntilComplete from '~/lib/utils/poll_until_complete';
import httpStatusCodes from '~/lib/utils/http_status';
import { TEST_HOST } from 'helpers/test_constants';
const endpoint = `${TEST_HOST}/foo`;
const mockData = 'mockData';
const pollInterval = 1234;
const pollIntervalHeader = {
'Poll-Interval': pollInterval,
};
describe('pollUntilComplete', () => {
let mock;
beforeEach(() => {
mock = new AxiosMockAdapter(axios);
});
afterEach(() => {
mock.restore();
});
describe('given an immediate success response', () => {
beforeEach(() => {
mock.onGet(endpoint).replyOnce(httpStatusCodes.OK, mockData);
});
it('resolves with the response', () =>
pollUntilComplete(endpoint).then(({ data }) => {
expect(data).toBe(mockData);
}));
});
describe(`given the endpoint returns NO_CONTENT with a Poll-Interval before succeeding`, () => {
beforeEach(() => {
mock
.onGet(endpoint)
.replyOnce(httpStatusCodes.NO_CONTENT, undefined, pollIntervalHeader)
.onGet(endpoint)
.replyOnce(httpStatusCodes.OK, mockData);
});
it('calls the endpoint until it succeeds, and resolves with the response', () =>
Promise.all([
pollUntilComplete(endpoint).then(({ data }) => {
expect(data).toBe(mockData);
expect(mock.history.get).toHaveLength(2);
}),
// To ensure the above pollUntilComplete() promise is actually
// fulfilled, we must explictly run the timers forward by the time
// indicated in the headers *after* each previous request has been
// fulfilled.
axios
// wait for initial NO_CONTENT response to be fulfilled
.waitForAll()
.then(() => {
jest.advanceTimersByTime(pollInterval);
}),
]));
});
describe('given the endpoint returns an error status', () => {
const errorMessage = 'error message';
beforeEach(() => {
mock.onGet(endpoint).replyOnce(httpStatusCodes.NOT_FOUND, errorMessage);
});
it('rejects with the error response', () =>
pollUntilComplete(endpoint).catch(error => {
expect(error.response.data).toBe(errorMessage);
}));
});
describe('given params', () => {
const params = { foo: 'bar' };
beforeEach(() => {
mock.onGet(endpoint, { params }).replyOnce(httpStatusCodes.OK, mockData);
});
it('requests the expected URL', () =>
pollUntilComplete(endpoint, { params }).then(({ data }) => {
expect(data).toBe(mockData);
}));
});
});
......@@ -30,6 +30,7 @@ describe GitlabSchema.types['SentryDetailedError'] do
lastReleaseLastCommit
firstReleaseShortVersion
lastReleaseShortVersion
gitlabCommit
]
is_expected.to have_graphql_fields(*expected_fields)
......
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::BackgroundMigration::ActivatePrometheusServicesForSharedClusterApplications, :migration, schema: 2019_12_20_102807 do
include MigrationHelpers::PrometheusServiceHelpers
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:services) { table(:services) }
let(:namespace) { namespaces.create(name: 'user', path: 'user') }
let(:project) { projects.create(namespace_id: namespace.id) }
let(:columns) do
%w(project_id active properties type template push_events
issues_events merge_requests_events tag_push_events
note_events category default wiki_page_events pipeline_events
confidential_issues_events commit_events job_events
confidential_note_events deployment_events)
end
describe '#perform' do
it 'is idempotent' do
expect { subject.perform(project.id) }.to change { services.order(:id).map { |row| row.attributes } }
expect { subject.perform(project.id) }.not_to change { services.order(:id).map { |row| row.attributes } }
end
context 'non prometheus services' do
it 'does not change them' do
other_type = 'SomeOtherService'
services.create(service_params_for(project.id, active: true, type: other_type))
expect { subject.perform(project.id) }.not_to change { services.where(type: other_type).order(:id).map { |row| row.attributes } }
end
end
context 'prometheus services are configured manually ' do
it 'does not change them' do
properties = '{"api_url":"http://test.dev","manual_configuration":"1"}'
services.create(service_params_for(project.id, properties: properties, active: false))
expect { subject.perform(project.id) }.not_to change { services.order(:id).map { |row| row.attributes } }
end
end
context 'prometheus integration services do not exist' do
it 'creates missing services entries' do
subject.perform(project.id)
rows = services.order(:id).map { |row| row.attributes.slice(*columns).symbolize_keys }
expect([service_params_for(project.id, active: true)]).to eq rows
end
end
context 'prometheus integration services exist' do
context 'in active state' do
it 'does not change them' do
services.create(service_params_for(project.id, active: true))
expect { subject.perform(project.id) }.not_to change { services.order(:id).map { |row| row.attributes } }
end
end
context 'not in active state' do
it 'sets active attribute to true' do
service = services.create(service_params_for(project.id))
expect { subject.perform(project.id) }.to change { service.reload.active? }.from(false).to(true)
end
end
end
end
end
......@@ -219,6 +219,16 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
expect(award_emoji.map(&:name)).to contain_exactly('thumbsup', 'coffee')
end
it 'snippet has notes' do
expect(@project.snippets.first.notes.count).to eq(1)
end
it 'snippet has award emojis on notes' do
award_emoji = @project.snippets.first.notes.first.award_emoji.first
expect(award_emoji.name).to eq('thumbsup')
end
it 'restores `ci_cd_settings` : `group_runners_enabled` setting' do
expect(@project.ci_cd_settings.group_runners_enabled?).to eq(false)
end
......@@ -240,6 +250,18 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
expect(sentry_issue.sentry_issue_identifier).to eq(1234567891)
end
it 'has award emoji for an issue' do
award_emoji = @project.issues.first.award_emoji.first
expect(award_emoji.name).to eq('musical_keyboard')
end
it 'has award emoji for a note in an issue' do
award_emoji = @project.issues.first.notes.first.award_emoji.first
expect(award_emoji.name).to eq('clapper')
end
it 'restores container_expiration_policy' do
policy = Project.find_by_path('project').container_expiration_policy
......@@ -266,6 +288,20 @@ describe Gitlab::ImportExport::ProjectTreeRestorer do
it 'has no source if source/target differ' do
expect(MergeRequest.find_by_title('MR2').source_project_id).to be_nil
end
it 'has award emoji' do
award_emoji = MergeRequest.find_by_title('MR1').award_emoji
expect(award_emoji.map(&:name)).to contain_exactly('thumbsup', 'drum')
end
context 'notes' do
it 'has award emoji' do
award_emoji = MergeRequest.find_by_title('MR1').notes.first.award_emoji.first
expect(award_emoji.name).to eq('tada')
end
end
end
context 'tokens are regenerated' do
......
......@@ -263,6 +263,7 @@ describe Sentry::Client::Issue do
:last_release_last_commit | [:lastRelease, :lastCommit]
:first_release_short_version | [:firstRelease, :shortVersion]
:last_release_short_version | [:lastRelease, :shortVersion]
:first_release_version | [:firstRelease, :version]
end
with_them do
......
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20200107172020_add_timestamp_softwarelicensespolicy.rb')
describe AddTimestampSoftwarelicensespolicy, :migration do
let(:software_licenses_policy) { table(:software_license_policies) }
let(:projects) { table(:projects) }
let(:licenses) { table(:software_licenses) }
before do
projects.create!(name: 'gitlab', path: 'gitlab-org/gitlab-ce', namespace_id: 1)
licenses.create!(name: 'MIT')
software_licenses_policy.create!(project_id: projects.first.id, software_license_id: licenses.first.id)
end
it 'creates timestamps' do
migrate!
expect(software_licenses_policy.first.created_at).to be_nil
expect(software_licenses_policy.first.updated_at).to be_nil
end
end
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20191220102807_patch_prometheus_services_for_shared_cluster_applications.rb')
describe PatchPrometheusServicesForSharedClusterApplications, :migration, :sidekiq do
include MigrationHelpers::PrometheusServiceHelpers
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:services) { table(:services) }
let(:clusters) { table(:clusters) }
let(:cluster_groups) { table(:cluster_groups) }
let(:clusters_applications_prometheus) { table(:clusters_applications_prometheus) }
let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
let(:application_statuses) do
{
errored: -1,
installed: 3,
updated: 5
}
end
let(:cluster_types) do
{
instance_type: 1,
group_type: 2
}
end
describe '#up' do
let!(:project_with_missing_service) { projects.create!(name: 'gitlab', path: 'gitlab-ce', namespace_id: namespace.id) }
let(:project_with_inactive_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
let(:project_with_active_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
let(:project_with_manual_active_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
let(:project_with_manual_inactive_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
let(:project_with_active_not_prometheus_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
let(:project_with_inactive_not_prometheus_service) { projects.create!(name: 'gitlab', path: 'gitlab-ee', namespace_id: namespace.id) }
before do
services.create(service_params_for(project_with_inactive_service.id, active: false))
services.create(service_params_for(project_with_active_service.id, active: true))
services.create(service_params_for(project_with_active_not_prometheus_service.id, active: true, type: 'other'))
services.create(service_params_for(project_with_inactive_not_prometheus_service.id, active: false, type: 'other'))
services.create(service_params_for(project_with_manual_inactive_service.id, active: false, properties: { some: 'data' }.to_json))
services.create(service_params_for(project_with_manual_active_service.id, active: true, properties: { some: 'data' }.to_json))
end
shared_examples 'patch prometheus services post migration' do
context 'prometheus application is installed on the cluster' do
it 'schedules a background migration' do
clusters_applications_prometheus.create(cluster_id: cluster.id, status: application_statuses[:installed], version: '123')
Sidekiq::Testing.fake! do
Timecop.freeze do
background_migrations = [["ActivatePrometheusServicesForSharedClusterApplications", project_with_missing_service.id],
["ActivatePrometheusServicesForSharedClusterApplications", project_with_inactive_service.id],
["ActivatePrometheusServicesForSharedClusterApplications", project_with_active_not_prometheus_service.id],
["ActivatePrometheusServicesForSharedClusterApplications", project_with_inactive_not_prometheus_service.id]]
migrate!
enqueued_migrations = BackgroundMigrationWorker.jobs.map { |job| job['args'] }
expect(enqueued_migrations).to match_array(background_migrations)
end
end
end
end
context 'prometheus application was recently updated on the cluster' do
it 'schedules a background migration' do
clusters_applications_prometheus.create(cluster_id: cluster.id, status: application_statuses[:updated], version: '123')
Sidekiq::Testing.fake! do
Timecop.freeze do
background_migrations = [["ActivatePrometheusServicesForSharedClusterApplications", project_with_missing_service.id],
["ActivatePrometheusServicesForSharedClusterApplications", project_with_inactive_service.id],
["ActivatePrometheusServicesForSharedClusterApplications", project_with_active_not_prometheus_service.id],
["ActivatePrometheusServicesForSharedClusterApplications", project_with_inactive_not_prometheus_service.id]]
migrate!
enqueued_migrations = BackgroundMigrationWorker.jobs.map { |job| job['args'] }
expect(enqueued_migrations).to match_array(background_migrations)
end
end
end
end
context 'prometheus application failed to install on the cluster' do
it 'does not schedule a background migration' do
clusters_applications_prometheus.create(cluster_id: cluster.id, status: application_statuses[:errored], version: '123')
Sidekiq::Testing.fake! do
Timecop.freeze do
migrate!
expect(BackgroundMigrationWorker.jobs.size).to eq 0
end
end
end
end
context 'prometheus application is NOT installed on the cluster' do
it 'does not schedule a background migration' do
Sidekiq::Testing.fake! do
Timecop.freeze do
migrate!
expect(BackgroundMigrationWorker.jobs.size).to eq 0
end
end
end
end
end
context 'Cluster is group_type' do
let(:cluster) { clusters.create(name: 'cluster', cluster_type: cluster_types[:group_type]) }
before do
cluster_groups.create(group_id: namespace.id, cluster_id: cluster.id)
end
it_behaves_like 'patch prometheus services post migration'
end
context 'Cluster is instance_type' do
let(:cluster) { clusters.create(name: 'cluster', cluster_type: cluster_types[:instance_type]) }
it_behaves_like 'patch prometheus services post migration'
end
end
end
......@@ -210,6 +210,53 @@ describe ErrorTracking::ProjectErrorTrackingSetting do
end
end
describe '#issue_details' do
let(:issue) { build(:detailed_error_tracking_error) }
let(:sentry_client) { double('sentry_client', issue_details: issue) }
let(:commit_id) { '123456' }
let(:result) do
subject.issue_details
end
context 'when cached' do
before do
stub_reactive_cache(subject, issue, {})
synchronous_reactive_cache(subject)
expect(subject).to receive(:sentry_client).and_return(sentry_client)
end
it { expect(result).to eq(issue: issue) }
it { expect(result[:issue].first_release_version).to eq(commit_id) }
it { expect(result[:issue].gitlab_commit).to eq(nil) }
context 'when release version is nil' do
before do
issue.first_release_version = nil
end
it { expect(result[:issue].gitlab_commit).to eq(nil) }
end
context 'when repo commit matches first relase version' do
let(:commit) { double('commit', id: commit_id) }
let(:repository) { double('repository', commit: commit) }
before do
expect(project).to receive(:repository).and_return(repository)
end
it { expect(result[:issue].gitlab_commit).to eq(commit_id) }
end
end
context 'when not cached' do
it { expect(subject).not_to receive(:sentry_client) }
it { expect(result).to be_nil }
end
end
describe '#update_issue' do
let(:opts) do
{ status: 'resolved' }
......
......@@ -181,4 +181,10 @@ RSpec.describe Release do
it { is_expected.to eq(release.evidence.summary) }
end
end
describe '#milestone_titles' do
let(:release) { create(:release, :with_milestones) }
it { expect(release.milestone_titles).to eq(release.milestones.map {|m| m.title }.sort.join(", "))}
end
end
......@@ -56,6 +56,7 @@ describe 'getting a detailed sentry error' do
expect(error_data['status']).to eql sentry_detailed_error.status.upcase
expect(error_data['firstSeen']).to eql sentry_detailed_error.first_seen
expect(error_data['lastSeen']).to eql sentry_detailed_error.last_seen
expect(error_data['gitlabCommit']).to be nil
end
it 'is expected to return the frequency correctly' do
......
......@@ -21,6 +21,7 @@ describe Releases::UpdateService do
it 'raises an error' do
result = service.execute
expect(result[:status]).to eq(:error)
expect(result[:milestones_updated]).to be_falsy
end
end
......@@ -50,21 +51,33 @@ describe Releases::UpdateService do
end
context 'when a milestone is passed in' do
let(:new_title) { 'v2.0' }
let(:milestone) { create(:milestone, project: project, title: 'v1.0') }
let(:new_milestone) { create(:milestone, project: project, title: new_title) }
let(:params_with_milestone) { params.merge!({ milestones: [new_title] }) }
let(:new_milestone) { create(:milestone, project: project, title: new_title) }
let(:service) { described_class.new(new_milestone.project, user, params_with_milestone) }
before do
release.milestones << milestone
end
service.execute
release.reload
context 'a different milestone' do
let(:new_title) { 'v2.0' }
it 'updates the related milestone accordingly' do
result = service.execute
release.reload
expect(release.milestones.first.title).to eq(new_title)
expect(result[:milestones_updated]).to be_truthy
end
end
it 'updates the related milestone accordingly' do
expect(release.milestones.first.title).to eq(new_title)
context 'an identical milestone' do
let(:new_title) { 'v1.0' }
it "raises an error" do
expect { service.execute }.to raise_error(ActiveRecord::RecordInvalid)
end
end
end
......@@ -76,12 +89,14 @@ describe Releases::UpdateService do
release.milestones << milestone
service.params = params_with_empty_milestone
service.execute
release.reload
end
it 'removes the old milestone and does not associate any new milestone' do
result = service.execute
release.reload
expect(release.milestones).not_to be_present
expect(result[:milestones_updated]).to be_truthy
end
end
......@@ -96,14 +111,15 @@ describe Releases::UpdateService do
create(:milestone, project: project, title: new_title_1)
create(:milestone, project: project, title: new_title_2)
release.milestones << milestone
service.execute
release.reload
end
it 'removes the old milestone and update the release with the new ones' do
result = service.execute
release.reload
milestone_titles = release.milestones.map(&:title)
expect(milestone_titles).to match_array([new_title_1, new_title_2])
expect(result[:milestones_updated]).to be_truthy
end
end
end
......
# frozen_string_literal: true
module MigrationHelpers
module PrometheusServiceHelpers
def service_params_for(project_id, params = {})
{
project_id: project_id,
active: false,
properties: '{}',
type: 'PrometheusService',
template: false,
push_events: true,
issues_events: true,
merge_requests_events: true,
tag_push_events: true,
note_events: true,
category: 'monitoring',
default: false,
wiki_page_events: true,
pipeline_events: true,
confidential_issues_events: true,
commit_events: true,
job_events: true,
confidential_note_events: true,
deployment_events: false
}.merge(params)
end
def row_attributes(entity)
entity.attributes.with_indifferent_access.tap do |hash|
hash.merge!(hash.slice(:created_at, :updated_at).transform_values { |v| v.to_s(:db) })
end
end
end
end
# frozen_string_literal: true
# This shared_example requires the following variables:
# let(:service_class) { Gitlab::DatabaseImporters::SelfMonitoring::Project::DeleteService }
# let(:service) { instance_double(service_class) }
RSpec.shared_examples 'executes service' do
before do
allow(service_class).to receive(:new) { service }
end
it 'runs the service' do
expect(service).to receive(:execute)
subject.perform
end
end
RSpec.shared_examples 'returns in_progress based on Sidekiq::Status' do
it 'returns true when job is enqueued' do
jid = described_class.perform_async
expect(described_class.in_progress?(jid)).to eq(true)
end
it 'returns false when job does not exist' do
expect(described_class.in_progress?('fake_jid')).to eq(false)
end
end
......@@ -7,22 +7,10 @@ describe SelfMonitoringProjectCreateWorker do
let(:service_class) { Gitlab::DatabaseImporters::SelfMonitoring::Project::CreateService }
let(:service) { instance_double(service_class) }
before do
allow(service_class).to receive(:new) { service }
end
it 'runs the SelfMonitoring::Project::CreateService' do
expect(service).to receive(:execute)
subject.perform
end
it_behaves_like 'executes service'
end
describe '.in_progress?', :clean_gitlab_redis_shared_state do
it 'returns in_progress when job is enqueued' do
jid = described_class.perform_async
expect(described_class.in_progress?(jid)).to eq(true)
end
it_behaves_like 'returns in_progress based on Sidekiq::Status'
end
end
# frozen_string_literal: true
require 'spec_helper'
describe SelfMonitoringProjectDeleteWorker do
let_it_be(:jid) { 'b5b28910d97563e58c2fe55f' }
let_it_be(:data_key) { "self_monitoring_delete_result:#{jid}" }
describe '#perform' do
let(:service_class) { Gitlab::DatabaseImporters::SelfMonitoring::Project::DeleteService }
let(:service) { instance_double(service_class) }
it_behaves_like 'executes service'
end
describe '.status', :clean_gitlab_redis_shared_state do
it_behaves_like 'returns in_progress based on Sidekiq::Status'
end
end
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册