提交 647de7e6 编写于 作者: G GitLab Bot

Add latest changes from gitlab-org/gitlab@master

上级 4233d3aa
......@@ -143,6 +143,7 @@ db:migrate:reset:
db:check-schema:
extends: .db-job-base
script:
- scripts/regenerate-schema
- source scripts/schema_changed.sh
db:migrate-from-v11.11.0:
......
......@@ -393,9 +393,9 @@ export default {
<template #description>
<div>
<span>{{ __('Monitor your errors by integrating with Sentry.') }}</span>
<a href="/help/user/project/operations/error_tracking.html">
{{ __('More information') }}
</a>
<gl-link target="_blank" href="/help/user/project/operations/error_tracking.html">{{
__('More information')
}}</gl-link>
</div>
</template>
</gl-empty-state>
......
......@@ -3,6 +3,8 @@ import { debounce, pickBy } from 'lodash';
import { mapActions, mapState, mapGetters } from 'vuex';
import VueDraggable from 'vuedraggable';
import {
GlIcon,
GlButton,
GlDeprecatedButton,
GlDropdown,
GlDropdownItem,
......@@ -17,7 +19,6 @@ import {
import DashboardPanel from './dashboard_panel.vue';
import { s__ } from '~/locale';
import createFlash from '~/flash';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import CustomMetricsFormFields from '~/custom_metrics/components/custom_metrics_form_fields.vue';
import { mergeUrlParams, redirectTo, updateHistory } from '~/lib/utils/url_utility';
import invalidUrl from '~/lib/utils/invalid_url';
......@@ -39,6 +40,8 @@ export default {
VueDraggable,
DashboardPanel,
Icon,
GlIcon,
GlButton,
GlDeprecatedButton,
GlDropdown,
GlLoadingIcon,
......@@ -60,7 +63,6 @@ export default {
GlTooltip: GlTooltipDirective,
TrackEvent: TrackEventDirective,
},
mixins: [glFeatureFlagsMixin()],
props: {
externalDashboardUrl: {
type: String,
......@@ -197,7 +199,6 @@ export default {
},
data() {
return {
state: 'gettingStarted',
formIsValid: null,
selectedTimeRange: timeRangeFromUrl() || defaultTimeRange,
hasValidDates: true,
......@@ -212,8 +213,8 @@ export default {
'showEmptyState',
'useDashboardEndpoint',
'allDashboards',
'additionalPanelTypesEnabled',
'environmentsLoading',
'expandedPanel',
]),
...mapGetters('monitoringDashboard', ['getMetricStates', 'filteredEnvironments']),
firstDashboard() {
......@@ -232,14 +233,6 @@ export default {
this.firstDashboard === this.selectedDashboard
);
},
hasHeaderButtons() {
return (
this.addingMetricsAvailable ||
this.showRearrangePanelsBtn ||
this.selectedDashboard.can_edit ||
this.externalDashboardUrl.length
);
},
shouldShowEnvironmentsDropdownNoMatchedMsg() {
return !this.environmentsLoading && this.filteredEnvironments.length === 0;
},
......@@ -273,6 +266,8 @@ export default {
'setInitialState',
'setPanelGroupMetrics',
'filterEnvironments',
'setExpandedPanel',
'clearExpandedPanel',
]),
updatePanels(key, panels) {
this.setPanelGroupMetrics({
......@@ -300,9 +295,13 @@ export default {
this.selectedTimeRange = defaultTimeRange;
},
generateLink(group, title, yLabel) {
generatePanelLink(group, graphData) {
if (!group || !graphData) {
return null;
}
const dashboard = this.currentDashboard || this.firstDashboard.path;
const params = pickBy({ dashboard, group, title, y_label: yLabel }, value => value != null);
const { y_label, title } = graphData;
const params = pickBy({ dashboard, group, title, y_label }, value => value != null);
return mergeUrlParams(params, window.location.href);
},
hideAddMetricModal() {
......@@ -366,11 +365,20 @@ export default {
});
this.selectedTimeRange = { start, end };
},
onExpandPanel(group, panel) {
this.setExpandedPanel({ group, panel });
},
onGoBack() {
this.clearExpandedPanel();
},
},
addMetric: {
title: s__('Metrics|Add metric'),
modalId: 'add-metric',
},
i18n: {
goBackLabel: s__('Metrics|Go back'),
},
};
</script>
......@@ -541,59 +549,88 @@ export default {
</div>
<div v-if="!showEmptyState">
<graph-group
v-for="(groupData, index) in dashboard.panelGroups"
:key="`${groupData.group}.${groupData.priority}`"
:name="groupData.group"
:show-panels="showPanels"
:collapse-group="collapseGroup(groupData.key)"
<dashboard-panel
v-show="expandedPanel.panel"
ref="expandedPanel"
:clipboard-text="generatePanelLink(expandedPanel.group, expandedPanel.panel)"
:graph-data="expandedPanel.panel"
:alerts-endpoint="alertsEndpoint"
:height="600"
:prometheus-alerts-available="prometheusAlertsAvailable"
@timerangezoom="onTimeRangeZoom"
>
<vue-draggable
v-if="!groupSingleEmptyState(groupData.key)"
:value="groupData.panels"
group="metrics-dashboard"
:component-data="{ attrs: { class: 'row mx-0 w-100' } }"
:disabled="!isRearrangingPanels"
@input="updatePanels(groupData.key, $event)"
<template #topLeft>
<gl-button
ref="goBackBtn"
v-gl-tooltip
class="mr-3 my-3"
:title="$options.i18n.goBackLabel"
@click="onGoBack"
>
<gl-icon
name="arrow-left"
:aria-label="$options.i18n.goBackLabel"
class="text-secondary"
/>
</gl-button>
</template>
</dashboard-panel>
<div v-show="!expandedPanel.panel">
<graph-group
v-for="groupData in dashboard.panelGroups"
:key="`${groupData.group}.${groupData.priority}`"
:name="groupData.group"
:show-panels="showPanels"
:collapse-group="collapseGroup(groupData.key)"
>
<div
v-for="(graphData, graphIndex) in groupData.panels"
:key="`dashboard-panel-${graphIndex}`"
class="col-12 col-lg-6 px-2 mb-2 draggable"
:class="{ 'draggable-enabled': isRearrangingPanels }"
<vue-draggable
v-if="!groupSingleEmptyState(groupData.key)"
:value="groupData.panels"
group="metrics-dashboard"
:component-data="{ attrs: { class: 'row mx-0 w-100' } }"
:disabled="!isRearrangingPanels"
@input="updatePanels(groupData.key, $event)"
>
<div class="position-relative draggable-panel js-draggable-panel">
<div
v-if="isRearrangingPanels"
class="draggable-remove js-draggable-remove p-2 w-100 position-absolute d-flex justify-content-end"
@click="removePanel(groupData.key, groupData.panels, graphIndex)"
>
<a class="mx-2 p-2 draggable-remove-link" :aria-label="__('Remove')">
<icon name="close" />
</a>
</div>
<div
v-for="(graphData, graphIndex) in groupData.panels"
:key="`dashboard-panel-${graphIndex}`"
class="col-12 col-lg-6 px-2 mb-2 draggable"
:class="{ 'draggable-enabled': isRearrangingPanels }"
>
<div class="position-relative draggable-panel js-draggable-panel">
<div
v-if="isRearrangingPanels"
class="draggable-remove js-draggable-remove p-2 w-100 position-absolute d-flex justify-content-end"
@click="removePanel(groupData.key, groupData.panels, graphIndex)"
>
<a class="mx-2 p-2 draggable-remove-link" :aria-label="__('Remove')">
<icon name="close" />
</a>
</div>
<dashboard-panel
:clipboard-text="generateLink(groupData.group, graphData.title, graphData.y_label)"
:graph-data="graphData"
:alerts-endpoint="alertsEndpoint"
:prometheus-alerts-available="prometheusAlertsAvailable"
:index="`${index}-${graphIndex}`"
@timerangezoom="onTimeRangeZoom"
/>
<dashboard-panel
:clipboard-text="generatePanelLink(groupData.group, graphData)"
:graph-data="graphData"
:alerts-endpoint="alertsEndpoint"
:prometheus-alerts-available="prometheusAlertsAvailable"
@timerangezoom="onTimeRangeZoom"
@expand="onExpandPanel(groupData.group, graphData)"
/>
</div>
</div>
</vue-draggable>
<div v-else class="py-5 col col-sm-10 col-md-8 col-lg-7 col-xl-6">
<group-empty-state
ref="empty-group"
:documentation-path="documentationPath"
:settings-path="settingsPath"
:selected-state="groupSingleEmptyState(groupData.key)"
:svg-path="emptyNoDataSmallSvgPath"
/>
</div>
</vue-draggable>
<div v-else class="py-5 col col-sm-10 col-md-8 col-lg-7 col-xl-6">
<group-empty-state
ref="empty-group"
:documentation-path="documentationPath"
:settings-path="settingsPath"
:selected-state="groupSingleEmptyState(groupData.key)"
:svg-path="emptyNoDataSmallSvgPath"
/>
</div>
</graph-group>
</graph-group>
</div>
</div>
<empty-state
v-else
......
......@@ -59,7 +59,8 @@ export default {
},
graphData: {
type: Object,
required: true,
required: false,
default: null,
},
groupId: {
type: String,
......@@ -114,17 +115,13 @@ export default {
},
}),
title() {
return this.graphData.title || '';
return this.graphData?.title || '';
},
graphDataHasResult() {
return (
this.graphData.metrics &&
this.graphData.metrics[0].result &&
this.graphData.metrics[0].result.length > 0
);
return this.graphData?.metrics?.[0]?.result?.length > 0;
},
graphDataIsLoading() {
const { metrics = [] } = this.graphData;
const metrics = this.graphData?.metrics || [];
return metrics.some(({ loading }) => loading);
},
logsPathWithTimeRange() {
......@@ -136,7 +133,7 @@ export default {
return null;
},
csvText() {
const chartData = this.graphData.metrics[0].result[0].values;
const chartData = this.graphData?.metrics[0].result[0].values || [];
const yLabel = this.graphData.y_label;
const header = `timestamp,${yLabel}\r\n`; // eslint-disable-line @gitlab/require-i18n-strings
return chartData.reduce((csv, data) => {
......@@ -230,7 +227,7 @@ export default {
return Object.values(this.getGraphAlerts(queries));
},
isPanelType(type) {
return this.graphData.type && this.graphData.type === type;
return this.graphData?.type === type;
},
showToast() {
this.$toast.show(__('Link copied'));
......
......@@ -89,6 +89,17 @@ export const setShowErrorBanner = ({ commit }, enabled) => {
commit(types.SET_SHOW_ERROR_BANNER, enabled);
};
export const setExpandedPanel = ({ commit }, { group, panel }) => {
commit(types.SET_EXPANDED_PANEL, { group, panel });
};
export const clearExpandedPanel = ({ commit }) => {
commit(types.SET_EXPANDED_PANEL, {
group: null,
panel: null,
});
};
// All Data
export const fetchData = ({ dispatch }) => {
......
......@@ -31,5 +31,5 @@ export const SET_GETTING_STARTED_EMPTY_STATE = 'SET_GETTING_STARTED_EMPTY_STATE'
export const SET_NO_DATA_EMPTY_STATE = 'SET_NO_DATA_EMPTY_STATE';
export const SET_SHOW_ERROR_BANNER = 'SET_SHOW_ERROR_BANNER';
export const SET_PANEL_GROUP_METRICS = 'SET_PANEL_GROUP_METRICS';
export const SET_ENVIRONMENTS_FILTER = 'SET_ENVIRONMENTS_FILTER';
export const SET_EXPANDED_PANEL = 'SET_EXPANDED_PANEL';
......@@ -134,6 +134,8 @@ export default {
metric.loading = false;
metric.result = null;
},
// Parameters and other information
[types.SET_INITIAL_STATE](state, initialState = {}) {
Object.assign(state, pick(initialState, initialStateKeys));
},
......@@ -163,4 +165,8 @@ export default {
[types.SET_ENVIRONMENTS_FILTER](state, searchTerm) {
state.environmentsSearchTerm = searchTerm;
},
[types.SET_EXPANDED_PANEL](state, { group, panel }) {
state.expandedPanel.group = group;
state.expandedPanel.panel = panel;
},
};
......@@ -17,6 +17,21 @@ export default () => ({
dashboard: {
panelGroups: [],
},
/**
* Panel that is currently "zoomed" in as
* a single panel in view.
*/
expandedPanel: {
/**
* {?String} Panel's group name.
*/
group: null,
/**
* {?Object} Panel content from `dashboard`
* null when no panel is expanded.
*/
panel: null,
},
allDashboards: [],
// Other project data
......
......@@ -20,9 +20,6 @@ module Boards
skip_before_action :authenticate_user!, only: [:index]
before_action :validate_id_list, only: [:bulk_move]
before_action :can_move_issues?, only: [:bulk_move]
before_action do
push_frontend_feature_flag(:board_search_optimization, board.group, default_enabled: true)
end
def index
list_service = Boards::Issues::ListService.new(board_parent, current_user, filter_params)
......
# frozen_string_literal: true
module Ci
class InstanceVariable < ApplicationRecord
extend Gitlab::Ci::Model
include Ci::NewHasVariable
include Ci::Maskable
alias_attribute :secret_value, :value
validates :key, uniqueness: {
message: "(%{value}) has already been taken"
}
scope :unprotected, -> { where(protected: false) }
end
end
......@@ -12,7 +12,7 @@ module Boards
def execute
return fetch_issues.order_closed_date_desc if list&.closed?
fetch_issues.order_by_position_and_priority(with_cte: can_attempt_search_optimization?)
fetch_issues.order_by_position_and_priority(with_cte: params[:search].present?)
end
# rubocop: disable CodeReuse/ActiveRecord
......@@ -91,7 +91,7 @@ module Boards
end
def set_attempt_search_optimizations
return unless can_attempt_search_optimization?
return unless params[:search].present?
if board.group_board?
params[:attempt_group_search_optimizations] = true
......@@ -130,11 +130,6 @@ module Boards
def board_group
board.group_board? ? parent : parent.group
end
def can_attempt_search_optimization?
params[:search].present? &&
Feature.enabled?(:board_search_optimization, board_group, default_enabled: true)
end
end
end
end
......
......@@ -28,19 +28,25 @@ module Gitlab
private
def create_issue(issue_attributes, project_id)
label_ids = issue_attributes.delete('label_ids')
issue_id = insert_and_return_id(issue_attributes, Issue)
label_issue(project_id, issue_id)
label_issue(project_id, issue_id, label_ids)
issue_id
end
def label_issue(project_id, issue_id)
label_id = JiraImport.get_import_label_id(project_id)
return unless label_id
def label_issue(project_id, issue_id, label_ids)
label_link_attrs = label_ids.to_a.map do |label_id|
build_label_attrs(issue_id, label_id.to_i)
end
label_link_attrs = build_label_attrs(issue_id, label_id.to_i)
insert_and_return_id(label_link_attrs, LabelLink)
import_label_id = JiraImport.get_import_label_id(project_id)
return unless import_label_id
label_link_attrs << build_label_attrs(issue_id, import_label_id.to_i)
Gitlab::Database.bulk_insert(LabelLink.table_name, label_link_attrs)
end
def build_label_attrs(issue_id, label_id)
......
---
title: Add migrations for global CI variables
merge_request: 30156
author:
type: added
---
title: Collect object store config in usage data
merge_request: 29149
author:
type: added
---
title: View a details of a panel in 'full screen mode'
merge_request: 29902
author:
type: added
---
title: Remove deprecated /admin/application_settings redirect
merge_request: 30532
author:
type: removed
---
title: Error tracking target blank empty state
merge_request: 30525
author:
type: other
---
title: Map Jira issue assignee and author
merge_request: 30498
author:
type: added
......@@ -45,7 +45,7 @@ module GettextI18nRailsJs
private
def gettext_messages_by_file
@gettext_messages_by_file ||= JSON.parse(load_messages)
@gettext_messages_by_file ||= Gitlab::Json.parse(load_messages)
end
def load_messages
......
......@@ -75,7 +75,7 @@ Sidekiq.configure_server do |config|
# Sidekiq-cron: load recurring jobs from gitlab.yml
# UGLY Hack to get nested hash from settingslogic
cron_jobs = JSON.parse(Gitlab.config.cron_jobs.to_json)
cron_jobs = Gitlab::Json.parse(Gitlab.config.cron_jobs.to_json)
# UGLY hack: Settingslogic doesn't allow 'class' key
cron_jobs_required_keys = %w(job_class cron)
cron_jobs.each do |k, v|
......
......@@ -116,10 +116,6 @@ namespace :admin do
end
resource :application_settings, only: :update do
# This redirect should be removed with 13.0 release.
# https://gitlab.com/gitlab-org/gitlab/issues/199427
get '/', to: redirect('admin/application_settings/general'), as: nil
resources :services, only: [:index, :edit, :update]
resources :integrations, only: [:edit, :update] do
member do
......
# frozen_string_literal: true
class CreateCiInstanceVariables < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
disable_ddl_transaction!
def up
unless table_exists?(:ci_instance_variables)
create_table :ci_instance_variables do |t|
t.integer :variable_type, null: false, limit: 2, default: 1
t.boolean :masked, default: false, allow_null: false
t.boolean :protected, default: false, allow_null: false
t.text :key, null: false
t.text :encrypted_value
t.text :encrypted_value_iv
t.index [:key], name: 'index_ci_instance_variables_on_key', unique: true, using: :btree
end
end
add_text_limit(:ci_instance_variables, :key, 255)
add_text_limit(:ci_instance_variables, :encrypted_value, 1024)
add_text_limit(:ci_instance_variables, :encrypted_value_iv, 255)
end
def down
drop_table :ci_instance_variables
end
end
......@@ -1069,6 +1069,28 @@ CREATE SEQUENCE public.ci_group_variables_id_seq
ALTER SEQUENCE public.ci_group_variables_id_seq OWNED BY public.ci_group_variables.id;
CREATE TABLE public.ci_instance_variables (
id bigint NOT NULL,
variable_type smallint DEFAULT 1 NOT NULL,
masked boolean DEFAULT false,
protected boolean DEFAULT false,
key text NOT NULL,
encrypted_value text,
encrypted_value_iv text,
CONSTRAINT check_07a45a5bcb CHECK ((char_length(encrypted_value_iv) <= 255)),
CONSTRAINT check_5aede12208 CHECK ((char_length(key) <= 255)),
CONSTRAINT check_5ebd0515a0 CHECK ((char_length(encrypted_value) <= 1024))
);
CREATE SEQUENCE public.ci_instance_variables_id_seq
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE public.ci_instance_variables_id_seq OWNED BY public.ci_instance_variables.id;
CREATE TABLE public.ci_job_artifacts (
id integer NOT NULL,
project_id integer NOT NULL,
......@@ -7227,6 +7249,8 @@ ALTER TABLE ONLY public.ci_daily_report_results ALTER COLUMN id SET DEFAULT next
ALTER TABLE ONLY public.ci_group_variables ALTER COLUMN id SET DEFAULT nextval('public.ci_group_variables_id_seq'::regclass);
ALTER TABLE ONLY public.ci_instance_variables ALTER COLUMN id SET DEFAULT nextval('public.ci_instance_variables_id_seq'::regclass);
ALTER TABLE ONLY public.ci_job_artifacts ALTER COLUMN id SET DEFAULT nextval('public.ci_job_artifacts_id_seq'::regclass);
ALTER TABLE ONLY public.ci_job_variables ALTER COLUMN id SET DEFAULT nextval('public.ci_job_variables_id_seq'::regclass);
......@@ -7892,6 +7916,9 @@ ALTER TABLE ONLY public.ci_daily_report_results
ALTER TABLE ONLY public.ci_group_variables
ADD CONSTRAINT ci_group_variables_pkey PRIMARY KEY (id);
ALTER TABLE ONLY public.ci_instance_variables
ADD CONSTRAINT ci_instance_variables_pkey PRIMARY KEY (id);
ALTER TABLE ONLY public.ci_job_artifacts
ADD CONSTRAINT ci_job_artifacts_pkey PRIMARY KEY (id);
......@@ -9073,6 +9100,8 @@ CREATE INDEX index_ci_daily_report_results_on_last_pipeline_id ON public.ci_dail
CREATE UNIQUE INDEX index_ci_group_variables_on_group_id_and_key ON public.ci_group_variables USING btree (group_id, key);
CREATE UNIQUE INDEX index_ci_instance_variables_on_key ON public.ci_instance_variables USING btree (key);
CREATE INDEX index_ci_job_artifacts_file_store_is_null ON public.ci_job_artifacts USING btree (id) WHERE (file_store IS NULL);
CREATE INDEX index_ci_job_artifacts_on_expire_at_and_job_id ON public.ci_job_artifacts USING btree (expire_at, job_id);
......@@ -13556,6 +13585,7 @@ COPY "schema_migrations" (version) FROM STDIN;
20200420201933
20200421092907
20200421233150
20200422091541
20200422213749
20200423075720
20200423080334
......
......@@ -74,12 +74,12 @@ the following preparations into account.
#### Preparation when adding migrations
- Ensure `db/structure.sql` is updated.
- Ensure `db/structure.sql` is updated as [documented](migration_style_guide.md#schema-changes).
- Make migrations reversible by using the `change` method or include a `down` method when using `up`.
- Include either a rollback procedure or describe how to rollback changes.
- Add the output of both migrating and rolling back for all migrations into the MR description
- Ensure the down method reverts the changes in `db/structure.sql`
- Update the migration output whenever you modify the migrations during the review process
- Add the output of both migrating and rolling back for all migrations into the MR description.
- Ensure the down method reverts the changes in `db/structure.sql`.
- Update the migration output whenever you modify the migrations during the review process.
- Add tests for the migration in `spec/migrations` if necessary. See [Testing Rails migrations at GitLab](testing_guide/testing_migrations_guide.md) for more details.
- When [high-traffic](https://gitlab.com/gitlab-org/gitlab/-/blob/master/rubocop/migration_helpers.rb#L12) tables are involved in the migration, use the [`with_lock_retries`](migration_style_guide.md#retry-mechanism-when-acquiring-database-locks) helper method. Review the relevant [examples in our documentation](migration_style_guide.md#examples) for use cases and solutions.
- Ensure RuboCop checks are not disabled unless there's a valid reason to.
......
......@@ -35,9 +35,29 @@ and post-deployment migrations (`db/post_migrate`) are run after the deployment
## Schema Changes
Migrations that make changes to the database schema (e.g. adding a column) can
only be added in the monthly release, patch releases may only contain data
migrations _unless_ schema changes are absolutely required to solve a problem.
Changes to the schema should be commited to `db/structure.sql`. This
file is automatically generated by Rails, so you normally should not
edit this file by hand. If your migration is adding a column to a
table, that column will be added at the bottom. Please do not reorder
columns manually for existing tables as this will cause confusing to
other people using `db/structure.sql` generated by Rails.
When your local database in your GDK is diverging from the schema from
`master` it might be hard to cleanly commit the schema changes to
Git. In that case you can use the `script/regenerate-schema` script to
regenerate a clean `db/structure.sql` for the migrations you're
adding. This script will apply all migrations found in `db/migrate`
or `db/post_migrate`, so if there are any migrations you don't want to
commit to the schema, rename or remove them. If your branch is not
targetting `master` you can set the `TARGET` environment variable.
```sh
# Regenerate schema against `master`
bin/regenerate-schema
# Regenerate schema against `12-9-stable-ee`
TARGET=12-9-stable-ee bin/regenerate-schema
```
## What Requires Downtime?
......
......@@ -21,7 +21,8 @@ module Gitlab
state_id: map_status(jira_issue.status.statusCategory),
updated_at: jira_issue.updated,
created_at: jira_issue.created,
author_id: project.creator_id, # TODO: map actual author: https://gitlab.com/gitlab-org/gitlab/-/issues/210580
author_id: reporter,
assignee_ids: assignees,
label_ids: label_ids
}
end
......@@ -34,8 +35,6 @@ module Gitlab
def description
body = []
body << formatter.author_line(jira_issue.reporter.displayName)
body << formatter.assignee_line(jira_issue.assignee.displayName) if jira_issue.assignee
body << jira_issue.description
body << MetadataCollector.new(jira_issue).execute
......@@ -51,6 +50,38 @@ module Gitlab
end
end
def map_user_id(email)
return unless email
# We also include emails that are not yet confirmed
users = User.by_any_email(email).to_a
# this event should never happen but we should log it in case we have invalid data
log_user_mapping_message('Multiple users found for an email address', email) if users.count > 1
user = users.first
unless project.project_member(user)
log_user_mapping_message('Jira user not found', email)
return
end
user.id
end
def reporter
map_user_id(jira_issue&.reporter&.emailAddress) || project.creator_id
end
def assignees
found_user_id = map_user_id(jira_issue&.assignee&.emailAddress)
return unless found_user_id
[found_user_id]
end
# We already create labels in Gitlab::JiraImport::LabelsImporter stage but
# there is a possibility it may fail or
# new labels were created on the Jira in the meantime
......@@ -59,6 +90,19 @@ module Gitlab
Gitlab::JiraImport::HandleLabelsService.new(project, jira_issue.fields['labels']).execute
end
def logger
@logger ||= Gitlab::Import::Logger.build
end
def log_user_mapping_message(message, email)
logger.info(
project_id: project.id,
project_path: project.full_path,
user_email: email,
message: message
)
end
end
end
end
......@@ -24,6 +24,7 @@ module Gitlab
.merge(features_usage_data)
.merge(components_usage_data)
.merge(cycle_analytics_usage_data)
.merge(object_store_usage_data)
end
def to_json(force_refresh: false)
......@@ -237,6 +238,40 @@ module Gitlab
'unknown_app_server_type'
end
def object_store_config(component)
config = alt_usage_data(fallback: nil) do
Settings[component]['object_store']
end
if config
{
enabled: alt_usage_data { Settings[component]['enabled'] },
object_store: {
enabled: alt_usage_data { config['enabled'] },
direct_upload: alt_usage_data { config['direct_upload'] },
background_upload: alt_usage_data { config['background_upload'] },
provider: alt_usage_data { config['connection']['provider'] }
}
}
else
{
enabled: alt_usage_data { Settings[component]['enabled'] }
}
end
end
def object_store_usage_data
{
object_store: {
artifacts: object_store_config('artifacts'),
external_diffs: object_store_config('external_diffs'),
lfs: object_store_config('lfs'),
uploads: object_store_config('uploads'),
packages: object_store_config('packages')
}
}
end
def ingress_modsecurity_usage
::Clusters::Applications::IngressModsecurityUsageService.new.execute
end
......
......@@ -13129,6 +13129,9 @@ msgstr ""
msgid "Metrics|For grouping similar metrics"
msgstr ""
msgid "Metrics|Go back"
msgstr ""
msgid "Metrics|Invalid time range, please verify."
msgstr ""
......
......@@ -75,7 +75,6 @@ module QA
autoload :CiVariable, 'qa/resource/ci_variable'
autoload :Runner, 'qa/resource/runner'
autoload :PersonalAccessToken, 'qa/resource/personal_access_token'
autoload :KubernetesCluster, 'qa/resource/kubernetes_cluster'
autoload :User, 'qa/resource/user'
autoload :ProjectMilestone, 'qa/resource/project_milestone'
autoload :Members, 'qa/resource/members'
......@@ -89,6 +88,11 @@ module QA
autoload :UserGPG, 'qa/resource/user_gpg'
autoload :Visibility, 'qa/resource/visibility'
module KubernetesCluster
autoload :Base, 'qa/resource/kubernetes_cluster/base'
autoload :ProjectCluster, 'qa/resource/kubernetes_cluster/project_cluster'
end
module Events
autoload :Base, 'qa/resource/events/base'
autoload :Project, 'qa/resource/events/project'
......
# frozen_string_literal: true
require 'securerandom'
module QA
module Resource
class KubernetesCluster < Base
attr_writer :project, :cluster,
:install_helm_tiller, :install_ingress, :install_prometheus, :install_runner, :domain
attribute :ingress_ip do
Page::Project::Operations::Kubernetes::Show.perform(&:ingress_ip)
end
def fabricate!
@project.visit!
Page::Project::Menu.perform(
&:go_to_operations_kubernetes)
Page::Project::Operations::Kubernetes::Index.perform(
&:add_kubernetes_cluster)
Page::Project::Operations::Kubernetes::Add.perform(
&:add_existing_cluster)
Page::Project::Operations::Kubernetes::AddExisting.perform do |cluster_page|
cluster_page.set_cluster_name(@cluster.cluster_name)
cluster_page.set_api_url(@cluster.api_url)
cluster_page.set_ca_certificate(@cluster.ca_certificate)
cluster_page.set_token(@cluster.token)
cluster_page.uncheck_rbac! unless @cluster.rbac
cluster_page.add_cluster!
end
if @install_helm_tiller
Page::Project::Operations::Kubernetes::Show.perform do |show|
# We must wait a few seconds for permissions to be set up correctly for new cluster
sleep 10
# Open applications tab
show.open_applications
# Helm must be installed before everything else
show.install!(:helm)
show.await_installed(:helm)
show.install!(:ingress) if @install_ingress
show.install!(:prometheus) if @install_prometheus
show.install!(:runner) if @install_runner
show.await_installed(:ingress) if @install_ingress
show.await_installed(:prometheus) if @install_prometheus
show.await_installed(:runner) if @install_runner
if @install_ingress
populate(:ingress_ip)
show.open_details
show.set_domain("#{ingress_ip}.nip.io")
show.save_domain
end
end
end
end
end
end
end
# frozen_string_literal: true
require 'securerandom'
module QA
module Resource
module KubernetesCluster
class Base < Resource::Base
attr_writer :add_name_uuid
attribute :id
attribute :name
attribute :domain
attribute :enabled
attribute :managed
attribute :management_project_id
attribute :api_url
attribute :token
attribute :ca_cert
attribute :namespace
attribute :authorization_type
attribute :environment_scope
def initialize
@add_name_uuid = true
@enabled = true
@managed = true
@authorization_type = :rbac
@environment_scope = :*
end
def name=(new_name)
@name = @add_name_uuid ? "#{new_name}-#{SecureRandom.hex(5)}" : new_name
end
end
end
end
end
# frozen_string_literal: true
module QA
module Resource
module KubernetesCluster
class ProjectCluster < Base
attr_writer :cluster,
:install_helm_tiller, :install_ingress, :install_prometheus, :install_runner, :domain
attribute :project do
Resource::Project.fabricate!
end
attribute :ingress_ip do
Page::Project::Operations::Kubernetes::Show.perform(&:ingress_ip)
end
def fabricate!
project.visit!
Page::Project::Menu.perform(
&:go_to_operations_kubernetes)
Page::Project::Operations::Kubernetes::Index.perform(
&:add_kubernetes_cluster)
Page::Project::Operations::Kubernetes::Add.perform(
&:add_existing_cluster)
Page::Project::Operations::Kubernetes::AddExisting.perform do |cluster_page|
cluster_page.set_cluster_name(@cluster.cluster_name)
cluster_page.set_api_url(@cluster.api_url)
cluster_page.set_ca_certificate(@cluster.ca_certificate)
cluster_page.set_token(@cluster.token)
cluster_page.uncheck_rbac! unless @cluster.rbac
cluster_page.add_cluster!
end
if @install_helm_tiller
Page::Project::Operations::Kubernetes::Show.perform do |show|
# We must wait a few seconds for permissions to be set up correctly for new cluster
sleep 10
# Open applications tab
show.open_applications
# Helm must be installed before everything else
show.install!(:helm)
show.await_installed(:helm)
show.install!(:ingress) if @install_ingress
show.install!(:prometheus) if @install_prometheus
show.install!(:runner) if @install_runner
show.await_installed(:ingress) if @install_ingress
show.await_installed(:prometheus) if @install_prometheus
show.await_installed(:runner) if @install_runner
if @install_ingress
populate(:ingress_ip)
show.open_details
show.set_domain("#{ingress_ip}.nip.io")
show.save_domain
end
end
end
end
end
end
end
end
......@@ -35,7 +35,7 @@ module QA
end
# Connect K8s cluster
Resource::KubernetesCluster.fabricate! do |k8s_cluster|
Resource::KubernetesCluster::ProjectCluster.fabricate! do |k8s_cluster|
k8s_cluster.project = project
k8s_cluster.cluster = cluster
k8s_cluster.install_helm_tiller = true
......
......@@ -21,12 +21,10 @@ module QA
end
it 'can create and associate a project cluster', :smoke do
Resource::KubernetesCluster.fabricate_via_browser_ui! do |k8s_cluster|
Resource::KubernetesCluster::ProjectCluster.fabricate_via_browser_ui! do |k8s_cluster|
k8s_cluster.project = project
k8s_cluster.cluster = cluster
end
project.visit!
end.project.visit!
Page::Project::Menu.perform(&:go_to_operations_kubernetes)
......
......@@ -69,7 +69,7 @@ module QA
project.description = 'Cluster with Prometheus'
end
@cluster_props = Resource::KubernetesCluster.fabricate_via_browser_ui! do |cluster_settings|
@cluster_props = Resource::KubernetesCluster::ProjectCluster.fabricate_via_browser_ui! do |cluster_settings|
cluster_settings.project = @project
cluster_settings.cluster = @cluster
cluster_settings.install_helm_tiller = true
......
#!/usr/bin/env ruby
# frozen_string_literal: true
require 'net/http'
require 'uri'
class SchemaRegenerator
##
# Filename of the schema
#
# This file is being regenerated by this script.
FILENAME = 'db/structure.sql'
##
# Directories where migrations are stored
#
# The methods +hide_migrations+ and +unhide_migrations+ will rename
# these to disable/enable migrations.
MIGRATION_DIRS = %w[db/migrate db/post_migrate].freeze
def execute
Dir.chdir(File.expand_path('..', __dir__)) do
checkout_ref
checkout_clean_schema
hide_migrations
reset_db
unhide_migrations
migrate
ensure
unhide_migrations
end
end
private
##
# Git checkout +CI_COMMIT_SHA+.
#
# When running from CI, checkout the clean commit,
# not the merged result.
def checkout_ref
return unless ci?
run %Q[git checkout #{source_ref}]
run %q[git clean -f -- db]
end
##
# Checkout the clean schema from the target branch
def checkout_clean_schema
remote_checkout_clean_schema || local_checkout_clean_schema
end
##
# Get clean schema from remote servers
#
# This script might run in CI, using a shallow clone, so to checkout
# the file, download it from the server.
def remote_checkout_clean_schema
return false unless project_url
uri = URI.join("#{project_url}/", 'raw/', "#{merge_base}/", FILENAME)
download_schema(uri)
end
##
# Download the schema from the given +uri+.
def download_schema(uri)
puts "Downloading #{uri}..."
Net::HTTP.start(uri.host, uri.port, use_ssl: true) do |http|
request = Net::HTTP::Get.new(uri.request_uri)
http.read_timeout = 500
http.request(request) do |response|
raise("Failed to download file: #{response.code} #{response.message}") if response.code.to_i != 200
File.open(FILENAME, 'w') do |io|
response.read_body do |chunk|
io.write(chunk)
end
end
end
end
true
end
##
# Git checkout the schema from target branch.
#
# Ask git to checkout the schema from the target branch and reset
# the file to unstage the changes.
def local_checkout_clean_schema
run %Q[git checkout #{merge_base} -- #{FILENAME}]
run %Q[git reset -- #{FILENAME}]
end
##
# Move migrations to where Rails will not find them.
#
# To reset the database to clean schema defined in +FILENAME+, move
# the migrations to a path where Rails will not find them, otherwise
# +db:reset+ would abort. Later when the migrations should be
# applied, use +unhide_migrations+ to bring them back.
def hide_migrations
MIGRATION_DIRS.each do |dir|
File.rename(dir, "#{dir}__")
end
end
##
# Undo the effect of +hide_migrations+.
#
# Place back the migrations which might be moved by
# +hide_migrations+.
def unhide_migrations
error = nil
MIGRATION_DIRS.each do |dir|
File.rename("#{dir}__", dir)
rescue Errno::ENOENT
nil
rescue StandardError => e
# Save error for later, but continue with other dirs first
error = e
end
raise error if error
end
##
# Run rake task to reset the database.
def reset_db
run %q[bin/rails db:reset RAILS_ENV=test]
end
##
# Run rake task to run migrations.
def migrate
run %q[bin/rails db:migrate RAILS_ENV=test]
end
##
# Run the given +cmd+.
#
# The command is colored green, and the output of the command is
# colored gray.
# When the command failed an exception is raised.
def run(cmd)
puts "\e[32m$ #{cmd}\e[37m"
ret = system(cmd)
puts "\e[0m"
raise("Command failed") unless ret
end
##
# Return the base commit between source and target branch.
def merge_base
@merge_base ||= `git merge-base #{target_branch} #{source_ref}`.chomp
end
##
# Return the name of the target branch
#
# Get source ref from CI environment variable, or read the +TARGET+
# environment+ variable, or default to +HEAD+.
def target_branch
ENV['CI_MERGE_REQUEST_TARGET_BRANCH_NAME'] || ENV['TARGET'] || 'master'
end
##
# Return the source ref
#
# Get source ref from CI environment variable, or default to +HEAD+.
def source_ref
ENV['CI_COMMIT_SHA'] || 'HEAD'
end
##
# Return the project URL from CI environment variable.
def project_url
ENV['CI_PROJECT_URL']
end
##
# Return whether the script is running from CI
def ci?
ENV['CI']
end
end
SchemaRegenerator.new.execute
......@@ -2,13 +2,13 @@
schema_changed() {
if [ ! -z "$(git diff --name-only -- db/structure.sql)" ]; then
printf "db/structure.sql after rake db:migrate:reset is different from one in the repository"
printf "Schema changes are not cleanly committed to db/structure.sql\n"
printf "The diff is as follows:\n"
diff=$(git diff -p --binary -- db/structure.sql)
printf "%s" "$diff"
exit 1
else
printf "db/structure.sql after rake db:migrate:reset matches one in the repository"
printf "Schema changes are correctly applied to db/structure.sql\n"
fi
}
......
# frozen_string_literal: true
FactoryBot.define do
factory :ci_instance_variable, class: 'Ci::InstanceVariable' do
sequence(:key) { |n| "VARIABLE_#{n}" }
value { 'VARIABLE_VALUE' }
masked { false }
trait(:protected) do
add_attribute(:protected) { true }
end
end
end
......@@ -56,7 +56,7 @@ describe('Dashboard Panel', () => {
const findTitle = () => wrapper.find({ ref: 'graphTitle' });
const findContextualMenu = () => wrapper.find({ ref: 'contextualMenu' });
const createWrapper = (props, options = {}) => {
const createWrapper = (props, options) => {
wrapper = shallowMount(DashboardPanel, {
propsData: {
graphData,
......@@ -108,24 +108,51 @@ describe('Dashboard Panel', () => {
wrapper.destroy();
});
describe('Empty Chart component', () => {
it('renders the chart title', () => {
expect(findTitle().text()).toBe(graphDataEmpty.title);
});
it('renders the chart title', () => {
expect(findTitle().text()).toBe(graphDataEmpty.title);
});
it('renders the no download csv link', () => {
expect(wrapper.find({ ref: 'downloadCsvLink' }).exists()).toBe(false);
});
it('renders no download csv link', () => {
expect(wrapper.find({ ref: 'downloadCsvLink' }).exists()).toBe(false);
});
it('does not contain graph widgets', () => {
expect(findContextualMenu().exists()).toBe(false);
});
it('does not contain graph widgets', () => {
expect(findContextualMenu().exists()).toBe(false);
});
it('is a Vue instance', () => {
expect(wrapper.find(MonitorEmptyChart).exists()).toBe(true);
expect(wrapper.find(MonitorEmptyChart).isVueInstance()).toBe(true);
it('The Empty Chart component is rendered and is a Vue instance', () => {
expect(wrapper.find(MonitorEmptyChart).exists()).toBe(true);
expect(wrapper.find(MonitorEmptyChart).isVueInstance()).toBe(true);
});
});
describe('When graphData is null', () => {
beforeEach(() => {
createWrapper({
graphData: null,
});
});
afterEach(() => {
wrapper.destroy();
});
it('renders no chart title', () => {
expect(findTitle().text()).toBe('');
});
it('renders no download csv link', () => {
expect(wrapper.find({ ref: 'downloadCsvLink' }).exists()).toBe(false);
});
it('does not contain graph widgets', () => {
expect(findContextualMenu().exists()).toBe(false);
});
it('The Empty Chart component is rendered and is a Vue instance', () => {
expect(wrapper.find(MonitorEmptyChart).exists()).toBe(true);
expect(wrapper.find(MonitorEmptyChart).isVueInstance()).toBe(true);
});
});
describe('When graphData is available', () => {
......
......@@ -212,6 +212,97 @@ describe('Dashboard', () => {
});
});
describe('single panel expands to "full screen" mode', () => {
const findExpandedPanel = () => wrapper.find({ ref: 'expandedPanel' });
describe('when the panel is not expanded', () => {
beforeEach(() => {
createShallowWrapper({ hasMetrics: true });
setupStoreWithData(wrapper.vm.$store);
return wrapper.vm.$nextTick();
});
it('expanded panel is not visible', () => {
expect(findExpandedPanel().isVisible()).toBe(false);
});
it('can set a panel as expanded', () => {
const panel = wrapper.findAll(DashboardPanel).at(1);
jest.spyOn(store, 'dispatch');
panel.vm.$emit('expand');
const groupData = metricsDashboardViewModel.panelGroups[0];
expect(store.dispatch).toHaveBeenCalledWith('monitoringDashboard/setExpandedPanel', {
group: groupData.group,
panel: expect.objectContaining({
id: groupData.panels[0].id,
}),
});
});
});
describe('when the panel is expanded', () => {
let group;
let panel;
const MockPanel = {
template: `<div><slot name="topLeft"/></div>`,
};
beforeEach(() => {
createShallowWrapper({ hasMetrics: true }, { stubs: { DashboardPanel: MockPanel } });
setupStoreWithData(wrapper.vm.$store);
const { panelGroups } = wrapper.vm.$store.state.monitoringDashboard.dashboard;
group = panelGroups[0].group;
[panel] = panelGroups[0].panels;
wrapper.vm.$store.commit(`monitoringDashboard/${types.SET_EXPANDED_PANEL}`, {
group,
panel,
});
return wrapper.vm.$nextTick();
});
it('displays a single panel and others are hidden', () => {
const panels = wrapper.findAll(MockPanel);
const visiblePanels = panels.filter(w => w.isVisible());
expect(findExpandedPanel().isVisible()).toBe(true);
// v-show for hiding panels is more performant than v-if
// check for panels to be hidden.
expect(panels.length).toBe(metricsDashboardPanelCount + 1);
expect(visiblePanels.length).toBe(1);
});
it('sets a link to the expanded panel', () => {
const searchQuery =
'?group=System%20metrics%20(Kubernetes)&title=Memory%20Usage%20(Total)&y_label=Total%20Memory%20Used%20(GB)';
expect(findExpandedPanel().attributes('clipboard-text')).toEqual(
expect.stringContaining(searchQuery),
);
});
it('restores full dashboard by clicking `back`', () => {
const backBtn = wrapper.find({ ref: 'goBackBtn' });
expect(backBtn.exists()).toBe(true);
jest.spyOn(store, 'dispatch');
backBtn.vm.$emit('click');
expect(store.dispatch).toHaveBeenCalledWith(
'monitoringDashboard/clearExpandedPanel',
undefined,
);
});
});
});
describe('when one of the metrics is missing', () => {
beforeEach(() => {
createShallowWrapper({ hasMetrics: true });
......@@ -499,11 +590,12 @@ describe('Dashboard', () => {
describe('Clipboard text in panels', () => {
const currentDashboard = 'TEST_DASHBOARD';
const panelIndex = 1; // skip expanded panel
const getClipboardTextAt = i =>
const getClipboardTextFirstPanel = () =>
wrapper
.findAll(DashboardPanel)
.at(i)
.at(panelIndex)
.props('clipboardText');
beforeEach(() => {
......@@ -515,18 +607,18 @@ describe('Dashboard', () => {
});
it('contains a link to the dashboard', () => {
expect(getClipboardTextAt(0)).toContain(`dashboard=${currentDashboard}`);
expect(getClipboardTextAt(0)).toContain(`group=`);
expect(getClipboardTextAt(0)).toContain(`title=`);
expect(getClipboardTextAt(0)).toContain(`y_label=`);
expect(getClipboardTextFirstPanel()).toContain(`dashboard=${currentDashboard}`);
expect(getClipboardTextFirstPanel()).toContain(`group=`);
expect(getClipboardTextFirstPanel()).toContain(`title=`);
expect(getClipboardTextFirstPanel()).toContain(`y_label=`);
});
it('strips the undefined parameter', () => {
wrapper.setProps({ currentDashboard: undefined });
return wrapper.vm.$nextTick(() => {
expect(getClipboardTextAt(0)).not.toContain(`dashboard=`);
expect(getClipboardTextAt(0)).toContain(`y_label=`);
expect(getClipboardTextFirstPanel()).not.toContain(`dashboard=`);
expect(getClipboardTextFirstPanel()).toContain(`y_label=`);
});
});
......@@ -534,8 +626,8 @@ describe('Dashboard', () => {
wrapper.setProps({ currentDashboard: null });
return wrapper.vm.$nextTick(() => {
expect(getClipboardTextAt(0)).not.toContain(`dashboard=`);
expect(getClipboardTextAt(0)).toContain(`y_label=`);
expect(getClipboardTextFirstPanel()).not.toContain(`dashboard=`);
expect(getClipboardTextFirstPanel()).toContain(`y_label=`);
});
});
});
......
......@@ -20,6 +20,8 @@ import {
fetchPrometheusMetric,
setInitialState,
filterEnvironments,
setExpandedPanel,
clearExpandedPanel,
setGettingStartedEmptyState,
duplicateSystemDashboard,
} from '~/monitoring/stores/actions';
......@@ -870,4 +872,43 @@ describe('Monitoring store actions', () => {
});
});
});
describe('setExpandedPanel', () => {
let state;
beforeEach(() => {
state = storeState();
});
it('Sets a panel as expanded', () => {
const group = 'group_1';
const panel = { title: 'A Panel' };
return testAction(
setExpandedPanel,
{ group, panel },
state,
[{ type: types.SET_EXPANDED_PANEL, payload: { group, panel } }],
[],
);
});
});
describe('clearExpandedPanel', () => {
let state;
beforeEach(() => {
state = storeState();
});
it('Clears a panel as expanded', () => {
return testAction(
clearExpandedPanel,
undefined,
state,
[{ type: types.SET_EXPANDED_PANEL, payload: { group: null, panel: null } }],
[],
);
});
});
});
......@@ -342,4 +342,26 @@ describe('Monitoring mutations', () => {
expect(stateCopy.allDashboards).toEqual(dashboardGitResponse);
});
});
describe('SET_EXPANDED_PANEL', () => {
it('no expanded panel is set initally', () => {
expect(stateCopy.expandedPanel.panel).toEqual(null);
expect(stateCopy.expandedPanel.group).toEqual(null);
});
it('sets a panel id as the expanded panel', () => {
const group = 'group_1';
const panel = { title: 'A Panel' };
mutations[types.SET_EXPANDED_PANEL](stateCopy, { group, panel });
expect(stateCopy.expandedPanel).toEqual({ group, panel });
});
it('clears panel as the expanded panel', () => {
mutations[types.SET_EXPANDED_PANEL](stateCopy, { group: null, panel: null });
expect(stateCopy.expandedPanel.group).toEqual(null);
expect(stateCopy.expandedPanel.panel).toEqual(null);
});
});
});
......@@ -16,7 +16,8 @@ describe Gitlab::JiraImport::IssueSerializer do
let(:description) { 'basic description' }
let(:created_at) { '2020-01-01 20:00:00' }
let(:updated_at) { '2020-01-10 20:00:00' }
let(:assignee) { double(displayName: 'Solver') }
let(:assignee) { double(displayName: 'Solver', emailAddress: 'assignee@example.com') }
let(:reporter) { double(displayName: 'Reporter', emailAddress: 'reporter@example.com') }
let(:jira_status) { 'new' }
let(:parent_field) do
......@@ -42,7 +43,7 @@ describe Gitlab::JiraImport::IssueSerializer do
created: created_at,
updated: updated_at,
assignee: assignee,
reporter: double(displayName: 'Reporter'),
reporter: reporter,
status: double(statusCategory: { 'key' => jira_status }),
fields: fields
)
......@@ -54,10 +55,6 @@ describe Gitlab::JiraImport::IssueSerializer do
let(:expected_description) do
<<~MD
*Created by: Reporter*
*Assigned to: Solver*
basic description
---
......@@ -80,6 +77,7 @@ describe Gitlab::JiraImport::IssueSerializer do
updated_at: updated_at,
created_at: created_at,
author_id: project.creator_id,
assignee_ids: nil,
label_ids: [project_label.id, group_label.id] + Label.reorder(id: :asc).last(2).pluck(:id)
)
end
......@@ -88,22 +86,108 @@ describe Gitlab::JiraImport::IssueSerializer do
expect(Issue.new(subject)).to be_valid
end
it 'creates all missing labels (on project level)' do
expect { subject }.to change { Label.count }.from(3).to(5)
context 'labels' do
it 'creates all missing labels (on project level)' do
expect { subject }.to change { Label.count }.from(3).to(5)
expect(Label.find_by(title: 'frontend').project).to eq(project)
expect(Label.find_by(title: 'backend').project).to eq(project)
end
context 'when there are no new labels' do
let(:labels_field) { %w(bug dev) }
expect(Label.find_by(title: 'frontend').project).to eq(project)
expect(Label.find_by(title: 'backend').project).to eq(project)
it 'assigns the labels to the Issue hash' do
expect(subject[:label_ids]).to match_array([project_label.id, group_label.id])
end
it 'does not create new labels' do
expect { subject }.not_to change { Label.count }.from(3)
end
end
end
context 'when there are no new labels' do
let(:labels_field) { %w(bug dev) }
context 'author' do
context 'when reporter maps to a GitLab user who is a project member' do
let!(:user) { create(:user, email: 'reporter@example.com') }
it 'assigns the labels to the Issue hash' do
expect(subject[:label_ids]).to match_array([project_label.id, group_label.id])
it 'sets the issue author to the mapped user' do
project.add_developer(user)
expect(subject[:author_id]).to eq(user.id)
end
end
it 'does not create new labels' do
expect { subject }.not_to change { Label.count }.from(3)
context 'when reporter maps to a GitLab user who is not a project member' do
let!(:user) { create(:user, email: 'reporter@example.com') }
it 'defaults the issue author to project creator' do
expect(subject[:author_id]).to eq(project.creator.id)
end
end
context 'when reporter does not map to a GitLab user' do
it 'defaults the issue author to project creator' do
expect(subject[:author_id]).to eq(project.creator.id)
end
end
context 'when reporter field is empty' do
let(:reporter) { nil }
it 'defaults the issue author to project creator' do
expect(subject[:author_id]).to eq(project.creator.id)
end
end
context 'when reporter field is missing email address' do
let(:reporter) { double(name: 'Reporter', emailAddress: nil) }
it 'defaults the issue author to project creator' do
expect(subject[:author_id]).to eq(project.creator.id)
end
end
end
context 'assignee' do
context 'when assignee maps to a GitLab user who is a project member' do
let!(:user) { create(:user, email: 'assignee@example.com') }
it 'sets the issue assignees to the mapped user' do
project.add_developer(user)
expect(subject[:assignee_ids]).to eq([user.id])
end
end
context 'when assignee maps to a GitLab user who is not a project member' do
let!(:user) { create(:user, email: 'assignee@example.com') }
it 'leaves the assignee empty' do
expect(subject[:assignee_ids]).to be_nil
end
end
context 'when assignee does not map to a GitLab user' do
it 'leaves the assignee empty' do
expect(subject[:assignee_ids]).to be_nil
end
end
context 'when assginee field is empty' do
let(:assignee) { nil }
it 'leaves the assignee empty' do
expect(subject[:assignee_ids]).to be_nil
end
end
context 'when assginee field is missing email address' do
let(:assignee) { double(name: 'Assignee', emailAddress: nil) }
it 'leaves the assignee empty' do
expect(subject[:assignee_ids]).to be_nil
end
end
end
end
......
......@@ -7,6 +7,8 @@ describe Gitlab::UsageData, :aggregate_failures do
before do
allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
stub_object_store_settings
end
shared_examples "usage data execution" do
......@@ -82,6 +84,16 @@ describe Gitlab::UsageData, :aggregate_failures do
expect(count_data[:clusters_management_project]).to eq(1)
end
it 'gathers object store usage correctly' do
expect(subject[:object_store]).to eq(
{ artifacts: { enabled: true, object_store: { enabled: true, direct_upload: true, background_upload: false, provider: "AWS" } },
external_diffs: { enabled: false },
lfs: { enabled: true, object_store: { enabled: false, direct_upload: true, background_upload: false, provider: "AWS" } },
uploads: { enabled: nil, object_store: { enabled: false, direct_upload: true, background_upload: false, provider: "AWS" } },
packages: { enabled: true, object_store: { enabled: false, direct_upload: false, background_upload: true, provider: "AWS" } } }
)
end
it 'works when queries time out' do
allow_any_instance_of(ActiveRecord::Relation)
.to receive(:count).and_raise(ActiveRecord::StatementInvalid.new(''))
......@@ -223,6 +235,66 @@ describe Gitlab::UsageData, :aggregate_failures do
end
end
describe '#object_store_config' do
let(:component) { 'lfs' }
subject { described_class.object_store_config(component) }
context 'when object_store is not configured' do
it 'returns component enable status only' do
allow(Settings).to receive(:[]).with(component).and_return({ 'enabled' => false })
expect(subject).to eq({ enabled: false })
end
end
context 'when object_store is configured' do
it 'returns filtered object store config' do
allow(Settings).to receive(:[]).with(component)
.and_return(
{ 'enabled' => true,
'object_store' =>
{ 'enabled' => true,
'remote_directory' => component,
'direct_upload' => true,
'connection' =>
{ 'provider' => 'AWS', 'aws_access_key_id' => 'minio', 'aws_secret_access_key' => 'gdk-minio', 'region' => 'gdk', 'endpoint' => 'http://127.0.0.1:9000', 'path_style' => true },
'background_upload' => false,
'proxy_download' => false } })
expect(subject).to eq(
{ enabled: true, object_store: { enabled: true, direct_upload: true, background_upload: false, provider: "AWS" } })
end
end
context 'when retrieve component setting meets exception' do
it 'returns -1 for component enable status' do
allow(Settings).to receive(:[]).with(component).and_raise(StandardError)
expect(subject).to eq({ enabled: -1 })
end
end
end
describe '#object_store_usage_data' do
subject { described_class.object_store_usage_data }
it 'fetches object store config of five components' do
%w(artifacts external_diffs lfs uploads packages).each do |component|
expect(described_class).to receive(:object_store_config).with(component).and_return("#{component}_object_store_config")
end
expect(subject).to eq(
object_store: {
artifacts: 'artifacts_object_store_config',
external_diffs: 'external_diffs_object_store_config',
lfs: 'lfs_object_store_config',
uploads: 'uploads_object_store_config',
packages: 'packages_object_store_config'
})
end
end
describe '#cycle_analytics_usage_data' do
subject { described_class.cycle_analytics_usage_data }
......
# frozen_string_literal: true
require 'spec_helper'
describe Ci::InstanceVariable do
subject { build(:ci_instance_variable) }
it_behaves_like "CI variable"
it { is_expected.to include_module(Ci::Maskable) }
it { is_expected.to validate_uniqueness_of(:key).with_message(/\(\w+\) has already been taken/) }
describe '.unprotected' do
subject { described_class.unprotected }
context 'when variable is protected' do
before do
create(:ci_instance_variable, :protected)
end
it 'returns nothing' do
is_expected.to be_empty
end
end
context 'when variable is not protected' do
let(:variable) { create(:ci_instance_variable, protected: false) }
it 'returns the variable' do
is_expected.to contain_exactly(variable)
end
end
end
end
......@@ -153,5 +153,59 @@ module UsageDataHelpers
projects_with_expiration_policy_enabled_with_older_than_set_to_14d
projects_with_expiration_policy_enabled_with_older_than_set_to_30d
projects_with_expiration_policy_enabled_with_older_than_set_to_90d
object_store
).freeze
def stub_object_store_settings
allow(Settings).to receive(:[]).with('artifacts')
.and_return(
{ 'enabled' => true,
'object_store' =>
{ 'enabled' => true,
'remote_directory' => 'artifacts',
'direct_upload' => true,
'connection' =>
{ 'provider' => 'AWS', 'aws_access_key_id' => 'minio', 'aws_secret_access_key' => 'gdk-minio', 'region' => 'gdk', 'endpoint' => 'http://127.0.0.1:9000', 'path_style' => true },
'background_upload' => false,
'proxy_download' => false } }
)
allow(Settings).to receive(:[]).with('external_diffs').and_return({ 'enabled' => false })
allow(Settings).to receive(:[]).with('lfs')
.and_return(
{ 'enabled' => true,
'object_store' =>
{ 'enabled' => false,
'remote_directory' => 'lfs-objects',
'direct_upload' => true,
'connection' =>
{ 'provider' => 'AWS', 'aws_access_key_id' => 'minio', 'aws_secret_access_key' => 'gdk-minio', 'region' => 'gdk', 'endpoint' => 'http://127.0.0.1:9000', 'path_style' => true },
'background_upload' => false,
'proxy_download' => false } }
)
allow(Settings).to receive(:[]).with('uploads')
.and_return(
{ 'object_store' =>
{ 'enabled' => false,
'remote_directory' => 'uploads',
'direct_upload' => true,
'connection' =>
{ 'provider' => 'AWS', 'aws_access_key_id' => 'minio', 'aws_secret_access_key' => 'gdk-minio', 'region' => 'gdk', 'endpoint' => 'http://127.0.0.1:9000', 'path_style' => true },
'background_upload' => false,
'proxy_download' => false } }
)
allow(Settings).to receive(:[]).with('packages')
.and_return(
{ 'enabled' => true,
'object_store' =>
{ 'enabled' => false,
'remote_directory' => 'packages',
'direct_upload' => false,
'connection' =>
{ 'provider' => 'AWS', 'aws_access_key_id' => 'minio', 'aws_secret_access_key' => 'gdk-minio', 'region' => 'gdk', 'endpoint' => 'http://127.0.0.1:9000', 'path_style' => true },
'background_upload' => true,
'proxy_download' => false } }
)
end
end
......@@ -5,6 +5,8 @@ require 'spec_helper'
describe Gitlab::JiraImport::ImportIssueWorker do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:jira_issue_label_1) { create(:label, project: project) }
let_it_be(:jira_issue_label_2) { create(:label, project: project) }
let(:some_key) { 'some-key' }
describe 'modules' do
......@@ -17,7 +19,10 @@ describe Gitlab::JiraImport::ImportIssueWorker do
subject { described_class.new }
describe '#perform', :clean_gitlab_redis_cache do
let(:issue_attrs) { build(:issue, project_id: project.id).as_json.compact }
let(:issue_attrs) do
build(:issue, project_id: project.id, title: 'jira issue')
.as_json.merge('label_ids' => [jira_issue_label_1.id, jira_issue_label_2.id]).compact
end
context 'when any exception raised while inserting to DB' do
before do
......@@ -47,14 +52,22 @@ describe Gitlab::JiraImport::ImportIssueWorker do
context 'when import label exists' do
before do
Gitlab::JiraImport.cache_import_label_id(project.id, label.id)
end
it 'does not record import failure' do
subject.perform(project.id, 123, issue_attrs, some_key)
end
it 'does not record import failure' do
expect(label.issues.count).to eq(1)
expect(Gitlab::Cache::Import::Caching.read(Gitlab::JiraImport.failed_issues_counter_cache_key(project.id)).to_i).to eq(0)
end
it 'creates an issue with the correct attributes' do
issue = Issue.last
expect(issue.title).to eq('jira issue')
expect(issue.project).to eq(project)
expect(issue.labels).to match_array([label, jira_issue_label_1, jira_issue_label_2])
end
end
end
end
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册