Top Level Namespace
- Includes:
- GoodData::Model
Defined Under Namespace
Modules: Enumerable, FalseExtensions, GoodData, IntegerExtensions, NilExtensions, StringExtensions, TrueExtensions Classes: Class, Object, PlaceholderExample, TestLogger
Constant Summary collapse
- DEFAULT_BRICK =
'hello_world_brick'
- BRICK_PARAM_PREFIX =
'BRICK_PARAM_'
- HIDDEN_BRICK_PARAMS_PREFIX =
'HIDDEN_BRICK_PARAM_'
- RAW_DATA =
{ 'metric' => { 'content' => { 'format' => '#,##0', 'expression' => 'SELECT SUM([/gdc/md/ksjy0nr3goz6k8yrpklz97l0mych7nez/obj/700])' }, 'meta' => { 'author' => '/gdc/account/profile/4e1e8cacc4989228e0ae531b30853248', 'uri' => '/gdc/md/ksjy0nr3goz6k8yrpklz97l0mych7nez/obj/70', 'tags' => 'a a b cg r t', 'created' => '2014-04-30 22:47:57', 'identifier' => 'afo7bx1VakCz', 'deprecated' => '0', 'summary' => '', 'title' => 'sum of Lines changed', 'category' => 'metric', 'updated' => '2014-05-05 20:00:42', 'contributor' => '/gdc/account/profile/4e1e8cacc4989228e0ae531b30853248' } } }
- ROW_BASED_DATA =
[ ['[email protected]', 'US', 'CZ', 'KZ'], ['[email protected]', 'US'], ['[email protected]', 'KZ'] ]
- COLUMN_BASED_DATA_WITH_HEADERS =
[ { :login => '[email protected]', :country => 'US', :age => 14 }, { :login => '[email protected]', :country => 'US', :age => 19 }, { :login => '[email protected]', :country => 'KZ', :age => 30 } ]
- COLUMN_BASED_DATA_WITH_HEADERS_AND_NIL_VAL =
[ { :login => '[email protected]', :country => 'US', :age => 14 }, { :login => '[email protected]', :country => 'US', :age => 19 }, { :login => '[email protected]', :country => 'KZ', :age => nil } ]
- COLUMN_BASED_DATA_WITH_HEADERS_AND_EMPTY_VAL =
[ { :login => '[email protected]', :country => 'US', :age => 14 }, { :login => '[email protected]', :country => 'US', :age => 19 }, { :login => '[email protected]', :country => 'KZ', :age => '' } ]
- ANALYTICAL_DASHBOARD_RAW_DATA =
{ 'analyticalDashboard' => { 'content' => { 'filterContext' => '/gdc/md/w2bbq79qeuqzjhwm9xln0865v7yb/obj/68', 'layout' => {}, 'widgets' => ['/gdc/md/w2bbq79qeuqzjhwm9xln0865v7yb/obj/69'] }, 'meta' => { 'author' => '/gdc/account/profile/4e1e8cac228e0ae531b30853248', 'uri' => '/gdc/md/w2bbq79qeuqzjhwm9xln0865v7yb/obj/70', 'tags' => '', 'created' => '2021-04-22 10:23:24', 'identifier' => 'aadsy5xXXFZd', 'deprecated' => '0', 'summary' => 'Test summary', 'title' => 'KPI Testing', 'category' => 'analyticalDashboard', 'updated' => '2021-04-23 13:03:48', 'contributor' => '/gdc/account/profile/4e1e8cacc4989228e0ae531b30853248' } } }
- VISUALIZATION_OBJECT_RAW_DATA =
{ 'visualizationObject' => { 'content' => { 'buckets' => [], 'properties' => '', 'visualizationClass' => {} }, 'meta' => { 'author' => '/gdc/account/profile/4e1e8cac228e0ae531b30853248', 'uri' => '/gdc/md/w2bbq79qeuqzjhwm9xln0865v7yb/obj/70', 'tags' => '', 'created' => '2021-04-22 10:23:24', 'identifier' => 'aabszNAtXFsJ', 'deprecated' => '0', 'summary' => 'Summary Testing', 'title' => 'Dashboard Testing', 'category' => 'visualizationObject', 'updated' => '2021-04-23 13:03:48', 'contributor' => '/gdc/account/profile/4e1e8cacc4989228e0ae531b30853248' } } }
Constants included from GoodData::Model
GoodData::Model::ATTRIBUTE_FOLDER_PREFIX, GoodData::Model::ATTRIBUTE_PREFIX, GoodData::Model::DATE_ATTRIBUTE, GoodData::Model::DATE_ATTRIBUTE_DEFAULT_DISPLAY_FORM, GoodData::Model::DATE_COLUMN_PREFIX, GoodData::Model::DATE_FACT_PREFIX, GoodData::Model::DEFAULT_ATTRIBUTE_DATATYPE, GoodData::Model::DEFAULT_DATE_FORMAT, GoodData::Model::DEFAULT_FACT_DATATYPE, GoodData::Model::DEFAULT_TYPE, GoodData::Model::FACT_COLUMN_PREFIX, GoodData::Model::FACT_FOLDER_PREFIX, GoodData::Model::FACT_PREFIX, GoodData::Model::FIELD_PK, GoodData::Model::FK_SUFFIX, GoodData::Model::GD_DATA_TYPES, GoodData::Model::GD_TYPES, GoodData::Model::LABEL_COLUMN_PREFIX, GoodData::Model::LABEL_PREFIX, GoodData::Model::LDM_CTG, GoodData::Model::LDM_MANAGE_CTG, GoodData::Model::TIME_ATTRIBUTE_PREFIX, GoodData::Model::TIME_COLUMN_PREFIX, GoodData::Model::TIME_FACT_PREFIX
Instance Method Summary collapse
- #check_filters(filters) ⇒ Object
- #clean_up!(client, force, days, opts = {}) ⇒ Object
- #create_process ⇒ Object
- #delete_ads_by_title(title, client, days = 14, force = false) ⇒ Object
- #delete_project_by_title(title, projects, days = 14, force = false) ⇒ Object
- #delete_segment_by_title(title, segments, days = 14, force = false) ⇒ Object
- #destroy_process(process) ⇒ Object
- #get_brick_params(prefix) ⇒ Object
- #get_upgrade_message(is_all, upgrade_datasets) ⇒ Object
- #handle_error(params, log, brick_type, error, error_message) ⇒ Object
- #handle_warning(params, log, brick_type, error, summary_error) ⇒ Object
- #init_client(username, password, server) ⇒ Object
- #m_to_ns(project) ⇒ Object
- #prepare_visualisation_object(rest_client, project) ⇒ Object
- #test_all ⇒ Object
- #test_error ⇒ Object
- #test_info ⇒ Object
- #test_request_id_logging ⇒ Object
- #test_warn ⇒ Object
- #test_webdav_upload(params) ⇒ Object
- #upload_user_filters_csv(user_filters) ⇒ Object
- #user_in_domain(user_name) ⇒ Object
Methods included from GoodData::Model
check_gd_data_type, check_gd_type, column_name, description, merge_dataset_columns, normalize_gd_data_type, title, upload_data, upload_multiple_data
Instance Method Details
#check_filters(filters) ⇒ Object
9 10 11 12 13 14 15 16 17 18 19 20 |
# File 'spec/unit/models/user_filters_spec.rb', line 9 def check_filters(filters) expect(filters.count).to eq(2) filter = filters.first expect(filter[:login]).to eq("[email protected]") expect(filter[:filters].count).to eq(1) expect(filter[:filters].first[:values].count).to eq(4) expect(filter[:filters].first[:values]).to eq(["USA", "Czech Republic", "Uganda", "Slovakia"]) filter = filters.last expect(filter[:login]).to eq("[email protected]") expect(filter[:filters].count).to eq(1) expect(filter[:filters].first[:values].count).to eq(1) end |
#clean_up!(client, force, days, opts = {}) ⇒ Object
109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 |
# File 'bin/test_projects_cleanup.rb', line 109 def clean_up!(client, force, days, opts = {}) projects = client.projects if opts[:domain_id] domain = client.domain(opts[:domain_id]) # segments id format: {title}_{hostname}_{datetime} delete_segment_by_title(/CAR_DEMO_PREMIUM/, domain.segments, days, force) end delete_project_by_title(/Insurance Demo Master/, projects, days, force) delete_project_by_title(/Car Demo Master/, projects, days, force) delete_project_by_title(/Insurance Demo Workspace/, projects, days, force) delete_project_by_title(/Client With Conflicting LDM/, projects, days, force) delete_project_by_title(/Development Project/, projects, days, force) delete_project_by_title(/lcm-test-fixture/, projects, days, force) delete_project_by_title(/Test MASTER project/, projects, days, force) delete_project_by_title(/Test MINOR project/, projects, days, force) delete_project_by_title(/^Test project$/, projects, days, force) delete_project_by_title(/userprov-e2e-testing/, projects, days, force) delete_project_by_title(/load test service project/, projects, days, force) delete_project_by_title(/LCM SPEC PROJECT/, projects, days, force) delete_project_by_title(/LCM spec Client With Conflicting LDM Changes/, projects, days, force) delete_project_by_title(/LCM spec master project/, projects, days, force) delete_project_by_title(/users brick load test/, projects, days, force) delete_project_by_title(/transfer_processes and #transfer_schedules test/, projects, days, force) delete_project_by_title(/DailyUse Project for gooddata-ruby integration tests/, projects, days, force) delete_project_by_title(/^New project$/, projects, days, force) delete_project_by_title(/RubyGem Dev Week test/, projects, days, force) delete_project_by_title(/My project from blueprint/, projects, days, force) delete_ads_by_title(/Development ADS/, client, days, force) delete_ads_by_title(/Production ADS/, client, days, force) delete_ads_by_title(/TEST ADS/, client, days, force) end |
#create_process ⇒ Object
29 30 31 |
# File 'spec/integration/models/process_spec.rb', line 29 def create_process GoodData::Process.deploy(@archive_location, .merge(name: 'Test process GRAPH')) end |
#delete_ads_by_title(title, client, days = 14, force = false) ⇒ Object
81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 |
# File 'bin/test_projects_cleanup.rb', line 81 def delete_ads_by_title(title, client, days = 14, force = false) warehouses = client.warehouses return if warehouses.empty? deleted = 0 warehouses.each do |warehouse| warehouse_title = warehouse.title next unless warehouse_title.match(title) dead_line = Time.now - days * 60 * 60 * 24 created = Time.parse(warehouse.data["created"]) next if created > dead_line begin if force puts "Deleting: #{warehouse_title} - #{created}" warehouse.delete else puts "Would delete: #{warehouse_title} - #{created}" end deleted += 1 rescue StandardError => e puts "Failed to delete #{warehouse_title}: #{e}" end end puts "#{deleted} ADS instances with title \"#{title}\" #{'would be ' unless force}deleted." end |
#delete_project_by_title(title, projects, days = 14, force = false) ⇒ Object
59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 |
# File 'bin/test_projects_cleanup.rb', line 59 def delete_project_by_title(title, projects, days = 14, force = false) dead_line = Time.now - days * 60 * 60 * 24 filtered_projects = projects.select do |p| p.title.match(title) && p.created < dead_line end filtered_projects.each do |project| begin if force puts "Deleting: #{project.pid} - #{project.title} - #{project.created}" project_add = project.add project_add && project_add.output_stage && project_add.output_stage.delete project.delete else puts "Would delete: #{project.pid} - #{project.title} - #{project.created}" end rescue StandardError => ex puts "Failed to delete project #{project.pid}, reason: #{ex}" end end puts "#{filtered_projects.length} projects matching \"#{title}\" #{'would be ' unless force}deleted." end |
#delete_segment_by_title(title, segments, days = 14, force = false) ⇒ Object
27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 |
# File 'bin/test_projects_cleanup.rb', line 27 def delete_segment_by_title(title, segments, days = 14, force = false) return if segments.empty? dead_line = Time.now - days * 60 * 60 * 24 filtered_segments = segments.select do |s| if s.id.match(title) && s.id.length > 14 && s.id[s.id.length-14..-1] segment_created = s.id[s.id.length-14..-1] created = Time.parse(segment_created) if segment_created s if created.year >= Time.new.year && created < dead_line end end filtered_segments.each do |segment| begin if force segment.clients.each do |segment_client| GoodData.logger.info("Deleting segment: #{segment.id} - client: #{segment_client.client_id}") segment_client.dissociate end puts "Deleting segment: #{segment.id}" segment && segment.delete(force: true) else puts "Would delete segment: #{segment.id}" end rescue RuntimeError, StandardError => ex puts "Failed to delete segment #{segment.id}, reason: #{ex}" end end end |
#destroy_process(process) ⇒ Object
33 34 35 |
# File 'spec/integration/models/process_spec.rb', line 33 def destroy_process(process) process.delete if process end |
#get_brick_params(prefix) ⇒ Object
16 17 18 |
# File 'bin/run_brick.rb', line 16 def get_brick_params(prefix) ENV.select { |k,| k.to_s.match(/^#{prefix}.*/) }.map { |k, v| [k.slice(prefix.length..-1), v] }.to_h end |
#get_upgrade_message(is_all, upgrade_datasets) ⇒ Object
93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 |
# File 'spec/integration/blueprint_upgrade_spec.rb', line 93 def (is_all, upgrade_datasets) if is_all { upgrade: { dateDatasets: { upgrade: "all" } } } else { upgrade: { dateDatasets: { upgrade: "exact", datasets: upgrade_datasets } } } end end |
#handle_error(params, log, brick_type, error, error_message) ⇒ Object
25 26 27 28 29 30 31 |
# File 'bin/run_brick.rb', line 25 def handle_error(params, log, brick_type, error, ) execution_log = GoodData.logger execution_log.error "Execution failed. Error: #{error}" unless execution_log.nil? GoodData::Bricks::ExecutionResultMiddleware.update_execution_result(GoodData::Bricks::ExecutionStatus::ERROR, ) log.error "action=#{brick_type}_execution status=failed commit_hash=#{params['GOODDATA_RUBY_COMMIT']} execution_id=#{params['GDC_EXECUTION_ID']} exception=#{error}" raise end |
#handle_warning(params, log, brick_type, error, summary_error) ⇒ Object
20 21 22 23 |
# File 'bin/run_brick.rb', line 20 def handle_warning(params, log, brick_type, error, summary_error) log.error "action=#{brick_type}_execution status=warning commit_hash=#{params['GOODDATA_RUBY_COMMIT']} execution_id=#{params['GDC_EXECUTION_ID']} exception=#{error}" GoodData::Bricks::ExecutionResultMiddleware.update_execution_result(GoodData::Bricks::ExecutionStatus::WARNING, summary_error) end |
#init_client(username, password, server) ⇒ Object
144 145 146 147 148 149 150 151 152 |
# File 'bin/test_projects_cleanup.rb', line 144 def init_client(username, password, server) GoodData.connect( username, password, server: server, verify_ssl: false, timeout: nil ) end |
#m_to_ns(project) ⇒ Object
97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 |
# File 'spec/lcm/slow/m_to_n_spec.rb', line 97 def m_to_ns(project) = { include_ca: true } result = project.client.get("/gdc/projects/#{project.pid}/model/view", params: { includeDeprecated: true, includeGrain: true, includeCA: [:include_ca] }) polling_url = result['asyncTask']['link']['poll'] model = project.client.poll_on_code(polling_url, )['projectModelView']['model']['projectModel'] %w[ datasets dateDimensions ].map do |a| next [] if model[a].nil? model[a].map do |d| d[a.chomp('s')]['bridges'].nil? ? [] : [d[a.chomp('s')]['identifier'], d[a.chomp('s')]['bridges'].join] end.flatten end end |
#prepare_visualisation_object(rest_client, project) ⇒ Object
8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 |
# File 'spec/lcm/slow/visualisation_object_transfer_spec.rb', line 8 def prepare_visualisation_object(rest_client, project) visualization_classes = GoodData::MdObject.query('visualizationClass', GoodData::MdObject, client: rest_client, project: project) visualization_class = visualization_classes.first attribute = project.attributes.first visualization_data = { visualizationObject: { content: { visualizationClass: { uri: visualization_class.uri }, buckets: [{ localIdentifier: "measure", items: [{ measure: { localIdentifier: "M1", title: "Count of Account", format: "#,##0.00", definition: { measureDefinition: { item: { uri: attribute.uri }, aggregation: "count" } } } }] }] }, meta: {} } } v = GoodData::MdObject.new(visualization_data.deep_stringify_keys) v.title = 'Foo' v.project = project v.client = rest_client v.save end |
#test_all ⇒ Object
62 63 64 65 66 67 |
# File 'spec/integration/core/logging_spec.rb', line 62 def test_all test_error test_info test_warn test_request_id_logging end |
#test_error ⇒ Object
42 43 44 |
# File 'spec/integration/core/logging_spec.rb', line 42 def test_error GoodData.logger.error TEST_MESSAGE end |
#test_info ⇒ Object
46 47 48 |
# File 'spec/integration/core/logging_spec.rb', line 46 def test_info GoodData.logger.info TEST_MESSAGE end |
#test_request_id_logging ⇒ Object
54 55 56 57 58 59 60 |
# File 'spec/integration/core/logging_spec.rb', line 54 def test_request_id_logging @client = ConnectionHelper.create_default_connection id = @client.generate_request_id GoodData.logger.info "Request id: #{id} Doing something very useful" @client.get('/gdc/md', :request_id => id) id end |
#test_warn ⇒ Object
50 51 52 |
# File 'spec/integration/core/logging_spec.rb', line 50 def test_warn GoodData.logger.warn TEST_MESSAGE end |
#test_webdav_upload(params) ⇒ Object
27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 |
# File 'spec/integration/rest_spec.rb', line 27 def test_webdav_upload(params) GoodData.with_project(@project, :client => @client) do # use current timestamp as a directory name on webdav dir = params[:no_dir] ? nil : 'testdir' dir = "#{dir}/#{dir}" if params[:nested_dir] dir = "#{dir}/" if params[:slash_in_dir] # local file path path = 'spec/data/test-ci-data.csv' if params[:special_chars] path = 'spec/data/abc-16:55:29+ha#he.csv' end path = File.(path) if params[:absolute_path] # upload it there upload_method = GoodData.method(params[:upload_method]) upload_method.call(path, directory: dir) # download it from there temp_file = Tempfile.new('foo.csv') expect(temp_file.size).to be == 0 download_method = GoodData.method(params[:download_method]) file_basename = File.basename(path) file_basename = "NOTTHERE_#{file_basename}" if params[:unknown_file] download_block = proc do if params[:path_in_file] # pass the dir directly in the first param # e.g. GoodData.download_from_project_webdav('1234/test-ci-data.csv', '/tmp/myfile.csv') download_method.call(File.join(dir, file_basename).to_s, temp_file) else # pass the dir in the :directory option # e.g. GoodData.download_from_project_webdav('test-ci-data.csv', '/tmp/myfile.csv', :directory => '1234') download_method.call(file_basename, temp_file, directory: dir) end end # if it's unknown it should raise an error, otherwise it should download the right stuff if params[:unknown_file] expect do download_block.call end.to raise_error(ArgumentError) else download_block.call expect(temp_file.size).to be > 0 # expect the contents of the original file and the downloaded file are the same expect(IO.read(temp_file)).to be == IO.read(path) end end end |
#upload_user_filters_csv(user_filters) ⇒ Object
7 8 9 10 |
# File 'spec/lcm/userprov/user_filters_brick_spec.rb', line 7 def upload_user_filters_csv(user_filters) filters_csv = ConfigurationHelper.csv_from_hashes(user_filters) Support::S3Helper.upload_file(filters_csv, @test_context[:s3_key]) end |
#user_in_domain(user_name) ⇒ Object
7 8 9 10 |
# File 'spec/lcm/userprov/users_brick_spec.rb', line 7 def user_in_domain(user_name) domain = @rest_client.domain(LcmConnectionHelper.environment[:prod_organization]) domain.find_user_by_login(user_name) end |