Moving puppet-midonet to OpenStack gerrit

Modify the .gitreview file, since this project belongs to OpenStack from
now on.

Modify Rakefile to adapt to OpenStack gerrit's test.

Change-Id: I91e6a433dbbef61db59f9dfe1609a7368bf55c11
This commit is contained in:
Jaume Devesa 2016-01-12 12:40:46 +01:00
parent a4bb534be3
commit 20b854ef51
19 changed files with 196 additions and 507 deletions

73
.gitignore vendored
View File

@ -1,59 +1,16 @@
*.gem
*.rbc
/.config
/coverage/
/InstalledFiles
**/pkg/
/spec/reports/
/spec/fixtures/modules/*
/spec/fixtures/manifests/*
/test/tmp/
/test/version_tmp/
/tmp/
## Specific to RubyMotion:
.dat*
.repl_history
build/
## Documentation cache and generated files:
/.yardoc/
/_yardoc/
/doc/
/rdoc/
## Environment normalisation:
/.bundle/
/lib/bundler/man/
# for a library or gem, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
pkg/
Gemfile.lock
Puppetfile.lock
# .ruby-version
# .ruby-gemset
# unless supporting rvm < 1.11.0 or doing something fancy, ignore this:
.rvmrc
# DS_Store
*/.DS_Store
# Configuration files for cloud9.io
*/.c9/
# Kitchen files
.kitchen/
# Librarian files
.librarian/
.tmp/
# Vim files
**.swp
# Ignore vagrant-generated files
.vagrant
# Beaker
log/
vendor/
spec/fixtures/
.vagrant/
.bundle/
coverage/
.idea/
*.swp
*.iml
openstack/
<% if ! @configs['paths'].nil? -%>
<% @configs['paths'].each do |path| -%>
<%= path %>
<% end -%>
<% end -%>

View File

@ -1,4 +1,4 @@
[gerrit]
host=review.gerrithub.io
host=review.openstack.org
port=29418
project=midonet/puppet-midonet.git
project=openstack/puppet-midonet.git

View File

@ -11,7 +11,7 @@ def location_for(place, fake_version = nil)
end
group :development, :unit_tests do
gem 'rspec-puppet', '~> 2.1', :require => false
gem 'rspec-puppet', '~> 2.2', :require => false
gem 'rspec-core', '3.1.7', :require => false
gem 'puppetlabs_spec_helper', '>= 0.1.0', :require => false
gem 'puppet-lint', '>= 0.3.2', :require => false
@ -32,6 +32,7 @@ group :system_tests do
gem 'serverspec', :require => false
gem 'beaker-puppet_install_helper', :require => false
gem 'r10k', :require => false
end
if facterversion = ENV['FACTER_GEM_VERSION']

View File

@ -3,13 +3,8 @@
forge "https://forgeapi.puppetlabs.com"
metadata
mod 'midonet-zookeeper',
:git => 'http://github.com/midonet/puppet-zookeeper',
:ref => 'master'
mod 'midonet-cassandra',
:git => 'http://github.com/midonet/puppet-cassandra',
:ref => 'master'
mod 'puppetlabs/inifile'
mod 'puppetlabs/apt'
mod 'puppetlabs/java'
mod 'puppetlabs/tomcat'
mod 'puppetlabs/stdlib'

View File

@ -1,8 +1,86 @@
require 'puppetlabs_spec_helper/rake_tasks'
require 'puppet-lint/tasks/puppet-lint'
require 'puppet-syntax/tasks/puppet-syntax'
require 'json'
PuppetLint.configuration.fail_on_warnings = true
PuppetLint.configuration.send('relative')
PuppetLint.configuration.send('disable_80chars')
PuppetLint.configuration.send('disable_autoloader_layout')
PuppetLint.configuration.ignore_paths = ["spec/**/*.pp", "pkg/**/*.pp"]
modname = JSON.parse(open('metadata.json').read)['name'].split('-')[1]
PuppetSyntax.exclude_paths ||= []
PuppetSyntax.exclude_paths << "spec/fixtures/**/*"
PuppetSyntax.exclude_paths << "pkg/**/*"
PuppetSyntax.exclude_paths << "vendor/**/*"
Rake::Task[:lint].clear
PuppetLint::RakeTask.new :lint do |config|
config.ignore_paths = ["spec/**/*.pp", "vendor/**/*.pp"]
config.fail_on_warnings = true
config.log_format = '%{path}:%{linenumber}:%{KIND}: %{message}'
config.disable_checks = ["80chars", "class_inherits_from_params_class", "only_variable_string"]
end
desc "Run acceptance tests"
RSpec::Core::RakeTask.new(:acceptance) do |t|
t.pattern = 'spec/acceptance'
end
Rake::Task[:spec_prep].clear
desc 'Create the fixtures directory'
task :spec_prep do
# Allow to test the module with custom dependencies
# like you could do with .fixtures file
ENV['PUPPETFILE']="#{Dir.pwd}/Puppetfile"
if ENV['PUPPETFILE']
puppetfile = ENV['PUPPETFILE']
if ENV['GEM_HOME']
gem_home = ENV['GEM_HOME']
gem_bin_dir = "#{gem_home}" + '/bin/'
else
gem_bin_dir = ''
end
r10k = ['env']
r10k += ["PUPPETFILE=#{puppetfile}"]
r10k += ["PUPPETFILE_DIR=#{Dir.pwd}/spec/fixtures/modules"]
r10k += ["#{gem_bin_dir}r10k"]
r10k += ['puppetfile', 'install', '-v']
sh(*r10k)
else
# otherwise, use official OpenStack Puppetfile
zuul_ref = ENV['ZUUL_REF']
zuul_branch = ENV['ZUUL_BRANCH']
zuul_url = ENV['ZUUL_URL']
repo = 'openstack/puppet-openstack-integration'
rm_rf(repo)
if File.exists?('/usr/zuul-env/bin/zuul-cloner')
zuul_clone_cmd = ['/usr/zuul-env/bin/zuul-cloner']
zuul_clone_cmd += ['--cache-dir', '/opt/git']
zuul_clone_cmd += ['--zuul-ref', "#{zuul_ref}"]
zuul_clone_cmd += ['--zuul-branch', "#{zuul_branch}"]
zuul_clone_cmd += ['--zuul-url', "#{zuul_url}"]
zuul_clone_cmd += ['git://git.openstack.org', "#{repo}"]
sh(*zuul_clone_cmd)
else
sh("git clone https://git.openstack.org/#{repo} #{repo}")
end
script = ['env']
script += ["PUPPETFILE_DIR=#{Dir.pwd}/spec/fixtures/modules"]
script += ["ZUUL_REF=#{zuul_ref}"]
script += ["ZUUL_BRANCH=#{zuul_branch}"]
script += ["ZUUL_URL=#{zuul_url}"]
script += ['bash', "#{repo}/install_modules_unit.sh"]
sh(*script)
end
rm_rf("spec/fixtures/modules/#{modname}")
ln_s(Dir.pwd, "spec/fixtures/modules/#{modname}")
mkdir_p('spec/fixtures/manifests')
touch('spec/fixtures/manifests/site.pp')
end
Rake::Task[:spec_clean].clear
desc 'Clean up the fixtures directory'
task :spec_clean do
rm_rf('spec/fixtures/modules')
rm_rf('openstack')
if File.zero?('spec/fixtures/manifests/site.pp')
rm_f('spec/fixtures/manifests/site.pp')
end
end

View File

@ -1,130 +0,0 @@
# == Class: ::midonet::cassandra
#
# Install and run the cassandra component
#
# === Parameters
#
# [*seeds*]
# Full list of cassandra seeds that belong to a cluster.
# [*seed_address*]
# IP address to bind for this instance. (Must belong to the
# seeders list.
# [*storage_port*]
# Inter-node cluster communication port (defaulted to 7000).
# [*ssl_storage_port*]
# SSL Inter-node cluster communication port (defaulted to 7001).
# [*client_port*]
# Cassandra client port (defaulted to 9042).
# [*client_port_thrift*]
# Cassandra client port thrift (defaulted to 9160).
#
#
# === Examples
#
# * The easiest way to run the class is:
#
# include ::cassandra
#
# And a cassandra single-machine cluster will be installed, binding the
# 'localhost' address.
#
# * Run a single-machine cluster but binding a hostname or another address
# would be:
#
# class {'::midonet::cassandra':
# seeds => ['192.168.2.2'],
# seed_address => '192.168.2.2',
# storage_port => 7000,
# ssl_storage_port => 7001,
# client_port => 9042,
# client_port_thrift => 9042,
# }
#
# * All the ports must be configured the same in all the nodes in the cluster.
#
# * For cluster of nodes, use the same 'seeds' value, but change the
# seed_address of each node:
#
# - On node1
# class {'::midonet::cassandra':
# seeds => ['node_1', 'node_2', 'node_3'],
# seed_address => 'node_1'
# storage_port => 7000,
# ssl_storage_port => 7001,
# client_port => 9042,
# client_port_thrift => 9042,
# }
# - On node2
# class {'::midonet::cassandra':
# seeds => ['node_1', 'node_2', 'node_3'],
# seed_address => 'node_2'
# storage_port => 7000,
# ssl_storage_port => 7001,
# client_port => 9042,
# client_port_thrift => 9042,
# }
# - On node3
# class {'::midonet::cassandra':
# seeds => ['node_1', 'node_2', 'node_3'],
# seed_address => 'node_3'
# storage_port => 7000,
# ssl_storage_port => 7001,
# client_port => 9042,
# client_port_thrift => 9042,
# }
#
# NOTE: node_X can be either hostnames or ip addresses
# You can alternatively use the Hiera.yaml style:
#
# ::midonet::cassandra::seeds:
# - node_1
# - node_2
# - node_3
# ::midonet::cassandra::seed_address: 'node_1'
#
# === Authors
#
# Midonet (http://midonet.org)
#
# === Copyright
#
# Copyright (c) 2015 Midokura SARL, All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class midonet::cassandra (
$seeds,
$seed_address,
$storage_port = '7000',
$ssl_storage_port = '7001',
$client_port = '9042',
$client_port_thrift = '9160',
$conf_dir,
$pid_dir,
$service_path)
{
class {'::cassandra':
seeds => $seeds,
seed_address => $seed_address,
storage_port => $storage_port,
ssl_storage_port => $ssl_storage_port,
client_port => $client_port,
client_port_thrift => $client_port_thrift,
conf_dir => $conf_dir,
pid_dir => $pid_dir,
service_path => $service_path
}
}

View File

@ -1,103 +0,0 @@
# == Class: ::midonet::zookeeper
#
# Install and run the zookeeper module.
#
# === Parameters
#
# [*servers*]
# Full list of ZooKeeper servers that run in the same cluster.
# [*server_id*]
# Identifier of this ZooKeeper server in the cluster.
# [*client_ip*]
# IP address from where the ZooKeeper listen client requests
#
# === Examples
#
# The easiest way to run the class is:
#
# include ::midonet::zookeeper
#
# And puppet will install a local zookeeper without cluster. For a
# clustered zookeeper, the way you have to define your puppet site, is:
#
#
# ... on Node1
#
# class {'::midonet::zookeeper':
# servers => [{'id' => 1
# 'host' => 'node_1'},
# {'id' => 2,
# 'host' => 'node_2'},
# {'id' => 3,
# 'host' => 'node_3'}],
# server_id => 1}
#
# ... on Node2
#
# class {'::midonet::zookeeper':
# servers => [{'id' => 1
# 'host' => 'node_1'},
# {'id' => 2,
# 'host' => 'node_2'},
# {'id' => 3,
# 'host' => 'node_3'}],
# server_id => 2}
#
# ... on Node3
#
# class {'::midonet::zookeeper':
# servers => [{'id' => 1,
# 'host' => 'node_1'},
# {'id' => 2,
# 'host' => 'node_2'},
# {'id' => 3,
# 'host' => 'node_3'}],
# server_id => 3}
#
# defining the same servers for each puppet node, but using a different
# server_id for each one. NOTE: node_X can be hostnames or IP addresses.
#
# you can alternatively use the Hiera.yaml style
#
# ::midonet::zookeeper::servers:
# - id: 1
# host: 'node_1'
# - id: 2
# host: 'node_2'
# - id: 3
# host: 'node_3'
# ::midonet::zookeeper::server_id: '1'
#
# This module is a passthrough to already existent 'deric-zookeeper'
# https://forge.puppetlabs.com/deric/zookeeper
#
# === Authors
#
# Midonet (http://midonet.org)
#
# === Copyright
#
# Copyright (c) 2015 Midokura SARL, All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class midonet::zookeeper($servers, $server_id, $client_ip=$::ipaddress_eth0) {
class {'::zookeeper':
servers => zookeeper_servers($servers),
id => $server_id,
cfg_dir => '/etc/zookeeper',
client_ip => $client_ip
}
}

View File

@ -8,10 +8,7 @@
"project_page": "https://github.com/midonet/puppet-midonet",
"issues_url": "https://midonet.atlassian.net/projects/PUP",
"dependencies": [
{ "name":"ripienaar-module_data","version_requirement":">=0.0.3" },
{ "name":"puppetlabs-inifile", "version_requirement": ">=1.0.0 <2.0.0" },
{ "name":"deric-zookeeper","version_requirement":">=0.3.9" },
{ "name":"midonet-cassandra","version_requirement":">=1.0.0" },
{ "name":"puppetlabs-apt","version_requirement":">=1.7.0 <2.0.0" },
{ "name":"puppetlabs-java","version_requirement":">=1.3.0" },
{ "name":"puppetlabs-tomcat","version_requirement":">=1.2.0" },

View File

@ -1,3 +1,4 @@
#TODO(karume) Enable this test ASAP! Only disabled because of
require 'spec_helper_acceptance'
describe 'midonet all-in-one' do

View File

@ -1,8 +1,11 @@
HOSTS:
centos-70-x64:
platform: centos-70-x64
image: midonet/centos:centos7
hypervisor: docker
docker_cmd: '["/sbin/init"]'
CONFIG:
HOSTS:
centos-server-70-x64:
roles:
- master
platform: el-7-x86_64
box: puppetlabs/centos-7.0-64-nocm
box_url: https://vagrantcloud.com/puppetlabs/centos-7.0-64-nocm
hypervisor: vagrant
CONFIG:
log_level: debug
type: foss

View File

@ -1,8 +1,10 @@
HOSTS:
ubuntu-14-04:
platform: ubuntu-14.04-x64
image: midonet/ubuntu:14.04
hypervisor: docker
docker_cmd: '["/sbin/init"]'
CONFIG:
ubuntu-server-14.04-amd64:
roles:
- master
platform: ubuntu-14.04-amd64
box: puppetlabs/ubuntu-14.04-64-nocm
box_url: https://vagrantcloud.com/puppetlabs/ubuntu-14.04-64-nocm
hypervisor: vagrant
CONFIG:
type: foss

View File

@ -0,0 +1,10 @@
HOSTS:
centos-70-x64:
roles:
- master
platform: el-7-x86_64
hypervisor: none
ip: 127.0.0.1
CONFIG:
type: foss
set_env: false

View File

@ -0,0 +1,10 @@
HOSTS:
ubuntu-14.04-amd64:
roles:
- master
platform: ubuntu-14.04-amd64
hypervisor: none
ip: 127.0.0.1
CONFIG:
type: foss
set_env: false

View File

@ -1,8 +1,11 @@
HOSTS:
ubuntu-14-04:
platform: ubuntu-14.04-x64
image: midonet/ubuntu:14.04
hypervisor: docker
docker_cmd: '["/sbin/init"]'
CONFIG:
HOSTS:
ubuntu-server-14.04-amd64:
roles:
- master
platform: ubuntu-14.04-amd64
box: puppetlabs/ubuntu-14.04-64-nocm
box_url: https://vagrantcloud.com/puppetlabs/ubuntu-14.04-64-nocm
hypervisor: vagrant
CONFIG:
log_level: debug
type: foss

View File

@ -1,76 +0,0 @@
require 'spec_helper'
describe 'midonet::cassandra' do
let :params do
{
:seeds => ['192.168.7.2', '192.168.7.3', '192.168.7.4'],
:seed_address => '192.168.7.2'
}
end
shared_examples_for 'cluster cassandra' do
before do
params.merge!(os_params)
end
it 'should call cassandra module properly' do
is_expected.to contain_class('cassandra').with({
'seeds' => params[:seeds],
'seed_address' => params[:seed_address],
'storage_port' => '7000',
'ssl_storage_port' => '7001',
'client_port' => '9042',
'client_port_thrift' => '9160'
})
end
end
context 'on Debian' do
let :facts do
{
:osfamily => 'Debian',
:operatingsystem => 'Ubuntu',
:lsbdistrelease => '14.04',
:lsbdistid => 'Ubuntu',
:lsbdistcodename => 'trusty',
:ipaddress => '127.0.0.1',
:hostname => 'test.puppet'
}
end
let :os_params do
{
:pid_dir => '/var/run/cassandra',
:conf_dir => '/etc/cassandra',
:service_path => '/usr/sbin'
}
end
it_configures 'cluster cassandra'
end
context 'on RedHat' do
let :facts do
{
:osfamily => 'RedHat',
:operatingsystem => 'CentOS',
:operatingsystemmajrelease => 7,
:ipaddress => '127.0.0.1',
:hostname => 'test.puppet'
}
end
let :os_params do
{
:pid_dir => '/var/run/cassandra',
:conf_dir => '/etc/cassandra/default.conf',
:service_path => '/sbin'
}
end
it_configures 'cluster cassandra'
end
end

View File

@ -1,58 +0,0 @@
require 'spec_helper'
describe 'midonet::zookeeper' do
shared_examples_for 'cluster zookeeper' do
let :params do
{
:servers => [{"id" => 1, "host" => 'node_1'},
{"id" => 3, "host" => 'node_2'},
{"id" => 2, "host" => 'node_3'}],
:server_id => 3
}
end
let :zookeeper_params do
{
# Please note the output is sorted by input 'ids'
:servers => ['node_1', 'node_3', 'node_2'],
:id => 3
}
end
it 'should call deric/zookeeper properly' do
is_expected.to contain_class('zookeeper').with({
'servers' => zookeeper_params[:servers],
'id' => zookeeper_params[:id],
'client_ip' => '127.0.0.1'
})
end
end
context 'on Debian' do
let :facts do
{
:osfamily => 'Debian',
:operatingsystem => 'Ubuntu',
:lsbdistrelease => '14.04',
:lsbdistid => 'Ubuntu',
:ipaddress => '127.0.0.1',
:hostname => 'test.puppet'
}
end
it_configures 'cluster zookeeper'
end
context 'on RedHat' do
let :facts do
{
:osfamily => 'RedHat',
:operatingsystemmajrelease => 7,
:ipaddress => '127.0.0.1',
:hostname => 'test.puppet'
}
end
it_configures 'cluster zookeeper'
end
end

View File

@ -1,13 +0,0 @@
require 'spec_helper'
input = [{'id' => '1', 'host' => 'host1'},
{'id' => '2', 'host' => 'host2'}]
output = ['host1', 'host2']
input2 = {'id' => '1', 'host' => 'host1'}
output2 = ['host1']
describe 'zookeeper_servers' do
it { is_expected.to run.with_params(input).and_return(output) }
it { is_expected.to run.with_params(input2).and_return(output2) }
end

View File

@ -1,23 +1,14 @@
require 'rspec-puppet'
require 'puppetlabs_spec_helper/module_spec_helper'
fixture_path = File.expand_path(File.join(__FILE__, '..', 'fixtures'))
RSpec.configure do |c|
c.alias_it_should_behave_like_to :it_configures, 'configures'
c.alias_it_should_behave_like_to :it_raises, 'raises'
c.module_path = File.join(fixture_path, 'modules')
c.manifest_dir = File.join(fixture_path, 'manifests')
c.before :each do
@default_facts = { :os_service_default => '<SERVICE DEFAULT>' }
end
c.mock_with :rspec do |mock_c|
mock_c = :expect
end
end
def make_site_pp(pp, path = File.join(master['puppetpath'], 'manifests'))
on master, "mkdir -p #{path}"
create_remote_file(master, File.join(path, "site.pp"), pp)
on master, "chown -R #{master['user']}:#{master['group']} #{path}"
on master, "chmod -R 0755 #{path}"
on master, "service #{master['puppetservice']} restart"
end
at_exit { RSpec::Puppet::Coverage.report! }

View File

@ -3,11 +3,10 @@ require 'beaker/puppet_install_helper'
run_puppet_install_helper
UNSUPPORTED_PLATFORMS = ['Suse','windows','AIX','Solaris']
RSpec.configure do |c|
# Project root
proj_root = File.expand_path(File.join(File.dirname(__FILE__), '..'))
modname = JSON.parse(open('metadata.json').read)['name'].split('-')[1]
# Readable test descriptions
c.formatter = :documentation
@ -16,20 +15,42 @@ RSpec.configure do |c|
c.before :suite do
# Install module and dependencies
hosts.each do |host|
copy_module_to(host, :source => proj_root, :module_name => 'midonet')
scp_to(host, proj_root + '/data/hiera.yaml', "#{default['puppetpath']}/hiera.yaml")
on host, 'mkdir -p /var/lib/hiera'
scp_to(host, proj_root + '/data/common.yaml', "/var/lib/hiera")
scp_to(host, proj_root + '/data/osfamily', "/var/lib/hiera")
on host, puppet('module install ripienaar-module_data'), {:acceptable_exit_codes => [0,1] }
on host, puppet('module install puppetlabs-stdlib --version 4.5.0'), { :acceptable_exit_codes => [0,1] }
on host, puppet('module install deric-zookeeper'), {:acceptable_exit_codes => [0,1] }
on host, puppet('module install midonet-cassandra'), {:acceptable_exit_codes => [0,1] }
on host, puppet('module install puppetlabs-inifile'), {:acceptable_exit_codes => [0,1] }
on host, puppet('module install puppetlabs-apt'), {:acceptable_exit_codes => [0,1] }
on host, puppet('module install puppetlabs-java'), {:acceptable_exit_codes => [0,1] }
on host, puppet('module install puppetlabs-tomcat'), {:acceptable_exit_codes => [0,1] }
# install git
install_package host, 'git'
zuul_ref = ENV['ZUUL_REF']
zuul_branch = ENV['ZUUL_BRANCH']
zuul_url = ENV['ZUUL_URL']
repo = 'openstack/puppet-openstack-integration'
# Start out with clean moduledir, don't trust r10k to purge it
on host, "rm -rf /etc/puppet/modules/*"
# Install dependent modules via git or zuul
r = on host, "test -e /usr/zuul-env/bin/zuul-cloner", { :acceptable_exit_codes => [0,1] }
if r.exit_code == 0
zuul_clone_cmd = '/usr/zuul-env/bin/zuul-cloner '
zuul_clone_cmd += '--cache-dir /opt/git '
zuul_clone_cmd += "--zuul-ref #{zuul_ref} "
zuul_clone_cmd += "--zuul-branch #{zuul_branch} "
zuul_clone_cmd += "--zuul-url #{zuul_url} "
zuul_clone_cmd += "git://git.openstack.org #{repo}"
on host, zuul_clone_cmd
else
on host, "git clone https://git.openstack.org/#{repo} #{repo}"
end
on host, "ZUUL_REF=#{zuul_ref} ZUUL_BRANCH=#{zuul_branch} ZUUL_URL=#{zuul_url} bash #{repo}/install_modules.sh"
# Install the module being tested
on host, "rm -fr /etc/puppet/modules/#{modname}"
puppet_module_install(:source => proj_root, :module_name => modname)
on host, "rm -fr #{repo}"
# List modules installed to help with debugging
on host, puppet('module','list'), { :acceptable_exit_codes => 0 }
end
end
end