Add multithreading to url_available check
* Checks are performed in parallel. * Puppet exception is raised after the first failure. * Add unit test. Change-Id: Ie09930d63198ff2bcd3f1b0ed4a94ec4b8a59fd6 Closes-Bug: #1457190
This commit is contained in:
parent
ce2cd1edae
commit
8697c43752
|
@ -1,6 +1,7 @@
|
||||||
require 'pp'
|
require 'pp'
|
||||||
require 'timeout'
|
require 'timeout'
|
||||||
require 'net/http'
|
require 'net/http'
|
||||||
|
require 'open-uri'
|
||||||
require 'uri'
|
require 'uri'
|
||||||
|
|
||||||
Puppet::Parser::Functions::newfunction(:url_available, :doc => <<-EOS
|
Puppet::Parser::Functions::newfunction(:url_available, :doc => <<-EOS
|
||||||
|
@ -40,6 +41,8 @@ EOS
|
||||||
) do |argv|
|
) do |argv|
|
||||||
url = argv[0]
|
url = argv[0]
|
||||||
http_proxy = argv[1]
|
http_proxy = argv[1]
|
||||||
|
threads_count = 16
|
||||||
|
Thread.abort_on_exception=true
|
||||||
|
|
||||||
def fetch(url, http_proxy = nil)
|
def fetch(url, http_proxy = nil)
|
||||||
# proxy variables, set later if http_proxy is provided or there is a proxy
|
# proxy variables, set later if http_proxy is provided or there is a proxy
|
||||||
|
@ -76,7 +79,7 @@ function. Must be of type String or Hash."
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
puts "Checking #{uri}"
|
puts "Checking #{uri}\n"
|
||||||
begin
|
begin
|
||||||
out = Timeout::timeout(180) do
|
out = Timeout::timeout(180) do
|
||||||
u = URI.parse(uri)
|
u = URI.parse(uri)
|
||||||
|
@ -94,9 +97,16 @@ function. Must be of type String or Hash."
|
||||||
end
|
end
|
||||||
|
|
||||||
# if passed an array, iterate through the array an check each element
|
# if passed an array, iterate through the array an check each element
|
||||||
|
# within a thread pool equal to threads_count
|
||||||
if url.instance_of? Array
|
if url.instance_of? Array
|
||||||
url.each do |u|
|
url.each_slice(threads_count) do |group|
|
||||||
fetch(u, http_proxy)
|
threads = []
|
||||||
|
group.each do |u|
|
||||||
|
threads << Thread.new do
|
||||||
|
fetch(u, http_proxy)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
threads.each(&:join)
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
fetch(url, http_proxy)
|
fetch(url, http_proxy)
|
||||||
|
|
|
@ -0,0 +1,44 @@
|
||||||
|
require 'spec_helper'
|
||||||
|
|
||||||
|
describe 'the structure function' do
|
||||||
|
let(:scope) { PuppetlabsSpec::PuppetInternals.scope }
|
||||||
|
|
||||||
|
let(:valid_urls) do
|
||||||
|
[
|
||||||
|
"http://archive.ubuntu.com/ubuntu/",
|
||||||
|
"http://mirror.fuel-infra.org/mos/ubuntu/",
|
||||||
|
"http://apt.postgresql.org/pub/repos/apt/"
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
let(:invalid_urls) do
|
||||||
|
[
|
||||||
|
"http://invalid-url.ubuntu.com/ubuntu/",
|
||||||
|
"http://mirror.fuel-infra.org/invalid-url"
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'should exist' do
|
||||||
|
expect(Puppet::Parser::Functions.function('url_available')).to eq 'function_url_available'
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'with single values' do
|
||||||
|
it 'should be able to process a single value' do
|
||||||
|
expect(scope.function_url_available([valid_urls[0]])).to be true
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'should throw exception on invalid url' do
|
||||||
|
expect{ scope.function_url_available([invalid_urls[0]]) }.to raise_error(Puppet::Error)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'with multiple values' do
|
||||||
|
it 'should be able to process an array of values' do
|
||||||
|
expect(scope.function_url_available([valid_urls])).to be true
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'should throw exception on invalid urls' do
|
||||||
|
expect{ scope.function_url_available([invalid_urls]) }.to raise_error(Puppet::Error)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
Loading…
Reference in New Issue