diff --git a/mcollective/README b/mcollective/README new file mode 100644 index 000000000..911e6b3cf --- /dev/null +++ b/mcollective/README @@ -0,0 +1,2 @@ +* rubygems +* amqp >= 0.9.7 \ No newline at end of file diff --git a/mcollective/agent/fake.ddl b/mcollective/agent/fake.ddl new file mode 100644 index 000000000..6a2b7c888 --- /dev/null +++ b/mcollective/agent/fake.ddl @@ -0,0 +1,14 @@ +metadata :name => "Fake Agent", + :description => "Fake Agent", + :author => "Mirantis Inc.", + :license => "Apache License 2.0", + :version => "0.0.1", + :url => "http://mirantis.com", + :timeout => 20 + +action "echo", :description => "Echo request message" do + output :output, + :description => "Just request message", + :display_as => "Echo message" +end + diff --git a/mcollective/agent/fake.rb b/mcollective/agent/fake.rb new file mode 100644 index 000000000..92c5555c2 --- /dev/null +++ b/mcollective/agent/fake.rb @@ -0,0 +1,12 @@ +module MCollective + module Agent + class Fake < RPC::Agent + + action "echo" do + validate :msg, String + reply[:msg] = "Hello, it is my reply: #{request[:msg]}" + end + + end + end +end diff --git a/mcollective/agent/naily.ddl b/mcollective/agent/naily.ddl new file mode 100644 index 000000000..2b5fdf971 --- /dev/null +++ b/mcollective/agent/naily.ddl @@ -0,0 +1,23 @@ +metadata \ +:name => "Naily Agent", +:description => "Naily Agent", +:author => "Mirantis Inc.", +:license => "Apache License 2.0", +:version => "0.0.1", +:url => "http://mirantis.com", +:timeout => 300 + +action "runonce", :description => "Runs puppet apply" do + output \ + :output, + :description => "Response message", + :display_as => "Response message" +end + +action "echo", :description => "Echo request message" do + output \ + :output, + :description => "Just echo request message", + :display_as => "Echo message" +end + diff --git a/mcollective/agent/naily.rb b/mcollective/agent/naily.rb new file mode 100644 index 000000000..3398051ce --- /dev/null +++ b/mcollective/agent/naily.rb @@ -0,0 +1,79 @@ +module MCollective + module Agent + class Naily < RPC::Agent + metadata \ + :name => "Naily Agent", + :description => "Naily Agent", + :author => "Mirantis Inc.", + :license => "Apache License 2.0", + :version => "0.0.1", + :url => "http://mirantis.com", + :timeout => 300 + + def startup_hook + @lockfile = @config.pluginconf["naily.lockfile"] || + "/var/lock/naily.lock" + @puppet = @config.pluginconf["naily.puppet"] || + "/usr/bin/puppet" + @puppetlog = @config.pluginconf["naily.puppetlog"] || + "/var/log/puppet.log" + @puppetmodules = @config.pluginconf["naily.puppetmodules"] || + "/etc/puppet/modules" + @sitepp = @config.pluginconf["naily.sitepp"] || + "/etc/puppet/manifests/site.pp" + end + + action "runonce" do + runonce + end + + action "echo" do + validate :msg, String + reply[:msg] = "Hello, it is my reply: #{request[:msg]}" + end + + private + + def running? + status = run("flock -w 0 -o #{@lockfile} -c ''", :cwd => "/") + return true if status != 0 + return false + end + + def runonce + if running? + reply.fail "Agent is running at the moment" + else + runonce_background + end + end + + def flock_command command + return "flock -w 0 -o #{@lockfile} -c \"#{command}\"" + end + + def runonce_background + + cmd = [@puppet, "apply"] + cmd << ["-l", @puppetlog] + cmd << "--verbose" + cmd << "--debug" + cmd << ["--modulepath", @puppetmodules] + cmd << @sitepp + cmd = cmd.join(" ") + + cmd = flock_command cmd + + reply[:command] = cmd + reply[:status] = run( + cmd, + :stdout => :output, + :stderr => :err, + :chomp => true + ) + + end + + end + end +end diff --git a/mcollective/bin/amqp_client.rb b/mcollective/bin/amqp_client.rb new file mode 100644 index 000000000..0e7d2f250 --- /dev/null +++ b/mcollective/bin/amqp_client.rb @@ -0,0 +1,92 @@ +require 'rubygems' +require 'eventmachine' +require 'amqp' +require 'json' +require 'lib/helpers' + + +class MyClient + + include Helpers + + def initialize + test_message_id = random_string + test_message_payload = JSON.dump({"msg_id" => test_message_id, + "action" => "status"}) + + test_message_metadata = { + :routing_key => "mcollective" + } + + response_exname = test_message_id + response_qname = test_message_id + response_routing_key = test_message_id + + request_exname = "nailgun.topic" + + logger.debug("Connecting to rabbitmq") + AMQP.connect(:host => "localhost", + :port => 5672, + :username => "guest", + :password => "guest") do |connection| + @connection = connection + logger.debug("Initializing channel") + AMQP::Channel.new(connection) do |channel| + + + logger.debug("Initializing response exchange: #{response_exname}") + response_exchange = AMQP::Exchange.new(channel, :direct, response_exname, + :auto_delete => true) + logger.debug("Initializing response queue: #{response_qname}") + response_queue = AMQP::Queue.new(channel, response_qname, + :exclusive => true, :auto_delete => true) + logger.debug("Binding response queue to response exchange") + response_queue.bind(response_exchange, :routing_key => response_routing_key) + + logger.debug("Subscribing to response queue") + response_queue.subscribe(:ack => true) do |metadata, payload| + logger.debug("Response:") + logger.debug("Response: metadata: #{metadata}") + logger.debug("Response: payload: #{payload}") + metadata.ack + response_queue.purge + response_queue.delete + response_exchange.delete + EM.stop + end + + logger.debug("Initializing request exchange: #{request_exname}") + request_exchange = AMQP::Exchange.new(channel, :topic, "nailgun.topic") + + logger.debug("Sending request: #{test_message_payload}") + request_exchange.publish(test_message_payload, test_message_metadata) + end + end + end + + def disconnect &blk + @connection.close + yield if blk + end + +end + + +EM.run do + myclient = MyClient.new + + Signal.trap("TERM") do + puts "TERM signal has been caught" + myclient.disconnect do + EventMachine.stop + end + end + + Signal.trap("INT") do + puts "INT signal has been caught" + myclient.disconnect do + EventMachine.stop + end + end + +end diff --git a/mcollective/bin/amqp_server.rb b/mcollective/bin/amqp_server.rb new file mode 100644 index 000000000..0e129578b --- /dev/null +++ b/mcollective/bin/amqp_server.rb @@ -0,0 +1,66 @@ +require 'rubygems' +require 'eventmachine' +require 'amqp' +require 'json' +require 'lib/helpers' + + +class MyServer + + include Helpers + + def initialize + + logger.debug("Connecting to rabbitmq") + AMQP.connect(:host => "localhost", + :port => 5672, + :username => "guest", + :password => "guest") do |connection| + @connection = connection + logger.debug("Initializing channel") + AMQP::Channel.new(connection) do |channel| + server_exchange = AMQP::Exchange.new(channel, :topic, "nailgun.topic") + + server_queue = AMQP::Queue.new(channel, "mcollective", + :exclusive => true, :auto_delete => true) + server_queue.bind(server_exchange, :routing_key => "mcollective") + + server_queue.subscribe() do |metadata, payload| + logger.debug("Received message: #{payload}") + + payload_parsed = JSON.parse(payload) + msg_id = payload_parsed["msg_id"] + exchange = AMQP::Exchange.new(channel, :direct, msg_id, + :auto_delete => true) + exchange.publish("Response", :routing_key => msg_id) + end + end + end + end + + def disconnect &blk + @connection.close + yield if blk + end + +end + + +EM.run do + myserver = MyServer.new + + Signal.trap("TERM") do + puts "TERM signal has been caught" + myserver.disconnect do + EventMachine.stop + end + end + + Signal.trap("INT") do + puts "INT signal has been caught" + myserver.disconnect do + EventMachine.stop + end + end + +end diff --git a/mcollective/bin/naily.rb b/mcollective/bin/naily.rb new file mode 100755 index 000000000..025f22b8e --- /dev/null +++ b/mcollective/bin/naily.rb @@ -0,0 +1,23 @@ +#!/usr/bin/env ruby + +$LOAD_PATH.unshift(File.expand_path(File.join(File.dirname(__FILE__), "..", "lib"))) + +require 'rubygems' +require 'naily/server/config' +require 'naily/server/daemon' + +Naily::Server::Config.define do |config| + config.amqp_host = "127.0.0.1" + config.amqp_port = 5672 + config.amqp_username = "guest" + config.amqp_password = "guest" + config.topic_exchange_name = "nailgun.topic" + config.topic_queue_name = "mcollective" + config.topic_queue_routing_key = "mcollective" +end + + +daemon = Naily::Server::Daemon.new +daemon.run + + diff --git a/mcollective/bin/run_blocking_client.rb b/mcollective/bin/run_blocking_client.rb new file mode 100644 index 000000000..197b316f3 --- /dev/null +++ b/mcollective/bin/run_blocking_client.rb @@ -0,0 +1,4 @@ +require 'client/blocking_client' + +client = BlockingClient.new +client.run diff --git a/mcollective/bin/run_catalog.rb b/mcollective/bin/run_catalog.rb new file mode 100755 index 000000000..f4a1e7db6 --- /dev/null +++ b/mcollective/bin/run_catalog.rb @@ -0,0 +1,21 @@ +#!/usr/bin/env ruby + +$LOAD_PATH.unshift(File.expand_path(File.join(File.dirname(__FILE__), "..", "lib"))) + +require 'naily/framework/catalog' + +def usage + puts "Usage: $0 " + exit 1 +end + + +if not ARGV[0] or not ARGV[1] + usage +else + basedir = ARGV[0] + nodename = ARGV[1] + puts Naily::Framework::Catalog.get_catalog basedir, nodename +end + + diff --git a/mcollective/bin/run_client.rb b/mcollective/bin/run_client.rb new file mode 100644 index 000000000..4f6d499df --- /dev/null +++ b/mcollective/bin/run_client.rb @@ -0,0 +1,4 @@ +require 'client/client' + +client = Client.new +client.run diff --git a/mcollective/bin/run_fake.rb b/mcollective/bin/run_fake.rb new file mode 100644 index 000000000..acaff50b6 --- /dev/null +++ b/mcollective/bin/run_fake.rb @@ -0,0 +1,10 @@ +$LOAD_PATH << '../' + +require 'client/client' + +tc = TestClient.new + +tc.discover +tc.run +tc.report +tc.disconnect diff --git a/mcollective/lib/naily/amqp/direct_publisher.rb b/mcollective/lib/naily/amqp/direct_publisher.rb new file mode 100644 index 000000000..2db36dd33 --- /dev/null +++ b/mcollective/lib/naily/amqp/direct_publisher.rb @@ -0,0 +1,20 @@ +require 'amqp' +require 'naily/amqp/helpers' + +module Naily + module Amqp + class DirectPublisher + include Helpers + + def initialize channel, message + logger.debug("Publish message: payload: #{message}") + AMQP::Exchange.new(channel, :direct, message.exchange_name, + :auto_delete => true) do |exchange| + exchange.publish(message, :routing_key => message.routing_key) do + logger.debug("Publish message: complete") + end + end + end + end + end +end diff --git a/mcollective/lib/naily/amqp/driver.rb b/mcollective/lib/naily/amqp/driver.rb new file mode 100644 index 000000000..f3d18cc2f --- /dev/null +++ b/mcollective/lib/naily/amqp/driver.rb @@ -0,0 +1,95 @@ +require 'amqp' +require 'json' +require 'naily/amqp/helpers' +require 'naily/amqp/message' +require 'naily/amqp/topic_consumer' +require 'naily/amqp/direct_publisher' +require 'naily/amqp/handler' + +# WE FOLLOW OPENSTACK RPC MODEL DESCRIBED HERE +# http://docs.openstack.org/developer/nova/devref/rpc.html + +# RUBY AMQP RPC MODEL DESCRIBED HERE +# http://rubyamqp.info/articles/patterns_and_use_cases/ + +module Naily + module Amqp + class Driver + + include Helpers + + def initialize options={} + default_options = { + :host => "localhost", + :port => 5672, + :username => "guest", + :password => "guest", + + :topic_exchange_name => "nailgun.topic", + :topic_queue_name => "mcollective", + :topic_queue_routing_key => "mcollective", + } + opts = default_options.merge(options) + + logger.debug("Connecting to rabbitmq") + AMQP.connect(:host => opts[:host], + :port => opts[:port], + :username => opts[:username], + :password => opts[:password]) do |connection| + @connection = connection + logger.debug("Initializing channel") + AMQP::Channel.new(connection) do |channel| + @channel = channel + + TopicConsumer.new(self, + channel, + opts[:topic_exchange_name], + opts[:topic_queue_name], + opts[:topic_queue_routing_key]) + + + end + end + end + + def handle message + raise "Message is not valid" if not message.valid? + handler = Naily::Amqp::Handler.new message + response = handler.handle + DirectPublisher.new(@channel, response) if response + end + + def disconnect &blk + @connection.close + yield if blk + end + + # def ready? options={} &blk + # default_options = { + # :timeout => 10, + # :on_timeout => nil + # } + # options = default_options.merge(options) + # tick = 0.5 + # n = 0 + + # timer = EM::PeriodicTimer.new(tick) do + # if @status == :ready + # timer.cancel + # yield true + # end + # if (n+=1) > options[:timeout] / tick + # @logger.error("Ready status timed out") + # timer.cancel + # if options[:on_timeout] + # options[:on_timeout].call if options[:on_timeout] + # else + # yield false + # end + # end + # end + # end + + end + end +end diff --git a/mcollective/lib/naily/amqp/handler.rb b/mcollective/lib/naily/amqp/handler.rb new file mode 100644 index 000000000..1ce17f9c3 --- /dev/null +++ b/mcollective/lib/naily/amqp/handler.rb @@ -0,0 +1,48 @@ +require 'naily/handler/echo' +require 'naily/handler/null' +require 'naily/handler/mco' +require 'naily/amqp/message' + +module Naily + module Amqp + class Handler + include Helpers + + def initialize message + @message = message + @real_handler = get_real_handler + + end + + def get_real_handler + case message.rpc_method.to_sym + when :echo + return Naily::Handler::Echo.new @message.rpc_method_args.to_hash + when :mco + return Naily::Handler::Mco.new @message.rpc_method_args.to_hash + else + return Naily::Handler::Null.new @message.rpc_method_args.to_hash + end + end + + def handle + @real_handler.handle do |response| + response ||= {} + if @message.call? + body = { + :result => handler_response, + :failure => nil, + :ending => false + } + options = { + :exchange_name => @message.msg_id, + :routing_key => @message.msg_id + } + return Response.new body, options + end + return nil + end + end + end + end +end diff --git a/mcollective/lib/naily/amqp/helpers.rb b/mcollective/lib/naily/amqp/helpers.rb new file mode 100644 index 000000000..78ad7fed1 --- /dev/null +++ b/mcollective/lib/naily/amqp/helpers.rb @@ -0,0 +1,25 @@ +require 'logger' + +module Naily + module Amqp + module Helpers + + def logger + logger = ::Logger.new(STDOUT) + logger.level = ::Logger::DEBUG + logger + end + + def random_string(length=16, downcase=true) + chars = ('a'..'z').to_a + ('A'..'Z').to_a + ('0'..'9').to_a + rnd = "" + length.times do |i| + rnd << chars[rand(chars.length)] + end + rnd.downcase! if downcase + rnd + end + + end + end +end diff --git a/mcollective/lib/naily/amqp/message.rb b/mcollective/lib/naily/amqp/message.rb new file mode 100644 index 000000000..24383853c --- /dev/null +++ b/mcollective/lib/naily/amqp/message.rb @@ -0,0 +1,76 @@ +require 'json' +require 'naily/amqp/helpers' + +module Naily + module Amqp + class Message + include Helpers + + attr_reader :metadata + + def payload= p + @payload = JSON.parse(p) + end + + def payload + JSON.dump(@payload) + end + + def valid? + return false if not @payload + end + + def to_s + self.payload + end + + end + + + class Request < Message + + def initialize m=nil, p=nil + @metadata = m + self.payload = p + end + + def valid? + call_valid_actions = ["status"] + cast_valid_actions = ["deploy"] + return false if not @payload + return false if not @payload["action"] + return false if self.call? and not call_valid_actions.include?(self.action) + return false if not self.call? and not cast_valid_actions.include?(self.action) + return false if self.call? and not @payload["msg_id"] + true + end + + def call? + return true if @payload["msg_id"] + false + end + + def msg_id + @payload["msg_id"] + end + + def action + @payload["action"] + end + + end + + class Response < Message + + attr_accessor :routing_key + attr_accessor :exchange_name + + def initialize p=nil, options={} + self.payload = p + self.routing_key = options[:routing_key] if options[:routing_key] + self.exchange_name = options[:exchange_name] if options[:exchange_name] + end + + end + end +end diff --git a/mcollective/lib/naily/amqp/topic_consumer.rb b/mcollective/lib/naily/amqp/topic_consumer.rb new file mode 100644 index 000000000..6bb3ea14c --- /dev/null +++ b/mcollective/lib/naily/amqp/topic_consumer.rb @@ -0,0 +1,37 @@ +require 'amqp' +require 'naily/amqp/helpers' + +module Naily + module Amqp + class TopicConsumer + + include Helpers + + def initialize parent, channel, exchange_name, queue_name, routing_key, &blk + logger.debug("Initializing topic consumer: exchange: #{exchange_name} "\ + "queue: #{queue_name} routing_key: #{routing_key}") + @parent = parent + AMQP::Exchange.new(channel, :topic, exchange_name) do |exchange| + AMQP::Queue.new(channel, queue_name, :exclusive => true, + :auto_delete => true) do |queue| + queue.bind(exchange, :routing_key => routing_key) do + queue.subscribe(:ack => true) do |metadata, payload| + message = Request.new(metadata, payload) + logger.debug("Received message: #{message}") + if message.valid? + @parent.handle(message) + else + logger.error("Received message is not valid") + end + metadata.ack + end + if blk + yield self + end + end + end + end + end + end + end +end diff --git a/mcollective/lib/naily/framework/async.rb b/mcollective/lib/naily/framework/async.rb new file mode 100644 index 000000000..48ce33686 --- /dev/null +++ b/mcollective/lib/naily/framework/async.rb @@ -0,0 +1,21 @@ +require 'eventmachine' + +module Naily + module Framework + class Async + + def initialize instance + @instance = instance + end + + def call method_name, *args, &blk + EM.defer(Proc.new { + method = @instance.method(method_name) + method.call(*args) + }, blk ? blk : nil) + end + end + end +end + + diff --git a/mcollective/lib/naily/framework/catalog.rb b/mcollective/lib/naily/framework/catalog.rb new file mode 100644 index 000000000..e783ea88f --- /dev/null +++ b/mcollective/lib/naily/framework/catalog.rb @@ -0,0 +1,79 @@ +require 'puppet' +require 'puppet/node' +require 'yaml' +require 'json' +require 'pp' +require 'logger' +require 'puppet/parser/compiler' +require 'puppet/indirector/yaml' +require 'puppet/indirector/request' +require 'puppet/indirector/node/exec' +require 'puppet/indirector/catalog/yaml' +require 'puppet/application' +require 'puppet/external/pson/common' + +module Naily + module Framework + module Catalog + class Yaml < Puppet::Node::Exec + + def initialize basepath + @basepath = basepath + end + + def find(request) + if File.exist? path(request.key) + output = open(path(request.key)) do |file| + file.read + end + else + raise "File #{path(request.key)} does not exist" + end + + # Translate the output to ruby. + result = translate(request.key, output) + + create_node(request.key, result) + end + + # This method is the same as that one in super class excluding + # that the facts are not merged into node + def create_node(name, result) + node = Puppet::Node.new(name) + set = false + [:parameters, :classes, :environment].each do |param| + if value = result[param] + node.send(param.to_s + "=", value) + set = true + end + end + + node + end + + def path name, ext=".yaml" + File.join(@basepath, name + ext) + end + + end + + + def self.get_catalog basepath, nodename + + request = Puppet::Indirector::Request.new('node', :find, nodename) + + node_terminus = Yaml.new basepath + node = node_terminus.find(request) + + compiler = Puppet::Parser::Compiler.new(node) + + catalog = compiler.compile + catalog_json = PSON::generate(catalog.to_resource, + :allow_nan => true, + :max_nesting => false) + + # jj JSON.load(catalog_json) + end + end + end +end diff --git a/mcollective/lib/naily/framework/client.rb b/mcollective/lib/naily/framework/client.rb new file mode 100644 index 000000000..f90eda334 --- /dev/null +++ b/mcollective/lib/naily/framework/client.rb @@ -0,0 +1,15 @@ +module Naily + module Framework + module Client + + def available_roles + + end + + def runonce params={} + + end + + end + end +end diff --git a/mcollective/lib/naily/handler/echo.rb b/mcollective/lib/naily/handler/echo.rb new file mode 100644 index 000000000..a85520260 --- /dev/null +++ b/mcollective/lib/naily/handler/echo.rb @@ -0,0 +1,15 @@ +module Naily + module Handler + class Echo + + def initialize args={} + @args = args + end + + def handle &blk + yield @args + end + + end + end +end diff --git a/mcollective/lib/naily/handler/mco.rb b/mcollective/lib/naily/handler/mco.rb new file mode 100644 index 000000000..616454348 --- /dev/null +++ b/mcollective/lib/naily/handler/mco.rb @@ -0,0 +1,28 @@ +require 'naily/framework/async' + +module Naily + module Handler + class Mco + + def initialize args + @args = args + end + + def handle &blk + case @args["client"] + when "simple" + client = Naily::MCClient::Simple.new + when "blocking" + client = Naily::MCClient::Blocking.new + else + raise "Unknown mcollective client" + end + + async = Naily::Framework::Async.new client + async.call @args["action"], @args["action_args"] do |result| + yield({'result' => 'Action ended: #{result}'}) + end + end + end + end +end diff --git a/mcollective/lib/naily/handler/null.rb b/mcollective/lib/naily/handler/null.rb new file mode 100644 index 000000000..42110c335 --- /dev/null +++ b/mcollective/lib/naily/handler/null.rb @@ -0,0 +1,16 @@ +require 'naily/framework/async' + +module Naily + module Handler + class Null + + def initialize args + end + + def handle &blk + yield + end + + end + end +end diff --git a/mcollective/lib/naily/mcclient/blocking.rb b/mcollective/lib/naily/mcclient/blocking.rb new file mode 100644 index 000000000..a3ee6123c --- /dev/null +++ b/mcollective/lib/naily/mcclient/blocking.rb @@ -0,0 +1,31 @@ +require 'mcollective' +require 'naily/framework/client' + +module Naily + module MCClient + class Blocking + include MCollective::RPC + include Naily::Framework::Client + + def initialize + @mc = rpcclient('naily') + @mc.verbose = true + end + + def run + responses = [] + @mc.echo(:msg => "Testing fake agent plugin: before sleep").each do |response| + responses << "Response: from: #{response[:sender]} message: #{response[:data][:msg]}" + end + sleep 10 + @mc.echo(:msg => "Testing fake agent plugin: after sleep").each do |response| + responses << "Response: from: #{response[:sender]} message: #{response[:data][:msg]}" + end + end + + def disconnect + @mc.disconnect + end + end + end +end diff --git a/mcollective/lib/naily/mcclient/simple.rb b/mcollective/lib/naily/mcclient/simple.rb new file mode 100644 index 000000000..f456be107 --- /dev/null +++ b/mcollective/lib/naily/mcclient/simple.rb @@ -0,0 +1,27 @@ +require 'mcollective' +require 'naily/framework/client' + +module Naily + module MCClient + class Simple + include MCollective::RPC + include Naily::Framework::Client + + def initialize + @mc = rpcclient('naily') + @mc.verbose = true + end + + def run + responses = [] + @mc.runonce().each do |response| + responses << response + end + end + + def disconnect + @mc.disconnect + end + end + end +end diff --git a/mcollective/lib/naily/server/config.rb b/mcollective/lib/naily/server/config.rb new file mode 100644 index 000000000..c7eda960a --- /dev/null +++ b/mcollective/lib/naily/server/config.rb @@ -0,0 +1,19 @@ +module Naily + module Server + module Config + extend self + + attr_accessor :amqp_host + attr_accessor :amqp_port + attr_accessor :amqp_username + attr_accessor :amqp_password + attr_accessor :topic_exchange_name + attr_accessor :topic_queue_name + attr_accessor :topic_queue_routing_key + + def define + yield self + end + end + end +end diff --git a/mcollective/lib/naily/server/daemon.rb b/mcollective/lib/naily/server/daemon.rb new file mode 100644 index 000000000..7b3a4b259 --- /dev/null +++ b/mcollective/lib/naily/server/daemon.rb @@ -0,0 +1,38 @@ +require 'logger' +require 'eventmachine' +require 'naily/amqp/driver' + +module Naily + module Server + class Daemon + def initialize + @logger = Logger.new(STDOUT) + @logger.level = Logger::DEBUG + + @options = { + :host => Config.amqp_host, + :port => Config.amqp_port, + :username => Config.amqp_username, + :password => Config.amqp_password, + + :topic_exchange_name => Config.topic_exchange_name, + :topic_queue_name => Config.topic_queue_name, + :topic_queue_routing_key => Config.topic_queue_routing_key + } + end + + def run + EM.run do + driver = Naily::Amqp::Driver.new @options + + Signal.trap("INT") do + @logger.debug("INT signal has been caught") + driver.disconnect do + EventMachine.stop + end + end + end + end + end + end +end diff --git a/mcollective/lib/puppet.rb b/mcollective/lib/puppet.rb new file mode 100644 index 000000000..fde9fd14f --- /dev/null +++ b/mcollective/lib/puppet.rb @@ -0,0 +1,164 @@ +# Try to load rubygems. Hey rubygems, I hate you. +begin + require 'rubygems' +rescue LoadError +end + +# see the bottom of the file for further inclusions +require 'singleton' +require 'facter' +require 'puppet/error' +require 'puppet/util' +require 'puppet/util/autoload' +require 'puppet/util/settings' +require 'puppet/util/feature' +require 'puppet/util/suidmanager' +require 'puppet/util/run_mode' + +#------------------------------------------------------------ +# the top-level module +# +# all this really does is dictate how the whole system behaves, through +# preferences for things like debugging +# +# it's also a place to find top-level commands like 'debug' + +module Puppet + PUPPETVERSION = '2.6.17' + + def Puppet.version + PUPPETVERSION + end + + class << self + include Puppet::Util + attr_reader :features + attr_writer :name + end + + # the hash that determines how our system behaves + @@settings = Puppet::Util::Settings.new + + # The services running in this process. + @services ||= [] + + require 'puppet/util/logging' + + extend Puppet::Util::Logging + + # The feature collection + @features = Puppet::Util::Feature.new('puppet/feature') + + # Load the base features. + require 'puppet/feature/base' + + # Store a new default value. + def self.setdefaults(section, hash) + @@settings.setdefaults(section, hash) + end + + # configuration parameter access and stuff + def self.[](param) + if param == :debug + return Puppet::Util::Log.level == :debug + else + return @@settings[param] + end + end + + # configuration parameter access and stuff + def self.[]=(param,value) + @@settings[param] = value + end + + def self.clear + @@settings.clear + end + + def self.debug=(value) + if value + Puppet::Util::Log.level=(:debug) + else + Puppet::Util::Log.level=(:notice) + end + end + + def self.settings + @@settings + end + + def self.run_mode + $puppet_application_mode || Puppet::Util::RunMode[:user] + end + + def self.application_name + $puppet_application_name ||= "apply" + end + + # Load all of the configuration parameters. + require 'puppet/defaults' + + def self.genmanifest + if Puppet[:genmanifest] + puts Puppet.settings.to_manifest + exit(0) + end + end + + # Parse the config file for this process. + def self.parse_config + Puppet.settings.parse + end + + # XXX this should all be done using puppet objects, not using + # normal mkdir + def self.recmkdir(dir,mode = 0755) + if FileTest.exist?(dir) + return false + else + tmp = dir.sub(/^\//,'') + path = [File::SEPARATOR] + tmp.split(File::SEPARATOR).each { |dir| + path.push dir + if ! FileTest.exist?(File.join(path)) + begin + Dir.mkdir(File.join(path), mode) + rescue Errno::EACCES => detail + Puppet.err detail.to_s + return false + rescue => detail + Puppet.err "Could not create #{path}: #{detail}" + return false + end + elsif FileTest.directory?(File.join(path)) + next + else FileTest.exist?(File.join(path)) + raise Puppet::Error, "Cannot create #{dir}: basedir #{File.join(path)} is a file" + end + } + return true + end + end + + # Create a new type. Just proxy to the Type class. + def self.newtype(name, options = {}, &block) + Puppet::Type.newtype(name, options, &block) + end + + # Retrieve a type by name. Just proxy to the Type class. + def self.type(name) + # LAK:DEP Deprecation notice added 12/17/2008 + Puppet.warning "Puppet.type is deprecated; use Puppet::Type.type" + Puppet::Type.type(name) + end +end + +require 'puppet/type' +require 'puppet/parser' +require 'puppet/resource' +require 'puppet/network' +require 'puppet/ssl' +require 'puppet/module' +require 'puppet/util/storage' +require 'puppet/status' +require 'puppet/file_bucket/file' diff --git a/mcollective/lib/puppet/agent.rb b/mcollective/lib/puppet/agent.rb new file mode 100644 index 000000000..47dd44a0e --- /dev/null +++ b/mcollective/lib/puppet/agent.rb @@ -0,0 +1,107 @@ +require 'sync' +require 'puppet/external/event-loop' +require 'puppet/application' + +# A general class for triggering a run of another +# class. +class Puppet::Agent + require 'puppet/agent/locker' + include Puppet::Agent::Locker + + attr_reader :client_class, :client, :splayed + + # Just so we can specify that we are "the" instance. + def initialize(client_class) + @splayed = false + + @client_class = client_class + end + + def lockfile_path + client_class.lockfile_path + end + + def needing_restart? + Puppet::Application.restart_requested? + end + + # Perform a run with our client. + def run(*args) + if running? + Puppet.notice "Run of #{client_class} already in progress; skipping" + return + end + result = nil + block_run = Puppet::Application.controlled_run do + splay + with_client do |client| + begin + sync.synchronize { lock { result = client.run(*args) } } + rescue SystemExit,NoMemoryError + raise + rescue Exception => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Could not run #{client_class}: #{detail}" + end + end + true + end + Puppet.notice "Shutdown/restart in progress; skipping run" unless block_run + result + end + + def stopping? + Puppet::Application.stop_requested? + end + + # Have we splayed already? + def splayed? + splayed + end + + # Sleep when splay is enabled; else just return. + def splay + return unless Puppet[:splay] + return if splayed? + + time = rand(Integer(Puppet[:splaylimit]) + 1) + Puppet.info "Sleeping for #{time} seconds (splay is enabled)" + sleep(time) + @splayed = true + end + + # Start listening for events. We're pretty much just listening for + # timer events here. + def start + # Create our timer. Puppet will handle observing it and such. + timer = EventLoop::Timer.new(:interval => Puppet[:runinterval], :tolerance => 1, :start? => true) do + run + end + + # Run once before we start following the timer + timer.sound_alarm + end + + def sync + @sync ||= Sync.new + end + + private + + # Create and yield a client instance, keeping a reference + # to it during the yield. + def with_client + begin + @client = client_class.new + rescue SystemExit,NoMemoryError + raise + rescue Exception => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Could not create instance of #{client_class}: #{detail}" + return + end + yield @client + ensure + @client = nil + end +end diff --git a/mcollective/lib/puppet/agent/locker.rb b/mcollective/lib/puppet/agent/locker.rb new file mode 100644 index 000000000..98f5b38d9 --- /dev/null +++ b/mcollective/lib/puppet/agent/locker.rb @@ -0,0 +1,40 @@ +require 'puppet/util/pidlock' + +# Break out the code related to locking the agent. This module is just +# included into the agent, but having it here makes it easier to test. +module Puppet::Agent::Locker + # Let the daemon run again, freely in the filesystem. + def enable + lockfile.unlock(:anonymous => true) + end + + # Stop the daemon from making any catalog runs. + def disable + lockfile.lock(:anonymous => true) + end + + # Yield if we get a lock, else do nothing. Return + # true/false depending on whether we get the lock. + def lock + if lockfile.lock + begin + yield + ensure + lockfile.unlock + end + return true + else + return false + end + end + + def lockfile + @lockfile ||= Puppet::Util::Pidlock.new(lockfile_path) + + @lockfile + end + + def running? + lockfile.locked? + end +end diff --git a/mcollective/lib/puppet/application.rb b/mcollective/lib/puppet/application.rb new file mode 100644 index 000000000..a028a158f --- /dev/null +++ b/mcollective/lib/puppet/application.rb @@ -0,0 +1,425 @@ +require 'optparse' +require 'puppet/util/plugins' + +# This class handles all the aspects of a Puppet application/executable +# * setting up options +# * setting up logs +# * choosing what to run +# * representing execution status +# +# === Usage +# An application is a subclass of Puppet::Application. +# +# For legacy compatibility, +# Puppet::Application[:example].run +# is equivalent to +# Puppet::Application::Example.new.run +# +# +# class Puppet::Application::Example << Puppet::Application +# +# def preinit +# # perform some pre initialization +# @all = false +# end +# +# # run_command is called to actually run the specified command +# def run_command +# send Puppet::Util::CommandLine.new.args.shift +# end +# +# # option uses metaprogramming to create a method +# # and also tells the option parser how to invoke that method +# option("--arg ARGUMENT") do |v| +# @args << v +# end +# +# option("--debug", "-d") do |v| +# @debug = v +# end +# +# option("--all", "-a:) do |v| +# @all = v +# end +# +# def handle_unknown(opt,arg) +# # last chance to manage an option +# ... +# # let's say to the framework we finally handle this option +# true +# end +# +# def read +# # read action +# end +# +# def write +# # writeaction +# end +# +# end +# +# === Preinit +# The preinit block is the first code to be called in your application, before option parsing, +# setup or command execution. +# +# === Options +# Puppet::Application uses +OptionParser+ to manage the application options. +# Options are defined with the +option+ method to which are passed various +# arguments, including the long option, the short option, a description... +# Refer to +OptionParser+ documentation for the exact format. +# * If the option method is given a block, this one will be called whenever +# the option is encountered in the command-line argument. +# * If the option method has no block, a default functionnality will be used, that +# stores the argument (or true/false if the option doesn't require an argument) in +# the global (to the application) options array. +# * If a given option was not defined by a the +option+ method, but it exists as a Puppet settings: +# * if +unknown+ was used with a block, it will be called with the option name and argument +# * if +unknown+ wasn't used, then the option/argument is handed to Puppet.settings.handlearg for +# a default behavior +# +# --help is managed directly by the Puppet::Application class, but can be overriden. +# +# === Setup +# Applications can use the setup block to perform any initialization. +# The defaul +setup+ behaviour is to: read Puppet configuration and manage log level and destination +# +# === What and how to run +# If the +dispatch+ block is defined it is called. This block should return the name of the registered command +# to be run. +# If it doesn't exist, it defaults to execute the +main+ command if defined. +# +# === Execution state +# The class attributes/methods of Puppet::Application serve as a global place to set and query the execution +# status of the application: stopping, restarting, etc. The setting of the application status does not directly +# aftect its running status; it's assumed that the various components within the application will consult these +# settings appropriately and affect their own processing accordingly. Control operations (signal handlers and +# the like) should set the status appropriately to indicate to the overall system that it's the process of +# stopping or restarting (or just running as usual). +# +# So, if something in your application needs to stop the process, for some reason, you might consider: +# +# def stop_me! +# # indicate that we're stopping +# Puppet::Application.stop! +# # ...do stuff... +# end +# +# And, if you have some component that involves a long-running process, you might want to consider: +# +# def my_long_process(giant_list_to_munge) +# giant_list_to_munge.collect do |member| +# # bail if we're stopping +# return if Puppet::Application.stop_requested? +# process_member(member) +# end +# end +module Puppet +class Application + require 'puppet/util' + include Puppet::Util + + DOCPATTERN = File.expand_path(File.dirname(__FILE__) + "/util/command_line/*" ) + + class << self + include Puppet::Util + + attr_accessor :run_status + + def clear! + self.run_status = nil + end + + def stop! + self.run_status = :stop_requested + end + + def restart! + self.run_status = :restart_requested + end + + # Indicates that Puppet::Application.restart! has been invoked and components should + # do what is necessary to facilitate a restart. + def restart_requested? + :restart_requested == run_status + end + + # Indicates that Puppet::Application.stop! has been invoked and components should do what is necessary + # for a clean stop. + def stop_requested? + :stop_requested == run_status + end + + # Indicates that one of stop! or start! was invoked on Puppet::Application, and some kind of process + # shutdown/short-circuit may be necessary. + def interrupted? + [:restart_requested, :stop_requested].include? run_status + end + + # Indicates that Puppet::Application believes that it's in usual running run_mode (no stop/restart request + # currently active). + def clear? + run_status.nil? + end + + # Only executes the given block if the run status of Puppet::Application is clear (no restarts, stops, + # etc. requested). + # Upon block execution, checks the run status again; if a restart has been requested during the block's + # execution, then controlled_run will send a new HUP signal to the current process. + # Thus, long-running background processes can potentially finish their work before a restart. + def controlled_run(&block) + return unless clear? + result = block.call + Process.kill(:HUP, $PID) if restart_requested? + result + end + + def should_parse_config + @parse_config = true + end + + def should_not_parse_config + @parse_config = false + end + + def should_parse_config? + @parse_config = true if ! defined?(@parse_config) + @parse_config + end + + # used to declare code that handle an option + def option(*options, &block) + long = options.find { |opt| opt =~ /^--/ }.gsub(/^--(?:\[no-\])?([^ =]+).*$/, '\1' ).gsub('-','_') + fname = symbolize("handle_#{long}") + if (block_given?) + define_method(fname, &block) + else + define_method(fname) do |value| + self.options["#{long}".to_sym] = value + end + end + self.option_parser_commands << [options, fname] + end + + def banner(banner = nil) + @banner ||= banner + end + + def option_parser_commands + @option_parser_commands ||= ( + superclass.respond_to?(:option_parser_commands) ? superclass.option_parser_commands.dup : [] + ) + @option_parser_commands + end + + def find(name) + klass = name.to_s.capitalize + + # const_defined? is used before const_get since const_defined? will only + # check within our namespace, whereas const_get will check ancestor + # trees as well, resulting in unexpected behaviour. + if !self.const_defined?(klass) + puts "Unable to find application '#{name.to_s}'." + Kernel::exit(1) + end + + self.const_get(klass) + end + + def [](name) + find(name).new + end + + # Sets or gets the run_mode name. Sets the run_mode name if a mode_name is + # passed. Otherwise, gets the run_mode or a default run_mode + # + def run_mode( mode_name = nil) + return @run_mode if @run_mode and not mode_name + + require 'puppet/util/run_mode' + @run_mode = Puppet::Util::RunMode[ mode_name || :user ] + end + end + + attr_reader :options, :command_line + + # Every app responds to --version + option("--version", "-V") do |arg| + puts "#{Puppet.version}" + exit + end + + # Every app responds to --help + option("--help", "-h") do |v| + help + end + + def should_parse_config? + self.class.should_parse_config? + end + + # override to execute code before running anything else + def preinit + end + + def initialize(command_line = nil) + require 'puppet/util/command_line' + @command_line = command_line || Puppet::Util::CommandLine.new + set_run_mode self.class.run_mode + @options = {} + + require 'puppet' + end + + # WARNING: This is a totally scary, frightening, and nasty internal API. We + # strongly advise that you do not use this, and if you insist, we will + # politely allow you to keep both pieces of your broken code. + # + # We plan to provide a supported, long-term API to deliver this in a way + # that you can use. Please make sure that you let us know if you do require + # this, and this message is still present in the code. --daniel 2011-02-03 + def set_run_mode(mode) + @run_mode = mode + $puppet_application_mode = @run_mode + $puppet_application_name = name + + if Puppet.respond_to? :settings + # This is to reduce the amount of confusion in rspec + # because it might have loaded defaults.rb before the globals were set + # and thus have the wrong defaults for the current application + Puppet.settings.set_value(:confdir, Puppet.run_mode.conf_dir, :mutable_defaults) + Puppet.settings.set_value(:vardir, Puppet.run_mode.var_dir, :mutable_defaults) + Puppet.settings.set_value(:name, Puppet.application_name.to_s, :mutable_defaults) + Puppet.settings.set_value(:logdir, Puppet.run_mode.logopts, :mutable_defaults) + Puppet.settings.set_value(:rundir, Puppet.run_mode.run_dir, :mutable_defaults) + Puppet.settings.set_value(:run_mode, Puppet.run_mode.name.to_s, :mutable_defaults) + end + end + + # This is the main application entry point + def run + exit_on_fail("initialize") { hook('preinit') { preinit } } + exit_on_fail("parse options") { hook('parse_options') { parse_options } } + exit_on_fail("parse configuration file") { Puppet.settings.parse } if should_parse_config? + exit_on_fail("prepare for execution") { hook('setup') { setup } } + exit_on_fail("run") { hook('run_command') { run_command } } + end + + def main + raise NotImplementedError, "No valid command or main" + end + + def run_command + main + end + + def setup + # Handle the logging settings + if options[:debug] or options[:verbose] + Puppet::Util::Log.newdestination(:console) + if options[:debug] + Puppet::Util::Log.level = :debug + else + Puppet::Util::Log.level = :info + end + end + + Puppet::Util::Log.newdestination(:syslog) unless options[:setdest] + end + + def parse_options + # Create an option parser + option_parser = OptionParser.new(self.class.banner) + + # Add all global options to it. + Puppet.settings.optparse_addargs([]).each do |option| + option_parser.on(*option) do |arg| + handlearg(option[0], arg) + end + end + + # Add options that are local to this application, which were + # created using the "option()" metaprogramming method. If there + # are any conflicts, this application's options will be favored. + self.class.option_parser_commands.each do |options, fname| + option_parser.on(*options) do |value| + # Call the method that "option()" created. + self.send(fname, value) + end + end + + # scan command line. + begin + option_parser.parse!(self.command_line.args) + rescue OptionParser::ParseError => detail + $stderr.puts detail + $stderr.puts "Try 'puppet #{command_line.subcommand_name} --help'" + exit(1) + end + end + + def handlearg(opt, arg) + # rewrite --[no-]option to --no-option if that's what was given + if opt =~ /\[no-\]/ and !arg + opt = opt.gsub(/\[no-\]/,'no-') + end + # otherwise remove the [no-] prefix to not confuse everybody + opt = opt.gsub(/\[no-\]/, '') + unless respond_to?(:handle_unknown) and send(:handle_unknown, opt, arg) + # Puppet.settings.handlearg doesn't handle direct true/false :-) + if arg.is_a?(FalseClass) + arg = "false" + elsif arg.is_a?(TrueClass) + arg = "true" + end + Puppet.settings.handlearg(opt, arg) + end + end + + # this is used for testing + def self.exit(code) + exit(code) + end + + def name + self.class.to_s.sub(/.*::/,"").downcase.to_sym + end + + def help + if Puppet.features.usage? + # RH:FIXME: My goodness, this is ugly. + ::RDoc.const_set("PuppetSourceFile", name) + #:stopdoc: # Issue #4161 + def (::RDoc).caller + docfile = `grep -l 'Puppet::Application\\[:#{::RDoc::PuppetSourceFile}\\]' #{DOCPATTERN}`.chomp + super << "#{docfile}:0" + end + #:startdoc: + ::RDoc::usage && exit + else + puts "No help available unless you have RDoc::usage installed" + exit + end + rescue Errno::ENOENT + puts "No help available for puppet #{name}" + exit + end + + private + + def exit_on_fail(message, code = 1) + yield + rescue RuntimeError, NotImplementedError => detail + puts detail.backtrace if Puppet[:trace] + $stderr.puts "Could not #{message}: #{detail}" + exit(code) + end + + def hook(step,&block) + Puppet::Plugins.send("before_application_#{step}",:application_object => self) + x = yield + Puppet::Plugins.send("after_application_#{step}",:application_object => self, :return_value => x) + x + end +end +end diff --git a/mcollective/lib/puppet/application/agent.rb b/mcollective/lib/puppet/application/agent.rb new file mode 100644 index 000000000..cf51f0825 --- /dev/null +++ b/mcollective/lib/puppet/application/agent.rb @@ -0,0 +1,272 @@ +require 'puppet/application' + +class Puppet::Application::Agent < Puppet::Application + + should_parse_config + run_mode :agent + + attr_accessor :args, :agent, :daemon, :host + + def preinit + # Do an initial trap, so that cancels don't get a stack trace. + Signal.trap(:INT) do + $stderr.puts "Cancelling startup" + exit(0) + end + + { + :waitforcert => nil, + :detailed_exitcodes => false, + :verbose => false, + :debug => false, + :centrallogs => false, + :setdest => false, + :enable => false, + :disable => false, + :client => true, + :fqdn => nil, + :serve => [], + :digest => :MD5, + :fingerprint => false, + }.each do |opt,val| + options[opt] = val + end + + @args = {} + require 'puppet/daemon' + @daemon = Puppet::Daemon.new + @daemon.argv = ARGV.dup + end + + option("--centrallogging") + option("--disable") + option("--enable") + option("--debug","-d") + option("--fqdn FQDN","-f") + option("--test","-t") + option("--verbose","-v") + + option("--fingerprint") + option("--digest DIGEST") + + option("--serve HANDLER", "-s") do |arg| + if Puppet::Network::Handler.handler(arg) + options[:serve] << arg.to_sym + else + raise "Could not find handler for #{arg}" + end + end + + option("--no-client") do |arg| + options[:client] = false + end + + option("--detailed-exitcodes") do |arg| + options[:detailed_exitcodes] = true + end + + option("--logdest DEST", "-l DEST") do |arg| + begin + Puppet::Util::Log.newdestination(arg) + options[:setdest] = true + rescue => detail + puts detail.backtrace if Puppet[:debug] + $stderr.puts detail.to_s + end + end + + option("--waitforcert WAITFORCERT", "-w") do |arg| + options[:waitforcert] = arg.to_i + end + + option("--port PORT","-p") do |arg| + @args[:Port] = arg + end + + def run_command + return fingerprint if options[:fingerprint] + return onetime if Puppet[:onetime] + main + end + + def fingerprint + unless cert = host.certificate || host.certificate_request + $stderr.puts "Fingerprint asked but no certificate nor certificate request have yet been issued" + exit(1) + return + end + unless fingerprint = cert.fingerprint(options[:digest]) + raise ArgumentError, "Could not get fingerprint for digest '#{options[:digest]}'" + end + puts fingerprint + end + + def onetime + unless options[:client] + $stderr.puts "onetime is specified but there is no client" + exit(43) + return + end + + @daemon.set_signal_traps + + begin + report = @agent.run + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err detail.to_s + end + + if not report + exit(1) + elsif options[:detailed_exitcodes] then + exit(report.exit_status) + else + exit(0) + end + end + + def main + Puppet.notice "Starting Puppet client version #{Puppet.version}" + + @daemon.start + end + + # Enable all of the most common test options. + def setup_test + Puppet.settings.handlearg("--ignorecache") + Puppet.settings.handlearg("--no-usecacheonfailure") + Puppet.settings.handlearg("--no-splay") + Puppet.settings.handlearg("--show_diff") + Puppet.settings.handlearg("--no-daemonize") + options[:verbose] = true + Puppet[:onetime] = true + options[:detailed_exitcodes] = true + end + + # Handle the logging settings. + def setup_logs + if options[:debug] or options[:verbose] + Puppet::Util::Log.newdestination(:console) + if options[:debug] + Puppet::Util::Log.level = :debug + else + Puppet::Util::Log.level = :info + end + end + + Puppet::Util::Log.newdestination(:syslog) unless options[:setdest] + end + + def enable_disable_client(agent) + if options[:enable] + agent.enable + elsif options[:disable] + agent.disable + end + exit(0) + end + + def setup_listen + unless FileTest.exists?(Puppet[:rest_authconfig]) + Puppet.err "Will not start without authorization file #{Puppet[:rest_authconfig]}" + exit(14) + end + + handlers = nil + + if options[:serve].empty? + handlers = [:Runner] + else + handlers = options[:serve] + end + + require 'puppet/network/server' + # No REST handlers yet. + server = Puppet::Network::Server.new(:xmlrpc_handlers => handlers, :port => Puppet[:puppetport]) + + @daemon.server = server + end + + def setup_host + @host = Puppet::SSL::Host.new + waitforcert = options[:waitforcert] || (Puppet[:onetime] ? 0 : 120) + cert = @host.wait_for_cert(waitforcert) unless options[:fingerprint] + end + + def setup + setup_test if options[:test] + + setup_logs + + exit(Puppet.settings.print_configs ? 0 : 1) if Puppet.settings.print_configs? + + # If noop is set, then also enable diffs + Puppet[:show_diff] = true if Puppet[:noop] + + args[:Server] = Puppet[:server] + if options[:fqdn] + args[:FQDN] = options[:fqdn] + Puppet[:certname] = options[:fqdn] + end + + if options[:centrallogs] + logdest = args[:Server] + + logdest += ":" + args[:Port] if args.include?(:Port) + Puppet::Util::Log.newdestination(logdest) + end + + Puppet.settings.use :main, :agent, :ssl + + # Always ignoreimport for agent. It really shouldn't even try to import, + # but this is just a temporary band-aid. + Puppet[:ignoreimport] = true + + # We need to specify a ca location for all of the SSL-related i + # indirected classes to work; in fingerprint mode we just need + # access to the local files and we don't need a ca. + Puppet::SSL::Host.ca_location = options[:fingerprint] ? :none : :remote + + Puppet::Transaction::Report.terminus_class = :rest + # we want the last report to be persisted locally + Puppet::Transaction::Report.cache_class = :yaml + + # Override the default; puppetd needs this, usually. + # You can still override this on the command-line with, e.g., :compiler. + Puppet[:catalog_terminus] = :rest + + # Override the default. + Puppet[:facts_terminus] = :facter + + Puppet::Resource::Catalog.cache_class = :yaml + + + # We need tomake the client either way, we just don't start it + # if --no-client is set. + require 'puppet/agent' + require 'puppet/configurer' + @agent = Puppet::Agent.new(Puppet::Configurer) + + enable_disable_client(@agent) if options[:enable] or options[:disable] + + @daemon.agent = agent if options[:client] + + # It'd be nice to daemonize later, but we have to daemonize before the + # waitforcert happens. + @daemon.daemonize if Puppet[:daemonize] + + setup_host + + @objects = [] + + # This has to go after the certs are dealt with. + if Puppet[:listen] + unless Puppet[:onetime] + setup_listen + else + Puppet.notice "Ignoring --listen on onetime run" + end + end + end +end diff --git a/mcollective/lib/puppet/application/apply.rb b/mcollective/lib/puppet/application/apply.rb new file mode 100644 index 000000000..f2bbcb99b --- /dev/null +++ b/mcollective/lib/puppet/application/apply.rb @@ -0,0 +1,171 @@ +require 'puppet/application' + +class Puppet::Application::Apply < Puppet::Application + + should_parse_config + + option("--debug","-d") + option("--execute EXECUTE","-e") do |arg| + options[:code] = arg + end + option("--loadclasses","-L") + option("--verbose","-v") + option("--use-nodes") + option("--detailed-exitcodes") + + option("--apply catalog", "-a catalog") do |arg| + options[:catalog] = arg + end + + option("--logdest LOGDEST", "-l") do |arg| + begin + Puppet::Util::Log.newdestination(arg) + options[:logset] = true + rescue => detail + $stderr.puts detail.to_s + end + end + + def run_command + if options[:catalog] + apply + elsif Puppet[:parseonly] + parseonly + else + main + end + end + + def apply + if options[:catalog] == "-" + text = $stdin.read + else + text = File.read(options[:catalog]) + end + + begin + catalog = Puppet::Resource::Catalog.convert_from(Puppet::Resource::Catalog.default_format,text) + catalog = Puppet::Resource::Catalog.pson_create(catalog) unless catalog.is_a?(Puppet::Resource::Catalog) + rescue => detail + raise Puppet::Error, "Could not deserialize catalog from pson: #{detail}" + end + + catalog = catalog.to_ral + + require 'puppet/configurer' + configurer = Puppet::Configurer.new + configurer.run :catalog => catalog + end + + def parseonly + # Set our code or file to use. + if options[:code] or command_line.args.length == 0 + Puppet[:code] = options[:code] || STDIN.read + else + Puppet[:manifest] = command_line.args.shift + end + begin + Puppet::Node::Environment.new(Puppet[:environment]).known_resource_types + rescue => detail + Puppet.err detail + exit 1 + end + exit 0 + end + + def main + # Set our code or file to use. + if options[:code] or command_line.args.length == 0 + Puppet[:code] = options[:code] || STDIN.read + else + manifest = command_line.args.shift + raise "Could not find file #{manifest}" unless File.exist?(manifest) + Puppet.warning("Only one file can be applied per run. Skipping #{command_line.args.join(', ')}") if command_line.args.size > 0 + Puppet[:manifest] = manifest + end + + # Collect our facts. + unless facts = Puppet::Node::Facts.find(Puppet[:node_name_value]) + raise "Could not find facts for #{Puppet[:node_name_value]}" + end + + unless Puppet[:node_name_fact].empty? + Puppet[:node_name_value] = facts.values[Puppet[:node_name_fact]] + facts.name = Puppet[:node_name_value] + end + + # Find our Node + unless node = Puppet::Node.find(Puppet[:node_name_value]) + raise "Could not find node #{Puppet[:node_name_value]}" + end + + # Merge in the facts. + node.merge(facts.values) + + # Allow users to load the classes that puppet agent creates. + if options[:loadclasses] + file = Puppet[:classfile] + if FileTest.exists?(file) + unless FileTest.readable?(file) + $stderr.puts "#{file} is not readable" + exit(63) + end + node.classes = File.read(file).split(/[\s\n]+/) + end + end + + begin + # Compile our catalog + starttime = Time.now + catalog = Puppet::Resource::Catalog.find(node.name, :use_node => node) + + # Translate it to a RAL catalog + catalog = catalog.to_ral + + catalog.finalize + + catalog.retrieval_duration = Time.now - starttime + + require 'puppet/configurer' + configurer = Puppet::Configurer.new + report = configurer.run(:skip_plugin_download => true, :catalog => catalog) + + if not report + exit(1) + elsif options[:detailed_exitcodes] then + exit(report.exit_status) + else + exit(0) + end + rescue => detail + puts detail.backtrace if Puppet[:trace] + $stderr.puts detail.message + exit(1) + end + end + + def setup + exit(Puppet.settings.print_configs ? 0 : 1) if Puppet.settings.print_configs? + + # If noop is set, then also enable diffs + Puppet[:show_diff] = true if Puppet[:noop] + + Puppet::Util::Log.newdestination(:console) unless options[:logset] + client = nil + server = nil + + Signal.trap(:INT) do + $stderr.puts "Exiting" + exit(1) + end + + # we want the last report to be persisted locally + Puppet::Transaction::Report.cache_class = :yaml + + if options[:debug] + Puppet::Util::Log.level = :debug + elsif options[:verbose] + Puppet::Util::Log.level = :info + end + end +end diff --git a/mcollective/lib/puppet/application/cert.rb b/mcollective/lib/puppet/application/cert.rb new file mode 100644 index 000000000..04093609b --- /dev/null +++ b/mcollective/lib/puppet/application/cert.rb @@ -0,0 +1,115 @@ +require 'puppet/application' + +class Puppet::Application::Cert < Puppet::Application + + should_parse_config + run_mode :master + + attr_accessor :all, :ca, :digest, :signed + + def subcommand + @subcommand + end + + def subcommand=(name) + # Handle the nasty, legacy mapping of "clean" to "destroy". + sub = name.to_sym + @subcommand = (sub == :clean ? :destroy : sub) + end + + option("--clean", "-c") do + self.subcommand = "destroy" + end + + option("--all", "-a") do + @all = true + end + + option("--digest DIGEST") do |arg| + @digest = arg + end + + option("--signed", "-s") do + @signed = true + end + + option("--debug", "-d") do |arg| + Puppet::Util::Log.level = :debug + end + + require 'puppet/ssl/certificate_authority/interface' + Puppet::SSL::CertificateAuthority::Interface::INTERFACE_METHODS.reject {|m| m == :destroy }.each do |method| + option("--#{method.to_s.gsub('_','-')}", "-#{method.to_s[0,1]}") do + self.subcommand = method + end + end + + option("--[no-]allow-dns-alt-names") do |value| + options[:allow_dns_alt_names] = value + end + + option("--verbose", "-v") do + Puppet::Util::Log.level = :info + end + + def main + if @all + hosts = :all + elsif @signed + hosts = :signed + else + hosts = command_line.args.collect { |h| h.downcase } + end + begin + @ca.apply(:revoke, options.merge(:to => hosts)) if subcommand == :destroy + @ca.apply(subcommand, options.merge(:to => hosts, :digest => @digest)) + rescue => detail + puts detail.backtrace if Puppet[:trace] + puts detail.to_s + exit(24) + end + end + + def setup + require 'puppet/ssl/certificate_authority' + exit(Puppet.settings.print_configs ? 0 : 1) if Puppet.settings.print_configs? + + Puppet::Util::Log.newdestination :console + + if [:generate, :destroy].include? subcommand + Puppet::SSL::Host.ca_location = :local + else + Puppet::SSL::Host.ca_location = :only + end + + # If we are generating, and the option came from the CLI, it gets added to + # the data. This will do the right thing for non-local certificates, in + # that the command line but *NOT* the config file option will apply. + if subcommand == :generate + if Puppet.settings.setting(:dns_alt_names).setbycli + options[:dns_alt_names] = Puppet[:dns_alt_names] + end + end + + begin + @ca = Puppet::SSL::CertificateAuthority.new + rescue => detail + puts detail.backtrace if Puppet[:trace] + puts detail.to_s + exit(23) + end + end + + def parse_options + # handle the bareword subcommand pattern. + result = super + unless self.subcommand then + if sub = self.command_line.args.shift then + self.subcommand = sub + else + help + end + end + result + end +end diff --git a/mcollective/lib/puppet/application/describe.rb b/mcollective/lib/puppet/application/describe.rb new file mode 100644 index 000000000..e76b347f6 --- /dev/null +++ b/mcollective/lib/puppet/application/describe.rb @@ -0,0 +1,203 @@ +require 'puppet/application' + +class Formatter + + def initialize(width) + @width = width + end + + def wrap(txt, opts) + return "" unless txt && !txt.empty? + work = (opts[:scrub] ? scrub(txt) : txt) + indent = (opts[:indent] ? opts[:indent] : 0) + textLen = @width - indent + patt = Regexp.new("^(.{0,#{textLen}})[ \n]") + prefix = " " * indent + + res = [] + + while work.length > textLen + if work =~ patt + res << $1 + work.slice!(0, $MATCH.length) + else + res << work.slice!(0, textLen) + end + end + res << work if work.length.nonzero? + prefix + res.join("\n#{prefix}") + end + + def header(txt, sep = "-") + "\n#{txt}\n" + sep * txt.size + end + + private + + def scrub(text) + # For text with no carriage returns, there's nothing to do. + return text if text !~ /\n/ + indent = nil + + # If we can match an indentation, then just remove that same level of + # indent from every line. + if text =~ /^(\s+)/ + indent = $1 + return text.gsub(/^#{indent}/,'') + else + return text + end + end + +end + +class TypeDoc + + def initialize + @format = Formatter.new(76) + @types = {} + Puppet::Type.loadall + Puppet::Type.eachtype { |type| + next if type.name == :component + @types[type.name] = type + } + end + + def list_types + puts "These are the types known to puppet:\n" + @types.keys.sort { |a, b| + a.to_s <=> b.to_s + }.each do |name| + type = @types[name] + s = type.doc.gsub(/\s+/, " ") + n = s.index(".") + if n.nil? + s = ".. no documentation .." + elsif n > 45 + s = s[0, 45] + " ..." + else + s = s[0, n] + end + printf "%-15s - %s\n", name, s + end + end + + def format_type(name, opts) + name = name.to_sym + unless @types.has_key?(name) + puts "Unknown type #{name}" + return + end + type = @types[name] + puts @format.header(name.to_s, "=") + puts @format.wrap(type.doc, :indent => 0, :scrub => true) + "\n\n" + + puts @format.header("Parameters") + if opts[:parameters] + format_attrs(type, [:property, :param]) + else + list_attrs(type, [:property, :param]) + end + + if opts[:meta] + puts @format.header("Meta Parameters") + if opts[:parameters] + format_attrs(type, [:meta]) + else + list_attrs(type, [:meta]) + end + end + + if type.providers.size > 0 + puts @format.header("Providers") + if opts[:providers] + format_providers(type) + else + list_providers(type) + end + end + end + + # List details about attributes + def format_attrs(type, attrs) + docs = {} + type.allattrs.each do |name| + kind = type.attrtype(name) + docs[name] = type.attrclass(name).doc if attrs.include?(kind) && name != :provider + end + + docs.sort { |a,b| + a[0].to_s <=> b[0].to_s + }.each { |name, doc| + print "\n- **#{name}**" + if type.key_attributes.include?(name) and name != :name + puts " (*namevar*)" + else + puts "" + end + puts @format.wrap(doc, :indent => 4, :scrub => true) + } + end + + # List the names of attributes + def list_attrs(type, attrs) + params = [] + type.allattrs.each do |name| + kind = type.attrtype(name) + params << name.to_s if attrs.include?(kind) && name != :provider + end + puts @format.wrap(params.sort.join(", "), :indent => 4) + end + + def format_providers(type) + type.providers.sort { |a,b| + a.to_s <=> b.to_s + }.each { |prov| + puts "\n- **#{prov}**" + puts @format.wrap(type.provider(prov).doc, :indent => 4, :scrub => true) + } + end + + def list_providers(type) + list = type.providers.sort { |a,b| + a.to_s <=> b.to_s + }.join(", ") + puts @format.wrap(list, :indent => 4) + end + +end + +class Puppet::Application::Describe < Puppet::Application + banner "puppet describe [options] [type]" + + should_not_parse_config + + option("--short", "-s", "Only list parameters without detail") do |arg| + options[:parameters] = false + end + + option("--providers","-p") + option("--list", "-l") + option("--meta","-m") + + def preinit + options[:parameters] = true + end + + def main + doc = TypeDoc.new + + if options[:list] + doc.list_types + else + options[:types].each { |name| doc.format_type(name, options) } + end + end + + def setup + options[:types] = command_line.args.dup + handle_help(nil) unless options[:list] || options[:types].size > 0 + $stderr.puts "Warning: ignoring types when listing all types" if options[:list] && options[:types].size > 0 + end + +end diff --git a/mcollective/lib/puppet/application/doc.rb b/mcollective/lib/puppet/application/doc.rb new file mode 100644 index 000000000..aaefd6e75 --- /dev/null +++ b/mcollective/lib/puppet/application/doc.rb @@ -0,0 +1,178 @@ +require 'puppet/application' + +class Puppet::Application::Doc < Puppet::Application + + should_not_parse_config + run_mode :master + + attr_accessor :unknown_args, :manifest + + def preinit + {:references => [], :mode => :text, :format => :to_markdown }.each do |name,value| + options[name] = value + end + @unknown_args = [] + @manifest = false + end + + option("--all","-a") + option("--outputdir OUTPUTDIR","-o") + option("--verbose","-v") + option("--debug","-d") + option("--charset CHARSET") + + option("--format FORMAT", "-f") do |arg| + method = "to_#{arg}" + require 'puppet/util/reference' + if Puppet::Util::Reference.method_defined?(method) + options[:format] = method + else + raise "Invalid output format #{arg}" + end + end + + option("--mode MODE", "-m") do |arg| + require 'puppet/util/reference' + if Puppet::Util::Reference.modes.include?(arg) or arg.intern==:rdoc + options[:mode] = arg.intern + else + raise "Invalid output mode #{arg}" + end + end + + option("--list", "-l") do |arg| + require 'puppet/util/reference' + puts Puppet::Util::Reference.references.collect { |r| Puppet::Util::Reference.reference(r).doc }.join("\n") + exit(0) + end + + option("--reference REFERENCE", "-r") do |arg| + options[:references] << arg.intern + end + + def handle_unknown( opt, arg ) + @unknown_args << {:opt => opt, :arg => arg } + true + end + + def run_command + return[:rdoc].include?(options[:mode]) ? send(options[:mode]) : other + end + + def rdoc + exit_code = 0 + files = [] + unless @manifest + env = Puppet::Node::Environment.new + files += env.modulepath + files << File.dirname(env[:manifest]) + end + files += command_line.args + Puppet.info "scanning: #{files.inspect}" + + Puppet.settings.setdefaults( + "puppetdoc", + + "document_all" => [false, "Document all resources"] + ) + Puppet.settings[:document_all] = options[:all] || false + begin + require 'puppet/util/rdoc' + if @manifest + Puppet::Util::RDoc.manifestdoc(files) + else + options[:outputdir] = "doc" unless options[:outputdir] + Puppet::Util::RDoc.rdoc(options[:outputdir], files, options[:charset]) + end + rescue => detail + puts detail.backtrace if Puppet[:trace] + $stderr.puts "Could not generate documentation: #{detail}" + exit_code = 1 + end + exit exit_code + end + + def other + text = "" + with_contents = options[:references].length <= 1 + exit_code = 0 + require 'puppet/util/reference' + options[:references].sort { |a,b| a.to_s <=> b.to_s }.each do |name| + raise "Could not find reference #{name}" unless section = Puppet::Util::Reference.reference(name) + + begin + # Add the per-section text, but with no ToC + text += section.send(options[:format], with_contents) + rescue => detail + puts detail.backtrace + $stderr.puts "Could not generate reference #{name}: #{detail}" + exit_code = 1 + next + end + end + + text += Puppet::Util::Reference.footer unless with_contents # We've only got one reference + + if options[:mode] == :pdf + Puppet::Util::Reference.pdf(text) + else + puts text + end + + exit exit_code + end + + def setup + # sole manifest documentation + if command_line.args.size > 0 + options[:mode] = :rdoc + @manifest = true + end + + if options[:mode] == :rdoc + setup_rdoc + else + setup_reference + end + end + + def setup_reference + if options[:all] + # Don't add dynamic references to the "all" list. + require 'puppet/util/reference' + options[:references] = Puppet::Util::Reference.references.reject do |ref| + Puppet::Util::Reference.reference(ref).dynamic? + end + end + + options[:references] << :type if options[:references].empty? + end + + def setup_rdoc(dummy_argument=:work_arround_for_ruby_GC_bug) + # consume the unknown options + # and feed them as settings + if @unknown_args.size > 0 + @unknown_args.each do |option| + # force absolute path for modulepath when passed on commandline + if option[:opt]=="--modulepath" or option[:opt] == "--manifestdir" + option[:arg] = option[:arg].split(':').collect { |p| File.expand_path(p) }.join(':') + end + Puppet.settings.handlearg(option[:opt], option[:arg]) + end + end + + # Now parse the config + Puppet.parse_config + + # Handle the logging settings. + if options[:debug] or options[:verbose] + if options[:debug] + Puppet::Util::Log.level = :debug + else + Puppet::Util::Log.level = :info + end + + Puppet::Util::Log.newdestination(:console) + end + end +end diff --git a/mcollective/lib/puppet/application/filebucket.rb b/mcollective/lib/puppet/application/filebucket.rb new file mode 100644 index 000000000..5c91c4f64 --- /dev/null +++ b/mcollective/lib/puppet/application/filebucket.rb @@ -0,0 +1,87 @@ +require 'puppet/application' + +class Puppet::Application::Filebucket < Puppet::Application + + should_not_parse_config + + option("--bucket BUCKET","-b") + option("--debug","-d") + option("--local","-l") + option("--remote","-r") + option("--verbose","-v") + + attr :args + + def run_command + @args = command_line.args + command = args.shift + return send(command) if %w{get backup restore}.include? command + help + end + + def get + md5 = args.shift + out = @client.getfile(md5) + print out + end + + def backup + args.each do |file| + unless FileTest.exists?(file) + $stderr.puts "#{file}: no such file" + next + end + unless FileTest.readable?(file) + $stderr.puts "#{file}: cannot read file" + next + end + md5 = @client.backup(file) + puts "#{file}: #{md5}" + end + end + + def restore + file = args.shift + md5 = args.shift + @client.restore(file, md5) + end + + def setup + Puppet::Log.newdestination(:console) + + @client = nil + @server = nil + + Signal.trap(:INT) do + $stderr.puts "Cancelling" + exit(1) + end + + if options[:debug] + Puppet::Log.level = :debug + elsif options[:verbose] + Puppet::Log.level = :info + end + + # Now parse the config + Puppet.parse_config + + exit(Puppet.settings.print_configs ? 0 : 1) if Puppet.settings.print_configs? + + require 'puppet/file_bucket/dipper' + begin + if options[:local] or options[:bucket] + path = options[:bucket] || Puppet[:bucketdir] + @client = Puppet::FileBucket::Dipper.new(:Path => path) + else + @client = Puppet::FileBucket::Dipper.new(:Server => Puppet[:server]) + end + rescue => detail + $stderr.puts detail + puts detail.backtrace if Puppet[:trace] + exit(1) + end + end + +end + diff --git a/mcollective/lib/puppet/application/inspect.rb b/mcollective/lib/puppet/application/inspect.rb new file mode 100644 index 000000000..2260feed7 --- /dev/null +++ b/mcollective/lib/puppet/application/inspect.rb @@ -0,0 +1,181 @@ +require 'puppet/application' + +class Puppet::Application::Inspect < Puppet::Application + + should_parse_config + run_mode :agent + + option("--debug","-d") + option("--verbose","-v") + + option("--logdest LOGDEST", "-l") do |arg| + begin + Puppet::Util::Log.newdestination(arg) + options[:logset] = true + rescue => detail + $stderr.puts detail.to_s + end + end + + def help + puts <<-HELP ; exit # XXX + +SYNOPSIS +======== + +Prepare and submit an inspection report to the puppet master. + + +USAGE +===== + + puppet inspect + + +DESCRIPTION +=========== + +This command uses the cached catalog from the previous run of 'puppet +agent' to determine which attributes of which resources have been +marked as auditable with the 'audit' metaparameter. It then examines +the current state of the system, writes the state of the specified +resource attributes to a report, and submits the report to the puppet +master. + +Puppet inspect does not run as a daemon, and must be run manually or from cron. + + +OPTIONS +======= + +Any configuration setting which is valid in the configuration file is +also a valid long argument, e.g. '--server=master.domain.com'. See the +configuration file documentation at +http://docs.puppetlabs.com/references/latest/configuration.html for +the full list of acceptable settings. + + +AUTHOR +====== + +Puppet Labs + + +COPYRIGHT +========= + +Copyright (c) 2011 Puppet Labs, LLC +Licensed under the GNU General Public License version 2 + + HELP + end + + def setup + exit(Puppet.settings.print_configs ? 0 : 1) if Puppet.settings.print_configs? + + raise "Inspect requires reporting to be enabled. Set report=true in puppet.conf to enable reporting." unless Puppet[:report] + + @report = Puppet::Transaction::Report.new("inspect") + + Puppet::Util::Log.newdestination(@report) + Puppet::Util::Log.newdestination(:console) unless options[:logset] + + Signal.trap(:INT) do + $stderr.puts "Exiting" + exit(1) + end + + if options[:debug] + Puppet::Util::Log.level = :debug + elsif options[:verbose] + Puppet::Util::Log.level = :info + end + + Puppet::Transaction::Report.terminus_class = :rest + Puppet::Resource::Catalog.terminus_class = :yaml + end + + def preinit + require 'puppet' + require 'puppet/file_bucket/dipper' + end + + def run_command + benchmark(:notice, "Finished inspection") do + retrieval_starttime = Time.now + + unless catalog = Puppet::Resource::Catalog.find(Puppet[:certname]) + raise "Could not find catalog for #{Puppet[:certname]}" + end + + @report.configuration_version = catalog.version + + inspect_starttime = Time.now + @report.add_times("config_retrieval", inspect_starttime - retrieval_starttime) + + if Puppet[:archive_files] + dipper = Puppet::FileBucket::Dipper.new(:Server => Puppet[:archive_file_server]) + end + + catalog.to_ral.resources.each do |ral_resource| + audited_attributes = ral_resource[:audit] + next unless audited_attributes + + status = Puppet::Resource::Status.new(ral_resource) + + begin + audited_resource = ral_resource.to_resource + rescue StandardError => detail + puts detail.backtrace if Puppet[:trace] + ral_resource.err "Could not inspect #{ral_resource}; skipping: #{detail}" + audited_attributes.each do |name| + event = ral_resource.event( + :property => name, + :status => "failure", + :audited => true, + :message => "failed to inspect #{name}" + ) + status.add_event(event) + end + else + audited_attributes.each do |name| + next if audited_resource[name].nil? + # Skip :absent properties of :absent resources. Really, it would be nicer if the RAL returned nil for those, but it doesn't. ~JW + if name == :ensure or audited_resource[:ensure] != :absent or audited_resource[name] != :absent + event = ral_resource.event( + :previous_value => audited_resource[name], + :property => name, + :status => "audit", + :audited => true, + :message => "inspected value is #{audited_resource[name].inspect}" + ) + status.add_event(event) + end + end + end + if Puppet[:archive_files] and ral_resource.type == :file and audited_attributes.include?(:content) + path = ral_resource[:path] + if File.readable?(path) + begin + dipper.backup(path) + rescue StandardError => detail + Puppet.warning detail + end + end + end + @report.add_resource_status(status) + end + + finishtime = Time.now + @report.add_times("inspect", finishtime - inspect_starttime) + @report.finalize_report + + begin + @report.save + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Could not send report: #{detail}" + end + end + end +end diff --git a/mcollective/lib/puppet/application/kick.rb b/mcollective/lib/puppet/application/kick.rb new file mode 100644 index 000000000..c055274b6 --- /dev/null +++ b/mcollective/lib/puppet/application/kick.rb @@ -0,0 +1,210 @@ +require 'puppet/application' + +class Puppet::Application::Kick < Puppet::Application + + should_not_parse_config + + attr_accessor :hosts, :tags, :classes + + option("--all","-a") + option("--foreground","-f") + option("--debug","-d") + option("--ping","-P") + option("--test") + + option("--host HOST") do |arg| + @hosts << arg + end + + option("--tag TAG", "-t") do |arg| + @tags << arg + end + + option("--class CLASS", "-c") do |arg| + @classes << arg + end + + option("--no-fqdn", "-n") do |arg| + options[:fqdn] = false + end + + option("--parallel PARALLEL", "-p") do |arg| + begin + options[:parallel] = Integer(arg) + rescue + $stderr.puts "Could not convert #{arg.inspect} to an integer" + exit(23) + end + end + + def run_command + @hosts += command_line.args + options[:test] ? test : main + end + + def test + puts "Skipping execution in test mode" + exit(0) + end + + def main + Puppet.warning "Failed to load ruby LDAP library. LDAP functionality will not be available" unless Puppet.features.ldap? + require 'puppet/util/ldap/connection' + + todo = @hosts.dup + + failures = [] + + # Now do the actual work + go = true + while go + # If we don't have enough children in process and we still have hosts left to + # do, then do the next host. + if @children.length < options[:parallel] and ! todo.empty? + host = todo.shift + pid = fork do + run_for_host(host) + end + @children[pid] = host + else + # Else, see if we can reap a process. + begin + pid = Process.wait + + if host = @children[pid] + # Remove our host from the list of children, so the parallelization + # continues working. + @children.delete(pid) + failures << host if $CHILD_STATUS.exitstatus != 0 + print "#{host} finished with exit code #{$CHILD_STATUS.exitstatus}\n" + else + $stderr.puts "Could not find host for PID #{pid} with status #{$CHILD_STATUS.exitstatus}" + end + rescue Errno::ECHILD + # There are no children left, so just exit unless there are still + # children left to do. + next unless todo.empty? + + if failures.empty? + puts "Finished" + exit(0) + else + puts "Failed: #{failures.join(", ")}" + exit(3) + end + end + end + end + end + + def run_for_host(host) + if options[:ping] + out = %x{ping -c 1 #{host}} + unless $CHILD_STATUS == 0 + $stderr.print "Could not contact #{host}\n" + next + end + end + + require 'puppet/run' + Puppet::Run.indirection.terminus_class = :rest + port = Puppet[:puppetport] + url = ["https://#{host}:#{port}", "production", "run", host].join('/') + + print "Triggering #{host}\n" + begin + run_options = { + :tags => @tags, + :background => ! options[:foreground], + :ignoreschedules => options[:ignoreschedules] + } + run = Puppet::Run.new( run_options ).save( url ) + puts "Getting status" + result = run.status + puts "status is #{result}" + rescue => detail + puts detail.backtrace if Puppet[:trace] + $stderr.puts "Host #{host} failed: #{detail}\n" + exit(2) + end + + case result + when "success"; + exit(0) + when "running" + $stderr.puts "Host #{host} is already running" + exit(3) + else + $stderr.puts "Host #{host} returned unknown answer '#{result}'" + exit(12) + end + end + + def initialize(*args) + super + @hosts = [] + @classes = [] + @tags = [] + end + + def preinit + [:INT, :TERM].each do |signal| + Signal.trap(signal) do + $stderr.puts "Cancelling" + exit(1) + end + end + options[:parallel] = 1 + options[:verbose] = true + options[:fqdn] = true + options[:ignoreschedules] = false + options[:foreground] = false + end + + def setup + if options[:debug] + Puppet::Util::Log.level = :debug + else + Puppet::Util::Log.level = :info + end + + # Now parse the config + Puppet.parse_config + + if Puppet[:node_terminus] == "ldap" and (options[:all] or @classes) + if options[:all] + @hosts = Puppet::Node.search("whatever", :fqdn => options[:fqdn]).collect { |node| node.name } + puts "all: #{@hosts.join(", ")}" + else + @hosts = [] + @classes.each do |klass| + list = Puppet::Node.search("whatever", :fqdn => options[:fqdn], :class => klass).collect { |node| node.name } + puts "#{klass}: #{list.join(", ")}" + + @hosts += list + end + end + elsif ! @classes.empty? + $stderr.puts "You must be using LDAP to specify host classes" + exit(24) + end + + @children = {} + + # If we get a signal, then kill all of our children and get out. + [:INT, :TERM].each do |signal| + Signal.trap(signal) do + Puppet.notice "Caught #{signal}; shutting down" + @children.each do |pid, host| + Process.kill("INT", pid) + end + + waitall + + exit(1) + end + end + + end + +end diff --git a/mcollective/lib/puppet/application/master.rb b/mcollective/lib/puppet/application/master.rb new file mode 100644 index 000000000..467548760 --- /dev/null +++ b/mcollective/lib/puppet/application/master.rb @@ -0,0 +1,173 @@ +require 'puppet/application' + +class Puppet::Application::Master < Puppet::Application + + should_parse_config + run_mode :master + + option("--debug", "-d") + option("--verbose", "-v") + + # internal option, only to be used by ext/rack/config.ru + option("--rack") + + option("--compile host", "-c host") do |arg| + options[:node] = arg + end + + option("--logdest DEST", "-l DEST") do |arg| + begin + Puppet::Util::Log.newdestination(arg) + options[:setdest] = true + rescue => detail + puts detail.backtrace if Puppet[:debug] + $stderr.puts detail.to_s + end + end + + def preinit + Signal.trap(:INT) do + $stderr.puts "Cancelling startup" + exit(0) + end + + # Create this first-off, so we have ARGV + require 'puppet/daemon' + @daemon = Puppet::Daemon.new + @daemon.argv = ARGV.dup + end + + def run_command + if options[:node] + compile + elsif Puppet[:parseonly] + parseonly + else + main + end + end + + def compile + Puppet::Util::Log.newdestination :console + raise ArgumentError, "Cannot render compiled catalogs without pson support" unless Puppet.features.pson? + begin + unless catalog = Puppet::Resource::Catalog.find(options[:node]) + raise "Could not compile catalog for #{options[:node]}" + end + + jj catalog.to_resource + rescue => detail + $stderr.puts detail + exit(30) + end + exit(0) + end + + def parseonly + begin + Puppet::Node::Environment.new(Puppet[:environment]).known_resource_types + rescue => detail + Puppet.err detail + exit 1 + end + exit(0) + end + + def main + require 'etc' + + xmlrpc_handlers = [:Status, :FileServer, :Master, :Report, :Filebucket] + + xmlrpc_handlers << :CA if Puppet[:ca] + + # Make sure we've got a localhost ssl cert + Puppet::SSL::Host.localhost + + # And now configure our server to *only* hit the CA for data, because that's + # all it will have write access to. + Puppet::SSL::Host.ca_location = :only if Puppet::SSL::CertificateAuthority.ca? + + if Puppet.features.root? + begin + Puppet::Util.chuser + rescue => detail + puts detail.backtrace if Puppet[:trace] + $stderr.puts "Could not change user to #{Puppet[:user]}: #{detail}" + exit(39) + end + end + + unless options[:rack] + require 'puppet/network/server' + @daemon.server = Puppet::Network::Server.new(:xmlrpc_handlers => xmlrpc_handlers) + @daemon.daemonize if Puppet[:daemonize] + else + require 'puppet/network/http/rack' + @app = Puppet::Network::HTTP::Rack.new(:xmlrpc_handlers => xmlrpc_handlers, :protocols => [:rest, :xmlrpc]) + end + + Puppet.notice "Starting Puppet master version #{Puppet.version}" + + unless options[:rack] + @daemon.start + else + return @app + end + end + + def setup_logs + # Handle the logging settings. + if options[:debug] or options[:verbose] + if options[:debug] + Puppet::Util::Log.level = :debug + else + Puppet::Util::Log.level = :info + end + + unless Puppet[:daemonize] or options[:rack] + Puppet::Util::Log.newdestination(:console) + options[:setdest] = true + end + end + + Puppet::Util::Log.newdestination(:syslog) unless options[:setdest] + end + + def setup_terminuses + require 'puppet/file_serving/content' + require 'puppet/file_serving/metadata' + + # Cache our nodes in yaml. Currently not configurable. + Puppet::Node.cache_class = :yaml + + Puppet::FileServing::Content.indirection.terminus_class = :file_server + Puppet::FileServing::Metadata.indirection.terminus_class = :file_server + + Puppet::FileBucket::File.indirection.terminus_class = :file + end + + def setup_ssl + # Configure all of the SSL stuff. + if Puppet::SSL::CertificateAuthority.ca? + Puppet::SSL::Host.ca_location = :local + Puppet.settings.use :ca + Puppet::SSL::CertificateAuthority.instance + else + Puppet::SSL::Host.ca_location = :none + end + end + + def setup + raise Puppet::Error.new("Puppet master is not supported on Microsoft Windows") if Puppet.features.microsoft_windows? + + setup_logs + + exit(Puppet.settings.print_configs ? 0 : 1) if Puppet.settings.print_configs? + + Puppet.settings.use :main, :master, :ssl, :metrics + + setup_terminuses + + setup_ssl + end +end diff --git a/mcollective/lib/puppet/application/queue.rb b/mcollective/lib/puppet/application/queue.rb new file mode 100644 index 000000000..9039a6e62 --- /dev/null +++ b/mcollective/lib/puppet/application/queue.rb @@ -0,0 +1,102 @@ +require 'puppet/application' +require 'puppet/util' + +class Puppet::Application::Queue < Puppet::Application + should_parse_config + + attr_accessor :daemon + + def preinit + require 'puppet/daemon' + @daemon = Puppet::Daemon.new + @daemon.argv = ARGV.dup + + # Do an initial trap, so that cancels don't get a stack trace. + + # This exits with exit code 1 + Signal.trap(:INT) do + $stderr.puts "Caught SIGINT; shutting down" + exit(1) + end + + # This is a normal shutdown, so code 0 + Signal.trap(:TERM) do + $stderr.puts "Caught SIGTERM; shutting down" + exit(0) + end + + { + :verbose => false, + :debug => false + }.each do |opt,val| + options[opt] = val + end + end + + option("--debug","-d") + option("--verbose","-v") + + option("--logdest DEST", "-l DEST") do |arg| + begin + Puppet::Util::Log.newdestination(arg) + options[:setdest] = true + rescue => detail + puts detail.backtrace if Puppet[:debug] + $stderr.puts detail.to_s + end + end + + def main + require 'puppet/indirector/catalog/queue' # provides Puppet::Indirector::Queue.subscribe + Puppet.notice "Starting puppetqd #{Puppet.version}" + Puppet::Resource::Catalog::Queue.subscribe do |catalog| + # Once you have a Puppet::Resource::Catalog instance, calling save on it should suffice + # to put it through to the database via its active_record indirector (which is determined + # by the terminus_class = :active_record setting above) + Puppet::Util.benchmark(:notice, "Processing queued catalog for #{catalog.name}") do + begin + catalog.save + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Could not save queued catalog for #{catalog.name}: #{detail}" + end + end + end + + Thread.list.each { |thread| thread.join } + end + + # Handle the logging settings. + def setup_logs + if options[:debug] or options[:verbose] + Puppet::Util::Log.newdestination(:console) + if options[:debug] + Puppet::Util::Log.level = :debug + else + Puppet::Util::Log.level = :info + end + end + Puppet::Util::Log.newdestination(:syslog) unless options[:setdest] + end + + def setup + unless Puppet.features.stomp? + raise ArgumentError, "Could not load the 'stomp' library, which must be present for queueing to work. You must install the required library." + end + + setup_logs + + exit(Puppet.settings.print_configs ? 0 : 1) if Puppet.settings.print_configs? + + require 'puppet/resource/catalog' + Puppet::Resource::Catalog.terminus_class = :active_record + + daemon.daemonize if Puppet[:daemonize] + + # We want to make sure that we don't have a cache + # class set up, because if storeconfigs is enabled, + # we'll get a loop of continually caching the catalog + # for storage again. + Puppet::Resource::Catalog.cache_class = nil + end +end diff --git a/mcollective/lib/puppet/application/resource.rb b/mcollective/lib/puppet/application/resource.rb new file mode 100644 index 000000000..3e4147e97 --- /dev/null +++ b/mcollective/lib/puppet/application/resource.rb @@ -0,0 +1,128 @@ +require 'puppet/application' + +class Puppet::Application::Resource < Puppet::Application + + should_not_parse_config + + attr_accessor :host, :extra_params + + def preinit + @extra_params = [] + @host = nil + Facter.loadfacts + end + + option("--debug","-d") + option("--verbose","-v") + option("--edit","-e") + + option("--host HOST","-H") do |arg| + @host = arg + end + + option("--types", "-t") do |arg| + types = [] + Puppet::Type.loadall + Puppet::Type.eachtype do |t| + next if t.name == :component + types << t.name.to_s + end + puts types.sort + exit + end + + option("--param PARAM", "-p") do |arg| + @extra_params << arg.to_sym + end + + def main + args = command_line.args + type = args.shift or raise "You must specify the type to display" + typeobj = Puppet::Type.type(type) or raise "Could not find type #{type}" + name = args.shift + params = {} + args.each do |setting| + if setting =~ /^(\w+)=(.+)$/ + params[$1] = $2 + else + raise "Invalid parameter setting #{setting}" + end + end + + raise "You cannot edit a remote host" if options[:edit] and @host + + properties = typeobj.properties.collect { |s| s.name } + + format = proc {|trans| + trans.dup.collect do |param, value| + if value.nil? or value.to_s.empty? + trans.delete(param) + elsif value.to_s == "absent" and param.to_s != "ensure" + trans.delete(param) + end + + trans.delete(param) unless properties.include?(param) or @extra_params.include?(param) + end + trans.to_manifest + } + + if @host + Puppet::Resource.indirection.terminus_class = :rest + port = Puppet[:puppetport] + key = ["https://#{host}:#{port}", "production", "resources", type, name].join('/') + else + key = [type, name].join('/') + end + + text = if name + if params.empty? + [ Puppet::Resource.find( key ) ] + else + [ Puppet::Resource.new( type, name, :parameters => params ).save( key ) ] + end + else + if type == "file" + raise "Listing all file instances is not supported. Please specify a file or directory, e.g. puppet resource file /etc" + end + Puppet::Resource.search( key, {} ) + end.map(&format).join("\n") + + if options[:edit] + require 'tempfile' + # Prefer the current directory, which is more likely to be secure + # and, in the case of interactive use, accessible to the user. + tmpfile = Tempfile.new('x2puppet', Dir.pwd) + begin + # sync write, so nothing buffers before we invoke the editor. + tmpfile.sync = true + tmpfile.puts text + + # edit the content + system(ENV["EDITOR"] || 'vi', tmpfile.path) + + # ...and, now, pass that file to puppet to apply. Because + # many editors rename or replace the original file we need to + # feed the pathname, not the file content itself, to puppet. + system('puppet -v ' + tmpfile.path) + ensure + # The temporary file will be safely removed. + tmpfile.close(true) + end + else + puts text + end + end + + def setup + Puppet::Util::Log.newdestination(:console) + + # Now parse the config + Puppet.parse_config + + if options[:debug] + Puppet::Util::Log.level = :debug + elsif options[:verbose] + Puppet::Util::Log.level = :info + end + end +end diff --git a/mcollective/lib/puppet/configurer.rb b/mcollective/lib/puppet/configurer.rb new file mode 100644 index 000000000..7f39a3853 --- /dev/null +++ b/mcollective/lib/puppet/configurer.rb @@ -0,0 +1,250 @@ +# The client for interacting with the puppetmaster config server. +require 'sync' +require 'timeout' +require 'puppet/network/http_pool' +require 'puppet/util' + +class Puppet::Configurer + require 'puppet/configurer/fact_handler' + require 'puppet/configurer/plugin_handler' + + include Puppet::Configurer::FactHandler + include Puppet::Configurer::PluginHandler + + # For benchmarking + include Puppet::Util + + attr_reader :compile_time + + # Provide more helpful strings to the logging that the Agent does + def self.to_s + "Puppet configuration client" + end + + class << self + # Puppetd should only have one instance running, and we need a way + # to retrieve it. + attr_accessor :instance + include Puppet::Util + end + + # How to lock instances of this class. + def self.lockfile_path + Puppet[:puppetdlockfile] + end + + def clear + @catalog.clear(true) if @catalog + @catalog = nil + end + + def execute_postrun_command + execute_from_setting(:postrun_command) + end + + def execute_prerun_command + execute_from_setting(:prerun_command) + end + + # Initialize and load storage + def dostorage + Puppet::Util::Storage.load + @compile_time ||= Puppet::Util::Storage.cache(:configuration)[:compile_time] + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Corrupt state file #{Puppet[:statefile]}: #{detail}" + begin + ::File.unlink(Puppet[:statefile]) + retry + rescue => detail + raise Puppet::Error.new("Cannot remove #{Puppet[:statefile]}: #{detail}") + end + end + + # Just so we can specify that we are "the" instance. + def initialize + Puppet.settings.use(:main, :ssl, :agent) + + self.class.instance = self + @running = false + @splayed = false + end + + # Prepare for catalog retrieval. Downloads everything necessary, etc. + def prepare(options) + dostorage + + download_plugins unless options[:skip_plugin_download] + + download_fact_plugins unless options[:skip_plugin_download] + end + + # Get the remote catalog, yo. Returns nil if no catalog can be found. + def retrieve_catalog(fact_options) + fact_options ||= {} + # First try it with no cache, then with the cache. + unless (Puppet[:use_cached_catalog] and result = retrieve_catalog_from_cache(fact_options)) or result = retrieve_new_catalog(fact_options) + if ! Puppet[:usecacheonfailure] + Puppet.warning "Not using cache on failed catalog" + return nil + end + result = retrieve_catalog_from_cache(fact_options) + end + + return nil unless result + + convert_catalog(result, @duration) + end + + # Convert a plain resource catalog into our full host catalog. + def convert_catalog(result, duration) + catalog = result.to_ral + catalog.finalize + catalog.retrieval_duration = duration + catalog.write_class_file + catalog + end + + # Retrieve (optionally) and apply a catalog. If a catalog is passed in + # the options, then apply that one, otherwise retrieve it. + def retrieve_and_apply_catalog(options, fact_options) + unless catalog = (options.delete(:catalog) || retrieve_catalog(fact_options)) + Puppet.err "Could not retrieve catalog; skipping run" + return + end + + report = options[:report] + report.configuration_version = catalog.version + + benchmark(:notice, "Finished catalog run") do + catalog.apply(options) + end + + report.finalize_report + report + end + + # The code that actually runs the catalog. + # This just passes any options on to the catalog, + # which accepts :tags and :ignoreschedules. + def run(options = {}) + options[:report] ||= Puppet::Transaction::Report.new("apply") + report = options[:report] + + Puppet::Util::Log.newdestination(report) + begin + prepare(options) + + if Puppet::Resource::Catalog.indirection.terminus_class == :rest + # This is a bit complicated. We need the serialized and escaped facts, + # and we need to know which format they're encoded in. Thus, we + # get a hash with both of these pieces of information. + fact_options = facts_for_uploading + end + + # set report host name now that we have the fact + report.host = Puppet[:node_name_value] + + begin + execute_prerun_command or return nil + retrieve_and_apply_catalog(options, fact_options) + rescue SystemExit,NoMemoryError + raise + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Failed to apply catalog: #{detail}" + return nil + ensure + execute_postrun_command or return nil + end + ensure + # Make sure we forget the retained module_directories of any autoload + # we might have used. + Thread.current[:env_module_directories] = nil + + # Now close all of our existing http connections, since there's no + # reason to leave them lying open. + Puppet::Network::HttpPool.clear_http_instances + end + ensure + Puppet::Util::Log.close(report) + send_report(report) + end + + def send_report(report) + puts report.summary if Puppet[:summarize] + save_last_run_summary(report) + report.save if Puppet[:report] + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Could not send report: #{detail}" + end + + def save_last_run_summary(report) + Puppet::Util::FileLocking.writelock(Puppet[:lastrunfile], 0660) do |file| + file.print YAML.dump(report.raw_summary) + end + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Could not save last run local report: #{detail}" + end + + private + + def self.timeout + timeout = Puppet[:configtimeout] + case timeout + when String + if timeout =~ /^\d+$/ + timeout = Integer(timeout) + else + raise ArgumentError, "Configuration timeout must be an integer" + end + when Integer # nothing + else + raise ArgumentError, "Configuration timeout must be an integer" + end + + timeout + end + + def execute_from_setting(setting) + return true if (command = Puppet[setting]) == "" + + begin + Puppet::Util.execute([command]) + true + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Could not run command from #{setting}: #{detail}" + false + end + end + + def retrieve_catalog_from_cache(fact_options) + result = nil + @duration = thinmark do + result = Puppet::Resource::Catalog.find(Puppet[:node_name_value], fact_options.merge(:ignore_terminus => true)) + end + Puppet.notice "Using cached catalog" + result + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Could not retrieve catalog from cache: #{detail}" + return nil + end + + def retrieve_new_catalog(fact_options) + result = nil + @duration = thinmark do + result = Puppet::Resource::Catalog.find(Puppet[:node_name_value], fact_options.merge(:ignore_cache => true)) + end + result + rescue SystemExit,NoMemoryError + raise + rescue Exception => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Could not retrieve catalog from remote server: #{detail}" + return nil + end +end diff --git a/mcollective/lib/puppet/configurer/downloader.rb b/mcollective/lib/puppet/configurer/downloader.rb new file mode 100644 index 000000000..b3696201a --- /dev/null +++ b/mcollective/lib/puppet/configurer/downloader.rb @@ -0,0 +1,80 @@ +require 'puppet/configurer' +require 'puppet/resource/catalog' + +class Puppet::Configurer::Downloader + attr_reader :name, :path, :source, :ignore + + # Determine the timeout value to use. + def self.timeout + timeout = Puppet[:configtimeout] + case timeout + when String + if timeout =~ /^\d+$/ + timeout = Integer(timeout) + else + raise ArgumentError, "Configuration timeout must be an integer" + end + when Integer # nothing + else + raise ArgumentError, "Configuration timeout must be an integer" + end + + timeout + end + + # Evaluate our download, returning the list of changed values. + def evaluate + Puppet.info "Retrieving #{name}" + + files = [] + begin + Timeout.timeout(self.class.timeout) do + catalog.apply do |trans| + trans.changed?.find_all do |resource| + yield resource if block_given? + files << resource[:path] + end + end + end + rescue Puppet::Error, Timeout::Error => detail + puts detail.backtrace if Puppet[:debug] + Puppet.err "Could not retrieve #{name}: #{detail}" + end + + files + end + + def initialize(name, path, source, ignore = nil) + @name, @path, @source, @ignore = name, path, source, ignore + end + + def catalog + catalog = Puppet::Resource::Catalog.new + catalog.host_config = false + catalog.add_resource(file) + catalog + end + + def file + args = default_arguments.merge(:path => path, :source => source) + args[:ignore] = ignore.split if ignore + Puppet::Type.type(:file).new(args) + end + + private + + def default_arguments + { + :path => path, + :recurse => true, + :source => source, + :tag => name, + :owner => Process.uid, + :group => Process.gid, + :purge => true, + :force => true, + :backup => false, + :noop => false + } + end +end diff --git a/mcollective/lib/puppet/configurer/fact_handler.rb b/mcollective/lib/puppet/configurer/fact_handler.rb new file mode 100644 index 000000000..77bd1e5f1 --- /dev/null +++ b/mcollective/lib/puppet/configurer/fact_handler.rb @@ -0,0 +1,77 @@ +require 'puppet/indirector/facts/facter' + +require 'puppet/configurer/downloader' + +# Break out the code related to facts. This module is +# just included into the agent, but having it here makes it +# easier to test. +module Puppet::Configurer::FactHandler + def download_fact_plugins? + Puppet[:factsync] + end + + def find_facts + # This works because puppet agent configures Facts to use 'facter' for + # finding facts and the 'rest' terminus for caching them. Thus, we'll + # compile them and then "cache" them on the server. + begin + reload_facter + facts = Puppet::Node::Facts.find(Puppet[:node_name_value]) + unless Puppet[:node_name_fact].empty? + Puppet[:node_name_value] = facts.values[Puppet[:node_name_fact]] + facts.name = Puppet[:node_name_value] + end + facts + rescue SystemExit,NoMemoryError + raise + rescue Exception => detail + puts detail.backtrace if Puppet[:trace] + raise Puppet::Error, "Could not retrieve local facts: #{detail}" + end + end + + def facts_for_uploading + facts = find_facts + #format = facts.class.default_format + + if facts.support_format?(:b64_zlib_yaml) + format = :b64_zlib_yaml + else + format = :yaml + end + + text = facts.render(format) + + {:facts_format => format, :facts => CGI.escape(text)} + end + + # Retrieve facts from the central server. + def download_fact_plugins + return unless download_fact_plugins? + + # Deprecated prior to 0.25, as of 5/19/2008 + Puppet.warning "Fact syncing is deprecated as of 0.25 -- use 'pluginsync' instead" + + Puppet::Configurer::Downloader.new("fact", Puppet[:factdest], Puppet[:factsource], Puppet[:factsignore]).evaluate + end + + # Clear out all of the loaded facts and reload them from disk. + # NOTE: This is clumsy and shouldn't be required for later (1.5.x) versions + # of Facter. + def reload_facter + Facter.clear + + # Reload everything. + if Facter.respond_to? :loadfacts + Facter.loadfacts + elsif Facter.respond_to? :load + Facter.load + else + Puppet.warning "You should upgrade your version of Facter to at least 1.3.8" + end + + # This loads all existing facts and any new ones. We have to remove and + # reload because there's no way to unload specific facts. + Puppet::Node::Facts::Facter.load_fact_plugins + end +end diff --git a/mcollective/lib/puppet/configurer/plugin_handler.rb b/mcollective/lib/puppet/configurer/plugin_handler.rb new file mode 100644 index 000000000..ae088f26f --- /dev/null +++ b/mcollective/lib/puppet/configurer/plugin_handler.rb @@ -0,0 +1,33 @@ +# Break out the code related to plugins. This module is +# just included into the agent, but having it here makes it +# easier to test. +module Puppet::Configurer::PluginHandler + def download_plugins? + Puppet[:pluginsync] + end + + # Retrieve facts from the central server. + def download_plugins + return nil unless download_plugins? + plugin_downloader = Puppet::Configurer::Downloader.new( + "plugin", + Puppet[:plugindest], + Puppet[:pluginsource], + Puppet[:pluginsignore] + ) + + plugin_downloader.evaluate.each { |file| load_plugin(file) } + end + + def load_plugin(file) + return unless FileTest.exist?(file) + return if FileTest.directory?(file) + + begin + Puppet.info "Loading downloaded plugin #{file}" + load file + rescue Exception => detail + Puppet.err "Could not load downloaded file #{file}: #{detail}" + end + end +end diff --git a/mcollective/lib/puppet/daemon.rb b/mcollective/lib/puppet/daemon.rb new file mode 100755 index 000000000..2632bc2a8 --- /dev/null +++ b/mcollective/lib/puppet/daemon.rb @@ -0,0 +1,130 @@ +require 'puppet' +require 'puppet/util/pidlock' +require 'puppet/external/event-loop' +require 'puppet/application' + +# A module that handles operations common to all daemons. This is included +# into the Server and Client base classes. +class Puppet::Daemon + attr_accessor :agent, :server, :argv + + def daemonname + Puppet[:name] + end + + # Put the daemon into the background. + def daemonize + if pid = fork + Process.detach(pid) + exit(0) + end + + create_pidfile + + # Get rid of console logging + Puppet::Util::Log.close(:console) + + Process.setsid + Dir.chdir("/") + begin + $stdin.reopen "/dev/null" + $stdout.reopen "/dev/null", "a" + $stderr.reopen $stdout + Puppet::Util::Log.reopen + rescue => detail + Puppet.err "Could not start #{Puppet[:name]}: #{detail}" + Puppet::Util::replace_file("/tmp/daemonout", 0644) do |f| + f.puts "Could not start #{Puppet[:name]}: #{detail}" + end + exit(12) + end + end + + # Create a pidfile for our daemon, so we can be stopped and others + # don't try to start. + def create_pidfile + Puppet::Util.synchronize_on(Puppet[:name],Sync::EX) do + raise "Could not create PID file: #{pidfile}" unless Puppet::Util::Pidlock.new(pidfile).lock + end + end + + # Provide the path to our pidfile. + def pidfile + Puppet[:pidfile] + end + + def reexec + raise Puppet::DevError, "Cannot reexec unless ARGV arguments are set" unless argv + command = $0 + " " + argv.join(" ") + Puppet.notice "Restarting with '#{command}'" + stop(:exit => false) + exec(command) + end + + def reload + return unless agent + if agent.running? + Puppet.notice "Not triggering already-running agent" + return + end + + agent.run + end + + # Remove the pid file for our daemon. + def remove_pidfile + Puppet::Util.synchronize_on(Puppet[:name],Sync::EX) do + locker = Puppet::Util::Pidlock.new(pidfile) + locker.unlock or Puppet.err "Could not remove PID file #{pidfile}" if locker.locked? + end + end + + def restart + Puppet::Application.restart! + reexec unless agent and agent.running? + end + + def reopen_logs + Puppet::Util::Log.reopen + end + + # Trap a couple of the main signals. This should probably be handled + # in a way that anyone else can register callbacks for traps, but, eh. + def set_signal_traps + signals = {:INT => :stop, :TERM => :stop } + # extended signals not supported under windows + signals.update({:HUP => :restart, :USR1 => :reload, :USR2 => :reopen_logs }) unless Puppet.features.microsoft_windows? + signals.each do |signal, method| + Signal.trap(signal) do + Puppet.notice "Caught #{signal}; calling #{method}" + send(method) + end + end + end + + # Stop everything + def stop(args = {:exit => true}) + Puppet::Application.stop! + + server.stop if server + + remove_pidfile + + Puppet::Util::Log.close_all + + exit if args[:exit] + end + + def start + set_signal_traps + + create_pidfile + + raise Puppet::DevError, "Daemons must have an agent, server, or both" unless agent or server + server.start if server + agent.start if agent + + EventLoop.current.run + end +end + diff --git a/mcollective/lib/puppet/defaults.rb b/mcollective/lib/puppet/defaults.rb new file mode 100644 index 000000000..f49dd211e --- /dev/null +++ b/mcollective/lib/puppet/defaults.rb @@ -0,0 +1,895 @@ +# The majority of the system configuration parameters are set in this file. +module Puppet + setdefaults(:main, + :confdir => [Puppet.run_mode.conf_dir, "The main Puppet configuration directory. The default for this parameter is calculated based on the user. If the process + is running as root or the user that Puppet is supposed to run as, it defaults to a system directory, but if it's running as any other user, + it defaults to being in the user's home directory."], + :vardir => [Puppet.run_mode.var_dir, "Where Puppet stores dynamic and growing data. The default for this parameter is calculated specially, like `confdir`_."], + :name => [Puppet.application_name.to_s, "The name of the application, if we are running as one. The + default is essentially $0 without the path or `.rb`."], + :run_mode => [Puppet.run_mode.name.to_s, "The effective 'run mode' of the application: master, agent, or user."] + ) + + setdefaults(:main, :logdir => Puppet.run_mode.logopts) + + setdefaults(:main, + :trace => [false, "Whether to print stack traces on some errors"], + :autoflush => { + :default => false, + :desc => "Whether log files should always flush to disk.", + :hook => proc { |value| Log.autoflush = value } + }, + :syslogfacility => ["daemon", "What syslog facility to use when logging to + syslog. Syslog has a fixed list of valid facilities, and you must + choose one of those; you cannot just make one up."], + :statedir => { :default => "$vardir/state", + :mode => 01755, + :desc => "The directory where Puppet state is stored. Generally, + this directory can be removed without causing harm (although it + might result in spurious service restarts)." + }, + :rundir => { + :default => Puppet.run_mode.run_dir, + :mode => 01777, + :desc => "Where Puppet PID files are kept." + }, + :genconfig => [false, + "Whether to just print a configuration to stdout and exit. Only makes + sense when used interactively. Takes into account arguments specified + on the CLI."], + :genmanifest => [false, + "Whether to just print a manifest to stdout and exit. Only makes + sense when used interactively. Takes into account arguments specified + on the CLI."], + :configprint => ["", + "Print the value of a specific configuration parameter. If a + parameter is provided for this, then the value is printed and puppet + exits. Comma-separate multiple values. For a list of all values, + specify 'all'. This feature is only available in Puppet versions + higher than 0.18.4."], + :color => ["ansi", "Whether to use colors when logging to the console. + Valid values are `ansi` (equivalent to `true`), `html` (mostly + used during testing with TextMate), and `false`, which produces + no color."], + :mkusers => [false, + "Whether to create the necessary user and group that puppet agent will + run as."], + :manage_internal_file_permissions => [true, + "Whether Puppet should manage the owner, group, and mode of files + it uses internally" + ], + :onetime => {:default => false, + :desc => "Run the configuration once, rather than as a long-running + daemon. This is useful for interactively running puppetd.", + :short => 'o' + }, + :path => {:default => "none", + :desc => "The shell search path. Defaults to whatever is inherited + from the parent process.", + :call_on_define => true, # Call our hook with the default value, so we always get the libdir set. + :hook => proc do |value| + ENV["PATH"] = "" if ENV["PATH"].nil? + ENV["PATH"] = value unless value == "none" + paths = ENV["PATH"].split(File::PATH_SEPARATOR) + %w{/usr/sbin /sbin}.each do |path| + ENV["PATH"] += File::PATH_SEPARATOR + path unless paths.include?(path) + end + value + end + }, + :libdir => {:default => "$vardir/lib", + :desc => "An extra search path for Puppet. This is only useful + for those files that Puppet will load on demand, and is only + guaranteed to work for those cases. In fact, the autoload + mechanism is responsible for making sure this directory + is in Ruby's search path", + :call_on_define => true, # Call our hook with the default value, so we always get the libdir set. + :hook => proc do |value| + $LOAD_PATH.delete(@oldlibdir) if defined?(@oldlibdir) and $LOAD_PATH.include?(@oldlibdir) + @oldlibdir = value + $LOAD_PATH << value + end + }, + :ignoreimport => [false, "A parameter that can be used in commit + hooks, since it enables you to parse-check a single file rather + than requiring that all files exist."], + :authconfig => [ "$confdir/namespaceauth.conf", + "The configuration file that defines the rights to the different + namespaces and methods. This can be used as a coarse-grained + authorization system for both `puppet agent` and `puppet master`." + ], + :environment => {:default => "production", :desc => "The environment Puppet is running in. For clients + (e.g., `puppet agent`) this determines the environment itself, which + is used to find modules and much more. For servers (i.e., `puppet master`) this provides the default environment for nodes + we know nothing about." + }, + :diff_args => ["-u", "Which arguments to pass to the diff command when printing differences between files."], + :diff => ["diff", "Which diff command to use when printing differences between files."], + :show_diff => [false, "Whether to print a contextual diff when files are being replaced. The diff + is printed on stdout, so this option is meaningless unless you are running Puppet interactively. + This feature currently requires the `diff/lcs` Ruby library."], + :daemonize => { :default => true, + :desc => "Send the process into the background. This is the default.", + :short => "D" + }, + :maximum_uid => [4294967290, "The maximum allowed UID. Some platforms use negative UIDs + but then ship with tools that do not know how to handle signed ints, so the UIDs show up as + huge numbers that can then not be fed back into the system. This is a hackish way to fail in a + slightly more useful way when that happens."], + :node_terminus => ["plain", "Where to find information about nodes."], + :catalog_terminus => ["compiler", "Where to get node catalogs. This is useful to change if, for instance, + you'd like to pre-compile catalogs and store them in memcached or some other easily-accessed store."], + :facts_terminus => { + :default => Puppet.application_name.to_s == "master" ? 'yaml' : 'facter', + :desc => "The node facts terminus.", + :hook => proc do |value| + require 'puppet/node/facts' + # Cache to YAML if we're uploading facts away + if %w[rest inventory_service].include? value.to_s + Puppet::Node::Facts.cache_class = :yaml + end + end + }, + :inventory_terminus => [ "$facts_terminus", "Should usually be the same as the facts terminus" ], + :httplog => { :default => "$logdir/http.log", + :owner => "root", + :mode => 0640, + :desc => "Where the puppet agent web server logs." + }, + :http_proxy_host => ["none", + "The HTTP proxy host to use for outgoing connections. Note: You + may need to use a FQDN for the server hostname when using a proxy."], + :http_proxy_port => [3128, "The HTTP proxy port to use for outgoing connections"], + :filetimeout => [ 15, + "The minimum time to wait (in seconds) between checking for updates in + configuration files. This timeout determines how quickly Puppet checks whether + a file (such as manifests or templates) has changed on disk." + ], + :queue_type => ["stomp", "Which type of queue to use for asynchronous processing."], + :queue_type => ["stomp", "Which type of queue to use for asynchronous processing."], + :queue_source => ["stomp://localhost:61613/", "Which type of queue to use for asynchronous processing. If your stomp server requires + authentication, you can include it in the URI as long as your stomp client library is at least 1.1.1"], + :async_storeconfigs => {:default => false, :desc => "Whether to use a queueing system to provide asynchronous database integration. + Requires that `puppetqd` be running and that 'PSON' support for ruby be installed.", + :hook => proc do |value| + if value + # This reconfigures the terminii for Node, Facts, and Catalog + Puppet.settings[:storeconfigs] = true + + # But then we modify the configuration + Puppet::Resource::Catalog.cache_class = :queue + else + raise "Cannot disable asynchronous storeconfigs in a running process" + end + end + }, + :thin_storeconfigs => {:default => false, :desc => + "Boolean; wether storeconfigs store in the database only the facts and exported resources. + If true, then storeconfigs performance will be higher and still allow exported/collected + resources, but other usage external to Puppet might not work", + :hook => proc do |value| + Puppet.settings[:storeconfigs] = true if value + end + }, + :config_version => ["", "How to determine the configuration version. By default, it will be the + time that the configuration is parsed, but you can provide a shell script to override how the + version is determined. The output of this script will be added to every log message in the + reports, allowing you to correlate changes on your hosts to the source version on the server."], + :zlib => [true, + "Boolean; whether to use the zlib library", + ], + :prerun_command => ["", "A command to run before every agent run. If this command returns a non-zero + return code, the entire Puppet run will fail."], + :postrun_command => ["", "A command to run after every agent run. If this command returns a non-zero + return code, the entire Puppet run will be considered to have failed, even though it might have + performed work during the normal run."], + :freeze_main => [false, "Freezes the 'main' class, disallowing any code to be added to it. This + essentially means that you can't have any code outside of a node, class, or definition other + than in the site manifest."] + ) + + hostname = Facter["hostname"].value + domain = Facter["domain"].value + if domain and domain != "" + fqdn = [hostname, domain].join(".") + else + fqdn = hostname + end + + + Puppet.setdefaults( + :main, + + # We have to downcase the fqdn, because the current ssl stuff (as oppsed to in master) doesn't have good facilities for + # manipulating naming. + :certname => {:default => fqdn.downcase, :desc => "The name to use when handling certificates. Defaults + to the fully qualified domain name.", + :call_on_define => true, # Call our hook with the default value, so we're always downcased + :hook => proc { |value| raise(ArgumentError, "Certificate names must be lower case; see #1168") unless value == value.downcase }}, + :certdnsnames => { + :default => '', + :hook => proc do |value| + unless value.nil? or value == '' then + Puppet.warning < < { + :default => '', + :desc => < { + :default => "$ssldir/certs", + :owner => "service", + :desc => "The certificate directory." + }, + :ssldir => { + :default => "$confdir/ssl", + :mode => 0771, + :owner => "service", + :desc => "Where SSL certificates are kept." + }, + :publickeydir => { + :default => "$ssldir/public_keys", + :owner => "service", + :desc => "The public key directory." + }, + :requestdir => { + :default => "$ssldir/certificate_requests", + :owner => "service", + :desc => "Where host certificate requests are stored." + }, + :privatekeydir => { :default => "$ssldir/private_keys", + :mode => 0750, + :owner => "service", + :desc => "The private key directory." + }, + :privatedir => { :default => "$ssldir/private", + :mode => 0750, + :owner => "service", + :desc => "Where the client stores private certificate information." + }, + :passfile => { :default => "$privatedir/password", + :mode => 0640, + :owner => "service", + :desc => "Where puppet agent stores the password for its private key. + Generally unused." + }, + :hostcsr => { :default => "$ssldir/csr_$certname.pem", + :mode => 0644, + :owner => "service", + :desc => "Where individual hosts store and look for their certificate requests." + }, + :hostcert => { :default => "$certdir/$certname.pem", + :mode => 0644, + :owner => "service", + :desc => "Where individual hosts store and look for their certificates." + }, + :hostprivkey => { :default => "$privatekeydir/$certname.pem", + :mode => 0600, + :owner => "service", + :desc => "Where individual hosts store and look for their private key." + }, + :hostpubkey => { :default => "$publickeydir/$certname.pem", + :mode => 0644, + :owner => "service", + :desc => "Where individual hosts store and look for their public key." + }, + :localcacert => { :default => "$certdir/ca.pem", + :mode => 0644, + :owner => "service", + :desc => "Where each client stores the CA certificate." + }, + :hostcrl => { :default => "$ssldir/crl.pem", + :mode => 0644, + :owner => "service", + :desc => "Where the host's certificate revocation list can be found. + This is distinct from the certificate authority's CRL." + }, + :certificate_revocation => [true, "Whether certificate revocation should be supported by downloading a Certificate Revocation List (CRL) + to all clients. If enabled, CA chaining will almost definitely not work."] + ) + + setdefaults( + :ca, + :ca_name => ["Puppet CA: $certname", "The name to use the Certificate Authority certificate."], + :cadir => { :default => "$ssldir/ca", + :owner => "service", + :group => "service", + :mode => 0770, + :desc => "The root directory for the certificate authority." + }, + :cacert => { :default => "$cadir/ca_crt.pem", + :owner => "service", + :group => "service", + :mode => 0660, + :desc => "The CA certificate." + }, + :cakey => { :default => "$cadir/ca_key.pem", + :owner => "service", + :group => "service", + :mode => 0660, + :desc => "The CA private key." + }, + :capub => { :default => "$cadir/ca_pub.pem", + :owner => "service", + :group => "service", + :desc => "The CA public key." + }, + :cacrl => { :default => "$cadir/ca_crl.pem", + :owner => "service", + :group => "service", + :mode => 0664, + + :desc => "The certificate revocation list (CRL) for the CA. Will be used if present but otherwise ignored.", + :hook => proc do |value| + if value == 'false' + Puppet.warning "Setting the :cacrl to 'false' is deprecated; Puppet will just ignore the crl if yours is missing" + end + end + }, + :caprivatedir => { :default => "$cadir/private", + :owner => "service", + :group => "service", + :mode => 0770, + :desc => "Where the CA stores private certificate information." + }, + :csrdir => { :default => "$cadir/requests", + :owner => "service", + :group => "service", + :desc => "Where the CA stores certificate requests" + }, + :signeddir => { :default => "$cadir/signed", + :owner => "service", + :group => "service", + :mode => 0770, + :desc => "Where the CA stores signed certificates." + }, + :capass => { :default => "$caprivatedir/ca.pass", + :owner => "service", + :group => "service", + :mode => 0660, + :desc => "Where the CA stores the password for the private key" + }, + :serial => { :default => "$cadir/serial", + :owner => "service", + :group => "service", + :mode => 0644, + :desc => "Where the serial number for certificates is stored." + }, + :autosign => { :default => "$confdir/autosign.conf", + :mode => 0644, + :desc => "Whether to enable autosign. Valid values are true (which + autosigns any key request, and is a very bad idea), false (which + never autosigns any key request), and the path to a file, which + uses that configuration file to determine which keys to sign."}, + :ca_days => ["", "How long a certificate should be valid. + This parameter is deprecated, use ca_ttl instead"], + :ca_ttl => ["5y", "The default TTL for new certificates; valid values + must be an integer, optionally followed by one of the units + 'y' (years of 365 days), 'd' (days), 'h' (hours), or + 's' (seconds). The unit defaults to seconds. If this parameter + is set, ca_days is ignored. Examples are '3600' (one hour) + and '1825d', which is the same as '5y' (5 years) "], + :ca_md => ["md5", "The type of hash used in certificates."], + :req_bits => [2048, "The bit length of the certificates."], + :keylength => [1024, "The bit length of keys."], + :cert_inventory => { + :default => "$cadir/inventory.txt", + :mode => 0644, + :owner => "service", + :group => "service", + :desc => "A Complete listing of all certificates" + } + ) + + # Define the config default. + + setdefaults( + Puppet.settings[:name], + :config => ["$confdir/puppet.conf", + "The configuration file for #{Puppet[:name]}."], + :pidfile => ["$rundir/$name.pid", "The pid file"], + :bindaddress => ["", "The address a listening server should bind to. Mongrel servers + default to 127.0.0.1 and WEBrick defaults to 0.0.0.0."], + :servertype => {:default => "webrick", :desc => "The type of server to use. Currently supported + options are webrick and mongrel. If you use mongrel, you will need + a proxy in front of the process or processes, since Mongrel cannot + speak SSL.", + + :call_on_define => true, # Call our hook with the default value, so we always get the correct bind address set. + :hook => proc { |value| value == "webrick" ? Puppet.settings[:bindaddress] = "0.0.0.0" : Puppet.settings[:bindaddress] = "127.0.0.1" if Puppet.settings[:bindaddress] == "" } + } + ) + + setdefaults(:master, + :user => ["puppet", "The user puppet master should run as."], + :group => ["puppet", "The group puppet master should run as."], + :manifestdir => ["$confdir/manifests", "Where puppet master looks for its manifests."], + :manifest => ["$manifestdir/site.pp", "The entry-point manifest for puppet master."], + :code => ["", "Code to parse directly. This is essentially only used + by `puppet`, and should only be set if you're writing your own Puppet + executable"], + :masterlog => { :default => "$logdir/puppetmaster.log", + :owner => "service", + :group => "service", + :mode => 0660, + :desc => "Where puppet master logs. This is generally not used, + since syslog is the default log destination." + }, + :masterhttplog => { :default => "$logdir/masterhttp.log", + :owner => "service", + :group => "service", + :mode => 0660, + :create => true, + :desc => "Where the puppet master web server logs." + }, + :masterport => [8140, "Which port puppet master listens on."], + :parseonly => [false, "Just check the syntax of the manifests."], + :node_name => ["cert", "How the puppetmaster determines the client's identity + and sets the 'hostname', 'fqdn' and 'domain' facts for use in the manifest, + in particular for determining which 'node' statement applies to the client. + Possible values are 'cert' (use the subject's CN in the client's + certificate) and 'facter' (use the hostname that the client + reported in its facts)"], + :bucketdir => { + :default => "$vardir/bucket", + :mode => 0750, + :owner => "service", + :group => "service", + :desc => "Where FileBucket files are stored." + }, + :rest_authconfig => [ "$confdir/auth.conf", + "The configuration file that defines the rights to the different + rest indirections. This can be used as a fine-grained + authorization system for `puppet master`." + ], + :ca => [true, "Wether the master should function as a certificate authority."], + :modulepath => {:default => "$confdir/modules:/usr/share/puppet/modules", + :desc => "The search path for modules as a colon-separated list of + directories.", :type => :setting }, # We don't want this to be considered a file, since it's multiple files. + :ssl_client_header => ["HTTP_X_CLIENT_DN", "The header containing an authenticated + client's SSL DN. Only used with Mongrel. This header must be set by the proxy + to the authenticated client's SSL DN (e.g., `/CN=puppet.puppetlabs.com`). + See http://projects.puppetlabs.com/projects/puppet/wiki/Using_Mongrel for more information."], + :ssl_client_verify_header => ["HTTP_X_CLIENT_VERIFY", "The header containing the status + message of the client verification. Only used with Mongrel. This header must be set by the proxy + to 'SUCCESS' if the client successfully authenticated, and anything else otherwise. + See http://projects.puppetlabs.com/projects/puppet/wiki/Using_Mongrel for more information."], + # To make sure this directory is created before we try to use it on the server, we need + # it to be in the server section (#1138). + :yamldir => {:default => "$vardir/yaml", :owner => "service", :group => "service", :mode => "750", + :desc => "The directory in which YAML data is stored, usually in a subdirectory."}, + :server_datadir => {:default => "$vardir/server_data", :owner => "service", :group => "service", :mode => "750", + :desc => "The directory in which serialized data is stored, usually in a subdirectory."}, + :reports => ["store", + "The list of reports to generate. All reports are looked for + in `puppet/reports/name.rb`, and multiple report names should be + comma-separated (whitespace is okay)." + ], + :reportdir => {:default => "$vardir/reports", + :mode => 0750, + :owner => "service", + :group => "service", + :desc => "The directory in which to store reports + received from the client. Each client gets a separate + subdirectory."}, + :reporturl => ["http://localhost:3000/reports", + "The URL used by the http reports processor to send reports"], + :fileserverconfig => ["$confdir/fileserver.conf", "Where the fileserver configuration is stored."], + :strict_hostname_checking => [false, "Whether to only search for the complete + hostname as it is in the certificate when searching for node information + in the catalogs."] + ) + + setdefaults(:metrics, + :rrddir => {:default => "$vardir/rrd", + :mode => 0750, + :owner => "service", + :group => "service", + :desc => "The directory where RRD database files are stored. + Directories for each reporting host will be created under + this directory." + }, + :rrdinterval => ["$runinterval", "How often RRD should expect data. + This should match how often the hosts report back to the server."] + ) + + setdefaults(:agent, + :node_name_value => { :default => "$certname", + :desc => "The explicit value used for the node name for all requests the agent + makes to the master. WARNING: This setting is mutually exclusive with + node_name_fact. Changing this setting also requires changes to the default + auth.conf configuration on the Puppet Master. Please see + http://links.puppetlabs.com/node_name_value for more information." + }, + :node_name_fact => { :default => "", + :desc => "The fact name used to determine the node name used for all requests the agent + makes to the master. WARNING: This setting is mutually exclusive with + node_name_value. Changing this setting also requires changes to the default + auth.conf configuration on the Puppet Master. Please see + http://links.puppetlabs.com/node_name_fact for more information.", + :hook => proc do |value| + if !value.empty? and Puppet[:node_name_value] != Puppet[:certname] + raise "Cannot specify both the node_name_value and node_name_fact settings" + end + end + }, + :localconfig => { :default => "$statedir/localconfig", + :owner => "root", + :mode => 0660, + :desc => "Where puppet agent caches the local configuration. An + extension indicating the cache format is added automatically."}, + :statefile => { :default => "$statedir/state.yaml", + :mode => 0660, + :desc => "Where puppet agent and puppet master store state associated + with the running configuration. In the case of puppet master, + this file reflects the state discovered through interacting + with clients." + }, + :clientyamldir => {:default => "$vardir/client_yaml", :mode => "750", :desc => "The directory in which client-side YAML data is stored."}, + :client_datadir => {:default => "$vardir/client_data", :mode => "750", :desc => "The directory in which serialized data is stored on the client."}, + :classfile => { :default => "$statedir/classes.txt", + :owner => "root", + :mode => 0644, + :desc => "The file in which puppet agent stores a list of the classes + associated with the retrieved configuration. Can be loaded in + the separate `puppet` executable using the `--loadclasses` + option."}, + :puppetdlog => { :default => "$logdir/puppetd.log", + :owner => "root", + :mode => 0640, + :desc => "The log file for puppet agent. This is generally not used." + }, + :server => ["puppet", "The server to which server puppet agent should connect"], + :ignoreschedules => [false, + "Boolean; whether puppet agent should ignore schedules. This is useful + for initial puppet agent runs."], + :puppetport => [8139, "Which port puppet agent listens on."], + :noop => [false, "Whether puppet agent should be run in noop mode."], + :runinterval => [1800, # 30 minutes + "How often puppet agent applies the client configuration; in seconds."], + :listen => [false, "Whether puppet agent should listen for + connections. If this is true, then by default only the + `runner` server is started, which allows remote authorized + and authenticated nodes to connect and trigger `puppet agent` + runs."], + :ca_server => ["$server", "The server to use for certificate + authority requests. It's a separate server because it cannot + and does not need to horizontally scale."], + :ca_port => ["$masterport", "The port to use for the certificate authority."], + :catalog_format => { + :default => "", + :desc => "(Deprecated for 'preferred_serialization_format') What format to + use to dump the catalog. Only supports 'marshal' and 'yaml'. Only + matters on the client, since it asks the server for a specific format.", + :hook => proc { |value| + if value + Puppet.warning "Setting 'catalog_format' is deprecated; use 'preferred_serialization_format' instead." + Puppet.settings[:preferred_serialization_format] = value + end + } + }, + :preferred_serialization_format => ["pson", "The preferred means of serializing + ruby instances for passing over the wire. This won't guarantee that all + instances will be serialized using this method, since not all classes + can be guaranteed to support this format, but it will be used for all + classes that support it."], + :puppetdlockfile => [ "$statedir/puppetdlock", "A lock file to temporarily stop puppet agent from doing anything."], + :usecacheonfailure => [true, + "Whether to use the cached configuration when the remote + configuration will not compile. This option is useful for testing + new configurations, where you want to fix the broken configuration + rather than reverting to a known-good one." + ], + :use_cached_catalog => [false, + "Whether to only use the cached catalog rather than compiling a new catalog + on every run. Puppet can be run with this enabled by default and then selectively + disabled when a recompile is desired."], + :ignorecache => [false, + "Ignore cache and always recompile the configuration. This is + useful for testing new configurations, where the local cache may in + fact be stale even if the timestamps are up to date - if the facts + change or if the server changes." + ], + :downcasefacts => [false, "Whether facts should be made all lowercase when sent to the server."], + :dynamicfacts => ["memorysize,memoryfree,swapsize,swapfree", + "Facts that are dynamic; these facts will be ignored when deciding whether + changed facts should result in a recompile. Multiple facts should be + comma-separated."], + :splaylimit => ["$runinterval", + "The maximum time to delay before runs. Defaults to being the same as the + run interval."], + :splay => [false, + "Whether to sleep for a pseudo-random (but consistent) amount of time before + a run."], + :clientbucketdir => { + :default => "$vardir/clientbucket", + :mode => 0750, + :desc => "Where FileBucket files are stored locally." + }, + :configtimeout => [120, + "How long the client should wait for the configuration to be retrieved + before considering it a failure. This can help reduce flapping if too + many clients contact the server at one time." + ], + :reportserver => { + :default => "$server", + :call_on_define => false, + :desc => "(Deprecated for 'report_server') The server to which to send transaction reports.", + :hook => proc do |value| + Puppet.settings[:report_server] = value if value + end + }, + :report_server => ["$server", + "The server to send transaction reports to." + ], + :report_port => ["$masterport", + "The port to communicate with the report_server." + ], + :inventory_server => ["$server", + "The server to send facts to." + ], + :inventory_port => ["$masterport", + "The port to communicate with the inventory_server." + ], + :report => [false, + "Whether to send reports after every transaction." + ], + :lastrunfile => { :default => "$statedir/last_run_summary.yaml", + :mode => 0660, + :desc => "Where puppet agent stores the last run report summary in yaml format." + }, + :lastrunreport => { :default => "$statedir/last_run_report.yaml", + :mode => 0660, + :desc => "Where puppet agent stores the last run report in yaml format." + }, + :graph => [false, "Whether to create dot graph files for the different + configuration graphs. These dot files can be interpreted by tools + like OmniGraffle or dot (which is part of ImageMagick)."], + :graphdir => ["$statedir/graphs", "Where to store dot-outputted graphs."], + :http_compression => [false, "Allow http compression in REST communication with the master. + This setting might improve performance for agent -> master communications over slow WANs. + Your puppetmaster needs to support compression (usually by activating some settings in a reverse-proxy + in front of the puppetmaster, which rules out webrick). + It is harmless to activate this settings if your master doesn't support + compression, but if it supports it, this setting might reduce performance on high-speed LANs."] + ) + + setdefaults(:inspect, + :archive_files => [false, "During an inspect run, whether to archive files whose contents are audited to a file bucket."], + :archive_file_server => ["$server", "During an inspect run, the file bucket server to archive files to if archive_files is set."] + ) + + # Plugin information. + + setdefaults( + :main, + :plugindest => ["$libdir", + "Where Puppet should store plugins that it pulls down from the central + server."], + :pluginsource => ["puppet://$server/plugins", + "From where to retrieve plugins. The standard Puppet `file` type + is used for retrieval, so anything that is a valid file source can + be used here."], + :pluginsync => [false, "Whether plugins should be synced with the central server."], + + :pluginsignore => [".svn CVS .git", "What files to ignore when pulling down plugins."] + ) + + # Central fact information. + + setdefaults( + :main, + :factpath => {:default => "$vardir/lib/facter:$vardir/facts", + :desc => "Where Puppet should look for facts. Multiple directories should + be colon-separated, like normal PATH variables.", + + :call_on_define => true, # Call our hook with the default value, so we always get the value added to facter. + :type => :setting, # Don't consider it a file, because it could be multiple colon-separated files + :hook => proc { |value| Facter.search(value) if Facter.respond_to?(:search) }}, + :factdest => ["$vardir/facts/", + "Where Puppet should store facts that it pulls down from the central + server."], + :factsource => ["puppet://$server/facts/", + "From where to retrieve facts. The standard Puppet `file` type + is used for retrieval, so anything that is a valid file source can + be used here."], + :factsync => [false, "Whether facts should be synced with the central server."], + :factsignore => [".svn CVS", "What files to ignore when pulling down facts."] + ) + + + setdefaults( + :tagmail, + :tagmap => ["$confdir/tagmail.conf", "The mapping between reporting tags and email addresses."], + :sendmail => [which('sendmail') || '', "Where to find the sendmail binary with which to send email."], + + :reportfrom => ["report@" + [Facter["hostname"].value, Facter["domain"].value].join("."), "The 'from' email address for the reports."], + :smtpserver => ["none", "The server through which to send email reports."] + ) + + setdefaults( + :rails, + :dblocation => { :default => "$statedir/clientconfigs.sqlite3", + :mode => 0660, + :owner => "service", + :group => "service", + :desc => "The database cache for client configurations. Used for + querying within the language." + }, + :dbadapter => [ "sqlite3", "The type of database to use." ], + :dbmigrate => [ false, "Whether to automatically migrate the database." ], + :dbname => [ "puppet", "The name of the database to use." ], + :dbserver => [ "localhost", "The database server for caching. Only + used when networked databases are used."], + :dbport => [ "", "The database password for caching. Only + used when networked databases are used."], + :dbuser => [ "puppet", "The database user for caching. Only + used when networked databases are used."], + :dbpassword => [ "puppet", "The database password for caching. Only + used when networked databases are used."], + :dbconnections => [ '', "The number of database connections for networked + databases. Will be ignored unless the value is a positive integer."], + :dbsocket => [ "", "The database socket location. Only used when networked + databases are used. Will be ignored if the value is an empty string."], + :railslog => {:default => "$logdir/rails.log", + :mode => 0600, + :owner => "service", + :group => "service", + :desc => "Where Rails-specific logs are sent" + }, + + :rails_loglevel => ["info", "The log level for Rails connections. The value must be + a valid log level within Rails. Production environments normally use `info` + and other environments normally use `debug`."] + ) + + setdefaults( + :couchdb, + + :couchdb_url => ["http://127.0.0.1:5984/puppet", "The url where the puppet couchdb database will be created"] + ) + + setdefaults( + :transaction, + :tags => ["", "Tags to use to find resources. If this is set, then + only resources tagged with the specified tags will be applied. + Values must be comma-separated."], + :evaltrace => [false, "Whether each resource should log when it is + being evaluated. This allows you to interactively see exactly + what is being done."], + :summarize => [false, + + "Whether to print a transaction summary." + ] + ) + + setdefaults( + :main, + :external_nodes => ["none", + + "An external command that can produce node information. The output + must be a YAML dump of a hash, and that hash must have one or both of + `classes` and `parameters`, where `classes` is an array and + `parameters` is a hash. For unknown nodes, the commands should + exit with a non-zero exit code. + + This command makes it straightforward to store your node mapping + information in other data sources like databases."]) + + setdefaults( + :ldap, + :ldapnodes => [false, + "Whether to search for node configurations in LDAP. See + http://projects.puppetlabs.com/projects/puppet/wiki/LDAP_Nodes for more information."], + :ldapssl => [false, + "Whether SSL should be used when searching for nodes. + Defaults to false because SSL usually requires certificates + to be set up on the client side."], + :ldaptls => [false, + "Whether TLS should be used when searching for nodes. + Defaults to false because TLS usually requires certificates + to be set up on the client side."], + :ldapserver => ["ldap", + "The LDAP server. Only used if `ldapnodes` is enabled."], + :ldapport => [389, + "The LDAP port. Only used if `ldapnodes` is enabled."], + + :ldapstring => ["(&(objectclass=puppetClient)(cn=%s))", + "The search string used to find an LDAP node."], + :ldapclassattrs => ["puppetclass", + "The LDAP attributes to use to define Puppet classes. Values + should be comma-separated."], + :ldapstackedattrs => ["puppetvar", + "The LDAP attributes that should be stacked to arrays by adding + the values in all hierarchy elements of the tree. Values + should be comma-separated."], + :ldapattrs => ["all", + "The LDAP attributes to include when querying LDAP for nodes. All + returned attributes are set as variables in the top-level scope. + Multiple values should be comma-separated. The value 'all' returns + all attributes."], + :ldapparentattr => ["parentnode", + "The attribute to use to define the parent node."], + :ldapuser => ["", + "The user to use to connect to LDAP. Must be specified as a + full DN."], + :ldappassword => ["", "The password to use to connect to LDAP."], + :ldapbase => ["", + "The search base for LDAP searches. It's impossible to provide + a meaningful default here, although the LDAP libraries might + have one already set. Generally, it should be the 'ou=Hosts' + branch under your main directory."] + ) + + setdefaults(:master, + :storeconfigs => {:default => false, :desc => "Whether to store each client's configuration. This + requires ActiveRecord from Ruby on Rails.", + :call_on_define => true, # Call our hook with the default value, so we always get the libdir set. + :hook => proc do |value| + require 'puppet/node' + require 'puppet/node/facts' + if value + require 'puppet/rails' + raise "StoreConfigs not supported without ActiveRecord 2.1 or higher" unless Puppet.features.rails? + Puppet::Resource::Catalog.cache_class = :active_record unless Puppet.settings[:async_storeconfigs] + Puppet::Node::Facts.cache_class = :active_record + Puppet::Node.cache_class = :active_record + end + end + } + ) + + # This doesn't actually work right now. + + setdefaults( + :parser, + + :lexical => [false, "Whether to use lexical scoping (vs. dynamic)."], + :templatedir => ["$vardir/templates", + "Where Puppet looks for template files. Can be a list of colon-seperated + directories." + ] + ) +end diff --git a/mcollective/lib/puppet/dsl.rb b/mcollective/lib/puppet/dsl.rb new file mode 100644 index 000000000..97a310436 --- /dev/null +++ b/mcollective/lib/puppet/dsl.rb @@ -0,0 +1,7 @@ +require 'puppet' + +module Puppet::DSL +end + +require 'puppet/dsl/resource_type_api' +require 'puppet/dsl/resource_api' diff --git a/mcollective/lib/puppet/dsl/resource_api.rb b/mcollective/lib/puppet/dsl/resource_api.rb new file mode 100644 index 000000000..d82373b0e --- /dev/null +++ b/mcollective/lib/puppet/dsl/resource_api.rb @@ -0,0 +1,120 @@ +# This module adds functionality to a resource to make it +# capable of evaluating the DSL resource type block and also +# hooking into the scope system. +require 'puppet/resource/type_collection_helper' + +class Puppet::DSL::ResourceAPI + include Puppet::Resource::TypeCollectionHelper + + FUNCTION_MAP = {:acquire => :include} + + attr_reader :scope, :resource, :block + + def environment + scope.environment + end + + def evaluate + set_instance_variables + instance_eval(&block) + end + + def initialize(resource, scope, block) + @scope = scope + @resource = resource + @block = block + end + + # Try to convert a missing method into a resource type or a function. + def method_missing(name, *args) + raise "MethodMissing loop when searching for #{name} with #{args.inspect}" if searching_for_method? + @searching_for_method = true + return create_resource(name, args[0], args[1]) if valid_type?(name) + + name = map_function(name) + + return call_function(name, args) if Puppet::Parser::Functions.function(name) + + super + ensure + @searching_for_method = false + end + + def set_instance_variables + resource.eachparam do |param| + instance_variable_set("@#{param.name}", param.value) + end + @title = resource.title + @name ||= resource.title + end + + def create_resource(type, names, arguments = nil) + names = [names] unless names.is_a?(Array) + + arguments ||= {} + raise ArgumentError, "Resource arguments must be provided as a hash" unless arguments.is_a?(Hash) + + names.collect do |name| + resource = Puppet::Parser::Resource.new(type, name, :scope => scope) + arguments.each do |param, value| + resource[param] = value + end + + resource.exported = true if exporting? + resource.virtual = true if virtualizing? + scope.compiler.add_resource(scope, resource) + resource + end + end + + def call_function(name, args) + return false unless method = Puppet::Parser::Functions.function(name) + scope.send(method, *args) + end + + def export(resources = nil, &block) + if resources + resources.each { |resource| resource.exported = true } + return resources + end + @exporting = true + instance_eval(&block) + ensure + @exporting = false + end + + def virtual(resources = nil, &block) + if resources + resources.each { |resource| resource.virtual = true } + return resources + end + @virtualizing = true + instance_eval(&block) + ensure + @virtualizing = false + end + + def valid_type?(name) + return true if [:class, :node].include?(name) + return true if Puppet::Type.type(name) + return(known_resource_types.definition(name) ? true : false) + end + + private + + def exporting? + @exporting + end + + def map_function(name) + FUNCTION_MAP[name] || name + end + + def searching_for_method? + @searching_for_method + end + + def virtualizing? + @virtualizing + end +end diff --git a/mcollective/lib/puppet/dsl/resource_type_api.rb b/mcollective/lib/puppet/dsl/resource_type_api.rb new file mode 100644 index 000000000..ecb914189 --- /dev/null +++ b/mcollective/lib/puppet/dsl/resource_type_api.rb @@ -0,0 +1,46 @@ +require 'puppet/resource/type' + +class Puppet::DSL::ResourceTypeAPI + def define(name, *args, &block) + result = __mk_resource_type__(:definition, name, Hash.new, block) + result.set_arguments(__munge_type_arguments__(args)) + nil + end + + def hostclass(name, options = {}, &block) + __mk_resource_type__(:hostclass, name, options, block) + nil + end + + def node(name, options = {}, &block) + __mk_resource_type__(:node, name, options, block) + nil + end + + # Note: we don't want the user to call the following methods + # directly. However, we can't stop them by making the methods + # private because the user's .rb code gets instance_eval'ed on an + # instance of this class. So instead we name the methods using + # double underscores to discourage customers from calling them. + + def __mk_resource_type__(type, name, options, code) + klass = Puppet::Resource::Type.new(type, name, options) + + klass.ruby_code = code if code + + Thread.current[:known_resource_types].add klass + + klass + end + + def __munge_type_arguments__(args) + args.inject([]) do |result, item| + if item.is_a?(Hash) + item.each { |p, v| result << [p, v] } + else + result << item + end + result + end + end +end diff --git a/mcollective/lib/puppet/error.rb b/mcollective/lib/puppet/error.rb new file mode 100644 index 000000000..d0a0c9cc5 --- /dev/null +++ b/mcollective/lib/puppet/error.rb @@ -0,0 +1,43 @@ +module Puppet # :nodoc: + # The base class for all Puppet errors. We want to make it easy to add + # line and file information. This probably isn't necessary for all + # errors, but... + class Error < RuntimeError + attr_accessor :line, :file + + def backtrace + if defined?(@backtrace) + return @backtrace + else + return super + end + end + + def initialize(message, line = nil, file = nil) + @message = message + + @line = line if line + @file = file if file + end + + def to_s + str = nil + if self.file and self.line + str = "#{@message} at #{@file}:#{@line}" + elsif self.line + str = "#{@message} at line #{@line}" + elsif self.file + str = "#{@message} in #{self.file}" + else + str = @message.to_s + end + + str + end + end + + # An error class for when I don't know what happened. Automatically + # prints a stack trace when in debug mode. + class DevError < Puppet::Error + end +end diff --git a/mcollective/lib/puppet/external/base64.rb b/mcollective/lib/puppet/external/base64.rb new file mode 100755 index 000000000..57359dc18 --- /dev/null +++ b/mcollective/lib/puppet/external/base64.rb @@ -0,0 +1,19 @@ +# a stupid hack class to get rid of all of the warnings but +# still make the encode/decode methods available + +# 1.8.2 has a Base64 class, but 1.8.1 just imports the methods directly +# into Object + +require 'base64' + +unless defined?(Base64) + class Base64 + def Base64.encode64(*args) + Object.method(:encode64).call(*args) + end + + def Base64.decode64(*args) + Object.method(:decode64).call(*args) + end + end +end diff --git a/mcollective/lib/puppet/external/dot.rb b/mcollective/lib/puppet/external/dot.rb new file mode 100644 index 000000000..c0e770ec5 --- /dev/null +++ b/mcollective/lib/puppet/external/dot.rb @@ -0,0 +1,326 @@ +# rdot.rb +# +# +# This is a modified version of dot.rb from Dave Thomas's rdoc project. I [Horst Duchene] +# renamed it to rdot.rb to avoid collision with an installed rdoc/dot. +# +# It also supports undirected edges. + +module DOT + + # These glogal vars are used to make nice graph source. + + $tab = ' ' + $tab2 = $tab * 2 + + # if we don't like 4 spaces, we can change it any time + + def change_tab (t) + $tab = t + $tab2 = t * 2 + end + + # options for node declaration + + NODE_OPTS = [ + # attributes due to + # http://www.graphviz.org/Documentation/dotguide.pdf + # March, 26, 2005 + 'bottomlabel', # auxiliary label for nodes of shape M* + 'color', # default: black; node shape color + 'comment', # any string (format-dependent) + 'distortion', # default: 0.0; node distortion for shape=polygon + 'fillcolor', # default: lightgrey/black; node fill color + 'fixedsize', # default: false; label text has no affect on node size + 'fontcolor', # default: black; type face color + 'fontname', # default: Times-Roman; font family + 'fontsize', # default: 14; point size of label + 'group', # name of node’s group + 'height', # default: .5; height in inches + 'label', # default: node name; any string + 'layer', # default: overlay range; all, id or id:id + 'orientation', # dafault: 0.0; node rotation angle + 'peripheries', # shape-dependent number of node boundaries + 'regular', # default: false; force polygon to be regular + 'shape', # default: ellipse; node shape; see Section 2.1 and Appendix E + 'shapefile', # external EPSF or SVG custom shape file + 'sides', # default: 4; number of sides for shape=polygon + 'skew' , # default: 0.0; skewing of node for shape=polygon + 'style', # graphics options, e.g. bold, dotted, filled; cf. Section 2.3 + 'toplabel', # auxiliary label for nodes of shape M* + 'URL', # URL associated with node (format-dependent) + 'width', # default: .75; width in inches + 'z', # default: 0.0; z coordinate for VRML output + + # maintained for backward compatibility or rdot internal + 'bgcolor', + 'rank' + ] + + # options for edge declaration + + EDGE_OPTS = [ + 'arrowhead', # default: normal; style of arrowhead at head end + 'arrowsize', # default: 1.0; scaling factor for arrowheads + 'arrowtail', # default: normal; style of arrowhead at tail end + 'color', # default: black; edge stroke color + 'comment', # any string (format-dependent) + 'constraint', # default: true use edge to affect node ranking + 'decorate', # if set, draws a line connecting labels with their edges + 'dir', # default: forward; forward, back, both, or none + 'fontcolor', # default: black type face color + 'fontname', # default: Times-Roman; font family + 'fontsize', # default: 14; point size of label + 'headlabel', # label placed near head of edge + 'headport', # n,ne,e,se,s,sw,w,nw + 'headURL', # URL attached to head label if output format is ismap + 'label', # edge label + 'labelangle', # default: -25.0; angle in degrees which head or tail label is rotated off edge + 'labeldistance', # default: 1.0; scaling factor for distance of head or tail label from node + 'labelfloat', # default: false; lessen constraints on edge label placement + 'labelfontcolor', # default: black; type face color for head and tail labels + 'labelfontname', # default: Times-Roman; font family for head and tail labels + 'labelfontsize', # default: 14 point size for head and tail labels + 'layer', # default: overlay range; all, id or id:id + 'lhead', # name of cluster to use as head of edge + 'ltail', # name of cluster to use as tail of edge + 'minlen', # default: 1 minimum rank distance between head and tail + 'samehead', # tag for head node; edge heads with the same tag are merged onto the same port + 'sametail', # tag for tail node; edge tails with the same tag are merged onto the same port + 'style', # graphics options, e.g. bold, dotted, filled; cf. Section 2.3 + 'taillabel', # label placed near tail of edge + 'tailport', # n,ne,e,se,s,sw,w,nw + 'tailURL', # URL attached to tail label if output format is ismap + 'weight', # default: 1; integer cost of stretching an edge + + # maintained for backward compatibility or rdot internal + 'id' + ] + + # options for graph declaration + + GRAPH_OPTS = [ + 'bgcolor', + 'center', 'clusterrank', 'color', 'concentrate', + 'fontcolor', 'fontname', 'fontsize', + 'label', 'layerseq', + 'margin', 'mclimit', + 'nodesep', 'nslimit', + 'ordering', 'orientation', + 'page', + 'rank', 'rankdir', 'ranksep', 'ratio', + 'size' + ] + + # a root class for any element in dot notation + + class DOTSimpleElement + + attr_accessor :name + + def initialize (params = {}) + @label = params['name'] ? params['name'] : '' + end + + def to_s + @name + end + end + + # an element that has options ( node, edge, or graph ) + + class DOTElement < DOTSimpleElement + + # attr_reader :parent + attr_accessor :name, :options + + def initialize (params = {}, option_list = []) + super(params) + @name = params['name'] ? params['name'] : nil + @parent = params['parent'] ? params['parent'] : nil + @options = {} + option_list.each{ |i| + @options[i] = params[i] if params[i] + } + @options['label'] ||= @name if @name != 'node' + end + + def each_option + @options.each{ |i| yield i } + end + + def each_option_pair + @options.each_pair{ |key, val| yield key, val } + end + + #def parent=( thing ) + # @parent.delete( self ) if defined?( @parent ) and @parent + # @parent = thing + #end + + end + + + # This is used when we build nodes that have shape=record + # ports don't have options :) + + class DOTPort < DOTSimpleElement + + attr_accessor :label + + def initialize (params = {}) + super(params) + @name = params['label'] ? params['label'] : '' + end + + def to_s + ( @name && @name != "" ? "<#{@name}>" : "" ) + "#{@label}" + end + end + + # node element + + class DOTNode < DOTElement + + @ports + + def initialize (params = {}, option_list = NODE_OPTS) + super(params, option_list) + @ports = params['ports'] ? params['ports'] : [] + end + + def each_port + @ports.each { |i| yield i } + end + + def << (thing) + @ports << thing + end + + def push (thing) + @ports.push(thing) + end + + def pop + @ports.pop + end + + def to_s (t = '') + + # This code is totally incomprehensible; it needs to be replaced! + + label = @options['shape'] != 'record' && @ports.length == 0 ? + @options['label'] ? + t + $tab + "label = \"#{@options['label']}\"\n" : + '' : + t + $tab + 'label = "' + " \\\n" + + t + $tab2 + "#{@options['label']}| \\\n" + + @ports.collect{ |i| + t + $tab2 + i.to_s + }.join( "| \\\n" ) + " \\\n" + + t + $tab + '"' + "\n" + + t + "#{@name} [\n" + + @options.to_a.collect{ |i| + i[1] && i[0] != 'label' ? + t + $tab + "#{i[0]} = #{i[1]}" : nil + }.compact.join( ",\n" ) + ( label != '' ? ",\n" : "\n" ) + + label + + t + "]\n" + end + + end + + # A subgraph element is the same to graph, but has another header in dot + # notation. + + class DOTSubgraph < DOTElement + + @nodes + @dot_string + + def initialize (params = {}, option_list = GRAPH_OPTS) + super(params, option_list) + @nodes = params['nodes'] ? params['nodes'] : [] + @dot_string = 'graph' + end + + def each_node + @nodes.each{ |i| yield i } + end + + def << (thing) + @nodes << thing + end + + def push (thing) + @nodes.push( thing ) + end + + def pop + @nodes.pop + end + + def to_s (t = '') + hdr = t + "#{@dot_string} #{@name} {\n" + + options = @options.to_a.collect{ |name, val| + val && name != 'label' ? + t + $tab + "#{name} = #{val}" : + name ? t + $tab + "#{name} = \"#{val}\"" : nil + }.compact.join( "\n" ) + "\n" + + nodes = @nodes.collect{ |i| + i.to_s( t + $tab ) + }.join( "\n" ) + "\n" + hdr + options + nodes + t + "}\n" + end + + end + + # This is a graph. + + class DOTDigraph < DOTSubgraph + + def initialize (params = {}, option_list = GRAPH_OPTS) + super(params, option_list) + @dot_string = 'digraph' + end + + end + + # This is an edge. + + class DOTEdge < DOTElement + + attr_accessor :from, :to + + def initialize (params = {}, option_list = EDGE_OPTS) + super(params, option_list) + @from = params['from'] ? params['from'] : nil + @to = params['to'] ? params['to'] : nil + end + + def edge_link + '--' + end + + def to_s (t = '') + t + "#{@from} #{edge_link} #{to} [\n" + + @options.to_a.collect{ |i| + i[1] && i[0] != 'label' ? + t + $tab + "#{i[0]} = #{i[1]}" : + i[1] ? t + $tab + "#{i[0]} = \"#{i[1]}\"" : nil + }.compact.join( "\n" ) + "\n#{t}]\n" + end + + end + + class DOTDirectedEdge < DOTEdge + + def edge_link + '->' + end + + end +end diff --git a/mcollective/lib/puppet/external/event-loop.rb b/mcollective/lib/puppet/external/event-loop.rb new file mode 100644 index 000000000..476fb0ba3 --- /dev/null +++ b/mcollective/lib/puppet/external/event-loop.rb @@ -0,0 +1 @@ +require "puppet/external/event-loop/event-loop" diff --git a/mcollective/lib/puppet/external/event-loop/better-definers.rb b/mcollective/lib/puppet/external/event-loop/better-definers.rb new file mode 100644 index 000000000..ef1d44c53 --- /dev/null +++ b/mcollective/lib/puppet/external/event-loop/better-definers.rb @@ -0,0 +1,367 @@ +## better-definers.rb --- better attribute and method definers +# Copyright (C) 2005 Daniel Brockman + +# This program is free software; you can redistribute it +# and/or modify it under the terms of the GNU General Public +# License as published by the Free Software Foundation; +# either version 2 of the License, or (at your option) any +# later version. + +# This file is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty +# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License for more details. + +# You should have received a copy of the GNU General Public +# License along with this program; if not, write to the Free +# Software Foundation, 51 Franklin Street, Fifth Floor, +# Boston, MA 02110-1301, USA. + +class Symbol + def predicate? + to_s.include? "?" end + def imperative? + to_s.include? "!" end + def writer? + to_s.include? "=" end + + def punctuated? + predicate? or imperative? or writer? end + def without_punctuation + to_s.delete("?!=").to_sym end + + def predicate + without_punctuation.to_s + "?" end + def imperative + without_punctuation.to_s + "!" end + def writer + without_punctuation.to_s + "=" end +end + +class Hash + def collect! (&block) + replace Hash[*collect(&block).flatten] + end + + def flatten + to_a.flatten + end +end + +module Kernel + def returning (value) + yield value ; value + end +end + +class Module + def define_hard_aliases (name_pairs) + for new_aliases, existing_name in name_pairs do + new_aliases.kind_of? Array or new_aliases = [new_aliases] + for new_alias in new_aliases do + alias_method(new_alias, existing_name) + end + end + end + + def define_soft_aliases (name_pairs) + for new_aliases, existing_name in name_pairs do + new_aliases.kind_of? Array or new_aliases = [new_aliases] + for new_alias in new_aliases do + class_eval %{def #{new_alias}(*args, &block) + #{existing_name}(*args, &block) end} + end + end + end + + define_soft_aliases \ + :define_hard_alias => :define_hard_aliases, + :define_soft_alias => :define_soft_aliases + + # This method lets you define predicates like :foo?, + # which will be defined to return the value of @foo. + def define_readers (*names) + for name in names.map { |x| x.to_sym } do + if name.punctuated? + # There's no way to define an efficient reader whose + # name is different from the instance variable. + class_eval %{def #{name} ; @#{name.without_punctuation} end} + else + # Use `attr_reader' to define an efficient method. + attr_reader(name) + end + end + end + + def writer_defined? (name) + method_defined?(name.to_sym.writer) + end + + # If you pass a predicate symbol :foo? to this method, it'll first + # define a regular writer method :foo, without a question mark. + # Then it'll define an imperative writer method :foo! as a shorthand + # for setting the property to true. + def define_writers (*names, &body) + for name in names.map { |x| x.to_sym } do + if block_given? + define_method(name.writer, &body) + else + attr_writer(name.without_punctuation) + end + if name.predicate? + class_eval %{def #{name.imperative} + self.#{name.writer} true end} + end + end + end + + define_soft_aliases \ + :define_reader => :define_readers, + :define_writer => :define_writers + + # We don't need a singular alias for `define_accessors', + # because it always defines at least two methods. + + def define_accessors (*names) + define_readers(*names) + define_writers(*names) + end + + def define_opposite_readers (name_pairs) + name_pairs.collect! { |k, v| [k.to_sym, v.to_sym] } + for opposite_name, name in name_pairs do + define_reader(name) unless method_defined?(name) + class_eval %{def #{opposite_name} ; not #{name} end} + end + end + + def define_opposite_writers (name_pairs) + name_pairs.collect! { |k, v| [k.to_sym, v.to_sym] } + for opposite_name, name in name_pairs do + define_writer(name) unless writer_defined?(name) + class_eval %{def #{opposite_name.writer} x + self.#{name.writer} !x end} + class_eval %{def #{opposite_name.imperative} + self.#{name.writer} false end} + end + end + + define_soft_aliases \ + :define_opposite_reader => :define_opposite_readers, + :define_opposite_writer => :define_opposite_writers + + def define_opposite_accessors (name_pairs) + define_opposite_readers name_pairs + define_opposite_writers name_pairs + end + + def define_reader_with_opposite (name_pair, &body) + name, opposite_name = name_pair.flatten.collect { |x| x.to_sym } + define_method(name, &body) + define_opposite_reader(opposite_name => name) + end + + def define_writer_with_opposite (name_pair, &body) + name, opposite_name = name_pair.flatten.collect { |x| x.to_sym } + define_writer(name, &body) + define_opposite_writer(opposite_name => name) + end + + public :define_method + + def define_methods (*names, &body) + names.each { |name| define_method(name, &body) } + end + + def define_private_methods (*names, &body) + define_methods(*names, &body) + names.each { |name| private name } + end + + def define_protected_methods (*names, &body) + define_methods(*names, &body) + names.each { |name| protected name } + end + + def define_private_method (name, &body) + define_method(name, &body) + private name + end + + def define_protected_method (name, &body) + define_method(name, &body) + protected name + end +end + +class ImmutableAttributeError < StandardError + def initialize (attribute=nil, message=nil) + super message + @attribute = attribute + end + + define_accessors :attribute + + def to_s + if @attribute and @message + "cannot change the value of `#@attribute': #@message" + elsif @attribute + "cannot change the value of `#@attribute'" + elsif @message + "cannot change the value of attribute: #@message" + else + "cannot change the value of attribute" + end + end +end + +class Module + # Guard each of the specified attributes by replacing the writer + # method with a proxy that asks the supplied block before proceeding + # with the change. + # + # If it's okay to change the attribute, the block should return + # either nil or the symbol :mutable. If it isn't okay, the block + # should return a string saying why the attribute can't be changed. + # If you don't want to provide a reason, you can have the block + # return just the symbol :immutable. + def guard_writers(*names, &predicate) + for name in names.map { |x| x.to_sym } do + define_hard_alias("__unguarded_#{name.writer}" => name.writer) + define_method(name.writer) do |new_value| + case result = predicate.call + when :mutable, nil + __send__("__unguarded_#{name.writer}", new_value) + when :immutable + raise ImmutableAttributeError.new(name) + else + raise ImmutableAttributeError.new(name, result) + end + end + end + end + + def define_guarded_writers (*names, &block) + define_writers(*names) + guard_writers(*names, &block) + end + + define_soft_alias :guard_writer => :guard_writers + define_soft_alias :define_guarded_writer => :define_guarded_writers +end + +if __FILE__ == $0 + require "test/unit" + + class DefineAccessorsTest < Test::Unit::TestCase + def setup + @X = Class.new + @Y = Class.new @X + @x = @X.new + @y = @Y.new + end + + def test_define_hard_aliases + @X.define_method(:foo) { 123 } + @X.define_method(:baz) { 321 } + @X.define_hard_aliases :bar => :foo, :quux => :baz + assert_equal @x.foo, 123 + assert_equal @x.bar, 123 + assert_equal @y.foo, 123 + assert_equal @y.bar, 123 + assert_equal @x.baz, 321 + assert_equal @x.quux, 321 + assert_equal @y.baz, 321 + assert_equal @y.quux, 321 + @Y.define_method(:foo) { 456 } + assert_equal @y.foo, 456 + assert_equal @y.bar, 123 + @Y.define_method(:quux) { 654 } + assert_equal @y.baz, 321 + assert_equal @y.quux, 654 + end + + def test_define_soft_aliases + @X.define_method(:foo) { 123 } + @X.define_method(:baz) { 321 } + @X.define_soft_aliases :bar => :foo, :quux => :baz + assert_equal @x.foo, 123 + assert_equal @x.bar, 123 + assert_equal @y.foo, 123 + assert_equal @y.bar, 123 + assert_equal @x.baz, 321 + assert_equal @x.quux, 321 + assert_equal @y.baz, 321 + assert_equal @y.quux, 321 + @Y.define_method(:foo) { 456 } + assert_equal @y.foo, @y.bar, 456 + @Y.define_method(:quux) { 654 } + assert_equal @y.baz, 321 + assert_equal @y.quux, 654 + end + + def test_define_readers + @X.define_readers :foo, :bar + assert !@x.respond_to?(:foo=) + assert !@x.respond_to?(:bar=) + @x.instance_eval { @foo = 123 ; @bar = 456 } + assert_equal @x.foo, 123 + assert_equal @x.bar, 456 + @X.define_readers :baz?, :quux? + assert !@x.respond_to?(:baz=) + assert !@x.respond_to?(:quux=) + @x.instance_eval { @baz = false ; @quux = true } + assert !@x.baz? + assert @x.quux? + end + + def test_define_writers + assert !@X.writer_defined?(:foo) + assert !@X.writer_defined?(:bar) + @X.define_writers :foo, :bar + assert @X.writer_defined?(:foo) + assert @X.writer_defined?(:bar) + assert @X.writer_defined?(:foo=) + assert @X.writer_defined?(:bar=) + assert @X.writer_defined?(:foo?) + assert @X.writer_defined?(:bar?) + assert !@x.respond_to?(:foo) + assert !@x.respond_to?(:bar) + @x.foo = 123 + @x.bar = 456 + assert_equal @x.instance_eval { @foo }, 123 + assert_equal @x.instance_eval { @bar }, 456 + @X.define_writers :baz?, :quux? + assert !@x.respond_to?(:baz?) + assert !@x.respond_to?(:quux?) + @x.baz = true + @x.quux = false + assert_equal @x.instance_eval { @baz }, true + assert_equal @x.instance_eval { @quux }, false + end + + def test_define_accessors + @X.define_accessors :foo, :bar + @x.foo = 123 ; @x.bar = 456 + assert_equal @x.foo, 123 + assert_equal @x.bar, 456 + end + + def test_define_opposite_readers + @X.define_opposite_readers :foo? => :bar?, :baz? => :quux? + assert !@x.respond_to?(:foo=) + assert !@x.respond_to?(:bar=) + assert !@x.respond_to?(:baz=) + assert !@x.respond_to?(:quux=) + @x.instance_eval { @bar = true ; @quux = false } + assert !@x.foo? + assert @x.bar? + assert @x.baz? + assert !@x.quux? + end + + def test_define_opposite_writers + @X.define_opposite_writers :foo? => :bar?, :baz => :quux + end + end +end diff --git a/mcollective/lib/puppet/external/event-loop/event-loop.rb b/mcollective/lib/puppet/external/event-loop/event-loop.rb new file mode 100644 index 000000000..3b40f6e71 --- /dev/null +++ b/mcollective/lib/puppet/external/event-loop/event-loop.rb @@ -0,0 +1,355 @@ +## event-loop.rb --- high-level IO multiplexer +# Copyright (C) 2005 Daniel Brockman + +# This program is free software; you can redistribute it +# and/or modify it under the terms of the GNU General Public +# License as published by the Free Software Foundation; +# either version 2 of the License, or (at your option) any +# later version. + +# This file is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty +# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License for more details. + +# You should have received a copy of the GNU General Public +# License along with this program; if not, write to the Free +# Software Foundation, 51 Franklin Street, Fifth Floor, +# Boston, MA 02110-1301, USA. + +require "puppet/external/event-loop/better-definers" +require "puppet/external/event-loop/signal-system" + +require "fcntl" + +class EventLoop + include SignalEmitter + + IO_STATES = [:readable, :writable, :exceptional] + + class << self + def default ; @default ||= new end + def default= x ; @default = x end + + def current + Thread.current["event-loop::current"] || default end + def current= x + Thread.current["event-loop::current"] = x end + + def with_current (new) + if current == new + yield + else + begin + old = self.current + self.current = new + yield + ensure + self.current = old + end + end + end + + def method_missing (name, *args, &block) + if current.respond_to? name + current.__send__(name, *args, &block) + else + super + end + end + end + + define_signals :before_sleep, :after_sleep + + def initialize + @running = false + @awake = false + @wakeup_time = nil + @timers = [] + + @io_arrays = [[], [], []] + @ios = Hash.new do |h, k| raise ArgumentError, + "invalid IO event: #{k}", caller(2) end + IO_STATES.each_with_index { |x, i| @ios[x] = @io_arrays[i] } + + @notify_src, @notify_snk = IO.pipe + + # prevent file descriptor leaks + if @notify_src.respond_to?(:fcntl) and defined?(Fcntl) and defined?(Fcntl::F_SETFD) and defined?(Fcntl::FD_CLOEXEC) + @notify_src.fcntl(Fcntl::F_SETFD, Fcntl::FD_CLOEXEC) + @notify_snk.fcntl(Fcntl::F_SETFD, Fcntl::FD_CLOEXEC) + end + + @notify_src.will_block = false + @notify_snk.will_block = false + + # Each time a byte is sent through the notification pipe + # we need to read it, or IO.select will keep returning. + monitor_io(@notify_src, :readable) + @notify_src.extend(Watchable) + @notify_src.on_readable do + begin + @notify_src.sysread(256) + rescue Errno::EAGAIN + # The pipe wasn't readable after all. + end + end + end + + define_opposite_accessors \ + :stopped? => :running?, + :sleeping? => :awake? + + def run + if block_given? + thread = Thread.new { run } + yield ; quit ; thread.join + else + running! + iterate while running? + end + ensure + quit + end + + def iterate (user_timeout=nil) + t1, t2 = user_timeout, max_timeout + timeout = t1 && t2 ? [t1, t2].min : t1 || t2 + select(timeout).zip(IO_STATES) do |ios, state| + ios.each { |x| x.signal(state) } if ios + end + end + + private + + def select (timeout) + @wakeup_time = timeout ? Time.now + timeout : nil + # puts "waiting: #{timeout} seconds" + signal :before_sleep ; sleeping! + IO.select(*@io_arrays + [timeout]) || [] + ensure + awake! ; signal :after_sleep + @timers.each { |x| x.sound_alarm if x.ready? } + end + + public + + def quit ; stopped! ; wake_up ; self end + + def monitoring_io? (io, event) + @ios[event].include? io end + def monitoring_timer? (timer) + @timers.include? timer end + + def monitor_io (io, *events) + for event in events do + @ios[event] << io ; wake_up unless monitoring_io?(io, event) + end + end + + def monitor_timer (timer) + @timers << timer unless monitoring_timer? timer + end + + def check_timer (timer) + wake_up if timer.end_time < @wakeup_time + end + + def ignore_io (io, *events) + events = IO_STATES if events.empty? + for event in events do + wake_up if @ios[event].delete(io) + end + end + + def ignore_timer (timer) + # Don't need to wake up for this. + @timers.delete(timer) + end + + def max_timeout + return nil if @timers.empty? + [@timers.collect { |x| x.time_left }.min, 0].max + end + + def wake_up + @notify_snk.write('.') if sleeping? + end +end + +class Symbol + def io_state? + EventLoop::IO_STATES.include? self + end +end + +module EventLoop::Watchable + include SignalEmitter + + define_signals :readable, :writable, :exceptional + + def monitor_events (*events) + EventLoop.monitor_io(self, *events) end + def ignore_events (*events) + EventLoop.ignore_io(self, *events) end + + define_soft_aliases \ + :monitor_event => :monitor_events, + :ignore_event => :ignore_events + + def close ; super + ignore_events end + def close_read ; super + ignore_event :readable end + def close_write ; super + ignore_event :writable end + + module Automatic + include EventLoop::Watchable + + def add_signal_handler (name, &handler) super + monitor_event(name) if name.io_state? + end + + def remove_signal_handler (name, handler) super + if @signal_handlers[name].empty? + ignore_event(name) if name.io_state? + end + end + end +end + +class IO + def on_readable &block + extend EventLoop::Watchable::Automatic + on_readable(&block) + end + + def on_writable &block + extend EventLoop::Watchable::Automatic + on_writable(&block) + end + + def on_exceptional &block + extend EventLoop::Watchable::Automatic + on_exceptional(&block) + end + + def will_block? + if respond_to?(:fcntl) and defined?(Fcntl) and defined?(Fcntl::F_GETFL) and defined?(Fcntl::O_NONBLOCK) + fcntl(Fcntl::F_GETFL, 0) & Fcntl::O_NONBLOCK == 0 + end + end + + def will_block= (wants_blocking) + if respond_to?(:fcntl) and defined?(Fcntl) and defined?(Fcntl::F_GETFL) and defined?(Fcntl::O_NONBLOCK) + flags = fcntl(Fcntl::F_GETFL, 0) + if wants_blocking + flags &= ~Fcntl::O_NONBLOCK + else + flags |= Fcntl::O_NONBLOCK + end + fcntl(Fcntl::F_SETFL, flags) + end + end +end + +class EventLoop::Timer + include SignalEmitter + + DEFAULT_INTERVAL = 0.0 + DEFAULT_TOLERANCE = 0.001 + + def initialize (options={}, &handler) + @running = false + @start_time = nil + + options = { :interval => options } if options.kind_of? Numeric + + if options[:interval] + @interval = options[:interval].to_f + else + @interval = DEFAULT_INTERVAL + end + + if options[:tolerance] + @tolerance = options[:tolerance].to_f + elsif DEFAULT_TOLERANCE < @interval + @tolerance = DEFAULT_TOLERANCE + else + @tolerance = 0.0 + end + + @event_loop = options[:event_loop] || EventLoop.current + + if block_given? + add_signal_handler(:alarm, &handler) + start unless options[:start?] == false + else + start if options[:start?] + end + end + + define_readers :interval, :tolerance + define_signal :alarm + + def stopped? ; @start_time == nil end + def running? ; @start_time != nil end + + def interval= (new_interval) + old_interval = @interval + @interval = new_interval + @event_loop.check_timer(self) if new_interval < old_interval + end + + def end_time + @start_time + @interval end + def time_left + end_time - Time.now end + def ready? + time_left <= @tolerance end + + def restart + @start_time = Time.now + end + + def sound_alarm + signal :alarm + restart if running? + end + + def start + @start_time = Time.now + @event_loop.monitor_timer(self) + end + + def stop + @start_time = nil + @event_loop.ignore_timer(self) + end +end + +if __FILE__ == $0 + require "test/unit" + + class TimerTest < Test::Unit::TestCase + def setup + @timer = EventLoop::Timer.new(:interval => 0.001) + end + + def test_timer + @timer.on_alarm do + puts "[#{@timer.time_left} seconds left after alarm]" + EventLoop.quit + end + 8.times do + t0 = Time.now + @timer.start ; EventLoop.run + t1 = Time.now + assert(t1 - t0 > @timer.interval - @timer.tolerance) + end + end + end +end + +## event-loop.rb ends here. diff --git a/mcollective/lib/puppet/external/event-loop/signal-system.rb b/mcollective/lib/puppet/external/event-loop/signal-system.rb new file mode 100644 index 000000000..d3c924bf8 --- /dev/null +++ b/mcollective/lib/puppet/external/event-loop/signal-system.rb @@ -0,0 +1,218 @@ +## signal-system.rb --- simple intra-process signal system +# Copyright (C) 2005 Daniel Brockman + +# This program is free software; you can redistribute it +# and/or modify it under the terms of the GNU General Public +# License as published by the Free Software Foundation; +# either version 2 of the License, or (at your option) any +# later version. + +# This file is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty +# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License for more details. + +# You should have received a copy of the GNU General Public +# License along with this program; if not, write to the Free +# Software Foundation, 51 Franklin Street, Fifth Floor, +# Boston, MA 02110-1301, USA. + +require "puppet/external/event-loop/better-definers" + +module SignalEmitterModule + def self.extended (object) + if object.kind_of? Module and not object < SignalEmitter + if object.respond_to? :fcall + # This is the way to call private methods + # in Ruby 1.9 as of November 16. + object.fcall :include, SignalEmitter + else + object.__send__ :include, SignalEmitter + end + end + end + + def define_signal (name, slot=:before, &body) + # Can't use `define_method' and take a block pre-1.9. + class_eval %{ def on_#{name} &block + add_signal_handler(:#{name}, &block) end } + define_signal_handler(name, :before, &lambda {|*a|}) + define_signal_handler(name, :after, &lambda {|*a|}) + define_signal_handler(name, slot, &body) if block_given? + end + + def define_signals (*names, &body) + names.each { |x| define_signal(x, &body) } + end + + def define_signal_handler (name, slot=:before, &body) + case slot + when :before + define_protected_method "handle_#{name}", &body + when :after + define_protected_method "after_handle_#{name}", &body + else + raise ArgumentError, "invalid slot `#{slot.inspect}'; " + + "should be `:before' or `:after'", caller(1) + end + end +end + +# This is an old name for the same thing. +SignalEmitterClass = SignalEmitterModule + +module SignalEmitter + def self.included (includer) + includer.extend SignalEmitterClass if not includer.kind_of? SignalEmitterClass + end + + def __maybe_initialize_signal_emitter + @signal_handlers ||= Hash.new { |h, k| h[k] = Array.new } + @allow_dynamic_signals ||= false + end + + define_accessors :allow_dynamic_signals? + + def add_signal_handler (name, &handler) + __maybe_initialize_signal_emitter + @signal_handlers[name] << handler + handler + end + + define_soft_aliases [:on, :on_signal] => :add_signal_handler + + def remove_signal_handler (name, handler) + __maybe_initialize_signal_emitter + @signal_handlers[name].delete(handler) + end + + def __signal__ (name, *args, &block) + __maybe_initialize_signal_emitter + respond_to? "on_#{name}" or allow_dynamic_signals? or + fail "undefined signal `#{name}' for #{self}:#{self.class}" + __send__("handle_#{name}", *args, &block) if + respond_to? "handle_#{name}" + @signal_handlers[name].each { |x| x.call(*args, &block) } + __send__("after_handle_#{name}", *args, &block) if + respond_to? "after_handle_#{name}" + end + + define_soft_alias :signal => :__signal__ +end + +# This module is indended to be a convenience mixin to be used by +# classes whose objects need to observe foreign signals. That is, +# if you want to observe some signals coming from an object, *you* +# should mix in this module. +# +# You cannot use this module at two different places of the same +# inheritance chain to observe signals coming from the same object. +# +# XXX: This has not seen much use, and I'd like to provide a +# better solution for the problem in the future. +module SignalObserver + def __maybe_initialize_signal_observer + @observed_signals ||= Hash.new do |signals, object| + signals[object] = Hash.new do |handlers, name| + handlers[name] = Array.new + end + end + end + + def observe_signal (subject, name, &handler) + __maybe_initialize_signal_observer + @observed_signals[subject][name] << handler + subject.add_signal_handler(name, &handler) + end + + def map_signals (source, pairs={}) + pairs.each do |src_name, dst_name| + observe_signal(source, src_name) do |*args| + __signal__(dst_name, *args) + end + end + end + + def absorb_signals (subject, *names) + names.each do |name| + observe_signal(subject, name) do |*args| + __signal__(name, *args) + end + end + end + + define_soft_aliases \ + :map_signal => :map_signals, + :absorb_signal => :absorb_signals + + def ignore_signal (subject, name) + __maybe_initialize_signal_observer + __ignore_signal_1(subject, name) + @observed_signals.delete(subject) if + @observed_signals[subject].empty? + end + + def ignore_signals (subject, *names) + __maybe_initialize_signal_observer + names = @observed_signals[subject] if names.empty? + names.each { |x| __ignore_signal_1(subject, x) } + end + + private + + def __ignore_signal_1(subject, name) + @observed_signals[subject][name].each do |handler| + subject.remove_signal_handler(name, handler) end + @observed_signals[subject].delete(name) + end +end + +if __FILE__ == $0 + require "test/unit" + class SignalEmitterTest < Test::Unit::TestCase + class X + include SignalEmitter + define_signal :foo + end + + def setup + @x = X.new + end + + def test_on_signal + moomin = 0 + @x.on_signal(:foo) { moomin = 1 } + @x.signal :foo + assert moomin == 1 + end + + def test_on_foo + moomin = 0 + @x.on_foo { moomin = 1 } + @x.signal :foo + assert moomin == 1 + end + + def test_multiple_on_signal + moomin = 0 + @x.on_signal(:foo) { moomin += 1 } + @x.on_signal(:foo) { moomin += 2 } + @x.on_signal(:foo) { moomin += 4 } + @x.on_signal(:foo) { moomin += 8 } + @x.signal :foo + assert moomin == 15 + end + + def test_multiple_on_foo + moomin = 0 + @x.on_foo { moomin += 1 } + @x.on_foo { moomin += 2 } + @x.on_foo { moomin += 4 } + @x.on_foo { moomin += 8 } + @x.signal :foo + assert moomin == 15 + end + end +end + +## application-signals.rb ends here. diff --git a/mcollective/lib/puppet/external/lock.rb b/mcollective/lib/puppet/external/lock.rb new file mode 100644 index 000000000..024fedf3d --- /dev/null +++ b/mcollective/lib/puppet/external/lock.rb @@ -0,0 +1,63 @@ +require 'thread' +require 'sync' + +# Gotten from: +# http://path.berkeley.edu/~vjoel/ruby/solaris-bug.rb + +# Extensions to the File class for exception-safe file locking in a +# environment with multiple user threads. + +# This is here because closing a file on solaris unlocks any locks that +# other threads might have. So we have to make sure that only the last +# reader thread closes the file. +# +# The hash maps inode number to a count of reader threads +$reader_count = Hash.new(0) + +class File + # Get an exclusive (i.e., write) lock on the file, and yield to the block. + # If the lock is not available, wait for it without blocking other ruby + # threads. + def lock_exclusive + if Thread.list.size == 1 + flock(LOCK_EX) + else + # ugly hack because waiting for a lock in a Ruby thread blocks the + # process + period = 0.001 + until flock(LOCK_EX|LOCK_NB) + sleep period + period *= 2 if period < 1 + end + end + + yield self + ensure + flush + flock(LOCK_UN) + end + + # Get a shared (i.e., read) lock on the file, and yield to the block. + # If the lock is not available, wait for it without blocking other ruby + # threads. + def lock_shared + if Thread.list.size == 1 + flock(LOCK_SH) + else + # ugly hack because waiting for a lock in a Ruby thread blocks the + # process + period = 0.001 + until flock(LOCK_SH|LOCK_NB) + sleep period + period *= 2 if period < 1 + end + end + + yield self + ensure + Thread.exclusive {flock(LOCK_UN) if $reader_count[self.stat.ino] == 1} + ## for solaris, no need to unlock here--closing does it + ## but this has no effect on the bug + end +end + diff --git a/mcollective/lib/puppet/external/nagios.rb b/mcollective/lib/puppet/external/nagios.rb new file mode 100755 index 000000000..2ed829198 --- /dev/null +++ b/mcollective/lib/puppet/external/nagios.rb @@ -0,0 +1,48 @@ +#!/usr/bin/env ruby -w + +#-------------------- +# A script to retrieve hosts from ldap and create an importable +# cfservd file from them + +require 'digest/md5' +#require 'ldap' +require 'puppet/external/nagios/parser.rb' +require 'puppet/external/nagios/base.rb' + +module Nagios + NAGIOSVERSION = '1.1' + # yay colors + PINK = "" + GREEN = "" + YELLOW = "" + SLATE = "" + ORANGE = "" + BLUE = "" + NOCOLOR = "" + RESET = "" + + def self.version + NAGIOSVERSION + end + + class Config + def Config.import(config) + + text = String.new + + File.open(config) { |file| + file.each { |line| + text += line + } + } + parser = Nagios::Parser.new + parser.parse(text) + end + + def Config.each + Nagios::Object.objects.each { |object| + yield object + } + end + end +end diff --git a/mcollective/lib/puppet/external/nagios/base.rb b/mcollective/lib/puppet/external/nagios/base.rb new file mode 100755 index 000000000..e98760e01 --- /dev/null +++ b/mcollective/lib/puppet/external/nagios/base.rb @@ -0,0 +1,472 @@ +# The base class for all of our Nagios object types. Everything else +# is mostly just data. +class Nagios::Base + + class UnknownNagiosType < RuntimeError # When an unknown type is asked for by name. + end + + include Enumerable + + class << self + attr_accessor :parameters, :derivatives, :ocs, :name, :att + attr_accessor :ldapbase + + attr_writer :namevar + + attr_reader :superior + end + + # Attach one class to another. + def self.attach(hash) + @attach ||= {} + hash.each do |n, v| @attach[n] = v end + end + + # Convert a parameter to camelcase + def self.camelcase(param) + param.gsub(/_./) do |match| + match.sub(/_/,'').capitalize + end + end + + # Uncamelcase a parameter. + def self.decamelcase(param) + param.gsub(/[A-Z]/) do |match| + "_#{match.downcase}" + end + end + + # Create a new instance of a given class. + def self.create(name, args = {}) + name = name.intern if name.is_a? String + + if @types.include?(name) + @types[name].new(args) + else + raise UnknownNagiosType, "Unknown type #{name}" + end + end + + # Yield each type in turn. + def self.eachtype + @types.each do |name, type| + yield [name, type] + end + end + + # Create a mapping. + def self.map(hash) + @map ||= {} + hash.each do |n, v| @map[n] = v end + end + + # Return a mapping (or nil) for a param + def self.mapping(name) + name = name.intern if name.is_a? String + if defined?(@map) + @map[name] + else + nil + end + end + + # Return the namevar for the canonical name. + def self.namevar + if defined?(@namevar) + return @namevar + else + if parameter?(:name) + return :name + elsif tmp = (self.name.to_s + "_name").intern and parameter?(tmp) + @namevar = tmp + return @namevar + else + raise "Type #{self.name} has no name var" + end + end + end + + # Create a new type. + def self.newtype(name, &block) + name = name.intern if name.is_a? String + + @types ||= {} + + # Create the class, with the correct name. + t = Class.new(self) + t.name = name + + # Everyone gets this. There should probably be a better way, and I + # should probably hack the attribute system to look things up based on + # this "use" setting, but, eh. + t.parameters = [:use] + + const_set(name.to_s.capitalize,t) + + # Evaluate the passed block. This should usually define all of the work. + t.class_eval(&block) + + @types[name] = t + end + + # Define both the normal case and camelcase method for a parameter + def self.paramattr(name) + camel = camelcase(name) + param = name + + [name, camel].each do |method| + define_method(method) do + @parameters[param] + end + + define_method(method.to_s + "=") do |value| + @parameters[param] = value + end + end + + end + + # Is the specified name a valid parameter? + def self.parameter?(name) + name = name.intern if name.is_a? String + @parameters.include?(name) + end + + # Manually set the namevar + def self.setnamevar(name) + name = name.intern if name.is_a? String + @namevar = name + end + + # Set the valid parameters for this class + def self.setparameters(*array) + @parameters += array + end + + # Set the superior ldap object class. Seems silly to include this + # in this class, but, eh. + def self.setsuperior(name) + @superior = name + end + + # Parameters to suppress in output. + def self.suppress(name) + @suppress ||= [] + @suppress << name + end + + # Whether a given parameter is suppressed. + def self.suppress?(name) + defined?(@suppress) and @suppress.include?(name) + end + + # Return our name as the string. + def self.to_s + self.name.to_s + end + + # Return a type by name. + def self.type(name) + name = name.intern if name.is_a? String + + @types[name] + end + + # Convenience methods. + def [](param) + send(param) + end + + # Convenience methods. + def []=(param,value) + send(param.to_s + "=", value) + end + + # Iterate across all ofour set parameters. + def each + @parameters.each { |param,value| + yield(param,value) + } + end + + # Initialize our object, optionally with a list of parameters. + def initialize(args = {}) + @parameters = {} + + args.each { |param,value| + self[param] = value + } + if @namevar == :_naginator_name + self['_naginator_name'] = self['name'] + end + end + + # Handle parameters like attributes. + def method_missing(mname, *args) + pname = mname.to_s + pname.sub!(/=/, '') + + if self.class.parameter?(pname) + if pname =~ /A-Z/ + pname = self.class.decamelcase(pname) + end + self.class.paramattr(pname) + + # Now access the parameters directly, to make it at least less + # likely we'll end up in an infinite recursion. + if mname.to_s =~ /=$/ + @parameters[pname] = *args + else + return @parameters[mname] + end + else + super + end + end + + # Retrieve our name, through a bit of redirection. + def name + send(self.class.namevar) + end + + # This is probably a bad idea. + def name=(value) + unless self.class.namevar.to_s == "name" + send(self.class.namevar.to_s + "=", value) + end + end + + def namevar + (self.type + "_name").intern + end + + def parammap(param) + unless defined?(@map) + map = { + self.namevar => "cn" + } + map.update(self.class.map) if self.class.map + end + if map.include?(param) + return map[param] + else + return "nagios-" + param.id2name.gsub(/_/,'-') + end + end + + def parent + unless defined?(self.class.attached) + puts "Duh, you called parent on an unattached class" + return + end + + klass,param = self.class.attached + unless @parameters.include?(param) + puts "Huh, no attachment param" + return + end + klass[@parameters[param]] + end + + # okay, this sucks + # how do i get my list of ocs? + def to_ldif + base = self.class.ldapbase + str = self.dn + "\n" + ocs = Array.new + if self.class.ocs + # i'm storing an array, so i have to flatten it and stuff + kocs = self.class.ocs + ocs.push(*kocs) + end + ocs.push "top" + oc = self.class.to_s + oc.sub!(/Nagios/,'nagios') + oc.sub!(/::/,'') + ocs.push oc + ocs.each { |oc| + str += "objectclass: #{oc}\n" + } + @parameters.each { |name,value| + next if self.class.suppress.include?(name) + ldapname = self.parammap(name) + str += ldapname + ": #{value}\n" + } + str += "\n" + end + + def to_s + str = "define #{self.type} {\n" + + self.each { |param,value| + str += %{\t%-30s %s\n} % [ param, + if value.is_a? Array + value.join(",") + else + value + end + ] + } + + str += "}\n" + + str + end + + # The type of object we are. + def type + self.class.name + end + + # object types + newtype :host do + setparameters :host_name, :alias, :display_name, :address, :parents, + :hostgroups, :check_command, :initial_state, :max_check_attempts, + :check_interval, :retry_interval, :active_checks_enabled, + :passive_checks_enabled, :check_period, :obsess_over_host, + :check_freshness, :freshness_threshold, :event_handler, + :event_handler_enabled, :low_flap_threshold, :high_flap_threshold, + :flap_detection_enabled, :flap_detection_options, + :failure_prediction_enabled, :process_perf_data, + :retain_status_information, :retain_nonstatus_information, :contacts, + :contact_groups, :notification_interval, :first_notification_delay, + :notification_period, :notification_options, :notifications_enabled, + :stalking_options, :notes, :notes_url, :action_url, :icon_image, + :icon_image_alt, :vrml_image, :statusmap_image, "2d_coords".intern, + "3d_coords".intern, + :register, :use + + setsuperior "person" + map :address => "ipHostNumber" + end + + newtype :hostgroup do + setparameters :hostgroup_name, :alias, :members, :hostgroup_members, :notes, + :notes_url, :action_url, + :register, :use + end + + newtype :service do + attach :host => :host_name + setparameters :host_name, :hostgroup_name, :service_description, + :display_name, :servicegroups, :is_volatile, :check_command, + :initial_state, :max_check_attempts, :check_interval, :retry_interval, + :normal_check_interval, :retry_check_interval, :active_checks_enabled, + :passive_checks_enabled, :parallelize_check, :check_period, + :obsess_over_service, :check_freshness, :freshness_threshold, + :event_handler, :event_handler_enabled, :low_flap_threshold, + :high_flap_threshold, :flap_detection_enabled,:flap_detection_options, + :process_perf_data, :failure_prediction_enabled, :retain_status_information, + :retain_nonstatus_information, :notification_interval, + :first_notification_delay, :notification_period, :notification_options, + :notifications_enabled, :contacts, :contact_groups, :stalking_options, + :notes, :notes_url, :action_url, :icon_image, :icon_image_alt, + :register, :use, + :_naginator_name + + suppress :host_name + + setnamevar :_naginator_name + end + + newtype :servicegroup do + setparameters :servicegroup_name, :alias, :members, :servicegroup_members, + :notes, :notes_url, :action_url, + :register, :use + end + + newtype :contact do + setparameters :contact_name, :alias, :contactgroups, + :host_notifications_enabled, :service_notifications_enabled, + :host_notification_period, :service_notification_period, + :host_notification_options, :service_notification_options, + :host_notification_commands, :service_notification_commands, + :email, :pager, :address1, :address2, :address3, :address4, + :address5, :address6, :can_submit_commands, :retain_status_information, + :retain_nonstatus_information, + :register, :use + + setsuperior "person" + end + + newtype :contactgroup do + setparameters :contactgroup_name, :alias, :members, :contactgroup_members, + :register, :use + end + + # TODO - We should support generic time periods here eg "day 1 - 15" + newtype :timeperiod do + setparameters :timeperiod_name, :alias, :sunday, :monday, :tuesday, + :wednesday, :thursday, :friday, :saturday, :exclude, + :register, :use + end + + newtype :command do + setparameters :command_name, :command_line + end + + newtype :servicedependency do + auxiliary = true + setparameters :dependent_host_name, :dependent_hostgroup_name, + :dependent_service_description, :host_name, :hostgroup_name, + :service_description, :inherits_parent, :execution_failure_criteria, + :notification_failure_criteria, :dependency_period, + :register, :use, + :_naginator_name + + setnamevar :_naginator_name + end + + newtype :serviceescalation do + setparameters :host_name, :hostgroup_name, :servicegroup_name, + :service_description, :contacts, :contact_groups, + :first_notification, :last_notification, :notification_interval, + :escalation_period, :escalation_options, + :register, :use, + :_naginator_name + + setnamevar :_naginator_name + end + + newtype :hostdependency do + auxiliary = true + setparameters :dependent_host_name, :dependent_hostgroup_name, :host_name, + :hostgroup_name, :inherits_parent, :execution_failure_criteria, + :notification_failure_criteria, :dependency_period, + :register, :use, + :_naginator_name + + setnamevar :_naginator_name + end + + newtype :hostescalation do + setparameters :host_name, :hostgroup_name, :contacts, :contact_groups, + :first_notification, :last_notification, :notification_interval, + :escalation_period, :escalation_options, + :register, :use, + :_naginator_name + + setnamevar :_naginator_name + end + + newtype :hostextinfo do + auxiliary = true + setparameters :host_name, :notes, :notes_url, :icon_image, :icon_image_alt, + :vrml_image, :statusmap_image, "2d_coords".intern, "3d_coords".intern, + :register, :use + + setnamevar :host_name + end + + newtype :serviceextinfo do + auxiliary = true + + setparameters :host_name, :service_description, :notes, :notes_url, + :action_url, :icon_image, :icon_image_alt, + :register, :use, + :_naginator_name + + setnamevar :_naginator_name + end + +end diff --git a/mcollective/lib/puppet/external/nagios/grammar.ry b/mcollective/lib/puppet/external/nagios/grammar.ry new file mode 100644 index 000000000..dc203be5c --- /dev/null +++ b/mcollective/lib/puppet/external/nagios/grammar.ry @@ -0,0 +1,185 @@ +# vim: syntax=ruby +class Nagios::Parser + +token DEFINE NAME STRING PARAM LCURLY RCURLY VALUE RETURN COMMENT INLINECOMMENT + +rule +decls: decl { return val[0] if val[0] } + | decls decl { + if val[1].nil? + result = val[0] + else + if val[0].nil? + result = val[1] + else + result = [ val[0], val[1] ].flatten + end + end + } + ; + +decl: object { result = [val[0]] } + | RETURN { result = nil } + | comment + ; + +comment: COMMENT RETURN { result = nil } + ; + +object: DEFINE NAME LCURLY RETURN vars RCURLY { + result = Nagios::Base.create(val[1],val[4]) + } + ; + +vars: var + | vars var { + val[1].each {|p,v| + val[0][p] = v + } + result = val[0] + } + ; + +var: PARAM VALUE icomment returns { result = {val[0],val[1]} } + ; + +returns: RETURN + | returns RETURN + ; + +icomment: # nothing + | INLINECOMMENT + ; + +end + +----inner + +class ::Nagios::Parser::SyntaxError < RuntimeError; end + +def parse(src) + @src = src + + # state variables + @invar = false + @inobject = false + @done = false + + @line = 0 + @yydebug = true + + do_parse +end + +# The lexer. Very simple. +def token + @src.sub!(/\A\n/,'') + if $& + @line += 1 + return [ :RETURN, "\n" ] + end + + if @done + return nil + end + yytext = String.new + + + # remove comments from this line + @src.sub!(/\A[ \t]*;.*\n/,"\n") + if $& + return [:INLINECOMMENT, ""] + end + + @src.sub!(/\A#.*\n/,"\n") + if $& + return [:COMMENT, ""] + end + + @src.sub!(/#.*/,'') + + if @src.length == 0 + @done = true + return [false, '$'] + end + + if @invar + @src.sub!(/\A[ \t]+/,'') + @src.sub!(/\A([^;\n]+)(\n|;)/,'\2') + if $1 + yytext += $1 + end + @invar = false + return [:VALUE, yytext] + else + @src.sub!(/\A[\t ]*(\S+)([\t ]*|$)/,'') + if $1 + yytext = $1 + case yytext + when 'define' + #puts "got define" + return [:DEFINE, yytext] + when '{' + #puts "got {" + @inobject = true + return [:LCURLY, yytext] + else + unless @inobject + #puts "got type: #{yytext}" + if yytext =~ /\W/ + giveback = yytext.dup + giveback.sub!(/^\w+/,'') + #puts "giveback " + giveback + #puts "yytext " + yytext + yytext.sub!(/\W.*$/,'') + #puts "yytext " + yytext + #puts "all [#{giveback} #{yytext} #{orig}]" + @src = giveback + @src + end + return [:NAME, yytext] + else + if yytext == '}' + #puts "got closure: #{yytext}" + @inobject = false + return [:RCURLY, '}'] + end + + unless @invar + @invar = true + return [:PARAM, $1] + else + end + end + end + end + end +end + +def next_token + token +end + +def yydebug + 1 +end + +def yywrap + 0 +end + +def on_error(token, value, vstack ) + msg = "" + unless value.nil? + msg = "line #{@line}: syntax error at '#{value}'" + else + msg = "line #{@line}: syntax error at '#{token}'" + end + unless @src.size > 0 + msg = "line #{@line}: Unexpected end of file" + end + if token == '$end'.intern + puts "okay, this is silly" + else + raise ::Nagios::Parser::SyntaxError, msg + end +end diff --git a/mcollective/lib/puppet/external/nagios/makefile b/mcollective/lib/puppet/external/nagios/makefile new file mode 100644 index 000000000..fc14564b7 --- /dev/null +++ b/mcollective/lib/puppet/external/nagios/makefile @@ -0,0 +1,9 @@ +all: parser.rb + +debug: parser.rb setdebug + +parser.rb: grammar.ry + racc -E -oparser.rb grammar.ry + +setdebug: + perl -pi -e 's{\@yydebug =.*$$}{\@yydebug = true}' parser.rb diff --git a/mcollective/lib/puppet/external/nagios/parser.rb b/mcollective/lib/puppet/external/nagios/parser.rb new file mode 100644 index 000000000..5504f5818 --- /dev/null +++ b/mcollective/lib/puppet/external/nagios/parser.rb @@ -0,0 +1,775 @@ +# +# DO NOT MODIFY!!!! +# This file is automatically generated by racc 1.4.5 +# from racc grammer file "grammar.ry". +# +# +# parser.rb: generated by racc (runtime embedded) +# +###### racc/parser.rb begin +unless $LOADED_FEATURES.index 'racc/parser.rb' +$LOADED_FEATURES.push 'racc/parser.rb' + +self.class.module_eval <<'..end racc/parser.rb modeval..id5256434e8a', 'racc/parser.rb', 1 +# +# $Id: parser.rb,v 1.7 2005/11/20 17:31:32 aamine Exp $ +# +# Copyright (c) 1999-2005 Minero Aoki +# +# This program is free software. +# You can distribute/modify this program under the same terms of ruby. +# +# As a special exception, when this code is copied by Racc +# into a Racc output file, you may use that output file +# without restriction. +# + +NotImplementedError = NotImplementError unless defined?(NotImplementedError) + +module Racc + class ParseError < StandardError; end +end +ParseError = Racc::ParseError unless defined?(::ParseError) + +module Racc + + Racc_No_Extentions = false unless defined?(Racc_No_Extentions) + + class Parser + + Racc_Runtime_Version = '1.4.5' + Racc_Runtime_Revision = '$Revision: 1.7 $'.split[1] + + Racc_Runtime_Core_Version_R = '1.4.5' + Racc_Runtime_Core_Revision_R = '$Revision: 1.7 $'.split[1] + begin + require 'racc/cparse' + # Racc_Runtime_Core_Version_C = (defined in extention) + Racc_Runtime_Core_Revision_C = Racc_Runtime_Core_Id_C.split[2] + raise LoadError, 'old cparse.so' unless new.respond_to?(:_racc_do_parse_c, true) + raise LoadError, 'selecting ruby version of racc runtime core' if Racc_No_Extentions + + Racc_Main_Parsing_Routine = :_racc_do_parse_c + Racc_YY_Parse_Method = :_racc_yyparse_c + Racc_Runtime_Core_Version = Racc_Runtime_Core_Version_C + Racc_Runtime_Core_Revision = Racc_Runtime_Core_Revision_C + Racc_Runtime_Type = 'c' + rescue LoadError + Racc_Main_Parsing_Routine = :_racc_do_parse_rb + Racc_YY_Parse_Method = :_racc_yyparse_rb + Racc_Runtime_Core_Version = Racc_Runtime_Core_Version_R + Racc_Runtime_Core_Revision = Racc_Runtime_Core_Revision_R + Racc_Runtime_Type = 'ruby' + end + + def Parser.racc_runtime_type + Racc_Runtime_Type + end + + private + + def _racc_setup + @yydebug = false unless self.class::Racc_debug_parser + @yydebug ||= false + if @yydebug + @racc_debug_out ||= $stderr + @racc_debug_out ||= $stderr + end + arg = self.class::Racc_arg + arg[13] = true if arg.size < 14 + arg + end + + def _racc_init_sysvars + @racc_state = [0] + @racc_tstack = [] + @racc_vstack = [] + + @racc_t = nil + @racc_val = nil + + @racc_read_next = true + + @racc_user_yyerror = false + @racc_error_status = 0 + end + + ### + ### do_parse + ### + + def do_parse + __send__(Racc_Main_Parsing_Routine, _racc_setup, false) + end + + def next_token + raise NotImplementedError, "#{self.class}\#next_token is not defined" + end + + def _racc_do_parse_rb(arg, in_debug) + action_table, action_check, action_default, action_pointer, + goto_table, goto_check, goto_default, goto_pointer, + nt_base, reduce_table, token_table, shift_n, + reduce_n, use_result, * = arg + + _racc_init_sysvars + tok = act = i = nil + nerr = 0 + + catch(:racc_end_parse) { + while true + if i = action_pointer[@racc_state[-1]] + if @racc_read_next + if @racc_t != 0 # not EOF + tok, @racc_val = next_token + unless tok # EOF + @racc_t = 0 + else + @racc_t = (token_table[tok] or 1) # error token + end + racc_read_token(@racc_t, tok, @racc_val) if @yydebug + @racc_read_next = false + end + end + i += @racc_t + unless i >= 0 and + act = action_table[i] and + action_check[i] == @racc_state[-1] + act = action_default[@racc_state[-1]] + end + else + act = action_default[@racc_state[-1]] + end + while act = _racc_evalact(act, arg) + ; + end + end + } + end + + ### + ### yyparse + ### + + def yyparse(recv, mid) + __send__(Racc_YY_Parse_Method, recv, mid, _racc_setup, true) + end + + def _racc_yyparse_rb(recv, mid, arg, c_debug) + action_table, action_check, action_default, action_pointer, + goto_table, goto_check, goto_default, goto_pointer, + nt_base, reduce_table, token_table, shift_n, + reduce_n, use_result, * = arg + + _racc_init_sysvars + tok = nil + act = nil + i = nil + nerr = 0 + + catch(:racc_end_parse) { + until i = action_pointer[@racc_state[-1]] + while act = _racc_evalact(action_default[@racc_state[-1]], arg) + ; + end + end + recv.__send__(mid) do |tok, val| + unless tok + @racc_t = 0 + else + @racc_t = (token_table[tok] or 1) # error token + end + @racc_val = val + @racc_read_next = false + + i += @racc_t + unless i >= 0 and + act = action_table[i] and + action_check[i] == @racc_state[-1] + act = action_default[@racc_state[-1]] + end + while act = _racc_evalact(act, arg) + ; + end + + while not (i = action_pointer[@racc_state[-1]]) or + not @racc_read_next or + @racc_t == 0 # $ + unless i and i += @racc_t and + i >= 0 and + act = action_table[i] and + action_check[i] == @racc_state[-1] + act = action_default[@racc_state[-1]] + end + while act = _racc_evalact(act, arg) + ; + end + end + end + } + end + + ### + ### common + ### + + def _racc_evalact(act, arg) + action_table, action_check, action_default, action_pointer, + goto_table, goto_check, goto_default, goto_pointer, + nt_base, reduce_table, token_table, shift_n, + reduce_n, use_result, * = arg + nerr = 0 # tmp + + if act > 0 and act < shift_n + # + # shift + # + if @racc_error_status > 0 + @racc_error_status -= 1 unless @racc_t == 1 # error token + end + @racc_vstack.push @racc_val + @racc_state.push act + @racc_read_next = true + if @yydebug + @racc_tstack.push @racc_t + racc_shift @racc_t, @racc_tstack, @racc_vstack + end + + elsif act < 0 and act > -reduce_n + # + # reduce + # + code = catch(:racc_jump) { + @racc_state.push _racc_do_reduce(arg, act) + false + } + if code + case code + when 1 # yyerror + @racc_user_yyerror = true # user_yyerror + return -reduce_n + when 2 # yyaccept + return shift_n + else + raise '[Racc Bug] unknown jump code' + end + end + + elsif act == shift_n + # + # accept + # + racc_accept if @yydebug + throw :racc_end_parse, @racc_vstack[0] + + elsif act == -reduce_n + # + # error + # + case @racc_error_status + when 0 + unless arg[21] # user_yyerror + nerr += 1 + on_error @racc_t, @racc_val, @racc_vstack + end + when 3 + if @racc_t == 0 # is $ + throw :racc_end_parse, nil + end + @racc_read_next = true + end + @racc_user_yyerror = false + @racc_error_status = 3 + while true + if i = action_pointer[@racc_state[-1]] + i += 1 # error token + if i >= 0 and + (act = action_table[i]) and + action_check[i] == @racc_state[-1] + break + end + end + throw :racc_end_parse, nil if @racc_state.size <= 1 + @racc_state.pop + @racc_vstack.pop + if @yydebug + @racc_tstack.pop + racc_e_pop @racc_state, @racc_tstack, @racc_vstack + end + end + return act + + else + raise "[Racc Bug] unknown action #{act.inspect}" + end + + racc_next_state(@racc_state[-1], @racc_state) if @yydebug + + nil + end + + def _racc_do_reduce(arg, act) + action_table, action_check, action_default, action_pointer, + goto_table, goto_check, goto_default, goto_pointer, + nt_base, reduce_table, token_table, shift_n, + reduce_n, use_result, * = arg + state = @racc_state + vstack = @racc_vstack + tstack = @racc_tstack + + i = act * -3 + len = reduce_table[i] + reduce_to = reduce_table[i+1] + method_id = reduce_table[i+2] + void_array = [] + + tmp_t = tstack[-len, len] if @yydebug + tmp_v = vstack[-len, len] + tstack[-len, len] = void_array if @yydebug + vstack[-len, len] = void_array + state[-len, len] = void_array + + # tstack must be updated AFTER method call + if use_result + vstack.push __send__(method_id, tmp_v, vstack, tmp_v[0]) + else + vstack.push __send__(method_id, tmp_v, vstack) + end + tstack.push reduce_to + + racc_reduce(tmp_t, reduce_to, tstack, vstack) if @yydebug + + k1 = reduce_to - nt_base + if i = goto_pointer[k1] + i += state[-1] + if i >= 0 and (curstate = goto_table[i]) and goto_check[i] == k1 + return curstate + end + end + goto_default[k1] + end + + def on_error(t, val, vstack) + raise ParseError, sprintf("\nparse error on value %s (%s)", val.inspect, token_to_str(t) || '?') + end + + def yyerror + throw :racc_jump, 1 + end + + def yyaccept + throw :racc_jump, 2 + end + + def yyerrok + @racc_error_status = 0 + end + + # + # for debugging output + # + + def racc_read_token(t, tok, val) + @racc_debug_out.print 'read ' + @racc_debug_out.print tok.inspect, '(', racc_token2str(t), ') ' + @racc_debug_out.puts val.inspect + @racc_debug_out.puts + end + + def racc_shift(tok, tstack, vstack) + @racc_debug_out.puts "shift #{racc_token2str tok}" + racc_print_stacks tstack, vstack + @racc_debug_out.puts + end + + def racc_reduce(toks, sim, tstack, vstack) + out = @racc_debug_out + out.print 'reduce ' + if toks.empty? + out.print ' ' + else + toks.each {|t| out.print ' ', racc_token2str(t) } + end + out.puts " --> #{racc_token2str(sim)}" + + racc_print_stacks tstack, vstack + @racc_debug_out.puts + end + + def racc_accept + @racc_debug_out.puts 'accept' + @racc_debug_out.puts + end + + def racc_e_pop(state, tstack, vstack) + @racc_debug_out.puts 'error recovering mode: pop token' + racc_print_states state + racc_print_stacks tstack, vstack + @racc_debug_out.puts + end + + def racc_next_state(curstate, state) + @racc_debug_out.puts "goto #{curstate}" + racc_print_states state + @racc_debug_out.puts + end + + def racc_print_stacks(t, v) + out = @racc_debug_out + out.print ' [' + t.each_index do |i| + out.print ' (', racc_token2str(t[i]), ' ', v[i].inspect, ')' + end + out.puts ' ]' + end + + def racc_print_states(s) + out = @racc_debug_out + out.print ' [' + s.each {|st| out.print ' ', st } + out.puts ' ]' + end + + def racc_token2str(tok) + self.class::Racc_token_to_s_table[tok] or + raise "[Racc Bug] can't convert token #{tok} to string" + end + + def token_to_str(t) + self.class::Racc_token_to_s_table[t] + end + + end + +end +..end racc/parser.rb modeval..id5256434e8a +end +###### racc/parser.rb end + + +module Nagios + + class Parser < Racc::Parser + +module_eval <<'..end grammar.ry modeval..idcb2ea30b34', 'grammar.ry', 57 + +class ::Nagios::Parser::SyntaxError < RuntimeError; end + +def parse(src) + @src = src + + # state variables + @invar = false + @inobject = false + @done = false + + @line = 0 + @yydebug = true + + do_parse +end + +# The lexer. Very simple. +def token + @src.sub!(/\A\n/,'') + if $MATCH + @line += 1 + return [ :RETURN, "\n" ] + end + + return nil if @done + yytext = String.new + + + # remove comments from this line + @src.sub!(/\A[ \t]*;.*\n/,"\n") + return [:INLINECOMMENT, ""] if $MATCH + + @src.sub!(/\A#.*\n/,"\n") + return [:COMMENT, ""] if $MATCH + + @src.sub!(/#.*/,'') + + if @src.length == 0 + @done = true + return [false, '$'] + end + + if @invar + @src.sub!(/\A[ \t]+/,'') + @src.sub!(/\A([^;\n]+)(\n|;)/,'\2') + if $1 + yytext += $1 + end + @invar = false + return [:VALUE, yytext] + else + @src.sub!(/\A[\t ]*(\S+)([\t ]*|$)/,'') + if $1 + yytext = $1 + case yytext + when 'define' + #puts "got define" + return [:DEFINE, yytext] + when '{' + #puts "got {" + @inobject = true + return [:LCURLY, yytext] + else + unless @inobject + #puts "got type: #{yytext}" + if yytext =~ /\W/ + giveback = yytext.dup + giveback.sub!(/^\w+/,'') + #puts "giveback #{giveback}" + #puts "yytext #{yytext}" + yytext.sub!(/\W.*$/,'') + #puts "yytext #{yytext}" + #puts "all [#{giveback} #{yytext} #{orig}]" + @src = giveback + @src + end + return [:NAME, yytext] + else + if yytext == '}' + #puts "got closure: #{yytext}" + @inobject = false + return [:RCURLY, '}'] + end + + unless @invar + @invar = true + return [:PARAM, $1] + else + end + end + end + end + end +end + +def next_token + token +end + +def yydebug + 1 +end + +def yywrap + 0 +end + +def on_error(token, value, vstack ) + msg = "" + unless value.nil? + msg = "line #{@line}: syntax error at '#{value}'" + else + msg = "line #{@line}: syntax error at '#{token}'" + end + msg = "line #{@line}: Unexpected end of file" unless @src.size > 0 + if token == '$end'.intern + puts "okay, this is silly" + else + raise ::Nagios::Parser::SyntaxError, msg + end +end +..end grammar.ry modeval..idcb2ea30b34 + +##### racc 1.4.5 generates ### + +racc_reduce_table = [ + 0, 0, :racc_error, + 1, 13, :_reduce_1, + 2, 13, :_reduce_2, + 1, 14, :_reduce_3, + 1, 14, :_reduce_4, + 1, 14, :_reduce_none, + 2, 16, :_reduce_6, + 6, 15, :_reduce_7, + 1, 17, :_reduce_none, + 2, 17, :_reduce_9, + 4, 18, :_reduce_10, + 1, 20, :_reduce_none, + 2, 20, :_reduce_none, + 0, 19, :_reduce_none, + 1, 19, :_reduce_none ] + +racc_reduce_n = 15 + +racc_shift_n = 26 + +racc_action_table = [ + 9, 15, 1, 20, 1, 14, 12, 13, 11, 6, + 7, 6, 7, 15, 18, 8, 21, 23, 25 ] + +racc_action_check = [ + 2, 16, 2, 16, 0, 12, 8, 9, 7, 2, + 2, 0, 0, 14, 15, 1, 18, 22, 24 ] + +racc_action_pointer = [ + 2, 12, 0, nil, nil, nil, nil, -1, 0, 7, + nil, nil, -4, nil, 8, 6, -4, nil, 5, nil, + nil, nil, 8, nil, 9, nil ] + +racc_action_default = [ + -15, -15, -15, -1, -3, -5, -4, -15, -15, -15, + -2, -6, -15, 26, -15, -15, -15, -8, -13, -9, + -7, -14, -15, -11, -10, -12 ] + +racc_goto_table = [ 17, 3, 19, 10, 2, 16, 22, 24 ] + +racc_goto_check = [ 6, 2, 6, 2, 1, 5, 7, 8 ] + +racc_goto_pointer = [ nil, 4, 1, nil, nil, -9, -14, -12, -15 ] + +racc_goto_default = [ nil, nil, nil, 4, 5, nil, nil, nil, nil ] + +racc_token_table = { + false => 0, + Object.new => 1, + :DEFINE => 2, + :NAME => 3, + :STRING => 4, + :PARAM => 5, + :LCURLY => 6, + :RCURLY => 7, + :VALUE => 8, + :RETURN => 9, + :COMMENT => 10, + :INLINECOMMENT => 11 } + +racc_use_result_var = true + +racc_nt_base = 12 + +Racc_arg = [ + racc_action_table, + racc_action_check, + racc_action_default, + racc_action_pointer, + racc_goto_table, + racc_goto_check, + racc_goto_default, + racc_goto_pointer, + racc_nt_base, + racc_reduce_table, + racc_token_table, + racc_shift_n, + racc_reduce_n, + racc_use_result_var ] + +Racc_token_to_s_table = [ +'$end', +'error', +'DEFINE', +'NAME', +'STRING', +'PARAM', +'LCURLY', +'RCURLY', +'VALUE', +'RETURN', +'COMMENT', +'INLINECOMMENT', +'$start', +'decls', +'decl', +'object', +'comment', +'vars', +'var', +'icomment', +'returns'] + +Racc_debug_parser = false + +##### racc system variables end ##### + +# reduce 0 omitted + +module_eval <<'.,.,', 'grammar.ry', 6 + def _reduce_1( val, _values, result ) +return val[0] if val[0] + result +end +.,., + +module_eval <<'.,.,', 'grammar.ry', 18 + def _reduce_2( val, _values, result ) + if val[1].nil? + result = val[0] + else + if val[0].nil? + result = val[1] + else + result = [ val[0], val[1] ].flatten + end + end + result + end +.,., + +module_eval <<'.,.,', 'grammar.ry', 20 + def _reduce_3( val, _values, result ) +result = [val[0]] + result +end +.,., + +module_eval <<'.,.,', 'grammar.ry', 21 + def _reduce_4( val, _values, result ) +result = nil + result +end +.,., + +# reduce 5 omitted + +module_eval <<'.,.,', 'grammar.ry', 25 + def _reduce_6( val, _values, result ) +result = nil + result +end +.,., + +module_eval <<'.,.,', 'grammar.ry', 31 + def _reduce_7( val, _values, result ) + result = Nagios::Base.create(val[1],val[4]) + result + end +.,., + +# reduce 8 omitted + +module_eval <<'.,.,', 'grammar.ry', 40 + def _reduce_9( val, _values, result ) + val[1].each {|p,v| + val[0][p] = v + } + result = val[0] + result + end +.,., + +module_eval <<'.,.,', 'grammar.ry', 42 + def _reduce_10( val, _values, result ) +result = {val[0],val[1]} + result +end +.,., + +# reduce 11 omitted + +# reduce 12 omitted + +# reduce 13 omitted + +# reduce 14 omitted + +def _reduce_none( val, _values, result ) + result +end + + end + +end diff --git a/mcollective/lib/puppet/external/pson/common.rb b/mcollective/lib/puppet/external/pson/common.rb new file mode 100644 index 000000000..160a502d1 --- /dev/null +++ b/mcollective/lib/puppet/external/pson/common.rb @@ -0,0 +1,370 @@ +require 'puppet/external/pson/version' + +module PSON + class << self + # If _object_ is string-like parse the string and return the parsed result + # as a Ruby data structure. Otherwise generate a PSON text from the Ruby + # data structure object and return it. + # + # The _opts_ argument is passed through to generate/parse respectively, see + # generate and parse for their documentation. + def [](object, opts = {}) + if object.respond_to? :to_str + PSON.parse(object.to_str, opts => {}) + else + PSON.generate(object, opts => {}) + end + end + + # Returns the PSON parser class, that is used by PSON. This might be either + # PSON::Ext::Parser or PSON::Pure::Parser. + attr_reader :parser + + # Set the PSON parser class _parser_ to be used by PSON. + def parser=(parser) # :nodoc: + @parser = parser + remove_const :Parser if const_defined? :Parser + const_set :Parser, parser + end + + def registered_document_types + @registered_document_types ||= {} + end + + # Register a class-constant for deserializaion. + def register_document_type(name,klass) + registered_document_types[name.to_s] = klass + end + + # Return the constant located at _path_. + # Anything may be registered as a path by calling register_path, above. + # Otherwise, the format of _path_ has to be either ::A::B::C or A::B::C. + # In either of these cases A has to be defined in Object (e.g. the path + # must be an absolute namespace path. If the constant doesn't exist at + # the given path, an ArgumentError is raised. + def deep_const_get(path) # :nodoc: + path = path.to_s + registered_document_types[path] || path.split(/::/).inject(Object) do |p, c| + case + when c.empty? then p + when p.const_defined?(c) then p.const_get(c) + else raise ArgumentError, "can't find const for unregistered document type #{path}" + end + end + end + + # Set the module _generator_ to be used by PSON. + def generator=(generator) # :nodoc: + @generator = generator + generator_methods = generator::GeneratorMethods + for const in generator_methods.constants + klass = deep_const_get(const) + modul = generator_methods.const_get(const) + klass.class_eval do + instance_methods(false).each do |m| + m.to_s == 'to_pson' and remove_method m + end + include modul + end + end + self.state = generator::State + const_set :State, self.state + end + + # Returns the PSON generator modul, that is used by PSON. This might be + # either PSON::Ext::Generator or PSON::Pure::Generator. + attr_reader :generator + + # Returns the PSON generator state class, that is used by PSON. This might + # be either PSON::Ext::Generator::State or PSON::Pure::Generator::State. + attr_accessor :state + + # This is create identifier, that is used to decide, if the _pson_create_ + # hook of a class should be called. It defaults to 'document_type'. + attr_accessor :create_id + end + self.create_id = 'document_type' + + NaN = (-1.0) ** 0.5 + + Infinity = 1.0/0 + + MinusInfinity = -Infinity + + # The base exception for PSON errors. + class PSONError < StandardError; end + + # This exception is raised, if a parser error occurs. + class ParserError < PSONError; end + + # This exception is raised, if the nesting of parsed datastructures is too + # deep. + class NestingError < ParserError; end + + # This exception is raised, if a generator or unparser error occurs. + class GeneratorError < PSONError; end + # For backwards compatibility + UnparserError = GeneratorError + + # If a circular data structure is encountered while unparsing + # this exception is raised. + class CircularDatastructure < GeneratorError; end + + # This exception is raised, if the required unicode support is missing on the + # system. Usually this means, that the iconv library is not installed. + class MissingUnicodeSupport < PSONError; end + + module_function + + # Parse the PSON string _source_ into a Ruby data structure and return it. + # + # _opts_ can have the following + # keys: + # * *max_nesting*: The maximum depth of nesting allowed in the parsed data + # structures. Disable depth checking with :max_nesting => false, it defaults + # to 19. + # * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in + # defiance of RFC 4627 to be parsed by the Parser. This option defaults + # to false. + # * *create_additions*: If set to false, the Parser doesn't create + # additions even if a matchin class and create_id was found. This option + # defaults to true. + def parse(source, opts = {}) + PSON.parser.new(source, opts).parse + end + + # Parse the PSON string _source_ into a Ruby data structure and return it. + # The bang version of the parse method, defaults to the more dangerous values + # for the _opts_ hash, so be sure only to parse trusted _source_ strings. + # + # _opts_ can have the following keys: + # * *max_nesting*: The maximum depth of nesting allowed in the parsed data + # structures. Enable depth checking with :max_nesting => anInteger. The parse! + # methods defaults to not doing max depth checking: This can be dangerous, + # if someone wants to fill up your stack. + # * *allow_nan*: If set to true, allow NaN, Infinity, and -Infinity in + # defiance of RFC 4627 to be parsed by the Parser. This option defaults + # to true. + # * *create_additions*: If set to false, the Parser doesn't create + # additions even if a matchin class and create_id was found. This option + # defaults to true. + def parse!(source, opts = {}) + opts = { + :max_nesting => false, + :allow_nan => true + }.update(opts) + PSON.parser.new(source, opts).parse + end + + # Unparse the Ruby data structure _obj_ into a single line PSON string and + # return it. _state_ is + # * a PSON::State object, + # * or a Hash like object (responding to to_hash), + # * an object convertible into a hash by a to_h method, + # that is used as or to configure a State object. + # + # It defaults to a state object, that creates the shortest possible PSON text + # in one line, checks for circular data structures and doesn't allow NaN, + # Infinity, and -Infinity. + # + # A _state_ hash can have the following keys: + # * *indent*: a string used to indent levels (default: ''), + # * *space*: a string that is put after, a : or , delimiter (default: ''), + # * *space_before*: a string that is put before a : pair delimiter (default: ''), + # * *object_nl*: a string that is put at the end of a PSON object (default: ''), + # * *array_nl*: a string that is put at the end of a PSON array (default: ''), + # * *check_circular*: true if checking for circular data structures + # should be done (the default), false otherwise. + # * *allow_nan*: true if NaN, Infinity, and -Infinity should be + # generated, otherwise an exception is thrown, if these values are + # encountered. This options defaults to false. + # * *max_nesting*: The maximum depth of nesting allowed in the data + # structures from which PSON is to be generated. Disable depth checking + # with :max_nesting => false, it defaults to 19. + # + # See also the fast_generate for the fastest creation method with the least + # amount of sanity checks, and the pretty_generate method for some + # defaults for a pretty output. + def generate(obj, state = nil) + if state + state = State.from_state(state) + else + state = State.new + end + obj.to_pson(state) + end + + # :stopdoc: + # I want to deprecate these later, so I'll first be silent about them, and + # later delete them. + alias unparse generate + module_function :unparse + # :startdoc: + + # Unparse the Ruby data structure _obj_ into a single line PSON string and + # return it. This method disables the checks for circles in Ruby objects, and + # also generates NaN, Infinity, and, -Infinity float values. + # + # *WARNING*: Be careful not to pass any Ruby data structures with circles as + # _obj_ argument, because this will cause PSON to go into an infinite loop. + def fast_generate(obj) + obj.to_pson(nil) + end + + # :stopdoc: + # I want to deprecate these later, so I'll first be silent about them, and later delete them. + alias fast_unparse fast_generate + module_function :fast_unparse + # :startdoc: + + # Unparse the Ruby data structure _obj_ into a PSON string and return it. The + # returned string is a prettier form of the string returned by #unparse. + # + # The _opts_ argument can be used to configure the generator, see the + # generate method for a more detailed explanation. + def pretty_generate(obj, opts = nil) + + state = PSON.state.new( + + :indent => ' ', + :space => ' ', + :object_nl => "\n", + :array_nl => "\n", + + :check_circular => true + ) + if opts + if opts.respond_to? :to_hash + opts = opts.to_hash + elsif opts.respond_to? :to_h + opts = opts.to_h + else + raise TypeError, "can't convert #{opts.class} into Hash" + end + state.configure(opts) + end + obj.to_pson(state) + end + + # :stopdoc: + # I want to deprecate these later, so I'll first be silent about them, and later delete them. + alias pretty_unparse pretty_generate + module_function :pretty_unparse + # :startdoc: + + # Load a ruby data structure from a PSON _source_ and return it. A source can + # either be a string-like object, an IO like object, or an object responding + # to the read method. If _proc_ was given, it will be called with any nested + # Ruby object as an argument recursively in depth first order. + # + # This method is part of the implementation of the load/dump interface of + # Marshal and YAML. + def load(source, proc = nil) + if source.respond_to? :to_str + source = source.to_str + elsif source.respond_to? :to_io + source = source.to_io.read + else + source = source.read + end + result = parse(source, :max_nesting => false, :allow_nan => true) + recurse_proc(result, &proc) if proc + result + end + + def recurse_proc(result, &proc) + case result + when Array + result.each { |x| recurse_proc x, &proc } + proc.call result + when Hash + result.each { |x, y| recurse_proc x, &proc; recurse_proc y, &proc } + proc.call result + else + proc.call result + end + end + private :recurse_proc + module_function :recurse_proc + + alias restore load + module_function :restore + + # Dumps _obj_ as a PSON string, i.e. calls generate on the object and returns + # the result. + # + # If anIO (an IO like object or an object that responds to the write method) + # was given, the resulting PSON is written to it. + # + # If the number of nested arrays or objects exceeds _limit_ an ArgumentError + # exception is raised. This argument is similar (but not exactly the + # same!) to the _limit_ argument in Marshal.dump. + # + # This method is part of the implementation of the load/dump interface of + # Marshal and YAML. + def dump(obj, anIO = nil, limit = nil) + if anIO and limit.nil? + anIO = anIO.to_io if anIO.respond_to?(:to_io) + unless anIO.respond_to?(:write) + limit = anIO + anIO = nil + end + end + limit ||= 0 + result = generate(obj, :allow_nan => true, :max_nesting => limit) + if anIO + anIO.write result + anIO + else + result + end + rescue PSON::NestingError + raise ArgumentError, "exceed depth limit" + end +end + +module ::Kernel + private + + # Outputs _objs_ to STDOUT as PSON strings in the shortest form, that is in + # one line. + def j(*objs) + objs.each do |obj| + puts PSON::generate(obj, :allow_nan => true, :max_nesting => false) + end + nil + end + + # Ouputs _objs_ to STDOUT as PSON strings in a pretty format, with + # indentation and over many lines. + def jj(*objs) + objs.each do |obj| + puts PSON::pretty_generate(obj, :allow_nan => true, :max_nesting => false) + end + nil + end + + # If _object_ is string-like parse the string and return the parsed result as + # a Ruby data structure. Otherwise generate a PSON text from the Ruby data + # structure object and return it. + # + # The _opts_ argument is passed through to generate/parse respectively, see + # generate and parse for their documentation. + def PSON(object, opts = {}) + if object.respond_to? :to_str + PSON.parse(object.to_str, opts) + else + PSON.generate(object, opts) + end + end +end + +class ::Class + # Returns true, if this class can be used to create an instance + # from a serialised PSON string. The class has to implement a class + # method _pson_create_ that expects a hash as first parameter, which includes + # the required data. + def pson_creatable? + respond_to?(:pson_create) + end +end diff --git a/mcollective/lib/puppet/external/pson/pure.rb b/mcollective/lib/puppet/external/pson/pure.rb new file mode 100644 index 000000000..53d1ea2a7 --- /dev/null +++ b/mcollective/lib/puppet/external/pson/pure.rb @@ -0,0 +1,77 @@ +require 'puppet/external/pson/common' +require 'puppet/external/pson/pure/parser' +require 'puppet/external/pson/pure/generator' + +module PSON + begin + require 'iconv' + # An iconv instance to convert from UTF8 to UTF16 Big Endian. + UTF16toUTF8 = Iconv.new('utf-8', 'utf-16be') # :nodoc: + # An iconv instance to convert from UTF16 Big Endian to UTF8. + UTF8toUTF16 = Iconv.new('utf-16be', 'utf-8') # :nodoc: + UTF8toUTF16.iconv('no bom') + rescue LoadError + # We actually don't care + Puppet.warning "iconv couldn't be loaded, which is required for UTF-8/UTF-16 conversions" + rescue Errno::EINVAL, Iconv::InvalidEncoding + # Iconv doesn't support big endian utf-16. Let's try to hack this manually + # into the converters. + begin + old_verbose, $VERBSOSE = $VERBOSE, nil + # An iconv instance to convert from UTF8 to UTF16 Big Endian. + UTF16toUTF8 = Iconv.new('utf-8', 'utf-16') # :nodoc: + # An iconv instance to convert from UTF16 Big Endian to UTF8. + UTF8toUTF16 = Iconv.new('utf-16', 'utf-8') # :nodoc: + UTF8toUTF16.iconv('no bom') + if UTF8toUTF16.iconv("\xe2\x82\xac") == "\xac\x20" + swapper = Class.new do + def initialize(iconv) # :nodoc: + @iconv = iconv + end + + def iconv(string) # :nodoc: + result = @iconv.iconv(string) + PSON.swap!(result) + end + end + UTF8toUTF16 = swapper.new(UTF8toUTF16) # :nodoc: + end + if UTF16toUTF8.iconv("\xac\x20") == "\xe2\x82\xac" + swapper = Class.new do + def initialize(iconv) # :nodoc: + @iconv = iconv + end + + def iconv(string) # :nodoc: + string = PSON.swap!(string.dup) + @iconv.iconv(string) + end + end + UTF16toUTF8 = swapper.new(UTF16toUTF8) # :nodoc: + end + rescue Errno::EINVAL, Iconv::InvalidEncoding + Puppet.warning "iconv doesn't seem to support UTF-8/UTF-16 conversions" + ensure + $VERBOSE = old_verbose + end + end + + # Swap consecutive bytes of _string_ in place. + def self.swap!(string) # :nodoc: + 0.upto(string.size / 2) do |i| + break unless string[2 * i + 1] + string[2 * i], string[2 * i + 1] = string[2 * i + 1], string[2 * i] + end + string + end + + # This module holds all the modules/classes that implement PSON's + # functionality in pure ruby. + module Pure + $DEBUG and warn "Using pure library for PSON." + PSON.parser = Parser + PSON.generator = Generator + end + + PSON_LOADED = true +end diff --git a/mcollective/lib/puppet/external/pson/pure/generator.rb b/mcollective/lib/puppet/external/pson/pure/generator.rb new file mode 100644 index 000000000..89a0c62e0 --- /dev/null +++ b/mcollective/lib/puppet/external/pson/pure/generator.rb @@ -0,0 +1,401 @@ +module PSON + MAP = { + "\x0" => '\u0000', + "\x1" => '\u0001', + "\x2" => '\u0002', + "\x3" => '\u0003', + "\x4" => '\u0004', + "\x5" => '\u0005', + "\x6" => '\u0006', + "\x7" => '\u0007', + "\b" => '\b', + "\t" => '\t', + "\n" => '\n', + "\xb" => '\u000b', + "\f" => '\f', + "\r" => '\r', + "\xe" => '\u000e', + "\xf" => '\u000f', + "\x10" => '\u0010', + "\x11" => '\u0011', + "\x12" => '\u0012', + "\x13" => '\u0013', + "\x14" => '\u0014', + "\x15" => '\u0015', + "\x16" => '\u0016', + "\x17" => '\u0017', + "\x18" => '\u0018', + "\x19" => '\u0019', + "\x1a" => '\u001a', + "\x1b" => '\u001b', + "\x1c" => '\u001c', + "\x1d" => '\u001d', + "\x1e" => '\u001e', + "\x1f" => '\u001f', + '"' => '\"', + '\\' => '\\\\', + } # :nodoc: + + # Convert a UTF8 encoded Ruby string _string_ to a PSON string, encoded with + # UTF16 big endian characters as \u????, and return it. + if String.method_defined?(:force_encoding) + def utf8_to_pson(string) # :nodoc: + string = string.dup + string << '' # XXX workaround: avoid buffer sharing + string.force_encoding(Encoding::ASCII_8BIT) + string.gsub!(/["\\\x0-\x1f]/) { MAP[$MATCH] } + string + rescue Iconv::Failure => e + raise GeneratorError, "Caught #{e.class}: #{e}" + end + else + def utf8_to_pson(string) # :nodoc: + string.gsub(/["\\\x0-\x1f]/n) { MAP[$MATCH] } + end + end + module_function :utf8_to_pson + + module Pure + module Generator + # This class is used to create State instances, that are use to hold data + # while generating a PSON text from a a Ruby data structure. + class State + # Creates a State object from _opts_, which ought to be Hash to create + # a new State instance configured by _opts_, something else to create + # an unconfigured instance. If _opts_ is a State object, it is just + # returned. + def self.from_state(opts) + case opts + when self + opts + when Hash + new(opts) + else + new + end + end + + # Instantiates a new State object, configured by _opts_. + # + # _opts_ can have the following keys: + # + # * *indent*: a string used to indent levels (default: ''), + # * *space*: a string that is put after, a : or , delimiter (default: ''), + # * *space_before*: a string that is put before a : pair delimiter (default: ''), + # * *object_nl*: a string that is put at the end of a PSON object (default: ''), + # * *array_nl*: a string that is put at the end of a PSON array (default: ''), + # * *check_circular*: true if checking for circular data structures + # should be done (the default), false otherwise. + # * *check_circular*: true if checking for circular data structures + # should be done, false (the default) otherwise. + # * *allow_nan*: true if NaN, Infinity, and -Infinity should be + # generated, otherwise an exception is thrown, if these values are + # encountered. This options defaults to false. + def initialize(opts = {}) + @seen = {} + @indent = '' + @space = '' + @space_before = '' + @object_nl = '' + @array_nl = '' + @check_circular = true + @allow_nan = false + configure opts + end + + # This string is used to indent levels in the PSON text. + attr_accessor :indent + + # This string is used to insert a space between the tokens in a PSON + # string. + attr_accessor :space + + # This string is used to insert a space before the ':' in PSON objects. + attr_accessor :space_before + + # This string is put at the end of a line that holds a PSON object (or + # Hash). + attr_accessor :object_nl + + # This string is put at the end of a line that holds a PSON array. + attr_accessor :array_nl + + # This integer returns the maximum level of data structure nesting in + # the generated PSON, max_nesting = 0 if no maximum is checked. + attr_accessor :max_nesting + + def check_max_nesting(depth) # :nodoc: + return if @max_nesting.zero? + current_nesting = depth + 1 + current_nesting > @max_nesting and + raise NestingError, "nesting of #{current_nesting} is too deep" + end + + # Returns true, if circular data structures should be checked, + # otherwise returns false. + def check_circular? + @check_circular + end + + # Returns true if NaN, Infinity, and -Infinity should be considered as + # valid PSON and output. + def allow_nan? + @allow_nan + end + + # Returns _true_, if _object_ was already seen during this generating + # run. + def seen?(object) + @seen.key?(object.__id__) + end + + # Remember _object_, to find out if it was already encountered (if a + # cyclic data structure is if a cyclic data structure is rendered). + def remember(object) + @seen[object.__id__] = true + end + + # Forget _object_ for this generating run. + def forget(object) + @seen.delete object.__id__ + end + + # Configure this State instance with the Hash _opts_, and return + # itself. + def configure(opts) + @indent = opts[:indent] if opts.key?(:indent) + @space = opts[:space] if opts.key?(:space) + @space_before = opts[:space_before] if opts.key?(:space_before) + @object_nl = opts[:object_nl] if opts.key?(:object_nl) + @array_nl = opts[:array_nl] if opts.key?(:array_nl) + @check_circular = !!opts[:check_circular] if opts.key?(:check_circular) + @allow_nan = !!opts[:allow_nan] if opts.key?(:allow_nan) + if !opts.key?(:max_nesting) # defaults to 19 + @max_nesting = 19 + elsif opts[:max_nesting] + @max_nesting = opts[:max_nesting] + else + @max_nesting = 0 + end + self + end + + # Returns the configuration instance variables as a hash, that can be + # passed to the configure method. + def to_h + result = {} + for iv in %w{indent space space_before object_nl array_nl check_circular allow_nan max_nesting} + result[iv.intern] = instance_variable_get("@#{iv}") + end + result + end + end + + module GeneratorMethods + module Object + # Converts this object to a string (calling #to_s), converts + # it to a PSON string, and returns the result. This is a fallback, if no + # special method #to_pson was defined for some object. + def to_pson(*) to_s.to_pson end + end + + module Hash + # Returns a PSON string containing a PSON object, that is unparsed from + # this Hash instance. + # _state_ is a PSON::State object, that can also be used to configure the + # produced PSON string output further. + # _depth_ is used to find out nesting depth, to indent accordingly. + def to_pson(state = nil, depth = 0, *) + if state + state = PSON.state.from_state(state) + state.check_max_nesting(depth) + pson_check_circular(state) { pson_transform(state, depth) } + else + pson_transform(state, depth) + end + end + + private + + def pson_check_circular(state) + if state and state.check_circular? + state.seen?(self) and raise PSON::CircularDatastructure, + "circular data structures not supported!" + state.remember self + end + yield + ensure + state and state.forget self + end + + def pson_shift(state, depth) + state and not state.object_nl.empty? or return '' + state.indent * depth + end + + def pson_transform(state, depth) + delim = ',' + if state + delim << state.object_nl + result = '{' + result << state.object_nl + result << map { |key,value| + s = pson_shift(state, depth + 1) + s << key.to_s.to_pson(state, depth + 1) + s << state.space_before + s << ':' + s << state.space + s << value.to_pson(state, depth + 1) + }.join(delim) + result << state.object_nl + result << pson_shift(state, depth) + result << '}' + else + result = '{' + result << map { |key,value| + key.to_s.to_pson << ':' << value.to_pson + }.join(delim) + result << '}' + end + result + end + end + + module Array + # Returns a PSON string containing a PSON array, that is unparsed from + # this Array instance. + # _state_ is a PSON::State object, that can also be used to configure the + # produced PSON string output further. + # _depth_ is used to find out nesting depth, to indent accordingly. + def to_pson(state = nil, depth = 0, *) + if state + state = PSON.state.from_state(state) + state.check_max_nesting(depth) + pson_check_circular(state) { pson_transform(state, depth) } + else + pson_transform(state, depth) + end + end + + private + + def pson_check_circular(state) + if state and state.check_circular? + state.seen?(self) and raise PSON::CircularDatastructure, + "circular data structures not supported!" + state.remember self + end + yield + ensure + state and state.forget self + end + + def pson_shift(state, depth) + state and not state.array_nl.empty? or return '' + state.indent * depth + end + + def pson_transform(state, depth) + delim = ',' + if state + delim << state.array_nl + result = '[' + result << state.array_nl + result << map { |value| + pson_shift(state, depth + 1) << value.to_pson(state, depth + 1) + }.join(delim) + result << state.array_nl + result << pson_shift(state, depth) + result << ']' + else + '[' << map { |value| value.to_pson }.join(delim) << ']' + end + end + end + + module Integer + # Returns a PSON string representation for this Integer number. + def to_pson(*) to_s end + end + + module Float + # Returns a PSON string representation for this Float number. + def to_pson(state = nil, *) + case + when infinite? + if !state || state.allow_nan? + to_s + else + raise GeneratorError, "#{self} not allowed in PSON" + end + when nan? + if !state || state.allow_nan? + to_s + else + raise GeneratorError, "#{self} not allowed in PSON" + end + else + to_s + end + end + end + + module String + # This string should be encoded with UTF-8 A call to this method + # returns a PSON string encoded with UTF16 big endian characters as + # \u????. + def to_pson(*) + '"' << PSON.utf8_to_pson(self) << '"' + end + + # Module that holds the extinding methods if, the String module is + # included. + module Extend + # Raw Strings are PSON Objects (the raw bytes are stored in an array for the + # key "raw"). The Ruby String can be created by this module method. + def pson_create(o) + o['raw'].pack('C*') + end + end + + # Extends _modul_ with the String::Extend module. + def self.included(modul) + modul.extend Extend + end + + # This method creates a raw object hash, that can be nested into + # other data structures and will be unparsed as a raw string. This + # method should be used, if you want to convert raw strings to PSON + # instead of UTF-8 strings, e. g. binary data. + def to_pson_raw_object + { + PSON.create_id => self.class.name, + 'raw' => self.unpack('C*'), + } + end + + # This method creates a PSON text from the result of + # a call to to_pson_raw_object of this String. + def to_pson_raw(*args) + to_pson_raw_object.to_pson(*args) + end + end + + module TrueClass + # Returns a PSON string for true: 'true'. + def to_pson(*) 'true' end + end + + module FalseClass + # Returns a PSON string for false: 'false'. + def to_pson(*) 'false' end + end + + module NilClass + # Returns a PSON string for nil: 'null'. + def to_pson(*) 'null' end + end + end + end + end +end diff --git a/mcollective/lib/puppet/external/pson/pure/parser.rb b/mcollective/lib/puppet/external/pson/pure/parser.rb new file mode 100644 index 000000000..6048f67e0 --- /dev/null +++ b/mcollective/lib/puppet/external/pson/pure/parser.rb @@ -0,0 +1,264 @@ +require 'strscan' + +module PSON + module Pure + # This class implements the PSON parser that is used to parse a PSON string + # into a Ruby data structure. + class Parser < StringScanner + STRING = /" ((?:[^\x0-\x1f"\\] | + # escaped special characters: + \\["\\\/bfnrt] | + \\u[0-9a-fA-F]{4} | + # match all but escaped special characters: + \\[\x20-\x21\x23-\x2e\x30-\x5b\x5d-\x61\x63-\x65\x67-\x6d\x6f-\x71\x73\x75-\xff])*) + "/nx + INTEGER = /(-?0|-?[1-9]\d*)/ + FLOAT = /(-? + (?:0|[1-9]\d*) + (?: + \.\d+(?i:e[+-]?\d+) | + \.\d+ | + (?i:e[+-]?\d+) + ) + )/x + NAN = /NaN/ + INFINITY = /Infinity/ + MINUS_INFINITY = /-Infinity/ + OBJECT_OPEN = /\{/ + OBJECT_CLOSE = /\}/ + ARRAY_OPEN = /\[/ + ARRAY_CLOSE = /\]/ + PAIR_DELIMITER = /:/ + COLLECTION_DELIMITER = /,/ + TRUE = /true/ + FALSE = /false/ + NULL = /null/ + IGNORE = %r( + (?: + //[^\n\r]*[\n\r]| # line comments + /\* # c-style comments + (?: + [^*/]| # normal chars + /[^*]| # slashes that do not start a nested comment + \*[^/]| # asterisks that do not end this comment + /(?=\*/) # single slash before this comment's end + )* + \*/ # the End of this comment + |[ \t\r\n]+ # whitespaces: space, horicontal tab, lf, cr + )+ + )mx + + UNPARSED = Object.new + + # Creates a new PSON::Pure::Parser instance for the string _source_. + # + # It will be configured by the _opts_ hash. _opts_ can have the following + # keys: + # * *max_nesting*: The maximum depth of nesting allowed in the parsed data + # structures. Disable depth checking with :max_nesting => false|nil|0, + # it defaults to 19. + # * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in + # defiance of RFC 4627 to be parsed by the Parser. This option defaults + # to false. + # * *create_additions*: If set to false, the Parser doesn't create + # additions even if a matchin class and create_id was found. This option + # defaults to true. + # * *object_class*: Defaults to Hash + # * *array_class*: Defaults to Array + def initialize(source, opts = {}) + super + if !opts.key?(:max_nesting) # defaults to 19 + @max_nesting = 19 + elsif opts[:max_nesting] + @max_nesting = opts[:max_nesting] + else + @max_nesting = 0 + end + @allow_nan = !!opts[:allow_nan] + ca = true + ca = opts[:create_additions] if opts.key?(:create_additions) + @create_id = ca ? PSON.create_id : nil + @object_class = opts[:object_class] || Hash + @array_class = opts[:array_class] || Array + end + + alias source string + + # Parses the current PSON string _source_ and returns the complete data + # structure as a result. + def parse + reset + obj = nil + until eos? + case + when scan(OBJECT_OPEN) + obj and raise ParserError, "source '#{peek(20)}' not in PSON!" + @current_nesting = 1 + obj = parse_object + when scan(ARRAY_OPEN) + obj and raise ParserError, "source '#{peek(20)}' not in PSON!" + @current_nesting = 1 + obj = parse_array + when skip(IGNORE) + ; + else + raise ParserError, "source '#{peek(20)}' not in PSON!" + end + end + obj or raise ParserError, "source did not contain any PSON!" + obj + end + + private + + # Unescape characters in strings. + UNESCAPE_MAP = Hash.new { |h, k| h[k] = k.chr } + + UNESCAPE_MAP.update( + { + ?" => '"', + ?\\ => '\\', + ?/ => '/', + ?b => "\b", + ?f => "\f", + ?n => "\n", + ?r => "\r", + ?t => "\t", + ?u => nil, + + }) + + def parse_string + if scan(STRING) + return '' if self[1].empty? + string = self[1].gsub(%r{(?:\\[\\bfnrt"/]|(?:\\u(?:[A-Fa-f\d]{4}))+|\\[\x20-\xff])}n) do |c| + if u = UNESCAPE_MAP[$MATCH[1]] + u + else # \uXXXX + bytes = '' + i = 0 + while c[6 * i] == ?\\ && c[6 * i + 1] == ?u + bytes << c[6 * i + 2, 2].to_i(16) << c[6 * i + 4, 2].to_i(16) + i += 1 + end + PSON::UTF16toUTF8.iconv(bytes) + end + end + string.force_encoding(Encoding::UTF_8) if string.respond_to?(:force_encoding) + string + else + UNPARSED + end + rescue Iconv::Failure => e + raise GeneratorError, "Caught #{e.class}: #{e}" + end + + def parse_value + case + when scan(FLOAT) + Float(self[1]) + when scan(INTEGER) + Integer(self[1]) + when scan(TRUE) + true + when scan(FALSE) + false + when scan(NULL) + nil + when (string = parse_string) != UNPARSED + string + when scan(ARRAY_OPEN) + @current_nesting += 1 + ary = parse_array + @current_nesting -= 1 + ary + when scan(OBJECT_OPEN) + @current_nesting += 1 + obj = parse_object + @current_nesting -= 1 + obj + when @allow_nan && scan(NAN) + NaN + when @allow_nan && scan(INFINITY) + Infinity + when @allow_nan && scan(MINUS_INFINITY) + MinusInfinity + else + UNPARSED + end + end + + def parse_array + raise NestingError, "nesting of #@current_nesting is too deep" if + @max_nesting.nonzero? && @current_nesting > @max_nesting + result = @array_class.new + delim = false + until eos? + case + when (value = parse_value) != UNPARSED + delim = false + result << value + skip(IGNORE) + if scan(COLLECTION_DELIMITER) + delim = true + elsif match?(ARRAY_CLOSE) + ; + else + raise ParserError, "expected ',' or ']' in array at '#{peek(20)}'!" + end + when scan(ARRAY_CLOSE) + raise ParserError, "expected next element in array at '#{peek(20)}'!" if delim + break + when skip(IGNORE) + ; + else + raise ParserError, "unexpected token in array at '#{peek(20)}'!" + end + end + result + end + + def parse_object + raise NestingError, "nesting of #@current_nesting is too deep" if + @max_nesting.nonzero? && @current_nesting > @max_nesting + result = @object_class.new + delim = false + until eos? + case + when (string = parse_string) != UNPARSED + skip(IGNORE) + raise ParserError, "expected ':' in object at '#{peek(20)}'!" unless scan(PAIR_DELIMITER) + skip(IGNORE) + unless (value = parse_value).equal? UNPARSED + result[string] = value + delim = false + skip(IGNORE) + if scan(COLLECTION_DELIMITER) + delim = true + elsif match?(OBJECT_CLOSE) + ; + else + raise ParserError, "expected ',' or '}' in object at '#{peek(20)}'!" + end + else + raise ParserError, "expected value in object at '#{peek(20)}'!" + end + when scan(OBJECT_CLOSE) + raise ParserError, "expected next name, value pair in object at '#{peek(20)}'!" if delim + if @create_id and klassname = result[@create_id] + klass = PSON.deep_const_get klassname + break unless klass and klass.pson_creatable? + result = klass.pson_create(result) + end + break + when skip(IGNORE) + ; + else + raise ParserError, "unexpected token in object at '#{peek(20)}'!" + end + end + result + end + end + end +end diff --git a/mcollective/lib/puppet/external/pson/version.rb b/mcollective/lib/puppet/external/pson/version.rb new file mode 100644 index 000000000..a5a8e4702 --- /dev/null +++ b/mcollective/lib/puppet/external/pson/version.rb @@ -0,0 +1,8 @@ +module PSON + # PSON version + VERSION = '1.1.9' + VERSION_ARRAY = VERSION.split(/\./).map { |x| x.to_i } # :nodoc: + VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc: + VERSION_MINOR = VERSION_ARRAY[1] # :nodoc: + VERSION_BUILD = VERSION_ARRAY[2] # :nodoc: +end diff --git a/mcollective/lib/puppet/feature/base.rb b/mcollective/lib/puppet/feature/base.rb new file mode 100644 index 000000000..c983f5c12 --- /dev/null +++ b/mcollective/lib/puppet/feature/base.rb @@ -0,0 +1,54 @@ +# Created by Luke Kanies on 2006-04-30. +# Copyright (c) 2006. All rights reserved. + +require 'puppet/util/feature' + +# Add the simple features, all in one file. + +# We've got LDAP available. +Puppet.features.add(:ldap, :libs => ["ldap"]) + +# We have the Rdoc::Usage library. +Puppet.features.add(:usage, :libs => %w{rdoc/ri/ri_paths rdoc/usage}) + +# We have libshadow, useful for managing passwords. +Puppet.features.add(:libshadow, :libs => ["shadow"]) + +# We're running as root. +Puppet.features.add(:root) { require 'puppet/util/suidmanager'; Puppet::Util::SUIDManager.root? } + +# We've got mongrel available +Puppet.features.add(:mongrel, :libs => %w{rubygems mongrel puppet/network/http_server/mongrel}) + +# We have lcs diff +Puppet.features.add :diff, :libs => %w{diff/lcs diff/lcs/hunk} + +# We have augeas +Puppet.features.add(:augeas, :libs => ["augeas"]) + +# We have RRD available +Puppet.features.add(:rrd_legacy, :libs => ["RRDtool"]) +Puppet.features.add(:rrd, :libs => ["RRD"]) + +# We have OpenSSL +Puppet.features.add(:openssl, :libs => ["openssl"]) + +# We have a syslog implementation +Puppet.features.add(:syslog, :libs => ["syslog"]) + +# We can use POSIX user functions +Puppet.features.add(:posix) do + require 'etc' + Etc.getpwuid(0) != nil && Puppet.features.syslog? +end + +# We can use Microsoft Windows functions +Puppet.features.add(:microsoft_windows, :libs => ["sys/admin", "win32/process", "win32/dir"]) + +raise Puppet::Error,"Cannot determine basic system flavour" unless Puppet.features.posix? or Puppet.features.microsoft_windows? + +# We have CouchDB +Puppet.features.add(:couchdb, :libs => ["couchrest"]) + +# We have sqlite +Puppet.features.add(:sqlite, :libs => ["sqlite3"]) diff --git a/mcollective/lib/puppet/feature/pson.rb b/mcollective/lib/puppet/feature/pson.rb new file mode 100644 index 000000000..0ebb2806f --- /dev/null +++ b/mcollective/lib/puppet/feature/pson.rb @@ -0,0 +1,6 @@ +Puppet.features.add(:pson) do + require 'puppet/external/pson/common' + require 'puppet/external/pson/version' + require 'puppet/external/pson/pure' + true +end diff --git a/mcollective/lib/puppet/feature/rack.rb b/mcollective/lib/puppet/feature/rack.rb new file mode 100644 index 000000000..8d101ffc5 --- /dev/null +++ b/mcollective/lib/puppet/feature/rack.rb @@ -0,0 +1,24 @@ +require 'puppet/util/feature' + +# See if we have rack available, an HTTP Application Stack +# Explicitly depend on rack library version >= 1.0.0 +Puppet.features.add(:rack) do + begin + require 'rack' + rescue LoadError => detail + require 'rubygems' + require 'rack' + end + + if ! (defined?(::Rack) and defined?(::Rack.release)) + false + else + major_version = ::Rack.release.split('.')[0].to_i + if major_version >= 1 + true + else + false + end + end +end + diff --git a/mcollective/lib/puppet/feature/rails.rb b/mcollective/lib/puppet/feature/rails.rb new file mode 100644 index 000000000..74ed09aa6 --- /dev/null +++ b/mcollective/lib/puppet/feature/rails.rb @@ -0,0 +1,33 @@ +# Created by Luke Kanies on 2006-11-07. +# Copyright (c) 2006. All rights reserved. + +require 'puppet/util/feature' + +Puppet.features.rubygems? + +Puppet.features.add(:rails) do + begin + require 'active_record' + require 'active_record/version' + rescue LoadError => detail + if FileTest.exists?("/usr/share/rails") + count = 0 + Dir.entries("/usr/share/rails").each do |dir| + libdir = File.join("/usr/share/rails", dir, "lib") + if FileTest.exists?(libdir) and ! $LOAD_PATH.include?(libdir) + count += 1 + $LOAD_PATH << libdir + end + end + + retry if count > 0 + end + end + + unless (Puppet::Util.activerecord_version >= 2.1) + Puppet.info "ActiveRecord 2.1 or later required for StoreConfigs" + false + else + true + end +end diff --git a/mcollective/lib/puppet/feature/rubygems.rb b/mcollective/lib/puppet/feature/rubygems.rb new file mode 100644 index 000000000..639524ffe --- /dev/null +++ b/mcollective/lib/puppet/feature/rubygems.rb @@ -0,0 +1,6 @@ +# Created by Luke Kanies on 2006-11-07. +# Copyright (c) 2006. All rights reserved. + +require 'puppet/util/feature' + +Puppet.features.add(:rubygems, :libs => "rubygems") diff --git a/mcollective/lib/puppet/feature/selinux.rb b/mcollective/lib/puppet/feature/selinux.rb new file mode 100644 index 000000000..84be239fc --- /dev/null +++ b/mcollective/lib/puppet/feature/selinux.rb @@ -0,0 +1,3 @@ +require 'puppet/util/feature' + +Puppet.features.add(:selinux, :libs => ["selinux"]) diff --git a/mcollective/lib/puppet/feature/stomp.rb b/mcollective/lib/puppet/feature/stomp.rb new file mode 100644 index 000000000..4a77c0a25 --- /dev/null +++ b/mcollective/lib/puppet/feature/stomp.rb @@ -0,0 +1,6 @@ +require 'puppet/util/feature' + +# We want this to load if possible, but it's not automatically +# required. +Puppet.features.rubygems? +Puppet.features.add(:stomp, :libs => %{stomp}) diff --git a/mcollective/lib/puppet/feature/zlib.rb b/mcollective/lib/puppet/feature/zlib.rb new file mode 100644 index 000000000..ddd65b234 --- /dev/null +++ b/mcollective/lib/puppet/feature/zlib.rb @@ -0,0 +1,6 @@ +require 'puppet/util/feature' + +# We want this to load if possible, but it's not automatically +# required. +Puppet.features.rubygems? +Puppet.features.add(:zlib, :libs => %{zlib}) diff --git a/mcollective/lib/puppet/file_bucket.rb b/mcollective/lib/puppet/file_bucket.rb new file mode 100644 index 000000000..5eafcd269 --- /dev/null +++ b/mcollective/lib/puppet/file_bucket.rb @@ -0,0 +1,4 @@ +# stub +module Puppet::FileBucket + class BucketError < RuntimeError; end +end diff --git a/mcollective/lib/puppet/file_bucket/dipper.rb b/mcollective/lib/puppet/file_bucket/dipper.rb new file mode 100644 index 000000000..de4c01b78 --- /dev/null +++ b/mcollective/lib/puppet/file_bucket/dipper.rb @@ -0,0 +1,105 @@ +require 'puppet/file_bucket' +require 'puppet/file_bucket/file' +require 'puppet/indirector/request' + +class Puppet::FileBucket::Dipper + # This is a transitional implementation that uses REST + # to access remote filebucket files. + + attr_accessor :name + + # Create our bucket client + def initialize(hash = {}) + # Emulate the XMLRPC client + server = hash[:Server] + port = hash[:Port] || Puppet[:masterport] + environment = Puppet[:environment] + + if hash.include?(:Path) + @local_path = hash[:Path] + @rest_path = nil + else + @local_path = nil + @rest_path = "https://#{server}:#{port}/#{environment}/file_bucket_file/" + end + end + + def local? + !! @local_path + end + + # Back up a file to our bucket + def backup(file) + raise(ArgumentError, "File #{file} does not exist") unless ::File.exist?(file) + contents = ::File.read(file) + begin + file_bucket_file = Puppet::FileBucket::File.new(contents, :bucket_path => @local_path) + files_original_path = absolutize_path(file) + dest_path = "#{@rest_path}#{file_bucket_file.name}#{files_original_path}" + + # Make a HEAD request for the file so that we don't waste time + # uploading it if it already exists in the bucket. + unless Puppet::FileBucket::File.head("#{@rest_path}#{file_bucket_file.checksum_type}/#{file_bucket_file.checksum_data}#{files_original_path}") + file_bucket_file.save(dest_path) + end + + return file_bucket_file.checksum_data + rescue => detail + puts detail.backtrace if Puppet[:trace] + raise Puppet::Error, "Could not back up #{file}: #{detail}" + end + end + + # Retrieve a file by sum. + def getfile(sum) + source_path = "#{@rest_path}md5/#{sum}" + file_bucket_file = Puppet::FileBucket::File.find(source_path, :bucket_path => @local_path) + + raise Puppet::Error, "File not found" unless file_bucket_file + file_bucket_file.to_s + end + + # Restore the file + def restore(file,sum) + restore = true + if FileTest.exists?(file) + cursum = Digest::MD5.hexdigest(::File.read(file)) + + # if the checksum has changed... + # this might be extra effort + if cursum == sum + restore = false + end + end + + if restore + if newcontents = getfile(sum) + tmp = "" + newsum = Digest::MD5.hexdigest(newcontents) + changed = nil + if FileTest.exists?(file) and ! FileTest.writable?(file) + changed = ::File.stat(file).mode + ::File.chmod(changed | 0200, file) + end + ::File.open(file, ::File::WRONLY|::File::TRUNC|::File::CREAT) { |of| + of.print(newcontents) + } + ::File.chmod(changed, file) if changed + else + Puppet.err "Could not find file with checksum #{sum}" + return nil + end + return newsum + else + return nil + end + end + + private + def absolutize_path( path ) + require 'pathname' + Pathname.new(path).realpath + end + +end + diff --git a/mcollective/lib/puppet/file_bucket/file.rb b/mcollective/lib/puppet/file_bucket/file.rb new file mode 100644 index 000000000..c8443bf68 --- /dev/null +++ b/mcollective/lib/puppet/file_bucket/file.rb @@ -0,0 +1,55 @@ +require 'puppet/file_bucket' +require 'puppet/indirector' +require 'puppet/util/checksums' +require 'digest/md5' + +class Puppet::FileBucket::File + # This class handles the abstract notion of a file in a filebucket. + # There are mechanisms to save and load this file locally and remotely in puppet/indirector/filebucketfile/* + # There is a compatibility class that emulates pre-indirector filebuckets in Puppet::FileBucket::Dipper + extend Puppet::Indirector + indirects :file_bucket_file, :terminus_class => :selector + + attr :contents + attr :bucket_path + + def initialize( contents, options = {} ) + raise ArgumentError if !contents.is_a?(String) + @contents = contents + + @bucket_path = options.delete(:bucket_path) + raise ArgumentError if options != {} + end + + def checksum_type + 'md5' + end + + def checksum + "{#{checksum_type}}#{checksum_data}" + end + + def checksum_data + @checksum_data ||= Digest::MD5.hexdigest(contents) + end + + def to_s + contents + end + + def name + "#{checksum_type}/#{checksum_data}" + end + + def self.from_s(contents) + self.new(contents) + end + + def to_pson + { "contents" => contents }.to_pson + end + + def self.from_pson(pson) + self.new(pson["contents"]) + end +end diff --git a/mcollective/lib/puppet/file_collection.rb b/mcollective/lib/puppet/file_collection.rb new file mode 100644 index 000000000..a7bdd04a8 --- /dev/null +++ b/mcollective/lib/puppet/file_collection.rb @@ -0,0 +1,30 @@ +# A simple way to turn file names into singletons, +# so we don't have tons of copies of each file path around. +class Puppet::FileCollection + require 'puppet/file_collection/lookup' + + def self.collection + @collection + end + + def initialize + @paths = [] + @inverse = {} + end + + def index(path) + if i = @inverse[path] + return i + else + @paths << path + i = @inverse[path] = @paths.length - 1 + return i + end + end + + def path(index) + @paths[index] + end + + @collection = self.new +end diff --git a/mcollective/lib/puppet/file_collection/lookup.rb b/mcollective/lib/puppet/file_collection/lookup.rb new file mode 100644 index 000000000..30679480d --- /dev/null +++ b/mcollective/lib/puppet/file_collection/lookup.rb @@ -0,0 +1,20 @@ +require 'puppet/file_collection' + +# A simple module for looking up file paths and indexes +# in a file collection. +module Puppet::FileCollection::Lookup + attr_accessor :line, :file_index + + def file_collection + Puppet::FileCollection.collection + end + + def file=(path) + @file_index = file_collection.index(path) + end + + def file + return nil unless file_index + file_collection.path(file_index) + end +end diff --git a/mcollective/lib/puppet/file_serving.rb b/mcollective/lib/puppet/file_serving.rb new file mode 100644 index 000000000..e7e2b898e --- /dev/null +++ b/mcollective/lib/puppet/file_serving.rb @@ -0,0 +1,7 @@ +# +# Created by Luke Kanies on 2007-10-16. +# Copyright (c) 2007. All rights reserved. + +# Just a stub class. +class Puppet::FileServing # :nodoc: +end diff --git a/mcollective/lib/puppet/file_serving/base.rb b/mcollective/lib/puppet/file_serving/base.rb new file mode 100644 index 000000000..09cab97d9 --- /dev/null +++ b/mcollective/lib/puppet/file_serving/base.rb @@ -0,0 +1,88 @@ +# +# Created by Luke Kanies on 2007-10-22. +# Copyright (c) 2007. All rights reserved. + +require 'puppet/file_serving' + +# The base class for Content and Metadata; provides common +# functionality like the behaviour around links. +class Puppet::FileServing::Base + # This is for external consumers to store the source that was used + # to retrieve the metadata. + attr_accessor :source + + # Does our file exist? + def exist? + stat + return true + rescue => detail + return false + end + + # Return the full path to our file. Fails if there's no path set. + def full_path(dummy_argument=:work_arround_for_ruby_GC_bug) + (if relative_path.nil? or relative_path == "" or relative_path == "." + path + else + File.join(path, relative_path) + end).gsub(%r{/+}, "/") + end + + def initialize(path, options = {}) + self.path = path + @links = :manage + + options.each do |param, value| + begin + send param.to_s + "=", value + rescue NoMethodError + raise ArgumentError, "Invalid option #{param} for #{self.class}" + end + end + end + + # Determine how we deal with links. + attr_reader :links + def links=(value) + value = value.to_sym + value = :manage if value == :ignore + raise(ArgumentError, ":links can only be set to :manage or :follow") unless [:manage, :follow].include?(value) + @links = value + end + + # Set our base path. + attr_reader :path + def path=(path) + raise ArgumentError.new("Paths must be fully qualified") unless path =~ /^#{::File::SEPARATOR}/ + @path = path + end + + # Set a relative path; this is used for recursion, and sets + # the file's path relative to the initial recursion point. + attr_reader :relative_path + def relative_path=(path) + raise ArgumentError.new("Relative paths must not be fully qualified") if path =~ /^#{::File::SEPARATOR}/ + @relative_path = path + end + + # Stat our file, using the appropriate link-sensitive method. + def stat + @stat_method ||= self.links == :manage ? :lstat : :stat + File.send(@stat_method, full_path) + end + + def to_pson_data_hash + { + # No 'document_type' since we don't send these bare + 'data' => { + 'path' => @path, + 'relative_path' => @relative_path, + 'links' => @links + }, + 'metadata' => { + 'api_version' => 1 + } + } + end + +end diff --git a/mcollective/lib/puppet/file_serving/configuration.rb b/mcollective/lib/puppet/file_serving/configuration.rb new file mode 100644 index 000000000..5bcb029af --- /dev/null +++ b/mcollective/lib/puppet/file_serving/configuration.rb @@ -0,0 +1,128 @@ +# +# Created by Luke Kanies on 2007-10-16. +# Copyright (c) 2007. All rights reserved. + +require 'puppet' +require 'puppet/file_serving' +require 'puppet/file_serving/mount' +require 'puppet/file_serving/mount/file' +require 'puppet/file_serving/mount/modules' +require 'puppet/file_serving/mount/plugins' +require 'puppet/util/cacher' + +class Puppet::FileServing::Configuration + require 'puppet/file_serving/configuration/parser' + + class << self + include Puppet::Util::Cacher + cached_attr(:configuration) { new } + end + + Mount = Puppet::FileServing::Mount + + # Create our singleton configuration. + def self.create + configuration + end + + private_class_method :new + + attr_reader :mounts + #private :mounts + + # Find the right mount. Does some shenanigans to support old-style module + # mounts. + def find_mount(mount_name, environment) + # Reparse the configuration if necessary. + readconfig + + if mount = mounts[mount_name] + return mount + end + + if environment.module(mount_name) + Puppet::Util::Warnings.notice_once "DEPRECATION NOTICE: Files found in modules without specifying 'modules' in file path will be deprecated in the next major release. Please fix module '#{mount_name}' when no 0.24.x clients are present" + return mounts["modules"] + end + + # This can be nil. + mounts[mount_name] + end + + def initialize + @mounts = {} + @config_file = nil + + # We don't check to see if the file is modified the first time, + # because we always want to parse at first. + readconfig(false) + end + + # Is a given mount available? + def mounted?(name) + @mounts.include?(name) + end + + # Split the path into the separate mount point and path. + def split_path(request) + # Reparse the configuration if necessary. + readconfig + + mount_name, path = request.key.split(File::Separator, 2) + + raise(ArgumentError, "Cannot find file: Invalid mount '#{mount_name}'") unless mount_name =~ %r{^[-\w]+$} + raise(ArgumentError, "Cannot find file: Invalid relative path '#{path}'") if path and path.split('/').include?('..') + + return nil unless mount = find_mount(mount_name, request.environment) + if mount.name == "modules" and mount_name != "modules" + # yay backward-compatibility + path = "#{mount_name}/#{path}" + end + + if path == "" + path = nil + elsif path + # Remove any double slashes that might have occurred + path = path.gsub(/\/+/, "/") + end + + return mount, path + end + + def umount(name) + @mounts.delete(name) if @mounts.include? name + end + + private + + def mk_default_mounts + @mounts["modules"] ||= Mount::Modules.new("modules") + @mounts["modules"].allow('*') if @mounts["modules"].empty? + @mounts["plugins"] ||= Mount::Plugins.new("plugins") + @mounts["plugins"].allow('*') if @mounts["plugins"].empty? + end + + # Read the configuration file. + def readconfig(check = true) + config = Puppet[:fileserverconfig] + + return unless FileTest.exists?(config) + + @parser ||= Puppet::FileServing::Configuration::Parser.new(config) + + return if check and ! @parser.changed? + + # Don't assign the mounts hash until we're sure the parsing succeeded. + begin + newmounts = @parser.parse + @mounts = newmounts + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Error parsing fileserver configuration: #{detail}; using old configuration" + end + + ensure + # Make sure we've got our plugins and modules. + mk_default_mounts + end +end diff --git a/mcollective/lib/puppet/file_serving/configuration/parser.rb b/mcollective/lib/puppet/file_serving/configuration/parser.rb new file mode 100644 index 000000000..334201d37 --- /dev/null +++ b/mcollective/lib/puppet/file_serving/configuration/parser.rb @@ -0,0 +1,122 @@ +require 'puppet/file_serving/configuration' +require 'puppet/util/loadedfile' + +class Puppet::FileServing::Configuration::Parser < Puppet::Util::LoadedFile + Mount = Puppet::FileServing::Mount + MODULES = 'modules' + + # Parse our configuration file. + def parse + raise("File server configuration #{self.file} does not exist") unless FileTest.exists?(self.file) + raise("Cannot read file server configuration #{self.file}") unless FileTest.readable?(self.file) + + @mounts = {} + @count = 0 + + File.open(self.file) { |f| + mount = nil + f.each { |line| + # Have the count increment at the top, in case we throw exceptions. + @count += 1 + + case line + when /^\s*#/; next # skip comments + when /^\s*$/; next # skip blank lines + when /\[([-\w]+)\]/ + mount = newmount($1) + when /^\s*(\w+)\s+(.+)$/ + var = $1 + value = $2 + raise(ArgumentError, "Fileserver configuration file does not use '=' as a separator") if value =~ /^=/ + case var + when "path" + path(mount, value) + when "allow" + allow(mount, value) + when "deny" + deny(mount, value) + else + raise ArgumentError.new("Invalid argument '#{var}'", @count, file) + end + else + raise ArgumentError.new("Invalid line '#{line.chomp}'", @count, file) + end + } + } + + validate + + @mounts + end + + private + + # Allow a given pattern access to a mount. + def allow(mount, value) + # LAK:NOTE See http://snurl.com/21zf8 [groups_google_com] + x = value.split(/\s*,\s*/).each { |val| + begin + mount.info "allowing #{val} access" + mount.allow(val) + rescue AuthStoreError => detail + + raise ArgumentError.new( + detail.to_s, + + @count, file) + end + } + end + + # Deny a given pattern access to a mount. + def deny(mount, value) + # LAK:NOTE See http://snurl.com/21zf8 [groups_google_com] + x = value.split(/\s*,\s*/).each { |val| + begin + mount.info "denying #{val} access" + mount.deny(val) + rescue AuthStoreError => detail + + raise ArgumentError.new( + detail.to_s, + + @count, file) + end + } + end + + # Create a new mount. + def newmount(name) + raise ArgumentError, "#{@mounts[name]} is already mounted at #{name}", @count, file if @mounts.include?(name) + case name + when "modules" + mount = Mount::Modules.new(name) + when "plugins" + mount = Mount::Plugins.new(name) + else + mount = Mount::File.new(name) + end + @mounts[name] = mount + mount + end + + # Set the path for a mount. + def path(mount, value) + if mount.respond_to?(:path=) + begin + mount.path = value + rescue ArgumentError => detail + Puppet.err "Removing mount #{mount.name}: #{detail}" + @mounts.delete(mount.name) + end + else + Puppet.warning "The '#{mount.name}' module can not have a path. Ignoring attempt to set it" + end + end + + # Make sure all of our mounts are valid. We have to do this after the fact + # because details are added over time as the file is parsed. + def validate + @mounts.each { |name, mount| mount.validate } + end +end diff --git a/mcollective/lib/puppet/file_serving/content.rb b/mcollective/lib/puppet/file_serving/content.rb new file mode 100644 index 000000000..571f70f96 --- /dev/null +++ b/mcollective/lib/puppet/file_serving/content.rb @@ -0,0 +1,49 @@ +# +# Created by Luke Kanies on 2007-10-16. +# Copyright (c) 2007. All rights reserved. + +require 'puppet/indirector' +require 'puppet/file_serving' +require 'puppet/file_serving/base' + +# A class that handles retrieving file contents. +# It only reads the file when its content is specifically +# asked for. +class Puppet::FileServing::Content < Puppet::FileServing::Base + extend Puppet::Indirector + indirects :file_content, :terminus_class => :selector + + attr_writer :content + + def self.supported_formats + [:raw] + end + + def self.from_raw(content) + instance = new("/this/is/a/fake/path") + instance.content = content + instance + end + + # BF: we used to fetch the file content here, but this is counter-productive + # for puppetmaster streaming of file content. So collect just returns itself + def collect + return if stat.ftype == "directory" + self + end + + # Read the content of our file in. + def content + unless @content + # This stat can raise an exception, too. + raise(ArgumentError, "Cannot read the contents of links unless following links") if stat.ftype == "symlink" + + @content = ::File.read(full_path) + end + @content + end + + def to_raw + File.new(full_path, "r") + end +end diff --git a/mcollective/lib/puppet/file_serving/fileset.rb b/mcollective/lib/puppet/file_serving/fileset.rb new file mode 100644 index 000000000..f29f70a53 --- /dev/null +++ b/mcollective/lib/puppet/file_serving/fileset.rb @@ -0,0 +1,172 @@ +# +# Created by Luke Kanies on 2007-10-22. +# Copyright (c) 2007. All rights reserved. + +require 'find' +require 'puppet/file_serving' +require 'puppet/file_serving/metadata' + +# Operate recursively on a path, returning a set of file paths. +class Puppet::FileServing::Fileset + attr_reader :path, :ignore, :links + attr_accessor :recurse, :recurselimit, :checksum_type + + # Produce a hash of files, with merged so that earlier files + # with the same postfix win. E.g., /dir1/subfile beats /dir2/subfile. + # It's a hash because we need to know the relative path of each file, + # and the base directory. + # This will probably only ever be used for searching for plugins. + def self.merge(*filesets) + result = {} + + filesets.each do |fileset| + fileset.files.each do |file| + result[file] ||= fileset.path + end + end + + result + end + + # Return a list of all files in our fileset. This is different from the + # normal definition of find in that we support specific levels + # of recursion, which means we need to know when we're going another + # level deep, which Find doesn't do. + def files + files = perform_recursion + + # Now strip off the leading path, so each file becomes relative, and remove + # any slashes that might end up at the beginning of the path. + result = files.collect { |file| file.sub(%r{^#{Regexp.escape(@path)}/*}, '') } + + # And add the path itself. + result.unshift(".") + + result + end + + # Should we ignore this path? + def ignore?(path) + return false if @ignore == [nil] + + # 'detect' normally returns the found result, whereas we just want true/false. + ! @ignore.detect { |pattern| File.fnmatch?(pattern, path) }.nil? + end + + def ignore=(values) + values = [values] unless values.is_a?(Array) + @ignore = values + end + + def initialize(path, options = {}) + path = path.chomp(File::SEPARATOR) unless path == File::SEPARATOR + raise ArgumentError.new("Fileset paths must be fully qualified") unless File.expand_path(path) == path + + @path = path + + # Set our defaults. + @ignore = [] + @links = :manage + @recurse = false + @recurselimit = :infinite + + if options.is_a?(Puppet::Indirector::Request) + initialize_from_request(options) + else + initialize_from_hash(options) + end + + raise ArgumentError.new("Fileset paths must exist") unless stat = stat(path) + raise ArgumentError.new("Fileset recurse parameter must not be a number anymore, please use recurselimit") if @recurse.is_a?(Integer) + end + + def links=(links) + links = links.to_sym + raise(ArgumentError, "Invalid :links value '#{links}'") unless [:manage, :follow].include?(links) + @links = links + @stat_method = links == :manage ? :lstat : :stat + end + + # Should we recurse further? This is basically a single + # place for all of the logic around recursion. + def recurse?(depth) + # recurse if told to, and infinite recursion or current depth not at the limit + self.recurse and (self.recurselimit == :infinite or depth <= self.recurselimit) + end + + def initialize_from_hash(options) + options.each do |option, value| + method = option.to_s + "=" + begin + send(method, value) + rescue NoMethodError + raise ArgumentError, "Invalid option '#{option}'" + end + end + end + + def initialize_from_request(request) + [:links, :ignore, :recurse, :recurselimit, :checksum_type].each do |param| + if request.options.include?(param) # use 'include?' so the values can be false + value = request.options[param] + elsif request.options.include?(param.to_s) + value = request.options[param.to_s] + end + next if value.nil? + value = true if value == "true" + value = false if value == "false" + value = Integer(value) if value.is_a?(String) and value =~ /^\d+$/ + send(param.to_s + "=", value) + end + end + + private + + # Pull the recursion logic into one place. It's moderately hairy, and this + # allows us to keep the hairiness apart from what we do with the files. + def perform_recursion + # Start out with just our base directory. + current_dirs = [@path] + + next_dirs = [] + + depth = 1 + + result = [] + return result unless recurse?(depth) + + while dir_path = current_dirs.shift or ((depth += 1) and recurse?(depth) and current_dirs = next_dirs and next_dirs = [] and dir_path = current_dirs.shift) + next unless stat = stat(dir_path) + next unless stat.directory? + + Dir.entries(dir_path).each do |file_path| + next if [".", ".."].include?(file_path) + + # Note that this also causes matching directories not + # to be recursed into. + next if ignore?(file_path) + + # Add it to our list of files to return + result << File.join(dir_path, file_path) + + # And to our list of files/directories to iterate over. + next_dirs << File.join(dir_path, file_path) + end + end + + result + end + public + # Stat a given file, using the links-appropriate method. + def stat(path) + @stat_method ||= self.links == :manage ? :lstat : :stat + + begin + return File.send(@stat_method, path) + rescue + # If this happens, it is almost surely because we're + # trying to manage a link to a file that does not exist. + return nil + end + end +end diff --git a/mcollective/lib/puppet/file_serving/metadata.rb b/mcollective/lib/puppet/file_serving/metadata.rb new file mode 100644 index 000000000..87e8874d4 --- /dev/null +++ b/mcollective/lib/puppet/file_serving/metadata.rb @@ -0,0 +1,117 @@ +# +# Created by Luke Kanies on 2007-10-16. +# Copyright (c) 2007. All rights reserved. + +require 'puppet' +require 'puppet/indirector' +require 'puppet/file_serving' +require 'puppet/file_serving/base' +require 'puppet/util/checksums' + +# A class that handles retrieving file metadata. +class Puppet::FileServing::Metadata < Puppet::FileServing::Base + + include Puppet::Util::Checksums + + extend Puppet::Indirector + indirects :file_metadata, :terminus_class => :selector + + attr_reader :path, :owner, :group, :mode, :checksum_type, :checksum, :ftype, :destination + + PARAM_ORDER = [:mode, :ftype, :owner, :group] + + def attributes_with_tabs + raise(ArgumentError, "Cannot manage files of type #{ftype}") unless ['file','directory','link'].include? ftype + desc = [] + PARAM_ORDER.each { |check| + check = :ftype if check == :type + desc << send(check) + } + + desc << checksum + desc << @destination rescue nil if ftype == 'link' + + desc.join("\t") + end + + def checksum_type=(type) + raise(ArgumentError, "Unsupported checksum type #{type}") unless respond_to?("#{type}_file") + + @checksum_type = type + end + + # Retrieve the attributes for this file, relative to a base directory. + # Note that File.stat raises Errno::ENOENT if the file is absent and this + # method does not catch that exception. + def collect + real_path = full_path + stat = stat() + @owner = stat.uid + @group = stat.gid + @ftype = stat.ftype + + + # We have to mask the mode, yay. + @mode = stat.mode & 007777 + + case stat.ftype + when "file" + @checksum = ("{#{@checksum_type}}") + send("#{@checksum_type}_file", real_path).to_s + when "directory" # Always just timestamp the directory. + @checksum_type = "ctime" + @checksum = ("{#{@checksum_type}}") + send("#{@checksum_type}_file", path).to_s + when "link" + @destination = File.readlink(real_path) + @checksum = ("{#{@checksum_type}}") + send("#{@checksum_type}_file", real_path).to_s rescue nil + else + raise ArgumentError, "Cannot manage files of type #{stat.ftype}" + end + end + + def initialize(path,data={}) + @owner = data.delete('owner') + @group = data.delete('group') + @mode = data.delete('mode') + if checksum = data.delete('checksum') + @checksum_type = checksum['type'] + @checksum = checksum['value'] + end + @checksum_type ||= "md5" + @ftype = data.delete('type') + @destination = data.delete('destination') + super(path,data) + end + + PSON.register_document_type('FileMetadata',self) + def to_pson_data_hash + { + 'document_type' => 'FileMetadata', + + 'data' => super['data'].update( + { + 'owner' => owner, + 'group' => group, + 'mode' => mode, + 'checksum' => { + 'type' => checksum_type, + 'value' => checksum + }, + 'type' => ftype, + 'destination' => destination, + + }), + 'metadata' => { + 'api_version' => 1 + } + } + end + + def to_pson(*args) + to_pson_data_hash.to_pson(*args) + end + + def self.from_pson(data) + new(data.delete('path'), data) + end + +end diff --git a/mcollective/lib/puppet/file_serving/mount.rb b/mcollective/lib/puppet/file_serving/mount.rb new file mode 100644 index 000000000..37dd89537 --- /dev/null +++ b/mcollective/lib/puppet/file_serving/mount.rb @@ -0,0 +1,44 @@ +# +# Created by Luke Kanies on 2007-10-16. +# Copyright (c) 2007. All rights reserved. + +require 'puppet/network/authstore' +require 'puppet/util/logging' +require 'puppet/util/cacher' +require 'puppet/file_serving' +require 'puppet/file_serving/metadata' +require 'puppet/file_serving/content' + +# Broker access to the filesystem, converting local URIs into metadata +# or content objects. +class Puppet::FileServing::Mount < Puppet::Network::AuthStore + include Puppet::Util::Logging + + attr_reader :name + + def find(path, options) + raise NotImplementedError + end + + # Create our object. It must have a name. + def initialize(name) + unless name =~ %r{^[-\w]+$} + raise ArgumentError, "Invalid mount name format '#{name}'" + end + @name = name + + super() + end + + def search(path, options) + raise NotImplementedError + end + + def to_s + "mount[#{@name}]" + end + + # A noop. + def validate + end +end diff --git a/mcollective/lib/puppet/file_serving/mount/file.rb b/mcollective/lib/puppet/file_serving/mount/file.rb new file mode 100644 index 000000000..7d622e4bf --- /dev/null +++ b/mcollective/lib/puppet/file_serving/mount/file.rb @@ -0,0 +1,124 @@ +require 'puppet/util/cacher' + +require 'puppet/file_serving/mount' + +class Puppet::FileServing::Mount::File < Puppet::FileServing::Mount + class << self + include Puppet::Util::Cacher + + cached_attr(:localmap) do + { "h" => Facter.value("hostname"), + "H" => [Facter.value("hostname"), + Facter.value("domain")].join("."), + "d" => Facter.value("domain") + } + end + end + + def complete_path(relative_path, node) + full_path = path(node) + + raise ArgumentError.new("Mounts without paths are not usable") unless full_path + + # If there's no relative path name, then we're serving the mount itself. + return full_path unless relative_path + + file = ::File.join(full_path, relative_path) + + if !(FileTest.exist?(file) or FileTest.symlink?(file)) + Puppet.info("File does not exist or is not accessible: #{file}") + return nil + end + + file + end + + # Return an instance of the appropriate class. + def find(short_file, request) + complete_path(short_file, request.node) + end + + # Return the path as appropriate, expanding as necessary. + def path(node = nil) + if expandable? + return expand(@path, node) + else + return @path + end + end + + # Set the path. + def path=(path) + # FIXME: For now, just don't validate paths with replacement + # patterns in them. + if path =~ /%./ + # Mark that we're expandable. + @expandable = true + else + raise ArgumentError, "#{path} does not exist or is not a directory" unless FileTest.directory?(path) + raise ArgumentError, "#{path} is not readable" unless FileTest.readable?(path) + @expandable = false + end + @path = path + end + + def search(path, request) + return nil unless path = complete_path(path, request.node) + [path] + end + + # Verify our configuration is valid. This should really check to + # make sure at least someone will be allowed, but, eh. + def validate + raise ArgumentError.new("Mounts without paths are not usable") if @path.nil? + end + + private + + # Create a map for a specific node. + def clientmap(node) + { + "h" => node.sub(/\..*$/, ""), + "H" => node, + "d" => node.sub(/[^.]+\./, "") # domain name + } + end + + # Replace % patterns as appropriate. + def expand(path, node = nil) + # This map should probably be moved into a method. + map = nil + + if node + map = clientmap(node) + else + Puppet.notice "No client; expanding '#{path}' with local host" + # Else, use the local information + map = localmap + end + + path.gsub(/%(.)/) do |v| + key = $1 + if key == "%" + "%" + else + map[key] || v + end + end + end + + # Do we have any patterns in our path, yo? + def expandable? + if defined?(@expandable) + @expandable + else + false + end + end + + # Cache this manufactured map, since if it's used it's likely + # to get used a lot. + def localmap + self.class.localmap + end +end diff --git a/mcollective/lib/puppet/file_serving/mount/modules.rb b/mcollective/lib/puppet/file_serving/mount/modules.rb new file mode 100644 index 000000000..3cbacb276 --- /dev/null +++ b/mcollective/lib/puppet/file_serving/mount/modules.rb @@ -0,0 +1,24 @@ +require 'puppet/file_serving/mount' + +# This is the modules-specific mount: it knows how to search through +# modules for files. Yay. +class Puppet::FileServing::Mount::Modules < Puppet::FileServing::Mount + # Return an instance of the appropriate class. + def find(path, request) + raise "No module specified" if path.to_s.empty? + module_name, relative_path = path.split("/", 2) + return nil unless mod = request.environment.module(module_name) + + mod.file(relative_path) + end + + def search(path, request) + if result = find(path, request) + [result] + end + end + + def valid? + true + end +end diff --git a/mcollective/lib/puppet/file_serving/mount/plugins.rb b/mcollective/lib/puppet/file_serving/mount/plugins.rb new file mode 100644 index 000000000..d21d6e92e --- /dev/null +++ b/mcollective/lib/puppet/file_serving/mount/plugins.rb @@ -0,0 +1,26 @@ +require 'puppet/file_serving/mount' + +# Find files in the modules' plugins directories. +# This is a very strange mount because it merges +# many directories into one. +class Puppet::FileServing::Mount::Plugins < Puppet::FileServing::Mount + # Return an instance of the appropriate class. + def find(relative_path, request) + return nil unless mod = request.environment.modules.find { |mod| mod.plugin(relative_path) } + + path = mod.plugin(relative_path) + + path + end + + def search(relative_path, request) + # We currently only support one kind of search on plugins - return + # them all. + paths = request.environment.modules.find_all { |mod| mod.plugins? }.collect { |mod| mod.plugin_directory } + return(paths.empty? ? nil : paths) + end + + def valid? + true + end +end diff --git a/mcollective/lib/puppet/file_serving/terminus_helper.rb b/mcollective/lib/puppet/file_serving/terminus_helper.rb new file mode 100644 index 000000000..4da285258 --- /dev/null +++ b/mcollective/lib/puppet/file_serving/terminus_helper.rb @@ -0,0 +1,25 @@ +# +# Created by Luke Kanies on 2007-10-22. +# Copyright (c) 2007. All rights reserved. + +require 'puppet/file_serving' +require 'puppet/file_serving/fileset' + +# Define some common methods for FileServing termini. +module Puppet::FileServing::TerminusHelper + # Create model instances for all files in a fileset. + def path2instances(request, *paths) + filesets = paths.collect do |path| + # Filesets support indirector requests as an options collection + Puppet::FileServing::Fileset.new(path, request) + end + + Puppet::FileServing::Fileset.merge(*filesets).collect do |file, base_path| + inst = model.new(base_path, :relative_path => file) + inst.checksum_type = request.options[:checksum_type] if request.options[:checksum_type] + inst.links = request.options[:links] if request.options[:links] + inst.collect + inst + end + end +end diff --git a/mcollective/lib/puppet/file_serving/terminus_selector.rb b/mcollective/lib/puppet/file_serving/terminus_selector.rb new file mode 100644 index 000000000..7b8c175ce --- /dev/null +++ b/mcollective/lib/puppet/file_serving/terminus_selector.rb @@ -0,0 +1,33 @@ +# +# Created by Luke Kanies on 2007-10-18. +# Copyright (c) 2007. All rights reserved. + +require 'uri' +require 'puppet/file_serving' + +# This module is used to pick the appropriate terminus +# in file-serving indirections. This is necessary because +# the terminus varies based on the URI asked for. +module Puppet::FileServing::TerminusSelector + PROTOCOL_MAP = {"puppet" => :rest, "file" => :file} + + def select(request) + # We rely on the request's parsing of the URI. + + # Short-circuit to :file if it's a fully-qualified path or specifies a 'file' protocol. + return PROTOCOL_MAP["file"] if request.key =~ /^#{::File::SEPARATOR}/ + return PROTOCOL_MAP["file"] if request.protocol == "file" + + # We're heading over the wire the protocol is 'puppet' and we've got a server name or we're not named 'apply' or 'puppet' + if request.protocol == "puppet" and (request.server or !["puppet","apply"].include?(Puppet.settings[:name])) + return PROTOCOL_MAP["puppet"] + end + + if request.protocol and PROTOCOL_MAP[request.protocol].nil? + raise(ArgumentError, "URI protocol '#{request.protocol}' is not currently supported for file serving") + end + + # If we're still here, we're using the file_server or modules. + :file_server + end +end diff --git a/mcollective/lib/puppet/indirector.rb b/mcollective/lib/puppet/indirector.rb new file mode 100644 index 000000000..fd6bf3010 --- /dev/null +++ b/mcollective/lib/puppet/indirector.rb @@ -0,0 +1,78 @@ +# Manage indirections to termini. They are organized in terms of indirections - +# - e.g., configuration, node, file, certificate -- and each indirection has one +# or more terminus types defined. The indirection is configured via the +# +indirects+ method, which will be called by the class extending itself +# with this module. +module Puppet::Indirector + # LAK:FIXME We need to figure out how to handle documentation for the + # different indirection types. + + require 'puppet/indirector/indirection' + require 'puppet/indirector/terminus' + require 'puppet/indirector/envelope' + require 'puppet/network/format_handler' + + # Declare that the including class indirects its methods to + # this terminus. The terminus name must be the name of a Puppet + # default, not the value -- if it's the value, then it gets + # evaluated at parse time, which is before the user has had a chance + # to override it. + def indirects(indirection, options = {}) + raise(ArgumentError, "Already handling indirection for #{@indirection.name}; cannot also handle #{indirection}") if @indirection + # populate this class with the various new methods + extend ClassMethods + include InstanceMethods + include Puppet::Indirector::Envelope + extend Puppet::Network::FormatHandler + + # instantiate the actual Terminus for that type and this name (:ldap, w/ args :node) + # & hook the instantiated Terminus into this class (Node: @indirection = terminus) + @indirection = Puppet::Indirector::Indirection.new(self, indirection, options) + end + + module ClassMethods + attr_reader :indirection + + def cache_class=(klass) + indirection.cache_class = klass + end + + def terminus_class=(klass) + indirection.terminus_class = klass + end + + # Expire any cached instance. + def expire(*args) + indirection.expire(*args) + end + + def find(*args) + indirection.find(*args) + end + + def head(*args) + indirection.head(*args) + end + + def destroy(*args) + indirection.destroy(*args) + end + + def search(*args) + indirection.search(*args) + end + end + + module InstanceMethods + def save(key = nil) + self.class.indirection.save key, self + end + end + + + # Helper definition for indirections that handle filenames. + BadNameRegexp = Regexp.union(/^\.\./, + %r{[\\/]}, + "\0", + /(?i)^[a-z]:/) +end diff --git a/mcollective/lib/puppet/indirector/active_record.rb b/mcollective/lib/puppet/indirector/active_record.rb new file mode 100644 index 000000000..a9f05d683 --- /dev/null +++ b/mcollective/lib/puppet/indirector/active_record.rb @@ -0,0 +1,28 @@ +require 'puppet/indirector' + +class Puppet::Indirector::ActiveRecord < Puppet::Indirector::Terminus + class << self + attr_accessor :ar_model + end + + def self.use_ar_model(klass) + self.ar_model = klass + end + + def ar_model + self.class.ar_model + end + + def initialize + Puppet::Rails.init + end + + def find(request) + return nil unless instance = ar_model.find_by_name(request.key) + instance.to_puppet + end + + def save(request) + ar_model.from_puppet(request.instance).save + end +end diff --git a/mcollective/lib/puppet/indirector/catalog/active_record.rb b/mcollective/lib/puppet/indirector/catalog/active_record.rb new file mode 100644 index 000000000..f814f4aff --- /dev/null +++ b/mcollective/lib/puppet/indirector/catalog/active_record.rb @@ -0,0 +1,41 @@ +require 'puppet/rails/host' +require 'puppet/indirector/active_record' +require 'puppet/resource/catalog' + +class Puppet::Resource::Catalog::ActiveRecord < Puppet::Indirector::ActiveRecord + use_ar_model Puppet::Rails::Host + + # If we can find the host, then return a catalog with the host's resources + # as the vertices. + def find(request) + return nil unless request.options[:cache_integration_hack] + return nil unless host = ar_model.find_by_name(request.key) + + catalog = Puppet::Resource::Catalog.new(host.name) + + host.resources.each do |resource| + catalog.add_resource resource.to_transportable + end + + catalog + end + + # Save the values from a Facts instance as the facts on a Rails Host instance. + def save(request) + catalog = request.instance + + host = ar_model.find_by_name(catalog.name) || ar_model.create(:name => catalog.name) + + host.railsmark "Saved catalog to database" do + host.merge_resources(catalog.vertices) + host.last_compile = Time.now + + if node = Puppet::Node.find(catalog.name) + host.ip = node.parameters["ipaddress"] + host.environment = node.environment.to_s + end + + host.save + end + end +end diff --git a/mcollective/lib/puppet/indirector/catalog/compiler.rb b/mcollective/lib/puppet/indirector/catalog/compiler.rb new file mode 100644 index 000000000..6375e801f --- /dev/null +++ b/mcollective/lib/puppet/indirector/catalog/compiler.rb @@ -0,0 +1,170 @@ +require 'puppet/node' +require 'puppet/resource/catalog' +require 'puppet/indirector/code' +require 'yaml' + +class Puppet::Resource::Catalog::Compiler < Puppet::Indirector::Code + desc "Puppet's catalog compilation interface, and its back-end is + Puppet's compiler" + + include Puppet::Util + + attr_accessor :code + + def extract_facts_from_request(request) + return unless text_facts = request.options[:facts] + raise ArgumentError, "Facts but no fact format provided for #{request.name}" unless format = request.options[:facts_format] + + # If the facts were encoded as yaml, then the param reconstitution system + # in Network::HTTP::Handler will automagically deserialize the value. + if text_facts.is_a?(Puppet::Node::Facts) + facts = text_facts + else + facts = Puppet::Node::Facts.convert_from(format, text_facts) + end + facts.save + end + + # Compile a node's catalog. + def find(request) + extract_facts_from_request(request) + + node = node_from_request(request) + + if catalog = compile(node) + return catalog + else + # This shouldn't actually happen; we should either return + # a config or raise an exception. + return nil + end + end + + # filter-out a catalog to remove exported resources + def filter(catalog) + return catalog.filter { |r| r.virtual? } if catalog.respond_to?(:filter) + catalog + end + + def initialize + set_server_facts + setup_database_backend if Puppet[:storeconfigs] + end + + # Is our compiler part of a network, or are we just local? + def networked? + Puppet.run_mode.master? + end + + private + + # Add any extra data necessary to the node. + def add_node_data(node) + # Merge in our server-side facts, so they can be used during compilation. + node.merge(@server_facts) + end + + # Compile the actual catalog. + def compile(node) + str = "Compiled catalog for #{node.name}" + str += " in environment #{node.environment}" if node.environment + config = nil + + loglevel = networked? ? :notice : :none + + benchmark(loglevel, str) do + begin + config = Puppet::Parser::Compiler.compile(node) + rescue Puppet::Error => detail + Puppet.err(detail.to_s) if networked? + raise + end + end + + config + end + + # Turn our host name into a node object. + def find_node(name) + begin + return nil unless node = Puppet::Node.find(name) + rescue => detail + puts detail.backtrace if Puppet[:trace] + raise Puppet::Error, "Failed when searching for node #{name}: #{detail}" + end + + + # Add any external data to the node. + add_node_data(node) + + node + end + + # Extract the node from the request, or use the request + # to find the node. + def node_from_request(request) + if node = request.options[:use_node] + return node + end + + # We rely on our authorization system to determine whether the connected + # node is allowed to compile the catalog's node referenced by key. + # By default the REST authorization system makes sure only the connected node + # can compile his catalog. + # This allows for instance monitoring systems or puppet-load to check several + # node's catalog with only one certificate and a modification to auth.conf + # If no key is provided we can only compile the currently connected node. + name = request.key || request.node + if node = find_node(name) + return node + end + + raise ArgumentError, "Could not find node '#{name}'; cannot compile" + end + + # Initialize our server fact hash; we add these to each client, and they + # won't change while we're running, so it's safe to cache the values. + def set_server_facts + @server_facts = {} + + # Add our server version to the fact list + @server_facts["serverversion"] = Puppet.version.to_s + + # And then add the server name and IP + {"servername" => "fqdn", + "serverip" => "ipaddress" + }.each do |var, fact| + if value = Facter.value(fact) + @server_facts[var] = value + else + Puppet.warning "Could not retrieve fact #{fact}" + end + end + + if @server_facts["servername"].nil? + host = Facter.value(:hostname) + if domain = Facter.value(:domain) + @server_facts["servername"] = [host, domain].join(".") + else + @server_facts["servername"] = host + end + end + end + + def setup_database_backend + raise Puppet::Error, "Rails is missing; cannot store configurations" unless Puppet.features.rails? + Puppet::Rails.init + end + + # Mark that the node has checked in. LAK:FIXME this needs to be moved into + # the Node class, or somewhere that's got abstract backends. + def update_node_check(node) + if Puppet.features.rails? and Puppet[:storeconfigs] + Puppet::Rails.connect + + host = Puppet::Rails::Host.find_or_create_by_name(node.name) + host.last_freshcheck = Time.now + host.save + end + end +end diff --git a/mcollective/lib/puppet/indirector/catalog/queue.rb b/mcollective/lib/puppet/indirector/catalog/queue.rb new file mode 100644 index 000000000..581382e9e --- /dev/null +++ b/mcollective/lib/puppet/indirector/catalog/queue.rb @@ -0,0 +1,5 @@ +require 'puppet/resource/catalog' +require 'puppet/indirector/queue' + +class Puppet::Resource::Catalog::Queue < Puppet::Indirector::Queue +end diff --git a/mcollective/lib/puppet/indirector/catalog/rest.rb b/mcollective/lib/puppet/indirector/catalog/rest.rb new file mode 100644 index 000000000..b7a00a321 --- /dev/null +++ b/mcollective/lib/puppet/indirector/catalog/rest.rb @@ -0,0 +1,6 @@ +require 'puppet/resource/catalog' +require 'puppet/indirector/rest' + +class Puppet::Resource::Catalog::Rest < Puppet::Indirector::REST + desc "Find resource catalogs over HTTP via REST." +end diff --git a/mcollective/lib/puppet/indirector/catalog/yaml.rb b/mcollective/lib/puppet/indirector/catalog/yaml.rb new file mode 100644 index 000000000..177b4e23e --- /dev/null +++ b/mcollective/lib/puppet/indirector/catalog/yaml.rb @@ -0,0 +1,22 @@ +require 'puppet/resource/catalog' +require 'puppet/indirector/yaml' + +class Puppet::Resource::Catalog::Yaml < Puppet::Indirector::Yaml + desc "Store catalogs as flat files, serialized using YAML." + + private + + # Override these, because yaml doesn't want to convert our self-referential + # objects. This is hackish, but eh. + def from_yaml(text) + if config = YAML.load(text) + return config + end + end + + def to_yaml(config) + # We can't yaml-dump classes. + #config.edgelist_class = nil + YAML.dump(config) + end +end diff --git a/mcollective/lib/puppet/indirector/certificate/ca.rb b/mcollective/lib/puppet/indirector/certificate/ca.rb new file mode 100644 index 000000000..563bd3c02 --- /dev/null +++ b/mcollective/lib/puppet/indirector/certificate/ca.rb @@ -0,0 +1,9 @@ +require 'puppet/indirector/ssl_file' +require 'puppet/ssl/certificate' + +class Puppet::SSL::Certificate::Ca < Puppet::Indirector::SslFile + desc "Manage the CA collection of signed SSL certificates on disk." + + store_in :signeddir + store_ca_at :cacert +end diff --git a/mcollective/lib/puppet/indirector/certificate/file.rb b/mcollective/lib/puppet/indirector/certificate/file.rb new file mode 100644 index 000000000..4ce18f81a --- /dev/null +++ b/mcollective/lib/puppet/indirector/certificate/file.rb @@ -0,0 +1,9 @@ +require 'puppet/indirector/ssl_file' +require 'puppet/ssl/certificate' + +class Puppet::SSL::Certificate::File < Puppet::Indirector::SslFile + desc "Manage SSL certificates on disk." + + store_in :certdir + store_ca_at :localcacert +end diff --git a/mcollective/lib/puppet/indirector/certificate/rest.rb b/mcollective/lib/puppet/indirector/certificate/rest.rb new file mode 100644 index 000000000..921b85790 --- /dev/null +++ b/mcollective/lib/puppet/indirector/certificate/rest.rb @@ -0,0 +1,15 @@ +require 'puppet/ssl/certificate' +require 'puppet/indirector/rest' + +class Puppet::SSL::Certificate::Rest < Puppet::Indirector::REST + desc "Find and save certificates over HTTP via REST." + + use_server_setting(:ca_server) + use_port_setting(:ca_port) + + def find(request) + return nil unless result = super + result.name = request.key unless result.name == request.key + result + end +end diff --git a/mcollective/lib/puppet/indirector/certificate_request/ca.rb b/mcollective/lib/puppet/indirector/certificate_request/ca.rb new file mode 100644 index 000000000..f4c924fe1 --- /dev/null +++ b/mcollective/lib/puppet/indirector/certificate_request/ca.rb @@ -0,0 +1,14 @@ +require 'puppet/indirector/ssl_file' +require 'puppet/ssl/certificate_request' + +class Puppet::SSL::CertificateRequest::Ca < Puppet::Indirector::SslFile + desc "Manage the CA collection of certificate requests on disk." + + store_in :csrdir + + def save(request) + result = super + Puppet.notice "#{request.key} has a waiting certificate request" + result + end +end diff --git a/mcollective/lib/puppet/indirector/certificate_request/file.rb b/mcollective/lib/puppet/indirector/certificate_request/file.rb new file mode 100644 index 000000000..9510e362b --- /dev/null +++ b/mcollective/lib/puppet/indirector/certificate_request/file.rb @@ -0,0 +1,8 @@ +require 'puppet/indirector/ssl_file' +require 'puppet/ssl/certificate_request' + +class Puppet::SSL::CertificateRequest::File < Puppet::Indirector::SslFile + desc "Manage the collection of certificate requests on disk." + + store_in :requestdir +end diff --git a/mcollective/lib/puppet/indirector/certificate_request/rest.rb b/mcollective/lib/puppet/indirector/certificate_request/rest.rb new file mode 100644 index 000000000..81810551f --- /dev/null +++ b/mcollective/lib/puppet/indirector/certificate_request/rest.rb @@ -0,0 +1,9 @@ +require 'puppet/ssl/certificate_request' +require 'puppet/indirector/rest' + +class Puppet::SSL::CertificateRequest::Rest < Puppet::Indirector::REST + desc "Find and save certificate requests over HTTP via REST." + + use_server_setting(:ca_server) + use_port_setting(:ca_port) +end diff --git a/mcollective/lib/puppet/indirector/certificate_revocation_list/ca.rb b/mcollective/lib/puppet/indirector/certificate_revocation_list/ca.rb new file mode 100644 index 000000000..1e2be6e0c --- /dev/null +++ b/mcollective/lib/puppet/indirector/certificate_revocation_list/ca.rb @@ -0,0 +1,8 @@ +require 'puppet/indirector/ssl_file' +require 'puppet/ssl/certificate_revocation_list' + +class Puppet::SSL::CertificateRevocationList::Ca < Puppet::Indirector::SslFile + desc "Manage the CA collection of certificate requests on disk." + + store_at :cacrl +end diff --git a/mcollective/lib/puppet/indirector/certificate_revocation_list/file.rb b/mcollective/lib/puppet/indirector/certificate_revocation_list/file.rb new file mode 100644 index 000000000..fbc437474 --- /dev/null +++ b/mcollective/lib/puppet/indirector/certificate_revocation_list/file.rb @@ -0,0 +1,8 @@ +require 'puppet/indirector/ssl_file' +require 'puppet/ssl/certificate_revocation_list' + +class Puppet::SSL::CertificateRevocationList::File < Puppet::Indirector::SslFile + desc "Manage the global certificate revocation list." + + store_at :hostcrl +end diff --git a/mcollective/lib/puppet/indirector/certificate_revocation_list/rest.rb b/mcollective/lib/puppet/indirector/certificate_revocation_list/rest.rb new file mode 100644 index 000000000..06cbb19f2 --- /dev/null +++ b/mcollective/lib/puppet/indirector/certificate_revocation_list/rest.rb @@ -0,0 +1,9 @@ +require 'puppet/ssl/certificate_revocation_list' +require 'puppet/indirector/rest' + +class Puppet::SSL::CertificateRevocationList::Rest < Puppet::Indirector::REST + desc "Find and save certificate revocation lists over HTTP via REST." + + use_server_setting(:ca_server) + use_port_setting(:ca_port) +end diff --git a/mcollective/lib/puppet/indirector/code.rb b/mcollective/lib/puppet/indirector/code.rb new file mode 100644 index 000000000..0c0ee146b --- /dev/null +++ b/mcollective/lib/puppet/indirector/code.rb @@ -0,0 +1,6 @@ +require 'puppet/indirector/terminus' + +# Do nothing, requiring that the back-end terminus do all +# of the work. +class Puppet::Indirector::Code < Puppet::Indirector::Terminus +end diff --git a/mcollective/lib/puppet/indirector/couch.rb b/mcollective/lib/puppet/indirector/couch.rb new file mode 100644 index 000000000..fae934fd8 --- /dev/null +++ b/mcollective/lib/puppet/indirector/couch.rb @@ -0,0 +1,76 @@ +raise "Couch terminus not supported without couchrest gem" unless Puppet.features.couchdb? + +require 'couchrest' +class Puppet::Indirector::Couch < Puppet::Indirector::Terminus + + # The CouchRest database instance. One database instance per Puppet runtime + # should be sufficient. + # + def self.db; @db ||= CouchRest.database! Puppet[:couchdb_url] end + def db; self.class.db end + + def find(request) + attributes_of get(request) + end + + # Create or update the couchdb document with the request's data hash. + # + def save(request) + raise ArgumentError, "PUT does not accept options" unless request.options.empty? + update(request) || create(request) + end + + private + + # RKH:TODO: Do not depend on error handling, check if the document exists + # first. (Does couchrest support this?) + # + def get(request) + db.get(id_for(request)) + rescue RestClient::ResourceNotFound + Puppet.debug "No couchdb document with id: #{id_for(request)}" + return nil + end + + def update(request) + doc = get request + return unless doc + doc.merge!(hash_from(request)) + doc.save + true + end + + def create(request) + db.save_doc hash_from(request) + end + + # The attributes hash that is serialized to CouchDB as JSON. It includes + # metadata that is used to help aggregate data in couchdb. Add + # model-specific attributes in subclasses. + # + def hash_from(request) + { + "_id" => id_for(request), + "puppet_type" => document_type_for(request) + } + end + + # The couchdb response stripped of metadata, used to instantiate the model + # instance that is returned by save. + # + def attributes_of(response) + response && response.reject{|k,v| k =~ /^(_rev|puppet_)/ } + end + + def document_type_for(request) + request.indirection_name + end + + # The id used to store the object in couchdb. Implemented in subclasses. + # + def id_for(request) + raise NotImplementedError + end + +end + diff --git a/mcollective/lib/puppet/indirector/direct_file_server.rb b/mcollective/lib/puppet/indirector/direct_file_server.rb new file mode 100644 index 000000000..80c84eab5 --- /dev/null +++ b/mcollective/lib/puppet/indirector/direct_file_server.rb @@ -0,0 +1,23 @@ +# +# Created by Luke Kanies on 2007-10-24. +# Copyright (c) 2007. All rights reserved. + +require 'puppet/file_serving/terminus_helper' +require 'puppet/indirector/terminus' + +class Puppet::Indirector::DirectFileServer < Puppet::Indirector::Terminus + + include Puppet::FileServing::TerminusHelper + + def find(request) + return nil unless FileTest.exists?(request.key) + instance = model.new(request.key) + instance.links = request.options[:links] if request.options[:links] + instance + end + + def search(request) + return nil unless FileTest.exists?(request.key) + path2instances(request, request.key) + end +end diff --git a/mcollective/lib/puppet/indirector/envelope.rb b/mcollective/lib/puppet/indirector/envelope.rb new file mode 100644 index 000000000..73c33e9d4 --- /dev/null +++ b/mcollective/lib/puppet/indirector/envelope.rb @@ -0,0 +1,11 @@ +require 'puppet/indirector' + +# Provide any attributes or functionality needed for indirected +# instances. +module Puppet::Indirector::Envelope + attr_accessor :expiration + + def expired? + expiration and expiration < Time.now + end +end diff --git a/mcollective/lib/puppet/indirector/exec.rb b/mcollective/lib/puppet/indirector/exec.rb new file mode 100644 index 000000000..8ea13ff95 --- /dev/null +++ b/mcollective/lib/puppet/indirector/exec.rb @@ -0,0 +1,48 @@ +require 'puppet/indirector/terminus' +require 'puppet/util' + +class Puppet::Indirector::Exec < Puppet::Indirector::Terminus + # Look for external node definitions. + def find(request) + # Run the command. + unless output = query(request.key) + return nil + end + + # Translate the output to ruby. + output + end + + private + + # Proxy the execution, so it's easier to test. + def execute(command, arguments) + Puppet::Util.execute(command,arguments) + end + + # Call the external command and see if it returns our output. + def query(name) + external_command = command + + # Make sure it's an arry + raise Puppet::DevError, "Exec commands must be an array" unless external_command.is_a?(Array) + + # Make sure it's fully qualified. + raise ArgumentError, "You must set the exec parameter to a fully qualified command" unless external_command[0][0] == File::SEPARATOR[0] + + # Add our name to it. + external_command << name + begin + output = execute(external_command, :combine => false) + rescue Puppet::ExecutionFailure => detail + raise Puppet::Error, "Failed to find #{name} via exec: #{detail}" + end + + if output =~ /\A\s*\Z/ # all whitespace + Puppet.debug "Empty response for #{name} from exec #{self.name} terminus" + return nil + else + return output + end + end +end diff --git a/mcollective/lib/puppet/indirector/facts/active_record.rb b/mcollective/lib/puppet/indirector/facts/active_record.rb new file mode 100644 index 000000000..96ed800da --- /dev/null +++ b/mcollective/lib/puppet/indirector/facts/active_record.rb @@ -0,0 +1,36 @@ +require 'puppet/rails/fact_name' +require 'puppet/rails/fact_value' +require 'puppet/rails/host' +require 'puppet/indirector/active_record' + +class Puppet::Node::Facts::ActiveRecord < Puppet::Indirector::ActiveRecord + use_ar_model Puppet::Rails::Host + + # Find the Rails host and pull its facts as a Facts instance. + def find(request) + return nil unless host = ar_model.find_by_name(request.key, :include => {:fact_values => :fact_name}) + + facts = Puppet::Node::Facts.new(host.name) + facts.values = host.get_facts_hash.inject({}) do |hash, ary| + # Convert all single-member arrays into plain values. + param = ary[0] + values = ary[1].collect { |v| v.value } + values = values[0] if values.length == 1 + hash[param] = values + hash + end + + facts + end + + # Save the values from a Facts instance as the facts on a Rails Host instance. + def save(request) + facts = request.instance + + host = ar_model.find_by_name(facts.name) || ar_model.create(:name => facts.name) + + host.merge_facts(facts.values) + + host.save + end +end diff --git a/mcollective/lib/puppet/indirector/facts/couch.rb b/mcollective/lib/puppet/indirector/facts/couch.rb new file mode 100644 index 000000000..fda2b9191 --- /dev/null +++ b/mcollective/lib/puppet/indirector/facts/couch.rb @@ -0,0 +1,31 @@ +require 'puppet/node/facts' +require 'puppet/indirector/couch' +class Puppet::Node::Facts::Couch < Puppet::Indirector::Couch + + # Return the facts object or nil if there is no document + def find(request) + doc = super + doc ? model.new(doc['_id'], doc['facts']) : nil + end + + private + + # Facts values are stored to the document's 'facts' attribute. Hostname is + # stored to 'name' + # + def hash_from(request) + super.merge('facts' => request.instance.values) + end + + # Facts are stored to the 'node' document. + def document_type_for(request) + 'node' + end + + # The id used to store the object in couchdb. + def id_for(request) + request.key.to_s + end + +end + diff --git a/mcollective/lib/puppet/indirector/facts/facter.rb b/mcollective/lib/puppet/indirector/facts/facter.rb new file mode 100644 index 000000000..ab7378a34 --- /dev/null +++ b/mcollective/lib/puppet/indirector/facts/facter.rb @@ -0,0 +1,83 @@ +require 'puppet/node/facts' +require 'puppet/indirector/code' + +class Puppet::Node::Facts::Facter < Puppet::Indirector::Code + desc "Retrieve facts from Facter. This provides a somewhat abstract interface + between Puppet and Facter. It's only `somewhat` abstract because it always + returns the local host's facts, regardless of what you attempt to find." + + + def self.load_fact_plugins + # Add any per-module fact directories to the factpath + module_fact_dirs = Puppet[:modulepath].split(":").collect do |d| + ["lib", "plugins"].map do |subdirectory| + Dir.glob("#{d}/*/#{subdirectory}/facter") + end + end.flatten + dirs = module_fact_dirs + Puppet[:factpath].split(":") + x = dirs.each do |dir| + load_facts_in_dir(dir) + end + end + + def self.load_facts_in_dir(dir) + return unless FileTest.directory?(dir) + + Dir.chdir(dir) do + Dir.glob("*.rb").each do |file| + fqfile = ::File.join(dir, file) + begin + Puppet.info "Loading facts in #{::File.basename(file.sub(".rb",''))}" + Timeout::timeout(self.timeout) do + load file + end + rescue SystemExit,NoMemoryError + raise + rescue Exception => detail + Puppet.warning "Could not load fact file #{fqfile}: #{detail}" + end + end + end + end + + def self.timeout + timeout = Puppet[:configtimeout] + case timeout + when String + if timeout =~ /^\d+$/ + timeout = Integer(timeout) + else + raise ArgumentError, "Configuration timeout must be an integer" + end + when Integer # nothing + else + raise ArgumentError, "Configuration timeout must be an integer" + end + + timeout + end + + def initialize(*args) + super + self.class.load_fact_plugins + end + + def destroy(facts) + raise Puppet::DevError, "You cannot destroy facts in the code store; it is only used for getting facts from Facter" + end + + # Look a host's facts up in Facter. + def find(request) + result = Puppet::Node::Facts.new(request.key, Facter.to_hash) + + result.add_local_facts + result.stringify + result.downcase_if_necessary + + result + end + + def save(facts) + raise Puppet::DevError, "You cannot save facts to the code store; it is only used for getting facts from Facter" + end +end diff --git a/mcollective/lib/puppet/indirector/facts/inventory_active_record.rb b/mcollective/lib/puppet/indirector/facts/inventory_active_record.rb new file mode 100644 index 000000000..db4c63f00 --- /dev/null +++ b/mcollective/lib/puppet/indirector/facts/inventory_active_record.rb @@ -0,0 +1,97 @@ +require 'puppet/rails' +require 'puppet/rails/inventory_node' +require 'puppet/rails/inventory_fact' +require 'puppet/indirector/active_record' + +class Puppet::Node::Facts::InventoryActiveRecord < Puppet::Indirector::ActiveRecord + def find(request) + node = Puppet::Rails::InventoryNode.find_by_name(request.key) + return nil unless node + facts = Puppet::Node::Facts.new(node.name, node.facts_to_hash) + facts.timestamp = node.timestamp + facts + end + + def save(request) + facts = request.instance + node = Puppet::Rails::InventoryNode.find_by_name(request.key) || Puppet::Rails::InventoryNode.create(:name => request.key, :timestamp => facts.timestamp) + node.timestamp = facts.timestamp + + ActiveRecord::Base.transaction do + Puppet::Rails::InventoryFact.delete_all(:node_id => node.id) + # We don't want to save internal values as facts, because those are + # metadata that belong on the node + facts.values.each do |name,value| + next if name.to_s =~ /^_/ + node.facts.build(:name => name, :value => value) + end + node.save + end + end + + def search(request) + return [] unless request.options + matching_nodes = [] + fact_names = [] + fact_filters = Hash.new {|h,k| h[k] = []} + meta_filters = Hash.new {|h,k| h[k] = []} + request.options.each do |key,value| + type, name, operator = key.to_s.split(".") + operator ||= "eq" + if type == "facts" + fact_filters[operator] << [name,value] + elsif type == "meta" and name == "timestamp" + meta_filters[operator] << [name,value] + end + end + + matching_nodes = nodes_matching_fact_filters(fact_filters) + nodes_matching_meta_filters(meta_filters) + + # to_a because [].inject == nil + matching_nodes.inject {|nodes,this_set| nodes & this_set}.to_a.sort + end + + private + + def nodes_matching_fact_filters(fact_filters) + node_sets = [] + fact_filters['eq'].each do |name,value| + node_sets << Puppet::Rails::InventoryNode.has_fact_with_value(name,value).map {|node| node.name} + end + fact_filters['ne'].each do |name,value| + node_sets << Puppet::Rails::InventoryNode.has_fact_without_value(name,value).map {|node| node.name} + end + { + 'gt' => '>', + 'lt' => '<', + 'ge' => '>=', + 'le' => '<=' + }.each do |operator_name,operator| + fact_filters[operator_name].each do |name,value| + facts = Puppet::Rails::InventoryFact.find_by_sql(["SELECT inventory_facts.value, inventory_nodes.name AS node_name + FROM inventory_facts INNER JOIN inventory_nodes + ON inventory_facts.node_id = inventory_nodes.id + WHERE inventory_facts.name = ?", name]) + node_sets << facts.select {|fact| fact.value.to_f.send(operator, value.to_f)}.map {|fact| fact.node_name} + end + end + node_sets + end + + def nodes_matching_meta_filters(meta_filters) + node_sets = [] + { + 'eq' => '=', + 'ne' => '!=', + 'gt' => '>', + 'lt' => '<', + 'ge' => '>=', + 'le' => '<=' + }.each do |operator_name,operator| + meta_filters[operator_name].each do |name,value| + node_sets << Puppet::Rails::InventoryNode.find(:all, :select => "name", :conditions => ["timestamp #{operator} ?", value]).map {|node| node.name} + end + end + node_sets + end +end diff --git a/mcollective/lib/puppet/indirector/facts/inventory_service.rb b/mcollective/lib/puppet/indirector/facts/inventory_service.rb new file mode 100644 index 000000000..988e57745 --- /dev/null +++ b/mcollective/lib/puppet/indirector/facts/inventory_service.rb @@ -0,0 +1,20 @@ +require 'puppet/node/facts' +require 'puppet/indirector/rest' + +class Puppet::Node::Facts::InventoryService < Puppet::Indirector::REST + desc "Find and save facts about nodes using a remote inventory service." + use_server_setting(:inventory_server) + use_port_setting(:inventory_port) + + # We don't want failing to upload to the inventory service to cause any + # failures, so we just suppress them and warn. + def save(request) + begin + super + true + rescue => e + Puppet.warning "Could not upload facts for #{request.key} to inventory service: #{e.to_s}" + false + end + end +end diff --git a/mcollective/lib/puppet/indirector/facts/memory.rb b/mcollective/lib/puppet/indirector/facts/memory.rb new file mode 100644 index 000000000..93682f456 --- /dev/null +++ b/mcollective/lib/puppet/indirector/facts/memory.rb @@ -0,0 +1,9 @@ +require 'puppet/node/facts' +require 'puppet/indirector/memory' + +class Puppet::Node::Facts::Memory < Puppet::Indirector::Memory + desc "Keep track of facts in memory but nowhere else. This is used for + one-time compiles, such as what the stand-alone `puppet` does. + To use this terminus, you must load it with the data you want it + to contain." +end diff --git a/mcollective/lib/puppet/indirector/facts/rest.rb b/mcollective/lib/puppet/indirector/facts/rest.rb new file mode 100644 index 000000000..e2afa14b2 --- /dev/null +++ b/mcollective/lib/puppet/indirector/facts/rest.rb @@ -0,0 +1,8 @@ +require 'puppet/node/facts' +require 'puppet/indirector/rest' + +class Puppet::Node::Facts::Rest < Puppet::Indirector::REST + desc "Find and save facts about nodes over HTTP via REST." + use_server_setting(:inventory_server) + use_port_setting(:inventory_port) +end diff --git a/mcollective/lib/puppet/indirector/facts/yaml.rb b/mcollective/lib/puppet/indirector/facts/yaml.rb new file mode 100644 index 000000000..65bd78354 --- /dev/null +++ b/mcollective/lib/puppet/indirector/facts/yaml.rb @@ -0,0 +1,82 @@ +require 'puppet/node/facts' +require 'puppet/indirector/yaml' + +class Puppet::Node::Facts::Yaml < Puppet::Indirector::Yaml + desc "Store client facts as flat files, serialized using YAML, or + return deserialized facts from disk." + + def search(request) + node_names = [] + Dir.glob(yaml_dir_path).each do |file| + facts = YAML.load_file(file) + node_names << facts.name if node_matches?(facts, request.options) + end + node_names + end + + private + + # Return the path to a given node's file. + def yaml_dir_path + base = Puppet.run_mode.master? ? Puppet[:yamldir] : Puppet[:clientyamldir] + File.join(base, 'facts', '*.yaml') + end + + def node_matches?(facts, options) + options.each do |key, value| + type, name, operator = key.to_s.split(".") + operator ||= 'eq' + + return false unless node_matches_option?(type, name, operator, value, facts) + end + return true + end + + def node_matches_option?(type, name, operator, value, facts) + case type + when "meta" + case name + when "timestamp" + compare_timestamp(operator, facts.timestamp, Time.parse(value)) + end + when "facts" + compare_facts(operator, facts.values[name], value) + end + end + + def compare_facts(operator, value1, value2) + return false unless value1 + + case operator + when "eq" + value1.to_s == value2.to_s + when "le" + value1.to_f <= value2.to_f + when "ge" + value1.to_f >= value2.to_f + when "lt" + value1.to_f < value2.to_f + when "gt" + value1.to_f > value2.to_f + when "ne" + value1.to_s != value2.to_s + end + end + + def compare_timestamp(operator, value1, value2) + case operator + when "eq" + value1 == value2 + when "le" + value1 <= value2 + when "ge" + value1 >= value2 + when "lt" + value1 < value2 + when "gt" + value1 > value2 + when "ne" + value1 != value2 + end + end +end diff --git a/mcollective/lib/puppet/indirector/file_bucket_file/file.rb b/mcollective/lib/puppet/indirector/file_bucket_file/file.rb new file mode 100644 index 000000000..0fd8a914f --- /dev/null +++ b/mcollective/lib/puppet/indirector/file_bucket_file/file.rb @@ -0,0 +1,135 @@ +require 'puppet/indirector/code' +require 'puppet/file_bucket/file' +require 'puppet/util/checksums' +require 'fileutils' + +module Puppet::FileBucketFile + class File < Puppet::Indirector::Code + include Puppet::Util::Checksums + + desc "Store files in a directory set based on their checksums." + + def initialize + Puppet.settings.use(:filebucket) + end + + def find( request ) + checksum, files_original_path = request_to_checksum_and_path( request ) + dir_path = path_for(request.options[:bucket_path], checksum) + file_path = ::File.join(dir_path, 'contents') + + return nil unless ::File.exists?(file_path) + return nil unless path_match(dir_path, files_original_path) + + if request.options[:diff_with] + hash_protocol = sumtype(checksum) + file2_path = path_for(request.options[:bucket_path], request.options[:diff_with], 'contents') + raise "could not find diff_with #{request.options[:diff_with]}" unless ::File.exists?(file2_path) + return `diff #{file_path.inspect} #{file2_path.inspect}` + else + contents = ::File.read file_path + Puppet.info "FileBucket read #{checksum}" + model.new(contents) + end + end + + def head(request) + checksum, files_original_path = request_to_checksum_and_path(request) + dir_path = path_for(request.options[:bucket_path], checksum) + + ::File.exists?(::File.join(dir_path, 'contents')) and path_match(dir_path, files_original_path) + end + + def save( request ) + instance = request.instance + checksum, files_original_path = request_to_checksum_and_path(request) + + save_to_disk(instance, files_original_path) + instance.to_s + end + + private + + def path_match(dir_path, files_original_path) + return true unless files_original_path # if no path was provided, it's a match + paths_path = ::File.join(dir_path, 'paths') + return false unless ::File.exists?(paths_path) + ::File.open(paths_path) do |f| + f.each do |line| + return true if line.chomp == files_original_path + end + end + return false + end + + def save_to_disk( bucket_file, files_original_path ) + filename = path_for(bucket_file.bucket_path, bucket_file.checksum_data, 'contents') + dir_path = path_for(bucket_file.bucket_path, bucket_file.checksum_data) + paths_path = ::File.join(dir_path, 'paths') + + # If the file already exists, do nothing. + if ::File.exist?(filename) + verify_identical_file!(bucket_file) + else + # Make the directories if necessary. + unless ::File.directory?(dir_path) + Puppet::Util.withumask(0007) do + ::FileUtils.mkdir_p(dir_path) + end + end + + Puppet.info "FileBucket adding #{bucket_file.checksum}" + + # Write the file to disk. + Puppet::Util.withumask(0007) do + ::File.open(filename, ::File::WRONLY|::File::CREAT, 0440) do |of| + of.print bucket_file.contents + end + ::File.open(paths_path, ::File::WRONLY|::File::CREAT, 0640) do |of| + # path will be written below + end + end + end + + unless path_match(dir_path, files_original_path) + ::File.open(paths_path, 'a') do |f| + f.puts(files_original_path) + end + end + end + + def request_to_checksum_and_path( request ) + checksum_type, checksum, path = request.key.split(/\//, 3) + if path == '' # Treat "md5//" like "md5/" + path = nil + end + raise "Unsupported checksum type #{checksum_type.inspect}" if checksum_type != 'md5' + raise "Invalid checksum #{checksum.inspect}" if checksum !~ /^[0-9a-f]{32}$/ + [checksum, path] + end + + def path_for(bucket_path, digest, subfile = nil) + bucket_path ||= Puppet[:bucketdir] + + dir = ::File.join(digest[0..7].split("")) + basedir = ::File.join(bucket_path, dir, digest) + + return basedir unless subfile + ::File.join(basedir, subfile) + end + + # If conflict_check is enabled, verify that the passed text is + # the same as the text in our file. + def verify_identical_file!(bucket_file) + disk_contents = ::File.read(path_for(bucket_file.bucket_path, bucket_file.checksum_data, 'contents')) + + # If the contents don't match, then we've found a conflict. + # Unlikely, but quite bad. + if disk_contents != bucket_file.contents + raise Puppet::FileBucket::BucketError, "Got passed new contents for sum #{bucket_file.checksum}" + else + Puppet.info "FileBucket got a duplicate file #{bucket_file.checksum}" + end + end + end +end diff --git a/mcollective/lib/puppet/indirector/file_bucket_file/rest.rb b/mcollective/lib/puppet/indirector/file_bucket_file/rest.rb new file mode 100644 index 000000000..783d43586 --- /dev/null +++ b/mcollective/lib/puppet/indirector/file_bucket_file/rest.rb @@ -0,0 +1,8 @@ +require 'puppet/indirector/rest' +require 'puppet/file_bucket/file' + +module Puppet::FileBucketFile + class Rest < Puppet::Indirector::REST + desc "This is a REST based mechanism to send/retrieve file to/from the filebucket" + end +end diff --git a/mcollective/lib/puppet/indirector/file_bucket_file/selector.rb b/mcollective/lib/puppet/indirector/file_bucket_file/selector.rb new file mode 100644 index 000000000..51fc7711f --- /dev/null +++ b/mcollective/lib/puppet/indirector/file_bucket_file/selector.rb @@ -0,0 +1,49 @@ +require 'puppet/indirector/code' + +module Puppet::FileBucketFile + class Selector < Puppet::Indirector::Code + desc "Select the terminus based on the request" + + def select(request) + if request.protocol == 'https' + :rest + else + :file + end + end + + def get_terminus(request) + indirection.terminus(select(request)) + end + + def head(request) + get_terminus(request).head(request) + end + + def find(request) + get_terminus(request).find(request) + end + + def save(request) + get_terminus(request).save(request) + end + + def search(request) + get_terminus(request).search(request) + end + + def destroy(request) + get_terminus(request).destroy(request) + end + + def authorized?(request) + terminus = get_terminus(request) + if terminus.respond_to?(:authorized?) + terminus.authorized?(request) + else + true + end + end + end +end + diff --git a/mcollective/lib/puppet/indirector/file_content.rb b/mcollective/lib/puppet/indirector/file_content.rb new file mode 100644 index 000000000..5261ddc05 --- /dev/null +++ b/mcollective/lib/puppet/indirector/file_content.rb @@ -0,0 +1,5 @@ +require 'puppet/file_serving/content' + +# A stub class, so our constants work. +class Puppet::Indirector::FileContent # :nodoc: +end diff --git a/mcollective/lib/puppet/indirector/file_content/file.rb b/mcollective/lib/puppet/indirector/file_content/file.rb new file mode 100644 index 000000000..75fc9981c --- /dev/null +++ b/mcollective/lib/puppet/indirector/file_content/file.rb @@ -0,0 +1,11 @@ +# +# Created by Luke Kanies on 2007-10-16. +# Copyright (c) 2007. All rights reserved. + +require 'puppet/file_serving/content' +require 'puppet/indirector/file_content' +require 'puppet/indirector/direct_file_server' + +class Puppet::Indirector::FileContent::File < Puppet::Indirector::DirectFileServer + desc "Retrieve file contents from disk." +end diff --git a/mcollective/lib/puppet/indirector/file_content/file_server.rb b/mcollective/lib/puppet/indirector/file_content/file_server.rb new file mode 100644 index 000000000..21cfe7324 --- /dev/null +++ b/mcollective/lib/puppet/indirector/file_content/file_server.rb @@ -0,0 +1,11 @@ +# +# Created by Luke Kanies on 2007-10-18. +# Copyright (c) 2007. All rights reserved. + +require 'puppet/file_serving/content' +require 'puppet/indirector/file_content' +require 'puppet/indirector/file_server' + +class Puppet::Indirector::FileContent::FileServer < Puppet::Indirector::FileServer + desc "Retrieve file contents using Puppet's fileserver." +end diff --git a/mcollective/lib/puppet/indirector/file_content/rest.rb b/mcollective/lib/puppet/indirector/file_content/rest.rb new file mode 100644 index 000000000..2fd39b7e5 --- /dev/null +++ b/mcollective/lib/puppet/indirector/file_content/rest.rb @@ -0,0 +1,11 @@ +# +# Created by Luke Kanies on 2007-10-18. +# Copyright (c) 2007. All rights reserved. + +require 'puppet/file_serving/content' +require 'puppet/indirector/file_content' +require 'puppet/indirector/rest' + +class Puppet::Indirector::FileContent::Rest < Puppet::Indirector::REST + desc "Retrieve file contents via a REST HTTP interface." +end diff --git a/mcollective/lib/puppet/indirector/file_content/selector.rb b/mcollective/lib/puppet/indirector/file_content/selector.rb new file mode 100644 index 000000000..32374ed74 --- /dev/null +++ b/mcollective/lib/puppet/indirector/file_content/selector.rb @@ -0,0 +1,30 @@ +require 'puppet/file_serving/content' +require 'puppet/indirector/file_content' +require 'puppet/indirector/code' +require 'puppet/file_serving/terminus_selector' + +class Puppet::Indirector::FileContent::Selector < Puppet::Indirector::Code + desc "Select the terminus based on the request" + include Puppet::FileServing::TerminusSelector + + def get_terminus(request) + indirection.terminus(select(request)) + end + + def find(request) + get_terminus(request).find(request) + end + + def search(request) + get_terminus(request).search(request) + end + + def authorized?(request) + terminus = get_terminus(request) + if terminus.respond_to?(:authorized?) + terminus.authorized?(request) + else + true + end + end +end diff --git a/mcollective/lib/puppet/indirector/file_metadata.rb b/mcollective/lib/puppet/indirector/file_metadata.rb new file mode 100644 index 000000000..c43579d24 --- /dev/null +++ b/mcollective/lib/puppet/indirector/file_metadata.rb @@ -0,0 +1,5 @@ +require 'puppet/file_serving/metadata' + +# A stub class, so our constants work. +class Puppet::Indirector::FileMetadata # :nodoc: +end diff --git a/mcollective/lib/puppet/indirector/file_metadata/file.rb b/mcollective/lib/puppet/indirector/file_metadata/file.rb new file mode 100644 index 000000000..4d6b0b335 --- /dev/null +++ b/mcollective/lib/puppet/indirector/file_metadata/file.rb @@ -0,0 +1,26 @@ +# +# Created by Luke Kanies on 2007-10-16. +# Copyright (c) 2007. All rights reserved. + +require 'puppet/file_serving/metadata' +require 'puppet/indirector/file_metadata' +require 'puppet/indirector/direct_file_server' + +class Puppet::Indirector::FileMetadata::File < Puppet::Indirector::DirectFileServer + desc "Retrieve file metadata directly from the local filesystem." + + def find(request) + return unless data = super + data.collect + + data + end + + def search(request) + return unless result = super + + result.each { |instance| instance.collect } + + result + end +end diff --git a/mcollective/lib/puppet/indirector/file_metadata/file_server.rb b/mcollective/lib/puppet/indirector/file_metadata/file_server.rb new file mode 100644 index 000000000..cef81f0a5 --- /dev/null +++ b/mcollective/lib/puppet/indirector/file_metadata/file_server.rb @@ -0,0 +1,11 @@ +# +# Created by Luke Kanies on 2007-10-18. +# Copyright (c) 2007. All rights reserved. + +require 'puppet/file_serving/metadata' +require 'puppet/indirector/file_metadata' +require 'puppet/indirector/file_server' + +class Puppet::Indirector::FileMetadata::FileServer < Puppet::Indirector::FileServer + desc "Retrieve file metadata using Puppet's fileserver." +end diff --git a/mcollective/lib/puppet/indirector/file_metadata/rest.rb b/mcollective/lib/puppet/indirector/file_metadata/rest.rb new file mode 100644 index 000000000..023edb8ff --- /dev/null +++ b/mcollective/lib/puppet/indirector/file_metadata/rest.rb @@ -0,0 +1,11 @@ +# +# Created by Luke Kanies on 2007-10-18. +# Copyright (c) 2007. All rights reserved. + +require 'puppet/file_serving/metadata' +require 'puppet/indirector/file_metadata' +require 'puppet/indirector/rest' + +class Puppet::Indirector::FileMetadata::Rest < Puppet::Indirector::REST + desc "Retrieve file metadata via a REST HTTP interface." +end diff --git a/mcollective/lib/puppet/indirector/file_metadata/selector.rb b/mcollective/lib/puppet/indirector/file_metadata/selector.rb new file mode 100644 index 000000000..08e4fbb88 --- /dev/null +++ b/mcollective/lib/puppet/indirector/file_metadata/selector.rb @@ -0,0 +1,30 @@ +require 'puppet/file_serving/metadata' +require 'puppet/indirector/file_metadata' +require 'puppet/indirector/code' +require 'puppet/file_serving/terminus_selector' + +class Puppet::Indirector::FileMetadata::Selector < Puppet::Indirector::Code + desc "Select the terminus based on the request" + include Puppet::FileServing::TerminusSelector + + def get_terminus(request) + indirection.terminus(select(request)) + end + + def find(request) + get_terminus(request).find(request) + end + + def search(request) + get_terminus(request).search(request) + end + + def authorized?(request) + terminus = get_terminus(request) + if terminus.respond_to?(:authorized?) + terminus.authorized?(request) + else + true + end + end +end diff --git a/mcollective/lib/puppet/indirector/file_server.rb b/mcollective/lib/puppet/indirector/file_server.rb new file mode 100644 index 000000000..46a08c97d --- /dev/null +++ b/mcollective/lib/puppet/indirector/file_server.rb @@ -0,0 +1,69 @@ +# +# Created by Luke Kanies on 2007-10-19. +# Copyright (c) 2007. All rights reserved. + +require 'puppet/file_serving/configuration' +require 'puppet/file_serving/fileset' +require 'puppet/file_serving/terminus_helper' +require 'puppet/indirector/terminus' + +# Look files up using the file server. +class Puppet::Indirector::FileServer < Puppet::Indirector::Terminus + include Puppet::FileServing::TerminusHelper + + # Is the client authorized to perform this action? + def authorized?(request) + return false unless [:find, :search].include?(request.method) + + mount, file_path = configuration.split_path(request) + + # If we're not serving this mount, then access is denied. + return false unless mount + mount.allowed?(request.node, request.ip) + end + + # Find our key using the fileserver. + def find(request) + mount, relative_path = configuration.split_path(request) + + return nil unless mount + + # The mount checks to see if the file exists, and returns nil + # if not. + return nil unless path = mount.find(relative_path, request) + result = model.new(path) + result.links = request.options[:links] if request.options[:links] + result.collect + result + end + + # Search for files. This returns an array rather than a single + # file. + def search(request) + mount, relative_path = configuration.split_path(request) + + unless mount and paths = mount.search(relative_path, request) + Puppet.info "Could not find filesystem info for file '#{request.key}' in environment #{request.environment}" + return nil + end + + filesets = paths.collect do |path| + # Filesets support indirector requests as an options collection + Puppet::FileServing::Fileset.new(path, request) + end + + Puppet::FileServing::Fileset.merge(*filesets).collect do |file, base_path| + inst = model.new(base_path, :relative_path => file) + inst.links = request.options[:links] if request.options[:links] + inst.collect + inst + end + end + + private + + # Our fileserver configuration, if needed. + def configuration + Puppet::FileServing::Configuration.create + end +end diff --git a/mcollective/lib/puppet/indirector/indirection.rb b/mcollective/lib/puppet/indirector/indirection.rb new file mode 100644 index 000000000..3d17e6e47 --- /dev/null +++ b/mcollective/lib/puppet/indirector/indirection.rb @@ -0,0 +1,319 @@ +require 'puppet/util/docs' +require 'puppet/indirector/envelope' +require 'puppet/indirector/request' +require 'puppet/util/cacher' + +# The class that connects functional classes with their different collection +# back-ends. Each indirection has a set of associated terminus classes, +# each of which is a subclass of Puppet::Indirector::Terminus. +class Puppet::Indirector::Indirection + include Puppet::Util::Cacher + include Puppet::Util::Docs + + @@indirections = [] + + # Find an indirection by name. This is provided so that Terminus classes + # can specifically hook up with the indirections they are associated with. + def self.instance(name) + @@indirections.find { |i| i.name == name } + end + + # Return a list of all known indirections. Used to generate the + # reference. + def self.instances + @@indirections.collect { |i| i.name } + end + + # Find an indirected model by name. This is provided so that Terminus classes + # can specifically hook up with the indirections they are associated with. + def self.model(name) + return nil unless match = @@indirections.find { |i| i.name == name } + match.model + end + + attr_accessor :name, :model + + # Create and return our cache terminus. + def cache + raise(Puppet::DevError, "Tried to cache when no cache class was set") unless cache_class + terminus(cache_class) + end + + # Should we use a cache? + def cache? + cache_class ? true : false + end + + attr_reader :cache_class + # Define a terminus class to be used for caching. + def cache_class=(class_name) + validate_terminus_class(class_name) if class_name + @cache_class = class_name + end + + # This is only used for testing. + def delete + @@indirections.delete(self) if @@indirections.include?(self) + end + + # Set the time-to-live for instances created through this indirection. + def ttl=(value) + raise ArgumentError, "Indirection TTL must be an integer" unless value.is_a?(Fixnum) + @ttl = value + end + + # Default to the runinterval for the ttl. + def ttl + @ttl ||= Puppet[:runinterval].to_i + end + + # Calculate the expiration date for a returned instance. + def expiration + Time.now + ttl + end + + # Generate the full doc string. + def doc + text = "" + + text += scrub(@doc) + "\n\n" if @doc + + if s = terminus_setting + text += "* **Terminus Setting**: #{terminus_setting}" + end + + text + end + + def initialize(model, name, options = {}) + @model = model + @name = name + + @cache_class = nil + @terminus_class = nil + + raise(ArgumentError, "Indirection #{@name} is already defined") if @@indirections.find { |i| i.name == @name } + @@indirections << self + + if mod = options[:extend] + extend(mod) + options.delete(:extend) + end + + # This is currently only used for cache_class and terminus_class. + options.each do |name, value| + begin + send(name.to_s + "=", value) + rescue NoMethodError + raise ArgumentError, "#{name} is not a valid Indirection parameter" + end + end + end + + # Set up our request object. + def request(*args) + Puppet::Indirector::Request.new(self.name, *args) + end + + # Return the singleton terminus for this indirection. + def terminus(terminus_name = nil) + # Get the name of the terminus. + raise Puppet::DevError, "No terminus specified for #{self.name}; cannot redirect" unless terminus_name ||= terminus_class + + termini[terminus_name] ||= make_terminus(terminus_name) + end + + # This can be used to select the terminus class. + attr_accessor :terminus_setting + + # Determine the terminus class. + def terminus_class + unless @terminus_class + if setting = self.terminus_setting + self.terminus_class = Puppet.settings[setting].to_sym + else + raise Puppet::DevError, "No terminus class nor terminus setting was provided for indirection #{self.name}" + end + end + @terminus_class + end + + def reset_terminus_class + @terminus_class = nil + end + + # Specify the terminus class to use. + def terminus_class=(klass) + validate_terminus_class(klass) + @terminus_class = klass + end + + # This is used by terminus_class= and cache=. + def validate_terminus_class(terminus_class) + raise ArgumentError, "Invalid terminus name #{terminus_class.inspect}" unless terminus_class and terminus_class.to_s != "" + unless Puppet::Indirector::Terminus.terminus_class(self.name, terminus_class) + raise ArgumentError, "Could not find terminus #{terminus_class} for indirection #{self.name}" + end + end + + # Expire a cached object, if one is cached. Note that we don't actually + # remove it, we expire it and write it back out to disk. This way people + # can still use the expired object if they want. + def expire(key, *args) + request = request(:expire, key, *args) + + return nil unless cache? + + return nil unless instance = cache.find(request(:find, key, *args)) + + Puppet.info "Expiring the #{self.name} cache of #{instance.name}" + + # Set an expiration date in the past + instance.expiration = Time.now - 60 + + cache.save(request(:save, instance, *args)) + end + + # Search for an instance in the appropriate terminus, caching the + # results if caching is configured.. + def find(key, *args) + request = request(:find, key, *args) + terminus = prepare(request) + + if result = find_in_cache(request) + return result + end + + # Otherwise, return the result from the terminus, caching if appropriate. + if ! request.ignore_terminus? and result = terminus.find(request) + result.expiration ||= self.expiration if result.respond_to?(:expiration) + if cache? and request.use_cache? + Puppet.info "Caching #{self.name} for #{request.key}" + cache.save request(:save, result, *args) + end + + return terminus.respond_to?(:filter) ? terminus.filter(result) : result + end + + nil + end + + # Search for an instance in the appropriate terminus, and return a + # boolean indicating whether the instance was found. + def head(key, *args) + request = request(:head, key, *args) + terminus = prepare(request) + + # Look in the cache first, then in the terminus. Force the result + # to be a boolean. + !!(find_in_cache(request) || terminus.head(request)) + end + + def find_in_cache(request) + # See if our instance is in the cache and up to date. + return nil unless cache? and ! request.ignore_cache? and cached = cache.find(request) + if cached.expired? + Puppet.info "Not using expired #{self.name} for #{request.key} from cache; expired at #{cached.expiration}" + return nil + end + + Puppet.debug "Using cached #{self.name} for #{request.key}" + cached + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Cached #{self.name} for #{request.key} failed: #{detail}" + nil + end + + # Remove something via the terminus. + def destroy(key, *args) + request = request(:destroy, key, *args) + terminus = prepare(request) + + result = terminus.destroy(request) + + if cache? and cached = cache.find(request(:find, key, *args)) + # Reuse the existing request, since it's equivalent. + cache.destroy(request) + end + + result + end + + # Search for more than one instance. Should always return an array. + def search(key, *args) + request = request(:search, key, *args) + terminus = prepare(request) + + if result = terminus.search(request) + raise Puppet::DevError, "Search results from terminus #{terminus.name} are not an array" unless result.is_a?(Array) + result.each do |instance| + next unless instance.respond_to? :expiration + instance.expiration ||= self.expiration + end + return result + end + end + + # Save the instance in the appropriate terminus. This method is + # normally an instance method on the indirected class. + def save(key, instance = nil) + request = request(:save, key, instance) + terminus = prepare(request) + + result = terminus.save(request) + + # If caching is enabled, save our document there + cache.save(request) if cache? + + result + end + + private + + # Check authorization if there's a hook available; fail if there is one + # and it returns false. + def check_authorization(request, terminus) + # At this point, we're assuming authorization makes no sense without + # client information. + return unless request.node + + # This is only to authorize via a terminus-specific authorization hook. + return unless terminus.respond_to?(:authorized?) + + unless terminus.authorized?(request) + msg = "Not authorized to call #{request.method} on #{request}" + msg += " with #{request.options.inspect}" unless request.options.empty? + raise ArgumentError, msg + end + end + + # Setup a request, pick the appropriate terminus, check the request's authorization, and return it. + def prepare(request) + # Pick our terminus. + if respond_to?(:select_terminus) + unless terminus_name = select_terminus(request) + raise ArgumentError, "Could not determine appropriate terminus for #{request}" + end + else + terminus_name = terminus_class + end + + dest_terminus = terminus(terminus_name) + check_authorization(request, dest_terminus) + + dest_terminus + end + + # Create a new terminus instance. + def make_terminus(terminus_class) + # Load our terminus class. + unless klass = Puppet::Indirector::Terminus.terminus_class(self.name, terminus_class) + raise ArgumentError, "Could not find terminus #{terminus_class} for indirection #{self.name}" + end + klass.new + end + + # Cache our terminus instances indefinitely, but make it easy to clean them up. + cached_attr(:termini) { Hash.new } +end diff --git a/mcollective/lib/puppet/indirector/key/ca.rb b/mcollective/lib/puppet/indirector/key/ca.rb new file mode 100644 index 000000000..056d037dd --- /dev/null +++ b/mcollective/lib/puppet/indirector/key/ca.rb @@ -0,0 +1,12 @@ +require 'puppet/indirector/ssl_file' +require 'puppet/ssl/key' + +class Puppet::SSL::Key::Ca < Puppet::Indirector::SslFile + desc "Manage the CA's private on disk. This terminus *only* works + with the CA key, because that's the only key that the CA ever interacts + with." + + store_in :privatekeydir + + store_ca_at :cakey +end diff --git a/mcollective/lib/puppet/indirector/key/file.rb b/mcollective/lib/puppet/indirector/key/file.rb new file mode 100644 index 000000000..1990f1a46 --- /dev/null +++ b/mcollective/lib/puppet/indirector/key/file.rb @@ -0,0 +1,42 @@ +require 'puppet/indirector/ssl_file' +require 'puppet/ssl/key' + +class Puppet::SSL::Key::File < Puppet::Indirector::SslFile + desc "Manage SSL private and public keys on disk." + + store_in :privatekeydir + store_ca_at :cakey + + # Where should we store the public key? + def public_key_path(name) + if ca?(name) + Puppet[:capub] + else + File.join(Puppet[:publickeydir], name.to_s + ".pem") + end + end + + # Remove the public key, in addition to the private key + def destroy(request) + super + + return unless FileTest.exist?(public_key_path(request.key)) + + begin + File.unlink(public_key_path(request.key)) + rescue => detail + raise Puppet::Error, "Could not remove #{request.key} public key: #{detail}" + end + end + + # Save the public key, in addition to the private key. + def save(request) + super + + begin + Puppet.settings.writesub(:publickeydir, public_key_path(request.key)) { |f| f.print request.instance.content.public_key.to_pem } + rescue => detail + raise Puppet::Error, "Could not write #{request.key}: #{detail}" + end + end +end diff --git a/mcollective/lib/puppet/indirector/ldap.rb b/mcollective/lib/puppet/indirector/ldap.rb new file mode 100644 index 000000000..8d7cd076a --- /dev/null +++ b/mcollective/lib/puppet/indirector/ldap.rb @@ -0,0 +1,78 @@ +require 'puppet/indirector/terminus' +require 'puppet/util/ldap/connection' + +class Puppet::Indirector::Ldap < Puppet::Indirector::Terminus + # Perform our ldap search and process the result. + def find(request) + ldapsearch(search_filter(request.key)) { |entry| return process(entry) } || nil + end + + # Process the found entry. We assume that we don't just want the + # ldap object. + def process(entry) + raise Puppet::DevError, "The 'process' method has not been overridden for the LDAP terminus for #{self.name}" + end + + # Default to all attributes. + def search_attributes + nil + end + + def search_base + Puppet[:ldapbase] + end + + # The ldap search filter to use. + def search_filter(name) + raise Puppet::DevError, "No search string set for LDAP terminus for #{self.name}" + end + + # Find the ldap node, return the class list and parent node specially, + # and everything else in a parameter hash. + def ldapsearch(filter) + raise ArgumentError.new("You must pass a block to ldapsearch") unless block_given? + + found = false + count = 0 + + begin + connection.search(search_base, 2, filter, search_attributes) do |entry| + found = true + yield entry + end + rescue SystemExit,NoMemoryError + raise + rescue Exception => detail + if count == 0 + # Try reconnecting to ldap if we get an exception and we haven't yet retried. + count += 1 + @connection = nil + Puppet.warning "Retrying LDAP connection" + retry + else + error = Puppet::Error.new("LDAP Search failed") + error.set_backtrace(detail.backtrace) + raise error + end + end + + found + end + + # Create an ldap connection. + def connection + unless @connection + raise Puppet::Error, "Could not set up LDAP Connection: Missing ruby/ldap libraries" unless Puppet.features.ldap? + begin + conn = Puppet::Util::Ldap::Connection.instance + conn.start + @connection = conn.connection + rescue => detail + puts detail.backtrace if Puppet[:trace] + raise Puppet::Error, "Could not connect to LDAP: #{detail}" + end + end + + @connection + end +end diff --git a/mcollective/lib/puppet/indirector/memory.rb b/mcollective/lib/puppet/indirector/memory.rb new file mode 100644 index 000000000..c44d62de2 --- /dev/null +++ b/mcollective/lib/puppet/indirector/memory.rb @@ -0,0 +1,21 @@ +require 'puppet/indirector/terminus' + +# Manage a memory-cached list of instances. +class Puppet::Indirector::Memory < Puppet::Indirector::Terminus + def initialize + @instances = {} + end + + def destroy(request) + raise ArgumentError.new("Could not find #{request.key} to destroy") unless @instances.include?(request.key) + @instances.delete(request.key) + end + + def find(request) + @instances[request.key] + end + + def save(request) + @instances[request.key] = request.instance + end +end diff --git a/mcollective/lib/puppet/indirector/node/active_record.rb b/mcollective/lib/puppet/indirector/node/active_record.rb new file mode 100644 index 000000000..08bc7e67a --- /dev/null +++ b/mcollective/lib/puppet/indirector/node/active_record.rb @@ -0,0 +1,13 @@ +require 'puppet/rails/host' +require 'puppet/indirector/active_record' +require 'puppet/node' + +class Puppet::Node::ActiveRecord < Puppet::Indirector::ActiveRecord + use_ar_model Puppet::Rails::Host + + def find(request) + node = super + node.fact_merge + node + end +end diff --git a/mcollective/lib/puppet/indirector/node/exec.rb b/mcollective/lib/puppet/indirector/node/exec.rb new file mode 100644 index 000000000..6e065c6f3 --- /dev/null +++ b/mcollective/lib/puppet/indirector/node/exec.rb @@ -0,0 +1,48 @@ +require 'puppet/node' +require 'puppet/indirector/exec' + +class Puppet::Node::Exec < Puppet::Indirector::Exec + desc "Call an external program to get node information. See + the [External Nodes](http://docs.puppetlabs.com/guides/external_nodes.html) page for more information." + include Puppet::Util + + def command + command = Puppet[:external_nodes] + raise ArgumentError, "You must set the 'external_nodes' parameter to use the external node terminus" unless command != "none" + command.split + end + + # Look for external node definitions. + def find(request) + output = super or return nil + + # Translate the output to ruby. + result = translate(request.key, output) + + create_node(request.key, result) + end + + private + + # Turn our outputted objects into a Puppet::Node instance. + def create_node(name, result) + node = Puppet::Node.new(name) + set = false + [:parameters, :classes, :environment].each do |param| + if value = result[param] + node.send(param.to_s + "=", value) + set = true + end + end + + node.fact_merge + node + end + + # Translate the yaml string into Ruby objects. + def translate(name, output) + YAML.load(output).inject({}) { |hash, data| hash[symbolize(data[0])] = data[1]; hash } + rescue => detail + raise Puppet::Error, "Could not load external node results for #{name}: #{detail}" + end +end diff --git a/mcollective/lib/puppet/indirector/node/ldap.rb b/mcollective/lib/puppet/indirector/node/ldap.rb new file mode 100644 index 000000000..5fd738511 --- /dev/null +++ b/mcollective/lib/puppet/indirector/node/ldap.rb @@ -0,0 +1,256 @@ +require 'puppet/node' +require 'puppet/indirector/ldap' + +class Puppet::Node::Ldap < Puppet::Indirector::Ldap + desc "Search in LDAP for node configuration information. See + the [LDAP Nodes](http://projects.puppetlabs.com/projects/puppet/wiki/Ldap_Nodes) page for more information. This will first + search for whatever the certificate name is, then (if that name + contains a `.`) for the short name, then `default`." + + # The attributes that Puppet class information is stored in. + def class_attributes + # LAK:NOTE See http://snurl.com/21zf8 [groups_google_com] + x = Puppet[:ldapclassattrs].split(/\s*,\s*/) + end + + # Separate this out so it's relatively atomic. It's tempting to call + # process instead of name2hash() here, but it ends up being + # difficult to test because all exceptions get caught by ldapsearch. + # LAK:NOTE Unfortunately, the ldap support is too stupid to throw anything + # but LDAP::ResultError, even on bad connections, so we are rough handed + # with our error handling. + def name2hash(name) + info = nil + ldapsearch(search_filter(name)) { |entry| info = entry2hash(entry) } + info + end + + # Look for our node in ldap. + def find(request) + names = [request.key] + names << request.key.sub(/\..+/, '') if request.key.include?(".") # we assume it's an fqdn + names << "default" + + node = nil + names.each do |name| + next unless info = name2hash(name) + + break if node = info2node(request.key, info) + end + + node + end + + # Find more than one node. LAK:NOTE This is a bit of a clumsy API, because the 'search' + # method currently *requires* a key. It seems appropriate in some cases but not others, + # and I don't really know how to get rid of it as a requirement but allow it when desired. + def search(request) + if classes = request.options[:class] + classes = [classes] unless classes.is_a?(Array) + filter = "(&(objectclass=puppetClient)(puppetclass=" + classes.join(")(puppetclass=") + "))" + else + filter = "(objectclass=puppetClient)" + end + + infos = [] + ldapsearch(filter) { |entry| infos << entry2hash(entry, request.options[:fqdn]) } + + return infos.collect do |info| + info2node(info[:name], info) + end + end + + # The parent attribute, if we have one. + def parent_attribute + if pattr = Puppet[:ldapparentattr] and ! pattr.empty? + pattr + else + nil + end + end + + # The attributes that Puppet will stack as array over the full + # hierarchy. + def stacked_attributes(dummy_argument=:work_arround_for_ruby_GC_bug) + Puppet[:ldapstackedattrs].split(/\s*,\s*/) + end + + # Convert the found entry into a simple hash. + def entry2hash(entry, fqdn = false) + result = {} + + cn = entry.dn[ /cn\s*=\s*([^,\s]+)/i,1] + dcs = entry.dn.scan(/dc\s*=\s*([^,\s]+)/i) + result[:name] = fqdn ? ([cn]+dcs).join('.') : cn + result[:parent] = get_parent_from_entry(entry) if parent_attribute + result[:classes] = get_classes_from_entry(entry) + result[:stacked] = get_stacked_values_from_entry(entry) + result[:parameters] = get_parameters_from_entry(entry) + + result[:environment] = result[:parameters]["environment"] if result[:parameters]["environment"] + + result[:stacked_parameters] = {} + + if result[:stacked] + result[:stacked].each do |value| + param = value.split('=', 2) + result[:stacked_parameters][param[0]] = param[1] + end + end + + if result[:stacked_parameters] + result[:stacked_parameters].each do |param, value| + result[:parameters][param] = value unless result[:parameters].include?(param) + end + end + + result[:parameters] = convert_parameters(result[:parameters]) + + result + end + + # Default to all attributes. + def search_attributes + ldapattrs = Puppet[:ldapattrs] + + # results in everything getting returned + return nil if ldapattrs == "all" + + search_attrs = class_attributes + ldapattrs.split(/\s*,\s*/) + + if pattr = parent_attribute + search_attrs << pattr + end + + search_attrs + end + + # The ldap search filter to use. + def search_filter(name) + filter = Puppet[:ldapstring] + + if filter.include? "%s" + # Don't replace the string in-line, since that would hard-code our node + # info. + filter = filter.gsub('%s', name) + end + filter + end + + private + + # Add our hash of ldap information to the node instance. + def add_to_node(node, information) + node.classes = information[:classes].uniq unless information[:classes].nil? or information[:classes].empty? + node.parameters = information[:parameters] unless information[:parameters].nil? or information[:parameters].empty? + node.environment = information[:environment] if information[:environment] + end + + def convert_parameters(parameters) + result = {} + parameters.each do |param, value| + if value.is_a?(Array) + result[param] = value.collect { |v| convert(v) } + else + result[param] = convert(value) + end + end + result + end + + # Convert any values if necessary. + def convert(value) + case value + when Integer, Fixnum, Bignum; value + when "true"; true + when "false"; false + else + value + end + end + + # Find information for our parent and merge it into the current info. + def find_and_merge_parent(parent, information) + parent_info = name2hash(parent) || raise(Puppet::Error.new("Could not find parent node '#{parent}'")) + information[:classes] += parent_info[:classes] + parent_info[:parameters].each do |param, value| + # Specifically test for whether it's set, so false values are handled correctly. + information[:parameters][param] = value unless information[:parameters].include?(param) + end + information[:environment] ||= parent_info[:environment] + parent_info[:parent] + end + + # Take a name and a hash, and return a node instance. + def info2node(name, info) + merge_parent(info) if info[:parent] + + node = Puppet::Node.new(name) + + add_to_node(node, info) + + node.fact_merge + + node + end + + def merge_parent(info) + parent_info = nil + parent = info[:parent] + + # Preload the parent array with the node name. + parents = [info[:name]] + while parent + raise ArgumentError, "Found loop in LDAP node parents; #{parent} appears twice" if parents.include?(parent) + parents << parent + parent = find_and_merge_parent(parent, info) + end + + info + end + + def get_classes_from_entry(entry) + result = class_attributes.inject([]) do |array, attr| + if values = entry.vals(attr) + values.each do |v| array << v end + end + array + end + result.uniq + end + + def get_parameters_from_entry(entry) + stacked_params = stacked_attributes + entry.to_hash.inject({}) do |hash, ary| + unless stacked_params.include?(ary[0]) # don't add our stacked parameters to the main param list + if ary[1].length == 1 + hash[ary[0]] = ary[1].shift + else + hash[ary[0]] = ary[1] + end + end + hash + end + end + + def get_parent_from_entry(entry) + pattr = parent_attribute + + return nil unless values = entry.vals(pattr) + + if values.length > 1 + raise Puppet::Error, + "Node entry #{entry.dn} specifies more than one parent: #{values.inspect}" + end + return(values.empty? ? nil : values.shift) + end + + def get_stacked_values_from_entry(entry) + stacked_attributes.inject([]) do |result, attr| + if values = entry.vals(attr) + result += values + end + result + end + end +end diff --git a/mcollective/lib/puppet/indirector/node/memory.rb b/mcollective/lib/puppet/indirector/node/memory.rb new file mode 100644 index 000000000..029926af8 --- /dev/null +++ b/mcollective/lib/puppet/indirector/node/memory.rb @@ -0,0 +1,10 @@ +require 'puppet/node' +require 'puppet/indirector/memory' + +class Puppet::Node::Memory < Puppet::Indirector::Memory + desc "Keep track of nodes in memory but nowhere else. This is used for + one-time compiles, such as what the stand-alone `puppet` does. + To use this terminus, you must load it with the data you want it + to contain; it is only useful for developers and should generally not + be chosen by a normal user." +end diff --git a/mcollective/lib/puppet/indirector/node/plain.rb b/mcollective/lib/puppet/indirector/node/plain.rb new file mode 100644 index 000000000..d648cce8d --- /dev/null +++ b/mcollective/lib/puppet/indirector/node/plain.rb @@ -0,0 +1,19 @@ +require 'puppet/node' +require 'puppet/indirector/plain' + +class Puppet::Node::Plain < Puppet::Indirector::Plain + desc "Always return an empty node object. Assumes you keep track of nodes + in flat file manifests. You should use it when you don't have some other, + functional source you want to use, as the compiler will not work without a + valid node terminus. + + Note that class is responsible for merging the node's facts into the + node instance before it is returned." + + # Just return an empty node. + def find(request) + node = super + node.fact_merge + node + end +end diff --git a/mcollective/lib/puppet/indirector/node/rest.rb b/mcollective/lib/puppet/indirector/node/rest.rb new file mode 100644 index 000000000..6aa8025a6 --- /dev/null +++ b/mcollective/lib/puppet/indirector/node/rest.rb @@ -0,0 +1,7 @@ +require 'puppet/node' +require 'puppet/indirector/rest' + +class Puppet::Node::Rest < Puppet::Indirector::REST + desc "This will eventually be a REST-based mechanism for finding nodes. It is currently non-functional." + # TODO/FIXME +end diff --git a/mcollective/lib/puppet/indirector/node/yaml.rb b/mcollective/lib/puppet/indirector/node/yaml.rb new file mode 100644 index 000000000..5a316b62e --- /dev/null +++ b/mcollective/lib/puppet/indirector/node/yaml.rb @@ -0,0 +1,7 @@ +require 'puppet/node' +require 'puppet/indirector/yaml' + +class Puppet::Node::Yaml < Puppet::Indirector::Yaml + desc "Store node information as flat files, serialized using YAML, + or deserialize stored YAML nodes." +end diff --git a/mcollective/lib/puppet/indirector/plain.rb b/mcollective/lib/puppet/indirector/plain.rb new file mode 100644 index 000000000..e11fc68c8 --- /dev/null +++ b/mcollective/lib/puppet/indirector/plain.rb @@ -0,0 +1,9 @@ +require 'puppet/indirector/terminus' + +# An empty terminus type, meant to just return empty objects. +class Puppet::Indirector::Plain < Puppet::Indirector::Terminus + # Just return nothing. + def find(request) + indirection.model.new(request.key) + end +end diff --git a/mcollective/lib/puppet/indirector/queue.rb b/mcollective/lib/puppet/indirector/queue.rb new file mode 100644 index 000000000..85ffacacc --- /dev/null +++ b/mcollective/lib/puppet/indirector/queue.rb @@ -0,0 +1,81 @@ +require 'puppet/indirector/terminus' +require 'puppet/util/queue' +require 'puppet/util' + +# Implements the :queue abstract indirector terminus type, for storing +# model instances to a message queue, presumably for the purpose of out-of-process +# handling of changes related to the model. +# +# Relies upon Puppet::Util::Queue for registry and client object management, +# and specifies a default queue type of :stomp, appropriate for use with a variety of message brokers. +# +# It's up to the queue client type to instantiate itself correctly based on Puppet configuration information. +# +# A single queue client is maintained for the abstract terminus, meaning that you can only use one type +# of queue client, one message broker solution, etc., with the indirection mechanism. +# +# Per-indirection queues are assumed, based on the indirection name. If the :catalog indirection makes +# use of this :queue terminus, queue operations work against the "catalog" queue. It is up to the queue +# client library to handle queue creation as necessary (for a number of popular queuing solutions, queue +# creation is automatic and not a concern). +class Puppet::Indirector::Queue < Puppet::Indirector::Terminus + extend ::Puppet::Util::Queue + include Puppet::Util + + def initialize(*args) + super + raise ArgumentError, "Queueing requires pson support" unless Puppet.features.pson? + end + + # Queue has no idiomatic "find" + def find(request) + nil + end + + # Place the request on the queue + def save(request) + result = nil + benchmark :info, "Queued #{indirection.name} for #{request.key}" do + result = client.publish_message(queue, request.instance.render(:pson)) + end + result + rescue => detail + raise Puppet::Error, "Could not write #{request.key} to queue: #{detail}\nInstance::#{request.instance}\n client : #{client}" + end + + def self.queue + indirection_name + end + + def queue + self.class.queue + end + + # Returns the singleton queue client object. + def client + self.class.client + end + + # converts the _message_ from deserialized format to an actual model instance. + def self.intern(message) + result = nil + benchmark :info, "Loaded queued #{indirection.name}" do + result = model.convert_from(:pson, message) + end + result + end + + # Provides queue subscription functionality; for a given indirection, use this method on the terminus + # to subscribe to the indirection-specific queue. Your _block_ will be executed per new indirection + # model received from the queue, with _obj_ being the model instance. + def self.subscribe + client.subscribe(queue) do |msg| + begin + yield(self.intern(msg)) + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Error occured with subscription to queue #{queue} for indirection #{indirection_name}: #{detail}" + end + end + end +end diff --git a/mcollective/lib/puppet/indirector/report/processor.rb b/mcollective/lib/puppet/indirector/report/processor.rb new file mode 100644 index 000000000..81b379eb8 --- /dev/null +++ b/mcollective/lib/puppet/indirector/report/processor.rb @@ -0,0 +1,50 @@ +require 'puppet/transaction/report' +require 'puppet/indirector/code' +require 'puppet/reports' + +class Puppet::Transaction::Report::Processor < Puppet::Indirector::Code + desc "Puppet's report processor. Processes the report with each of + the report types listed in the 'reports' setting." + + def initialize + Puppet.settings.use(:main, :reporting, :metrics) + end + + def save(request) + process(request.instance) + end + + private + + # Process the report with each of the configured report types. + # LAK:NOTE This isn't necessarily the best design, but it's backward + # compatible and that's good enough for now. + def process(report) + Puppet.debug "Recieved report to process from #{report.host}" + return if Puppet[:reports] == "none" + + reports.each do |name| + Puppet.debug "Processing report from #{report.host} with processor #{name}" + if mod = Puppet::Reports.report(name) + # We have to use a dup because we're including a module in the + # report. + newrep = report.dup + begin + newrep.extend(mod) + newrep.process + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Report #{name} failed: #{detail}" + end + else + Puppet.warning "No report named '#{name}'" + end + end + end + + # Handle the parsing of the reports attribute. + def reports + # LAK:NOTE See http://snurl.com/21zf8 [groups_google_com] + x = Puppet[:reports].gsub(/(^\s+)|(\s+$)/, '').split(/\s*,\s*/) + end +end diff --git a/mcollective/lib/puppet/indirector/report/rest.rb b/mcollective/lib/puppet/indirector/report/rest.rb new file mode 100644 index 000000000..601da9edb --- /dev/null +++ b/mcollective/lib/puppet/indirector/report/rest.rb @@ -0,0 +1,7 @@ +require 'puppet/indirector/rest' + +class Puppet::Transaction::Report::Rest < Puppet::Indirector::REST + desc "Get server report over HTTP via REST." + use_server_setting(:report_server) + use_port_setting(:report_port) +end diff --git a/mcollective/lib/puppet/indirector/report/yaml.rb b/mcollective/lib/puppet/indirector/report/yaml.rb new file mode 100644 index 000000000..bf7bf4fe5 --- /dev/null +++ b/mcollective/lib/puppet/indirector/report/yaml.rb @@ -0,0 +1,11 @@ +require 'puppet/transaction/report' +require 'puppet/indirector/yaml' + +class Puppet::Transaction::Report::Yaml < Puppet::Indirector::Yaml + desc "Store last report as a flat file, serialized using YAML." + + # Force report to be saved there + def path(name,ext='.yaml') + Puppet[:lastrunreport] + end +end diff --git a/mcollective/lib/puppet/indirector/request.rb b/mcollective/lib/puppet/indirector/request.rb new file mode 100644 index 000000000..fd8d654dd --- /dev/null +++ b/mcollective/lib/puppet/indirector/request.rb @@ -0,0 +1,199 @@ +require 'cgi' +require 'uri' +require 'puppet/indirector' + +# This class encapsulates all of the information you need to make an +# Indirection call, and as a a result also handles REST calls. It's somewhat +# analogous to an HTTP Request object, except tuned for our Indirector. +class Puppet::Indirector::Request + attr_accessor :key, :method, :options, :instance, :node, :ip, :authenticated, :ignore_cache, :ignore_terminus + + attr_accessor :server, :port, :uri, :protocol + + attr_reader :indirection_name + + OPTION_ATTRIBUTES = [:ip, :node, :authenticated, :ignore_terminus, :ignore_cache, :instance, :environment] + + # Is this an authenticated request? + def authenticated? + # Double negative, so we just get true or false + ! ! authenticated + end + + def environment + @environment ||= Puppet::Node::Environment.new + end + + def environment=(env) + @environment = if env.is_a?(Puppet::Node::Environment) + env + else + Puppet::Node::Environment.new(env) + end + end + + def escaped_key + URI.escape(key) + end + + # LAK:NOTE This is a messy interface to the cache, and it's only + # used by the Configurer class. I decided it was better to implement + # it now and refactor later, when we have a better design, than + # to spend another month coming up with a design now that might + # not be any better. + def ignore_cache? + ignore_cache + end + + def ignore_terminus? + ignore_terminus + end + + def initialize(indirection_name, method, key_or_instance, options_or_instance = {}) + if options_or_instance.is_a? Hash + options = options_or_instance + @instance = nil + else + options = {} + @instance = options_or_instance + end + + self.indirection_name = indirection_name + self.method = method + + set_attributes(options) + + @options = options.inject({}) { |hash, ary| hash[ary[0].to_sym] = ary[1]; hash } + + if key_or_instance.is_a?(String) || key_or_instance.is_a?(Symbol) + key = key_or_instance + else + @instance ||= key_or_instance + end + + if key + # If the request key is a URI, then we need to treat it specially, + # because it rewrites the key. We could otherwise strip server/port/etc + # info out in the REST class, but it seemed bad design for the REST + # class to rewrite the key. + if key.to_s =~ /^\w+:\/\// # it's a URI + set_uri_key(key) + else + @key = key + end + end + + @key = @instance.name if ! @key and @instance + end + + # Look up the indirection based on the name provided. + def indirection + Puppet::Indirector::Indirection.instance(indirection_name) + end + + def indirection_name=(name) + @indirection_name = name.to_sym + end + + + def model + raise ArgumentError, "Could not find indirection '#{indirection_name}'" unless i = indirection + i.model + end + + # Should we allow use of the cached object? + def use_cache? + if defined?(@use_cache) + ! ! use_cache + else + true + end + end + + # Are we trying to interact with multiple resources, or just one? + def plural? + method == :search + end + + # Create the query string, if options are present. + def query_string + return "" unless options and ! options.empty? + "?" + options.collect do |key, value| + case value + when nil; next + when true, false; value = value.to_s + when Fixnum, Bignum, Float; value = value # nothing + when String; value = CGI.escape(value) + when Symbol; value = CGI.escape(value.to_s) + when Array; value = CGI.escape(YAML.dump(value)) + else + raise ArgumentError, "HTTP REST queries cannot handle values of type '#{value.class}'" + end + + "#{key}=#{value}" + end.join("&") + end + + def to_hash + result = options.dup + + OPTION_ATTRIBUTES.each do |attribute| + if value = send(attribute) + result[attribute] = value + end + end + result + end + + def to_s + return(uri ? uri : "/#{indirection_name}/#{key}") + end + + private + + def set_attributes(options) + OPTION_ATTRIBUTES.each do |attribute| + if options.include?(attribute) + send(attribute.to_s + "=", options[attribute]) + options.delete(attribute) + end + end + end + + # Parse the key as a URI, setting attributes appropriately. + def set_uri_key(key) + @uri = key + begin + uri = URI.parse(URI.escape(key)) + rescue => detail + raise ArgumentError, "Could not understand URL #{key}: #{detail}" + end + + # Just short-circuit these to full paths + if uri.scheme == "file" + @key = URI.unescape(uri.path) + return + end + + @server = uri.host if uri.host + + # If the URI class can look up the scheme, it will provide a port, + # otherwise it will default to '0'. + if uri.port.to_i == 0 and uri.scheme == "puppet" + @port = Puppet.settings[:masterport].to_i + else + @port = uri.port.to_i + end + + @protocol = uri.scheme + + if uri.scheme == 'puppet' + @key = URI.unescape(uri.path.sub(/^\//, '')) + return + end + + env, indirector, @key = URI.unescape(uri.path.sub(/^\//, '')).split('/',3) + @key ||= '' + self.environment = env unless env == '' + end +end diff --git a/mcollective/lib/puppet/indirector/resource/ral.rb b/mcollective/lib/puppet/indirector/resource/ral.rb new file mode 100644 index 000000000..bc41d14ae --- /dev/null +++ b/mcollective/lib/puppet/indirector/resource/ral.rb @@ -0,0 +1,53 @@ +class Puppet::Resource::Ral < Puppet::Indirector::Code + def find( request ) + # find by name + res = type(request).instances.find { |o| o.name == resource_name(request) } + res ||= type(request).new(:name => resource_name(request), :audit => type(request).properties.collect { |s| s.name }) + + res.to_resource + end + + def search( request ) + conditions = request.options.dup + conditions[:name] = resource_name(request) if resource_name(request) + + type(request).instances.map do |res| + res.to_resource + end.find_all do |res| + conditions.all? {|property, value| res.to_resource[property].to_s == value.to_s} + end.sort do |a,b| + a.title <=> b.title + end + end + + def save( request ) + # In RAL-land, to "save" means to actually try to change machine state + res = request.instance + ral_res = res.to_ral + + catalog = Puppet::Resource::Catalog.new + catalog.add_resource ral_res + catalog.apply + + ral_res.to_resource + end + + private + + # {type,resource}_name: the resource name may contain slashes: + # File["/etc/hosts"]. To handle, assume the type name does + # _not_ have any slashes in it, and split only on the first. + + def type_name( request ) + request.key.split('/', 2)[0] + end + + def resource_name( request ) + name = request.key.split('/', 2)[1] + name unless name == "" + end + + def type( request ) + Puppet::Type.type(type_name(request)) or raise Puppet::Error, "Could not find type #{type}" + end +end diff --git a/mcollective/lib/puppet/indirector/resource/rest.rb b/mcollective/lib/puppet/indirector/resource/rest.rb new file mode 100644 index 000000000..7848ae65e --- /dev/null +++ b/mcollective/lib/puppet/indirector/resource/rest.rb @@ -0,0 +1,5 @@ +require 'puppet/indirector/status' +require 'puppet/indirector/rest' + +class Puppet::Resource::Rest < Puppet::Indirector::REST +end diff --git a/mcollective/lib/puppet/indirector/resource_type.rb b/mcollective/lib/puppet/indirector/resource_type.rb new file mode 100644 index 000000000..0564dc2b3 --- /dev/null +++ b/mcollective/lib/puppet/indirector/resource_type.rb @@ -0,0 +1,5 @@ +require 'puppet/resource/type' + +# A stub class, so our constants work. +class Puppet::Indirector::ResourceType # :nodoc: +end diff --git a/mcollective/lib/puppet/indirector/resource_type/parser.rb b/mcollective/lib/puppet/indirector/resource_type/parser.rb new file mode 100644 index 000000000..8b1bed0a9 --- /dev/null +++ b/mcollective/lib/puppet/indirector/resource_type/parser.rb @@ -0,0 +1,27 @@ +require 'puppet/resource/type' +require 'puppet/indirector/code' +require 'puppet/indirector/resource_type' + +class Puppet::Indirector::ResourceType::Parser < Puppet::Indirector::Code + desc "Return the data-form of a resource type." + + def find(request) + krt = request.environment.known_resource_types + + # This is a bit ugly. + [:hostclass, :definition, :node].each do |type| + if r = krt.send(type, request.key) + return r + end + end + nil + end + + def search(request) + raise ArgumentError, "Only '*' is acceptable as a search request" unless request.key == "*" + krt = request.environment.known_resource_types + result = [krt.hostclasses.values, krt.definitions.values, krt.nodes.values].flatten + return nil if result.empty? + result + end +end diff --git a/mcollective/lib/puppet/indirector/resource_type/rest.rb b/mcollective/lib/puppet/indirector/resource_type/rest.rb new file mode 100644 index 000000000..f372ecc87 --- /dev/null +++ b/mcollective/lib/puppet/indirector/resource_type/rest.rb @@ -0,0 +1,7 @@ +require 'puppet/resource/type' +require 'puppet/indirector/rest' +require 'puppet/indirector/resource_type' + +class Puppet::Indirector::ResourceType::Rest < Puppet::Indirector::REST + desc "Retrieve resource types via a REST HTTP interface." +end diff --git a/mcollective/lib/puppet/indirector/rest.rb b/mcollective/lib/puppet/indirector/rest.rb new file mode 100644 index 000000000..e50dc68ae --- /dev/null +++ b/mcollective/lib/puppet/indirector/rest.rb @@ -0,0 +1,115 @@ +require 'net/http' +require 'uri' + +require 'puppet/network/http_pool' +require 'puppet/network/http/api/v1' +require 'puppet/network/http/compression' + +# Access objects via REST +class Puppet::Indirector::REST < Puppet::Indirector::Terminus + include Puppet::Network::HTTP::API::V1 + include Puppet::Network::HTTP::Compression.module + + class << self + attr_reader :server_setting, :port_setting + end + + # Specify the setting that we should use to get the server name. + def self.use_server_setting(setting) + @server_setting = setting + end + + def self.server + Puppet.settings[server_setting || :server] + end + + # Specify the setting that we should use to get the port. + def self.use_port_setting(setting) + @port_setting = setting + end + + def self.port + Puppet.settings[port_setting || :masterport].to_i + end + + # Figure out the content type, turn that into a format, and use the format + # to extract the body of the response. + def deserialize(response, multiple = false) + case response.code + when "404" + return nil + when /^2/ + raise "No content type in http response; cannot parse" unless response['content-type'] + + content_type = response['content-type'].gsub(/\s*;.*$/,'') # strip any appended charset + + body = uncompress_body(response) + + # Convert the response to a deserialized object. + if multiple + model.convert_from_multiple(content_type, body) + else + model.convert_from(content_type, body) + end + else + # Raise the http error if we didn't get a 'success' of some kind. + raise convert_to_http_error(response) + end + end + + def convert_to_http_error(response) + message = "Error #{response.code} on SERVER: #{(response.body||'').empty? ? response.message : uncompress_body(response)}" + Net::HTTPError.new(message, response) + end + + # Provide appropriate headers. + def headers + add_accept_encoding({"Accept" => model.supported_formats.join(", ")}) + end + + def network(request) + Puppet::Network::HttpPool.http_instance(request.server || self.class.server, request.port || self.class.port) + end + + def find(request) + return nil unless result = deserialize(network(request).get(indirection2uri(request), headers)) + result.name = request.key if result.respond_to?(:name=) + result + end + + def head(request) + response = network(request).head(indirection2uri(request), headers) + case response.code + when "404" + return false + when /^2/ + return true + else + # Raise the http error if we didn't get a 'success' of some kind. + raise convert_to_http_error(response) + end + end + + def search(request) + unless result = deserialize(network(request).get(indirection2uri(request), headers), true) + return [] + end + result + end + + def destroy(request) + raise ArgumentError, "DELETE does not accept options" unless request.options.empty? + deserialize network(request).delete(indirection2uri(request), headers) + end + + def save(request) + raise ArgumentError, "PUT does not accept options" unless request.options.empty? + deserialize network(request).put(indirection2uri(request), request.instance.render, headers.merge({ "Content-Type" => request.instance.mime })) + end + + private + + def environment + Puppet::Node::Environment.new + end +end diff --git a/mcollective/lib/puppet/indirector/run/local.rb b/mcollective/lib/puppet/indirector/run/local.rb new file mode 100644 index 000000000..8cf65d179 --- /dev/null +++ b/mcollective/lib/puppet/indirector/run/local.rb @@ -0,0 +1,8 @@ +require 'puppet/run' +require 'puppet/indirector/code' + +class Puppet::Run::Local < Puppet::Indirector::Code + def save( request ) + request.instance.run + end +end diff --git a/mcollective/lib/puppet/indirector/run/rest.rb b/mcollective/lib/puppet/indirector/run/rest.rb new file mode 100644 index 000000000..cbd34814a --- /dev/null +++ b/mcollective/lib/puppet/indirector/run/rest.rb @@ -0,0 +1,6 @@ +require 'puppet/run' +require 'puppet/indirector/rest' + +class Puppet::Run::Rest < Puppet::Indirector::REST + desc "Trigger Agent runs via REST." +end diff --git a/mcollective/lib/puppet/indirector/ssl_file.rb b/mcollective/lib/puppet/indirector/ssl_file.rb new file mode 100644 index 000000000..45104999a --- /dev/null +++ b/mcollective/lib/puppet/indirector/ssl_file.rb @@ -0,0 +1,178 @@ +require 'puppet/ssl' + +class Puppet::Indirector::SslFile < Puppet::Indirector::Terminus + # Specify the directory in which multiple files are stored. + def self.store_in(setting) + @directory_setting = setting + end + + # Specify a single file location for storing just one file. + # This is used for things like the CRL. + def self.store_at(setting) + @file_setting = setting + end + + # Specify where a specific ca file should be stored. + def self.store_ca_at(setting) + @ca_setting = setting + end + + class << self + attr_reader :directory_setting, :file_setting, :ca_setting + end + + # The full path to where we should store our files. + def self.collection_directory + return nil unless directory_setting + Puppet.settings[directory_setting] + end + + # The full path to an individual file we would be managing. + def self.file_location + return nil unless file_setting + Puppet.settings[file_setting] + end + + # The full path to a ca file we would be managing. + def self.ca_location + return nil unless ca_setting + Puppet.settings[ca_setting] + end + + # We assume that all files named 'ca' are pointing to individual ca files, + # rather than normal host files. It's a bit hackish, but all the other + # solutions seemed even more hackish. + def ca?(name) + name == Puppet::SSL::Host.ca_name + end + + def initialize + Puppet.settings.use(:main, :ssl) + + (collection_directory || file_location) or raise Puppet::DevError, "No file or directory setting provided; terminus #{self.class.name} cannot function" + end + + def path(name) + if name =~ Puppet::Indirector::BadNameRegexp then + Puppet.crit("directory traversal detected in #{self.class}: #{name.inspect}") + raise ArgumentError, "invalid key" + end + + if ca?(name) and ca_location + ca_location + elsif collection_directory + File.join(collection_directory, name.to_s + ".pem") + else + file_location + end + end + + # Remove our file. + def destroy(request) + path = path(request.key) + return false unless FileTest.exist?(path) + + Puppet.notice "Removing file #{model} #{request.key} at '#{path}'" + begin + File.unlink(path) + rescue => detail + raise Puppet::Error, "Could not remove #{request.key}: #{detail}" + end + end + + # Find the file on disk, returning an instance of the model. + def find(request) + path = path(request.key) + + return nil unless FileTest.exist?(path) or rename_files_with_uppercase(path) + + result = model.new(request.key) + result.read(path) + result + end + + # Save our file to disk. + def save(request) + path = path(request.key) + dir = File.dirname(path) + + raise Puppet::Error.new("Cannot save #{request.key}; parent directory #{dir} does not exist") unless FileTest.directory?(dir) + raise Puppet::Error.new("Cannot save #{request.key}; parent directory #{dir} is not writable") unless FileTest.writable?(dir) + + write(request.key, path) { |f| f.print request.instance.to_s } + end + + # Search for more than one file. At this point, it just returns + # an instance for every file in the directory. + def search(request) + dir = collection_directory + Dir.entries(dir).reject { |file| file !~ /\.pem$/ }.collect do |file| + name = file.sub(/\.pem$/, '') + result = model.new(name) + result.read(File.join(dir, file)) + result + end + end + + private + + # Demeterish pointers to class info. + def collection_directory + self.class.collection_directory + end + + def file_location + self.class.file_location + end + + def ca_location + self.class.ca_location + end + + # A hack method to deal with files that exist with a different case. + # Just renames it; doesn't read it in or anything. + # LAK:NOTE This is a copy of the method in sslcertificates/support.rb, + # which we'll be EOL'ing at some point. This method was added at 20080702 + # and should be removed at some point. + def rename_files_with_uppercase(file) + dir, short = File.split(file) + return nil unless FileTest.exist?(dir) + + raise ArgumentError, "Tried to fix SSL files to a file containing uppercase" unless short.downcase == short + real_file = Dir.entries(dir).reject { |f| f =~ /^\./ }.find do |other| + other.downcase == short + end + + return nil unless real_file + + full_file = File.join(dir, real_file) + + Puppet.notice "Fixing case in #{full_file}; renaming to #{file}" + File.rename(full_file, file) + + true + end + + # Yield a filehandle set up appropriately, either with our settings doing + # the work or opening a filehandle manually. + def write(name, path) + if ca?(name) and ca_location + Puppet.settings.write(self.class.ca_setting) { |f| yield f } + elsif file_location + Puppet.settings.write(self.class.file_setting) { |f| yield f } + elsif setting = self.class.directory_setting + begin + Puppet.settings.writesub(setting, path) { |f| yield f } + rescue => detail + raise Puppet::Error, "Could not write #{path} to #{setting}: #{detail}" + end + else + raise Puppet::DevError, "You must provide a setting to determine where the files are stored" + end + end +end + +# LAK:NOTE This has to be at the end, because classes like SSL::Key use this +# class, and this require statement loads those, which results in a load loop +# and lots of failures. +require 'puppet/ssl/host' diff --git a/mcollective/lib/puppet/indirector/status.rb b/mcollective/lib/puppet/indirector/status.rb new file mode 100644 index 000000000..f40bbc4d8 --- /dev/null +++ b/mcollective/lib/puppet/indirector/status.rb @@ -0,0 +1,3 @@ +# A stub class, so our constants work. +class Puppet::Indirector::Status +end diff --git a/mcollective/lib/puppet/indirector/status/local.rb b/mcollective/lib/puppet/indirector/status/local.rb new file mode 100644 index 000000000..9951f7e22 --- /dev/null +++ b/mcollective/lib/puppet/indirector/status/local.rb @@ -0,0 +1,7 @@ +require 'puppet/indirector/status' + +class Puppet::Indirector::Status::Local < Puppet::Indirector::Code + def find( *anything ) + model.new + end +end diff --git a/mcollective/lib/puppet/indirector/status/rest.rb b/mcollective/lib/puppet/indirector/status/rest.rb new file mode 100644 index 000000000..22e70429b --- /dev/null +++ b/mcollective/lib/puppet/indirector/status/rest.rb @@ -0,0 +1,5 @@ +require 'puppet/indirector/status' +require 'puppet/indirector/rest' + +class Puppet::Indirector::Status::Rest < Puppet::Indirector::REST +end diff --git a/mcollective/lib/puppet/indirector/terminus.rb b/mcollective/lib/puppet/indirector/terminus.rb new file mode 100644 index 000000000..4ebd0d004 --- /dev/null +++ b/mcollective/lib/puppet/indirector/terminus.rb @@ -0,0 +1,148 @@ +require 'puppet/indirector' +require 'puppet/indirector/indirection' +require 'puppet/util/instance_loader' + +# A simple class that can function as the base class for indirected types. +class Puppet::Indirector::Terminus + require 'puppet/util/docs' + extend Puppet::Util::Docs + + class << self + include Puppet::Util::InstanceLoader + + attr_accessor :name, :terminus_type + attr_reader :abstract_terminus, :indirection + + # Are we an abstract terminus type, rather than an instance with an + # associated indirection? + def abstract_terminus? + abstract_terminus + end + + # Convert a constant to a short name. + def const2name(const) + const.sub(/^[A-Z]/) { |i| i.downcase }.gsub(/[A-Z]/) { |i| "_#{i.downcase}" }.intern + end + + # Look up the indirection if we were only provided a name. + def indirection=(name) + if name.is_a?(Puppet::Indirector::Indirection) + @indirection = name + elsif ind = Puppet::Indirector::Indirection.instance(name) + @indirection = ind + else + raise ArgumentError, "Could not find indirection instance #{name} for #{self.name}" + end + end + + def indirection_name + @indirection.name + end + + # Register our subclass with the appropriate indirection. + # This follows the convention that our terminus is named after the + # indirection. + def inherited(subclass) + longname = subclass.to_s + if longname =~ /# 0 + indirection_name = names.pop.sub(/^[A-Z]/) { |i| i.downcase }.gsub(/[A-Z]/) { |i| "_#{i.downcase}" }.intern + + if indirection_name == "" or indirection_name.nil? + raise Puppet::DevError, "Could not discern indirection model from class constant" + end + + # This will throw an exception if the indirection instance cannot be found. + # Do this last, because it also registers the terminus type with the indirection, + # which needs the above information. + subclass.indirection = indirection_name + + # And add this instance to the instance hash. + Puppet::Indirector::Terminus.register_terminus_class(subclass) + end + + # Mark that this instance is abstract. + def mark_as_abstract_terminus + @abstract_terminus = true + end + + def model + indirection.model + end + + # Convert a short name to a constant. + def name2const(name) + name.to_s.capitalize.sub(/_(.)/) { |i| $1.upcase } + end + + # Register a class, probably autoloaded. + def register_terminus_class(klass) + setup_instance_loading klass.indirection_name + instance_hash(klass.indirection_name)[klass.name] = klass + end + + # Return a terminus by name, using the autoloader. + def terminus_class(indirection_name, terminus_type) + setup_instance_loading indirection_name + loaded_instance(indirection_name, terminus_type) + end + + # Return all terminus classes for a given indirection. + def terminus_classes(indirection_name) + setup_instance_loading indirection_name + + # Load them all. + instance_loader(indirection_name).loadall + + # And return the list of names. + loaded_instances(indirection_name) + end + + private + + def setup_instance_loading(type) + instance_load type, "puppet/indirector/#{type}" unless instance_loading?(type) + end + end + + def indirection + self.class.indirection + end + + def initialize + raise Puppet::DevError, "Cannot create instances of abstract terminus types" if self.class.abstract_terminus? + end + + def model + self.class.model + end + + def name + self.class.name + end + + def terminus_type + self.class.terminus_type + end +end diff --git a/mcollective/lib/puppet/indirector/yaml.rb b/mcollective/lib/puppet/indirector/yaml.rb new file mode 100644 index 000000000..4c488da62 --- /dev/null +++ b/mcollective/lib/puppet/indirector/yaml.rb @@ -0,0 +1,70 @@ +require 'puppet/indirector/terminus' +require 'puppet/util/file_locking' + +# The base class for YAML indirection termini. +class Puppet::Indirector::Yaml < Puppet::Indirector::Terminus + include Puppet::Util::FileLocking + + # Read a given name's file in and convert it from YAML. + def find(request) + file = path(request.key) + return nil unless FileTest.exist?(file) + + yaml = nil + begin + readlock(file) { |fh| yaml = fh.read } + rescue => detail + raise Puppet::Error, "Could not read YAML data for #{indirection.name} #{request.key}: #{detail}" + end + begin + return from_yaml(yaml) + rescue => detail + raise Puppet::Error, "Could not parse YAML data for #{indirection.name} #{request.key}: #{detail}" + end + end + + # Convert our object to YAML and store it to the disk. + def save(request) + raise ArgumentError.new("You can only save objects that respond to :name") unless request.instance.respond_to?(:name) + + file = path(request.key) + + basedir = File.dirname(file) + + # This is quite likely a bad idea, since we're not managing ownership or modes. + Dir.mkdir(basedir) unless FileTest.exist?(basedir) + + begin + writelock(file, 0660) { |f| f.print to_yaml(request.instance) } + rescue TypeError => detail + Puppet.err "Could not save #{self.name} #{request.key}: #{detail}" + end + end + + # Return the path to a given node's file. + def path(name,ext='.yaml') + if name =~ Puppet::Indirector::BadNameRegexp then + Puppet.crit("directory traversal detected in #{self.class}: #{name.inspect}") + raise ArgumentError, "invalid key" + end + + base = Puppet.run_mode.master? ? Puppet[:yamldir] : Puppet[:clientyamldir] + File.join(base, self.class.indirection_name.to_s, name.to_s + ext) + end + + def search(request) + Dir.glob(path(request.key,'')).collect do |file| + YAML.load_file(file) + end + end + + private + + def from_yaml(text) + YAML.load(text) + end + + def to_yaml(object) + YAML.dump(object) + end +end diff --git a/mcollective/lib/puppet/metatype/manager.rb b/mcollective/lib/puppet/metatype/manager.rb new file mode 100644 index 000000000..597a89f31 --- /dev/null +++ b/mcollective/lib/puppet/metatype/manager.rb @@ -0,0 +1,134 @@ +require 'puppet' +require 'puppet/util/classgen' + +# Methods dealing with Type management. This module gets included into the +# Puppet::Type class, it's just split out here for clarity. +module Puppet::MetaType +module Manager + include Puppet::Util::ClassGen + + # remove all type instances; this is mostly only useful for testing + def allclear + @types.each { |name, type| + type.clear + } + end + + # iterate across all of the subclasses of Type + def eachtype + @types.each do |name, type| + # Only consider types that have names + #if ! type.parameters.empty? or ! type.validproperties.empty? + yield type + #end + end + end + + # Load all types. Only currently used for documentation. + def loadall + typeloader.loadall + end + + # Define a new type. + def newtype(name, options = {}, &block) + # Handle backward compatibility + unless options.is_a?(Hash) + Puppet.warning "Puppet::Type.newtype(#{name}) now expects a hash as the second argument, not #{options.inspect}" + options = {:parent => options} + end + + # First make sure we don't have a method sitting around + name = symbolize(name) + newmethod = "new#{name.to_s}" + + # Used for method manipulation. + selfobj = singleton_class + + @types ||= {} + + if @types.include?(name) + if self.respond_to?(newmethod) + # Remove the old newmethod + selfobj.send(:remove_method,newmethod) + end + end + + options = symbolize_options(options) + + if parent = options[:parent] + options.delete(:parent) + end + + # Then create the class. + + klass = genclass( + name, + :parent => (parent || Puppet::Type), + :overwrite => true, + :hash => @types, + :attributes => options, + &block + ) + + # Now define a "new" method for convenience. + if self.respond_to? newmethod + # Refuse to overwrite existing methods like 'newparam' or 'newtype'. + Puppet.warning "'new#{name.to_s}' method already exists; skipping" + else + selfobj.send(:define_method, newmethod) do |*args| + klass.new(*args) + end + end + + # If they've got all the necessary methods defined and they haven't + # already added the property, then do so now. + klass.ensurable if klass.ensurable? and ! klass.validproperty?(:ensure) + + # Now set up autoload any providers that might exist for this type. + + klass.providerloader = Puppet::Util::Autoload.new(klass, "puppet/provider/#{klass.name.to_s}") + + # We have to load everything so that we can figure out the default provider. + klass.providerloader.loadall + klass.providify unless klass.providers.empty? + + klass + end + + # Remove an existing defined type. Largely used for testing. + def rmtype(name) + # Then create the class. + + klass = rmclass(name, :hash => @types) + + singleton_class.send(:remove_method, "new#{name}") if respond_to?("new#{name}") + end + + # Return a Type instance by name. + def type(name) + @types ||= {} + + name = name.to_s.downcase.to_sym + + if t = @types[name] + return t + else + if typeloader.load(name) + Puppet.warning "Loaded puppet/type/#{name} but no class was created" unless @types.include? name + end + + return @types[name] + end + end + + # Create a loader for Puppet types. + def typeloader + unless defined?(@typeloader) + @typeloader = Puppet::Util::Autoload.new(self, "puppet/type", :wrap => false) + end + + @typeloader + end +end +end + diff --git a/mcollective/lib/puppet/module.rb b/mcollective/lib/puppet/module.rb new file mode 100644 index 000000000..43266b2b5 --- /dev/null +++ b/mcollective/lib/puppet/module.rb @@ -0,0 +1,204 @@ +require 'puppet/util/logging' + +# Support for modules +class Puppet::Module + class Error < Puppet::Error; end + class MissingModule < Error; end + class IncompatibleModule < Error; end + class UnsupportedPlatform < Error; end + class IncompatiblePlatform < Error; end + class MissingMetadata < Error; end + class InvalidName < Error; end + + include Puppet::Util::Logging + + TEMPLATES = "templates" + FILES = "files" + MANIFESTS = "manifests" + PLUGINS = "plugins" + + FILETYPES = [MANIFESTS, FILES, TEMPLATES, PLUGINS] + + # Return an array of paths by splitting the +modulepath+ config + # parameter. Only consider paths that are absolute and existing + # directories + def self.modulepath(environment = nil) + Puppet::Node::Environment.new(environment).modulepath + end + + # Find and return the +module+ that +path+ belongs to. If +path+ is + # absolute, or if there is no module whose name is the first component + # of +path+, return +nil+ + def self.find(modname, environment = nil) + return nil unless modname + Puppet::Node::Environment.new(environment).module(modname) + end + + attr_reader :name, :environment + attr_writer :environment + + attr_accessor :source, :author, :version, :license, :puppetversion, :summary, :description, :project_page + + def has_metadata? + return false unless metadata_file + + FileTest.exist?(metadata_file) + end + + def initialize(name, environment = nil) + @name = name + + assert_validity + + if environment.is_a?(Puppet::Node::Environment) + @environment = environment + else + @environment = Puppet::Node::Environment.new(environment) + end + + load_metadata if has_metadata? + + validate_puppet_version + validate_dependencies + end + + FILETYPES.each do |type| + # A boolean method to let external callers determine if + # we have files of a given type. + define_method(type +'?') do + return false unless path + return false unless FileTest.exist?(subpath(type)) + return true + end + + # A method for returning a given file of a given type. + # e.g., file = mod.manifest("my/manifest.pp") + # + # If the file name is nil, then the base directory for the + # file type is passed; this is used for fileserving. + define_method(type.to_s.sub(/s$/, '')) do |file| + return nil unless path + + # If 'file' is nil then they're asking for the base path. + # This is used for things like fileserving. + if file + full_path = File.join(subpath(type), file) + else + full_path = subpath(type) + end + + return nil unless FileTest.exist?(full_path) + return full_path + end + end + + def exist? + ! path.nil? + end + + # Find the first 'files' directory. This is used by the XMLRPC fileserver. + def file_directory + subpath("files") + end + + def license_file + return @license_file if defined?(@license_file) + + return @license_file = nil unless path + @license_file = File.join(path, "License") + end + + def load_metadata + data = PSON.parse File.read(metadata_file) + [:source, :author, :version, :license, :puppetversion].each do |attr| + unless value = data[attr.to_s] + unless attr == :puppetversion + raise MissingMetadata, "No #{attr} module metadata provided for #{self.name}" + end + end + send(attr.to_s + "=", value) + end + end + + # Return the list of manifests matching the given glob pattern, + # defaulting to 'init.{pp,rb}' for empty modules. + def match_manifests(rest) + pat = File.join(path, MANIFESTS, rest || 'init') + [manifest("init.pp"),manifest("init.rb")].compact + Dir. + glob(pat + (File.extname(pat).empty? ? '.{pp,rb}' : '')). + reject { |f| FileTest.directory?(f) } + end + + def metadata_file + return @metadata_file if defined?(@metadata_file) + + return @metadata_file = nil unless path + @metadata_file = File.join(path, "metadata.json") + end + + # Find this module in the modulepath. + def path + environment.modulepath.collect { |path| File.join(path, name) }.find { |d| FileTest.exist?(d) } + end + + # Find all plugin directories. This is used by the Plugins fileserving mount. + def plugin_directory + subpath("plugins") + end + + def requires(name, version = nil) + @requires ||= [] + @requires << [name, version] + end + + def supports(name, version = nil) + @supports ||= [] + @supports << [name, version] + end + + def to_s + result = "Module #{name}" + result += "(#{path})" if path + result + end + + def validate_dependencies + return unless defined?(@requires) + + @requires.each do |name, version| + unless mod = environment.module(name) + raise MissingModule, "Missing module #{name} required by #{self.name}" + end + + if version and mod.version != version + raise IncompatibleModule, "Required module #{name} is version #{mod.version} but #{self.name} requires #{version}" + end + end + end + + def validate_puppet_version + return unless puppetversion and puppetversion != Puppet.version + raise IncompatibleModule, "Module #{self.name} is only compatible with Puppet version #{puppetversion}, not #{Puppet.version}" + end + + private + + def subpath(type) + return File.join(path, type) unless type.to_s == "plugins" + + backward_compatible_plugins_dir + end + + def backward_compatible_plugins_dir + if dir = File.join(path, "plugins") and FileTest.exist?(dir) + Puppet.warning "using the deprecated 'plugins' directory for ruby extensions; please move to 'lib'" + return dir + else + return File.join(path, "lib") + end + end + + def assert_validity + raise InvalidName, "Invalid module name; module names must be alphanumeric (plus '-'), not '#{name}'" unless name =~ /^[-\w]+$/ + end +end diff --git a/mcollective/lib/puppet/network.rb b/mcollective/lib/puppet/network.rb new file mode 100644 index 000000000..8993b8869 --- /dev/null +++ b/mcollective/lib/puppet/network.rb @@ -0,0 +1,3 @@ +# Just a stub, so we can correctly scope other classes. +module Puppet::Network # :nodoc: +end diff --git a/mcollective/lib/puppet/network/authconfig.rb b/mcollective/lib/puppet/network/authconfig.rb new file mode 100644 index 000000000..4ba89fa71 --- /dev/null +++ b/mcollective/lib/puppet/network/authconfig.rb @@ -0,0 +1,172 @@ +require 'puppet/util/loadedfile' +require 'puppet/network/rights' + +module Puppet + class ConfigurationError < Puppet::Error; end + class Network::AuthConfig < Puppet::Util::LoadedFile + + def self.main + @main ||= self.new + end + + # Just proxy the setting methods to our rights stuff + [:allow, :deny].each do |method| + define_method(method) do |*args| + @rights.send(method, *args) + end + end + + # Here we add a little bit of semantics. They can set auth on a whole + # namespace or on just a single method in the namespace. + def allowed?(request) + name = request.call.intern + namespace = request.handler.intern + method = request.method.intern + + read + + if @rights.include?(name) + return @rights[name].allowed?(request.name, request.ip) + elsif @rights.include?(namespace) + return @rights[namespace].allowed?(request.name, request.ip) + end + false + end + + # Does the file exist? Puppetmasterd does not require it, but + # puppet agent does. + def exists? + FileTest.exists?(@file) + end + + def initialize(file = nil, parsenow = true) + @file = file || Puppet[:authconfig] + + raise Puppet::DevError, "No authconfig file defined" unless @file + return unless self.exists? + super(@file) + @rights = Puppet::Network::Rights.new + @configstamp = @configstatted = nil + @configtimeout = 60 + + read if parsenow + end + + # Read the configuration file. + def read + return unless FileTest.exists?(@file) + + if @configstamp + if @configtimeout and @configstatted + if Time.now - @configstatted > @configtimeout + @configstatted = Time.now + tmp = File.stat(@file).ctime + + if tmp == @configstamp + return + else + Puppet.notice "#{tmp} vs #{@configstamp}" + end + else + return + end + else + Puppet.notice "#{@configtimeout} and #{@configstatted}" + end + end + + parse + + @configstamp = File.stat(@file).ctime + @configstatted = Time.now + end + + private + + def parse + newrights = Puppet::Network::Rights.new + begin + File.open(@file) { |f| + right = nil + count = 1 + f.each { |line| + case line + when /^\s*#/ # skip comments + count += 1 + next + when /^\s*$/ # skip blank lines + count += 1 + next + when /^(?:(\[[\w.]+\])|(path)\s+((?:~\s+)?[^ ]+))\s*$/ # "namespace" or "namespace.method" or "path /path" or "path ~ regex" + name = $1 + name = $3 if $2 == "path" + name.chomp! + right = newrights.newright(name, count, @file) + when /^\s*(allow|deny|method|environment|auth(?:enticated)?)\s+(.+)$/ + parse_right_directive(right, $1, $2, count) + else + raise ConfigurationError, "Invalid line #{count}: #{line}" + end + count += 1 + } + } + rescue Errno::EACCES => detail + Puppet.err "Configuration error: Cannot read #{@file}; cannot serve" + #raise Puppet::Error, "Cannot read #{@config}" + rescue Errno::ENOENT => detail + Puppet.err "Configuration error: '#{@file}' does not exit; cannot serve" + #raise Puppet::Error, "#{@config} does not exit" + #rescue FileServerError => detail + # Puppet.err "FileServer error: #{detail}" + end + + # Verify each of the rights are valid. + # We let the check raise an error, so that it can raise an error + # pointing to the specific problem. + newrights.each { |name, right| + right.valid? + } + @rights = newrights + end + + def parse_right_directive(right, var, value, count) + case var + when "allow" + modify_right(right, :allow, value, "allowing %s access", count) + when "deny" + modify_right(right, :deny, value, "denying %s access", count) + when "method" + unless right.acl_type == :regex + raise ConfigurationError, "'method' directive not allowed in namespace ACL at line #{count} of #{@config}" + end + modify_right(right, :restrict_method, value, "allowing 'method' %s", count) + when "environment" + unless right.acl_type == :regex + raise ConfigurationError, "'environment' directive not allowed in namespace ACL at line #{count} of #{@config}" + end + modify_right(right, :restrict_environment, value, "adding environment %s", count) + when /auth(?:enticated)?/ + unless right.acl_type == :regex + raise ConfigurationError, "'authenticated' directive not allowed in namespace ACL at line #{count} of #{@config}" + end + modify_right(right, :restrict_authenticated, value, "adding authentication %s", count) + else + raise ConfigurationError, + "Invalid argument '#{var}' at line #{count}" + end + end + + def modify_right(right, method, value, msg, count) + value.split(/\s*,\s*/).each do |val| + begin + right.info msg % val + right.send(method, val) + rescue AuthStoreError => detail + raise ConfigurationError, "#{detail} at line #{count} of #{@file}" + end + end + end + + end +end + diff --git a/mcollective/lib/puppet/network/authorization.rb b/mcollective/lib/puppet/network/authorization.rb new file mode 100644 index 000000000..b9cab2132 --- /dev/null +++ b/mcollective/lib/puppet/network/authorization.rb @@ -0,0 +1,76 @@ +require 'puppet/network/client_request' +require 'puppet/network/authconfig' + +module Puppet::Network + # Most of our subclassing is just so that we can get + # access to information from the request object, like + # the client name and IP address. + class InvalidClientRequest < Puppet::Error; end + module Authorization + # Create our config object if necessary. This works even if + # there's no configuration file. + def authconfig + @authconfig ||= Puppet::Network::AuthConfig.main + + @authconfig + end + + # Verify that our client has access. We allow untrusted access to + # puppetca methods but no others. + def authorized?(request) + msg = "#{request.authenticated? ? "authenticated" : "unauthenticated"} client #{request} access to #{request.call}" + + if request.authenticated? + if authconfig.exists? + if authconfig.allowed?(request) + Puppet.debug "Allowing #{msg}" + return true + else + Puppet.notice "Denying #{msg}" + return false + end + else + if Puppet.run_mode.master? + Puppet.debug "Allowing #{msg}" + return true + else + Puppet.notice "Denying #{msg}" + return false + end + end + else + if request.handler == "puppetca" + Puppet.notice "Allowing #{msg}" + else + Puppet.notice "Denying #{msg}" + return false + end + end + end + + # Is this functionality available? + def available?(request) + if handler_loaded?(request.handler) + return true + else + Puppet.warning "Client #{request} requested unavailable functionality #{request.handler}" + return false + end + end + + # Make sure that this method is available and authorized. + def verify(request) + unless available?(request) + raise InvalidClientRequest.new( + "Functionality #{request.handler} not available" + ) + end + unless authorized?(request) + raise InvalidClientRequest.new( + "Host #{request} not authorized to call #{request.call}" + ) + end + end + end +end + diff --git a/mcollective/lib/puppet/network/authstore.rb b/mcollective/lib/puppet/network/authstore.rb new file mode 100755 index 000000000..4ddd14feb --- /dev/null +++ b/mcollective/lib/puppet/network/authstore.rb @@ -0,0 +1,250 @@ +# standard module for determining whether a given hostname or IP has access to +# the requested resource + +require 'ipaddr' +require 'puppet/util/logging' + +module Puppet + class AuthStoreError < Puppet::Error; end + class AuthorizationError < Puppet::Error; end + + class Network::AuthStore + include Puppet::Util::Logging + + # Mark a given pattern as allowed. + def allow(pattern) + # a simple way to allow anyone at all to connect + if pattern == "*" + @globalallow = true + else + store(:allow, pattern) + end + + nil + end + + # Is a given combination of name and ip address allowed? If either input + # is non-nil, then both inputs must be provided. If neither input + # is provided, then the authstore is considered local and defaults to "true". + def allowed?(name, ip) + if name or ip + # This is probably unnecessary, and can cause some weirdnesses in + # cases where we're operating over localhost but don't have a real + # IP defined. + raise Puppet::DevError, "Name and IP must be passed to 'allowed?'" unless name and ip + # else, we're networked and such + else + # we're local + return true + end + + # yay insecure overrides + return true if globalallow? + + if decl = declarations.find { |d| d.match?(name, ip) } + return decl.result + end + + info "defaulting to no access for #{name}" + false + end + + # Deny a given pattern. + def deny(pattern) + store(:deny, pattern) + end + + # Is global allow enabled? + def globalallow? + @globalallow + end + + # does this auth store has any rules? + def empty? + @globalallow.nil? && @declarations.size == 0 + end + + def initialize + @globalallow = nil + @declarations = [] + end + + def to_s + "authstore" + end + + def interpolate(match) + Thread.current[:declarations] = @declarations.collect { |ace| ace.interpolate(match) }.sort + end + + def reset_interpolation + Thread.current[:declarations] = nil + end + + private + + # returns our ACEs list, but if we have a modification of it + # in our current thread, let's return it + # this is used if we want to override the this purely immutable list + # by a modified version in a multithread safe way. + def declarations + Thread.current[:declarations] || @declarations + end + + # Store the results of a pattern into our hash. Basically just + # converts the pattern and sticks it into the hash. + def store(type, pattern) + @declarations << Declaration.new(type, pattern) + @declarations.sort! + + nil + end + + # A single declaration. Stores the info for a given declaration, + # provides the methods for determining whether a declaration matches, + # and handles sorting the declarations appropriately. + class Declaration + include Puppet::Util + include Comparable + + # The type of declaration: either :allow or :deny + attr_reader :type + + # The name: :ip or :domain + attr_accessor :name + + # The pattern we're matching against. Can be an IPAddr instance, + # or an array of strings, resulting from reversing a hostname + # or domain name. + attr_reader :pattern + + # The length. Only used for iprange and domain. + attr_accessor :length + + # Sort the declarations most specific first. + def <=>(other) + compare(exact?, other.exact?) || + compare(ip?, other.ip?) || + ((length != other.length) && (other.length <=> length)) || + compare(deny?, other.deny?) || + ( ip? ? pattern.to_s <=> other.pattern.to_s : pattern <=> other.pattern) + end + + def deny? + type == :deny + end + + def exact? + @exact == :exact + end + + def initialize(type, pattern) + self.type = type + self.pattern = pattern + end + + # Are we an IP type? + def ip? + name == :ip + end + + # Does this declaration match the name/ip combo? + def match?(name, ip) + ip? ? pattern.include?(IPAddr.new(ip)) : matchname?(name) + end + + # Set the pattern appropriately. Also sets the name and length. + def pattern=(pattern) + parse(pattern) + @orig = pattern + end + + # Mapping a type of statement into a return value. + def result + type == :allow + end + + def to_s + "#{type}: #{pattern}" + end + + # Set the declaration type. Either :allow or :deny. + def type=(type) + type = symbolize(type) + raise ArgumentError, "Invalid declaration type #{type}" unless [:allow, :deny].include?(type) + @type = type + end + + # interpolate a pattern to replace any + # backreferences by the given match + # for instance if our pattern is $1.reductivelabs.com + # and we're called with a MatchData whose capture 1 is puppet + # we'll return a pattern of puppet.reductivelabs.com + def interpolate(match) + clone = dup + clone.pattern = clone.pattern.reverse.collect do |p| + p.gsub(/\$(\d)/) { |m| match[$1.to_i] } + end.join(".") + clone + end + + private + + # Returns nil if both values are true or both are false, returns + # -1 if the first is true, and 1 if the second is true. Used + # in the <=> operator. + def compare(me, them) + (me and them) ? nil : me ? -1 : them ? 1 : nil + end + + # Does the name match our pattern? + def matchname?(name) + name = munge_name(name) + (pattern == name) or (not exact? and pattern.zip(name).all? { |p,n| p == n }) + end + + # Convert the name to a common pattern. + def munge_name(name) + # LAK:NOTE http://snurl.com/21zf8 [groups_google_com] + # Change to name.downcase.split(".",-1).reverse for FQDN support + name.downcase.split(".").reverse + end + + # Parse our input pattern and figure out what kind of allowal + # statement it is. The output of this is used for later matching. + Octet = '(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])' + IPv4 = "#{Octet}\.#{Octet}\.#{Octet}\.#{Octet}" + IPv6_full = "_:_:_:_:_:_:_:_|_:_:_:_:_:_::_?|_:_:_:_:_::((_:)?_)?|_:_:_:_::((_:){0,2}_)?|_:_:_::((_:){0,3}_)?|_:_::((_:){0,4}_)?|_::((_:){0,5}_)?|::((_:){0,6}_)?" + IPv6_partial = "_:_:_:_:_:_:|_:_:_:_::(_:)?|_:_::(_:){0,2}|_::(_:){0,3}" + # It should be: + # IP = "#{IPv4}|#{IPv6_full}|(#{IPv6_partial}#{IPv4})".gsub(/_/,'([0-9a-fA-F]{1,4})').gsub(/\(/,'(?:') + # but ruby's ipaddr lib doesn't support the hybrid format + IP = "#{IPv4}|#{IPv6_full}".gsub(/_/,'([0-9a-fA-F]{1,4})').gsub(/\(/,'(?:') + def parse(value) + @name,@exact,@length,@pattern = *case value + when /^(?:#{IP})\/(\d+)$/ # 12.34.56.78/24, a001:b002::efff/120, c444:1000:2000::9:192.168.0.1/112 + [:ip,:inexact,$1.to_i,IPAddr.new(value)] + when /^(#{IP})$/ # 10.20.30.40, + [:ip,:exact,nil,IPAddr.new(value)] + when /^(#{Octet}\.){1,3}\*$/ # an ip address with a '*' at the end + segments = value.split(".")[0..-2] + bits = 8*segments.length + [:ip,:inexact,bits,IPAddr.new((segments+[0,0,0])[0,4].join(".") + "/#{bits}")] + when /^(\w[-\w]*\.)+[-\w]+$/ # a full hostname + # Change to /^(\w[-\w]*\.)+[-\w]+\.?$/ for FQDN support + [:domain,:exact,nil,munge_name(value)] + when /^\*(\.(\w[-\w]*)){1,}$/ # *.domain.com + host_sans_star = munge_name(value)[0..-2] + [:domain,:inexact,host_sans_star.length,host_sans_star] + when /\$\d+/ # a backreference pattern ala $1.reductivelabs.com or 192.168.0.$1 or $1.$2 + [:dynamic,:exact,nil,munge_name(value)] + when /^\w[-.@\w]*$/ # ? Just like a host name but allow '@'s and ending '.'s + [:opaque,:exact,nil,[value]] + else + raise AuthStoreError, "Invalid pattern #{value}" + end + end + end + end +end + diff --git a/mcollective/lib/puppet/network/client_request.rb b/mcollective/lib/puppet/network/client_request.rb new file mode 100644 index 000000000..e3df471f4 --- /dev/null +++ b/mcollective/lib/puppet/network/client_request.rb @@ -0,0 +1,29 @@ +module Puppet::Network # :nodoc: + # A struct-like class for passing around a client request. It's mostly + # just used for validation and authorization. + class ClientRequest + attr_accessor :name, :ip, :authenticated, :handler, :method + + def authenticated? + self.authenticated + end + + # A common way of talking about the full call. Individual servers + # are responsible for setting the values correctly, but this common + # format makes it possible to check rights. + def call + raise ArgumentError, "Request is not set up; cannot build call" unless handler and method + + [handler, method].join(".") + end + + def initialize(name, ip, authenticated) + @name, @ip, @authenticated = name, ip, authenticated + end + + def to_s + "#{self.name}(#{self.ip})" + end + end +end + diff --git a/mcollective/lib/puppet/network/format.rb b/mcollective/lib/puppet/network/format.rb new file mode 100644 index 000000000..9cd6cf0b5 --- /dev/null +++ b/mcollective/lib/puppet/network/format.rb @@ -0,0 +1,111 @@ +require 'puppet/provider' +require 'puppet/provider/confiner' + +# A simple class for modeling encoding formats for moving +# instances around the network. +class Puppet::Network::Format + include Puppet::Provider::Confiner + + attr_reader :name, :mime + attr_accessor :intern_method, :render_method, :intern_multiple_method, :render_multiple_method, :weight, :required_methods, :extension + + def init_attribute(name, default) + if value = @options[name] + @options.delete(name) + else + value = default + end + self.send(name.to_s + "=", value) + end + + def initialize(name, options = {}, &block) + @name = name.to_s.downcase.intern + + @options = options + + # This must be done early the values can be used to set required_methods + define_method_names + + method_list = { + :intern_method => "from_#{name}", + :intern_multiple_method => "from_multiple_#{name}", + :render_multiple_method => "to_multiple_#{name}", + :render_method => "to_#{name}" + } + + init_attribute(:mime, "text/#{name}") + init_attribute(:weight, 5) + init_attribute(:required_methods, method_list.keys) + init_attribute(:extension, name.to_s) + + method_list.each do |method, value| + init_attribute(method, value) + end + + raise ArgumentError, "Unsupported option(s) #{@options.keys}" unless @options.empty? + + @options = nil + + instance_eval(&block) if block_given? + end + + def intern(klass, text) + return klass.send(intern_method, text) if klass.respond_to?(intern_method) + raise NotImplementedError, "#{klass} does not respond to #{intern_method}; can not intern instances from #{mime}" + end + + def intern_multiple(klass, text) + return klass.send(intern_multiple_method, text) if klass.respond_to?(intern_multiple_method) + raise NotImplementedError, "#{klass} does not respond to #{intern_multiple_method}; can not intern multiple instances from #{mime}" + end + + def mime=(mime) + @mime = mime.to_s.downcase + end + + def render(instance) + return instance.send(render_method) if instance.respond_to?(render_method) + raise NotImplementedError, "#{instance.class} does not respond to #{render_method}; can not render instances to #{mime}" + end + + def render_multiple(instances) + # This method implicitly assumes that all instances are of the same type. + return instances[0].class.send(render_multiple_method, instances) if instances[0].class.respond_to?(render_multiple_method) + raise NotImplementedError, "#{instances[0].class} does not respond to #{render_multiple_method}; can not intern multiple instances to #{mime}" + end + + def required_methods_present?(klass) + [:intern_method, :intern_multiple_method, :render_multiple_method].each do |name| + return false unless required_method_present?(name, klass, :class) + end + + return false unless required_method_present?(:render_method, klass, :instance) + + true + end + + def supported?(klass) + suitable? and required_methods_present?(klass) + end + + def to_s + "Puppet::Network::Format[#{name}]" + end + + private + + def define_method_names + @intern_method = "from_#{name}" + @render_method = "to_#{name}" + @intern_multiple_method = "from_multiple_#{name}" + @render_multiple_method = "to_multiple_#{name}" + end + + def required_method_present?(name, klass, type) + return true unless required_methods.include?(name) + + method = send(name) + + return(type == :class ? klass.respond_to?(method) : klass.instance_methods.include?(method)) + end +end diff --git a/mcollective/lib/puppet/network/format_handler.rb b/mcollective/lib/puppet/network/format_handler.rb new file mode 100644 index 000000000..b94a4f902 --- /dev/null +++ b/mcollective/lib/puppet/network/format_handler.rb @@ -0,0 +1,181 @@ +require 'yaml' +require 'puppet/network' +require 'puppet/network/format' + +module Puppet::Network::FormatHandler + class FormatError < Puppet::Error; end + + class FormatProtector + attr_reader :format + + def protect(method, args) + Puppet::Network::FormatHandler.format(format).send(method, *args) + rescue => details + direction = method.to_s.include?("intern") ? "from" : "to" + error = FormatError.new("Could not #{method} #{direction} #{format}: #{details}") + error.set_backtrace(details.backtrace) + raise error + end + + def initialize(format) + @format = format + end + + [:intern, :intern_multiple, :render, :render_multiple, :mime].each do |method| + define_method(method) do |*args| + protect(method, args) + end + end + end + + @formats = {} + def self.create(*args, &block) + instance = Puppet::Network::Format.new(*args) + instance.instance_eval(&block) if block_given? + + @formats[instance.name] = instance + instance + end + + def self.create_serialized_formats(name,options = {},&block) + ["application/x-#{name}", "application/#{name}", "text/x-#{name}", "text/#{name}"].each { |mime_type| + create name, {:mime => mime_type}.update(options), &block + } + end + + def self.extended(klass) + klass.extend(ClassMethods) + + # LAK:NOTE This won't work in 1.9 ('send' won't be able to send + # private methods, but I don't know how else to do it. + klass.send(:include, InstanceMethods) + end + + def self.format(name) + @formats[name.to_s.downcase.intern] + end + + def self.format_by_extension(ext) + @formats.each do |name, format| + return format if format.extension == ext + end + nil + end + + # Provide a list of all formats. + def self.formats + @formats.keys + end + + # Return a format capable of handling the provided mime type. + def self.mime(mimetype) + mimetype = mimetype.to_s.downcase + @formats.values.find { |format| format.mime == mimetype } + end + + # Use a delegator to make sure any exceptions generated by our formats are + # handled intelligently. + def self.protected_format(name) + name = format_to_canonical_name(name) + @format_protectors ||= {} + @format_protectors[name] ||= FormatProtector.new(name) + @format_protectors[name] + end + + # Return a format name given: + # * a format name + # * a mime-type + # * a format instance + def self.format_to_canonical_name(format) + case format + when Puppet::Network::Format + out = format + when %r{\w+/\w+} + out = mime(format) + else + out = format(format) + end + raise ArgumentError, "No format match the given format name or mime-type (#{format})" if out.nil? + out.name + end + + module ClassMethods + def format_handler + Puppet::Network::FormatHandler + end + + def convert_from(format, data) + format_handler.protected_format(format).intern(self, data) + end + + def convert_from_multiple(format, data) + format_handler.protected_format(format).intern_multiple(self, data) + end + + def render_multiple(format, instances) + format_handler.protected_format(format).render_multiple(instances) + end + + def default_format + supported_formats[0] + end + + def support_format?(name) + Puppet::Network::FormatHandler.format(name).supported?(self) + end + + def supported_formats + result = format_handler.formats.collect { |f| format_handler.format(f) }.find_all { |f| f.supported?(self) }.collect { |f| f.name }.sort do |a, b| + # It's an inverse sort -- higher weight formats go first. + format_handler.format(b).weight <=> format_handler.format(a).weight + end + + result = put_preferred_format_first(result) + + Puppet.debug "#{friendly_name} supports formats: #{result.map{ |f| f.to_s }.sort.join(' ')}; using #{result.first}" + + result + end + + private + + def friendly_name + if self.respond_to? :indirection + indirection.name + else + self + end + end + + def put_preferred_format_first(list) + preferred_format = Puppet.settings[:preferred_serialization_format].to_sym + if list.include?(preferred_format) + list.delete(preferred_format) + list.unshift(preferred_format) + else + Puppet.debug "Value of 'preferred_serialization_format' (#{preferred_format}) is invalid for #{friendly_name}, using default (#{list.first})" + end + list + end + end + + module InstanceMethods + def render(format = nil) + format ||= self.class.default_format + + Puppet::Network::FormatHandler.protected_format(format).render(self) + end + + def mime(format = nil) + format ||= self.class.default_format + + Puppet::Network::FormatHandler.protected_format(format).mime + end + + def support_format?(name) + self.class.support_format?(name) + end + end +end + +require 'puppet/network/formats' diff --git a/mcollective/lib/puppet/network/formats.rb b/mcollective/lib/puppet/network/formats.rb new file mode 100644 index 000000000..dd196921c --- /dev/null +++ b/mcollective/lib/puppet/network/formats.rb @@ -0,0 +1,135 @@ +require 'puppet/network/format_handler' + +Puppet::Network::FormatHandler.create_serialized_formats(:yaml) do + # Yaml doesn't need the class name; it's serialized. + def intern(klass, text) + YAML.load(text) + end + + # Yaml doesn't need the class name; it's serialized. + def intern_multiple(klass, text) + YAML.load(text) + end + + def render(instance) + instance.to_yaml + end + + # Yaml monkey-patches Array, so this works. + def render_multiple(instances) + instances.to_yaml + end + + # Unlike core's yaml, ZAML should support 1.8.1 just fine + def supported?(klass) + true + end +end + +# This is a "special" format which is used for the moment only when sending facts +# as REST GET parameters (see Puppet::Configurer::FactHandler). +# This format combines a yaml serialization, then zlib compression and base64 encoding. +Puppet::Network::FormatHandler.create_serialized_formats(:b64_zlib_yaml) do + require 'base64' + + def use_zlib? + Puppet.features.zlib? && Puppet[:zlib] + end + + def requiring_zlib + if use_zlib? + yield + else + raise Puppet::Error, "the zlib library is not installed or is disabled." + end + end + + def intern(klass, text) + decode(text) + end + + def intern_multiple(klass, text) + decode(text) + end + + def render(instance) + encode(instance.to_yaml) + end + + def render_multiple(instances) + encode(instances.to_yaml) + end + + def supported?(klass) + true + end + + def encode(text) + requiring_zlib do + Base64.encode64(Zlib::Deflate.deflate(text, Zlib::BEST_COMPRESSION)) + end + end + + def decode(yaml) + requiring_zlib do + YAML.load(Zlib::Inflate.inflate(Base64.decode64(yaml))) + end + end +end + +Puppet::Network::FormatHandler.create(:s, :mime => "text/plain", :extension => "txt") + +# A very low-weight format so it'll never get chosen automatically. +Puppet::Network::FormatHandler.create(:raw, :mime => "application/x-raw", :weight => 1) do + def intern_multiple(klass, text) + raise NotImplementedError + end + + def render_multiple(instances) + raise NotImplementedError + end + + # LAK:NOTE The format system isn't currently flexible enough to handle + # what I need to support raw formats just for individual instances (rather + # than both individual and collections), but we don't yet have enough data + # to make a "correct" design. + # So, we hack it so it works for singular but fail if someone tries it + # on plurals. + def supported?(klass) + true + end +end + +Puppet::Network::FormatHandler.create_serialized_formats(:pson, :weight => 10, :required_methods => [:render_method, :intern_method]) do + confine :true => Puppet.features.pson? + + def intern(klass, text) + data_to_instance(klass, PSON.parse(text)) + end + + def intern_multiple(klass, text) + PSON.parse(text).collect do |data| + data_to_instance(klass, data) + end + end + + # PSON monkey-patches Array, so this works. + def render_multiple(instances) + instances.to_pson + end + + # If they pass class information, we want to ignore it. By default, + # we'll include class information but we won't rely on it - we don't + # want class names to be required because we then can't change our + # internal class names, which is bad. + def data_to_instance(klass, data) + if data.is_a?(Hash) and d = data['data'] + data = d + end + return data if data.is_a?(klass) + klass.from_pson(data) + end +end + +# This is really only ever going to be used for Catalogs. +Puppet::Network::FormatHandler.create_serialized_formats(:dot, :required_methods => [:render_method]) diff --git a/mcollective/lib/puppet/network/handler.rb b/mcollective/lib/puppet/network/handler.rb new file mode 100644 index 000000000..3cad3872f --- /dev/null +++ b/mcollective/lib/puppet/network/handler.rb @@ -0,0 +1,50 @@ +require 'puppet/util/docs' +require 'puppet/util/subclass_loader' + +module Puppet::Network + # The base class for the different handlers. The handlers are each responsible + # for separate xmlrpc namespaces. + class Handler + extend Puppet::Util::Docs + + # This is so that the handlers can subclass just 'Handler', rather + # then having to specify the full class path. + Handler = self + attr_accessor :server, :local + + extend Puppet::Util::SubclassLoader + extend Puppet::Util + + handle_subclasses :handler, "puppet/network/handler" + + # Return the xmlrpc interface. + def self.interface + if defined?(@interface) + return @interface + else + raise Puppet::DevError, "Handler #{self} has no defined interface" + end + end + + # Set/Determine whether we're a client- or server-side handler. + def self.side(side = nil) + if side + side = side.intern if side.is_a?(String) + raise ArgumentError, "Invalid side registration '#{side}' for #{self.name}" unless [:client, :server].include?(side) + @side = side + else + @side ||= :server + return @side + end + end + + # Create an empty init method with the same signature. + def initialize(hash = {}) + end + + def local? + self.local + end + end +end + diff --git a/mcollective/lib/puppet/network/handler/ca.rb b/mcollective/lib/puppet/network/handler/ca.rb new file mode 100644 index 000000000..1dabeee2f --- /dev/null +++ b/mcollective/lib/puppet/network/handler/ca.rb @@ -0,0 +1,61 @@ +require 'openssl' +require 'puppet' +require 'xmlrpc/server' +require 'puppet/network/handler' + +class Puppet::Network::Handler + class CA < Handler + attr_reader :ca + + desc "Provides an interface for signing CSRs. Accepts a CSR and returns + the CA certificate and the signed certificate, or returns nil if + the cert is not signed." + + @interface = XMLRPC::Service::Interface.new("puppetca") { |iface| + iface.add_method("array getcert(csr)") + } + + def initialize(hash = {}) + Puppet.settings.use(:main, :ssl, :ca) + + @ca = Puppet::SSL::CertificateAuthority.instance + end + + # our client sends us a csr, and we either store it for later signing, + # or we sign it right away + def getcert(csrtext, client = nil, clientip = nil) + csr = Puppet::SSL::CertificateRequest.from_s(csrtext) + hostname = csr.name + + unless @ca + Puppet.notice "Host #{hostname} asked for signing from non-CA master" + return "" + end + + # We used to save the public key, but it's basically unnecessary + # and it mucks with the permissions requirements. + + # first check to see if we already have a signed cert for the host + cert = Puppet::SSL::Certificate.find(hostname) + cacert = Puppet::SSL::Certificate.find(@ca.host.name) + + if cert + Puppet.info "Retrieving existing certificate for #{hostname}" + unless csr.content.public_key.to_s == cert.content.public_key.to_s + raise Puppet::Error, "Certificate request does not match existing certificate; run 'puppetca --clean #{hostname}'." + end + [cert.to_s, cacert.to_s] + else + csr.save + + # We determine whether we signed the csr by checking if there's a certificate for it + if cert = Puppet::SSL::Certificate.find(hostname) + [cert.to_s, cacert.to_s] + else + nil + end + end + end + end +end + diff --git a/mcollective/lib/puppet/network/handler/filebucket.rb b/mcollective/lib/puppet/network/handler/filebucket.rb new file mode 100755 index 000000000..a1b99fdf4 --- /dev/null +++ b/mcollective/lib/puppet/network/handler/filebucket.rb @@ -0,0 +1,53 @@ +require 'fileutils' +require 'digest/md5' +require 'puppet/external/base64' +require 'puppet/network/handler' +require 'xmlrpc/server' + +class Puppet::Network::Handler # :nodoc: + # Accept files and store them by md5 sum, returning the md5 sum back + # to the client. Alternatively, accept an md5 sum and return the + # associated content. + class FileBucket < Handler + desc "The interface to Puppet's FileBucket system. Can be used to store + files in and retrieve files from a filebucket." + + @interface = XMLRPC::Service::Interface.new("puppetbucket") { |iface| + iface.add_method("string addfile(string, string)") + iface.add_method("string getfile(string)") + } + + Puppet::Util.logmethods(self, true) + attr_reader :name, :path + + def initialize(hash) + @path = hash[:Path] || Puppet[:bucketdir] + @name = "Filebucket[#{@path}]" + end + + # Accept a file from a client and store it by md5 sum, returning + # the sum. + def addfile(contents, path, client = nil, clientip = nil) + contents = Base64.decode64(contents) if client + bucket = Puppet::FileBucket::File.new(contents) + bucket.save + end + + # Return the contents associated with a given md5 sum. + def getfile(md5, client = nil, clientip = nil) + bucket = Puppet::FileBucket::File.find("md5:#{md5}") + contents = bucket.contents + + if client + return Base64.encode64(contents) + else + return contents + end + end + + def to_s + self.name + end + end +end + diff --git a/mcollective/lib/puppet/network/handler/fileserver.rb b/mcollective/lib/puppet/network/handler/fileserver.rb new file mode 100755 index 000000000..10e1c801d --- /dev/null +++ b/mcollective/lib/puppet/network/handler/fileserver.rb @@ -0,0 +1,728 @@ +require 'puppet' +require 'puppet/network/authstore' +require 'webrick/httpstatus' +require 'cgi' +require 'delegate' +require 'sync' +require 'puppet/network/handler' + +require 'puppet/network/handler' +require 'puppet/network/xmlrpc/server' +require 'puppet/file_serving' +require 'puppet/file_serving/metadata' + +class Puppet::Network::Handler + AuthStoreError = Puppet::AuthStoreError + class FileServerError < Puppet::Error; end + class FileServer < Handler + desc "The interface to Puppet's fileserving abilities." + + attr_accessor :local + + CHECKPARAMS = [:mode, :type, :owner, :group, :checksum] + + # Special filserver module for puppet's module system + MODULES = "modules" + PLUGINS = "plugins" + + @interface = XMLRPC::Service::Interface.new("fileserver") { |iface| + iface.add_method("string describe(string, string)") + iface.add_method("string list(string, string, boolean, array)") + iface.add_method("string retrieve(string, string)") + } + + def self.params + CHECKPARAMS.dup + end + + # If the configuration file exists, then create (if necessary) a LoadedFile + # object to manage it; else, return nil. + def configuration + # Short-circuit the default case. + return @configuration if defined?(@configuration) + + config_path = @passed_configuration_path || Puppet[:fileserverconfig] + return nil unless FileTest.exist?(config_path) + + # The file exists but we don't have a LoadedFile instance for it. + @configuration = Puppet::Util::LoadedFile.new(config_path) + end + + # Create our default mounts for modules and plugins. This is duplicated code, + # but I'm not really worried about that. + def create_default_mounts + @mounts = {} + Puppet.debug "No file server configuration file; autocreating #{MODULES} mount with default permissions" + mount = Mount.new(MODULES) + mount.allow("*") + @mounts[MODULES] = mount + + Puppet.debug "No file server configuration file; autocreating #{PLUGINS} mount with default permissions" + mount = PluginMount.new(PLUGINS) + mount.allow("*") + @mounts[PLUGINS] = mount + end + + # Describe a given file. This returns all of the manageable aspects + # of that file. + def describe(url, links = :follow, client = nil, clientip = nil) + links = links.intern if links.is_a? String + + mount, path = convert(url, client, clientip) + + mount.debug("Describing #{url} for #{client}") if client + + # use the mount to resolve the path for us. + return "" unless full_path = mount.file_path(path, client) + + metadata = Puppet::FileServing::Metadata.new(url, :path => full_path, :links => links) + + return "" unless metadata.exist? + + begin + metadata.collect + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err detail + return "" + end + + metadata.attributes_with_tabs + end + + # Create a new fileserving module. + def initialize(hash = {}) + @mounts = {} + @files = {} + + @local = hash[:Local] + + @noreadconfig = true if hash[:Config] == false + + @passed_configuration_path = hash[:Config] + + if hash.include?(:Mount) + @passedconfig = true + raise Puppet::DevError, "Invalid mount hash #{hash[:Mount].inspect}" unless hash[:Mount].is_a?(Hash) + + hash[:Mount].each { |dir, name| + self.mount(dir, name) if FileTest.exists?(dir) + } + self.mount(nil, MODULES) + self.mount(nil, PLUGINS) + else + @passedconfig = false + if configuration + readconfig(false) # don't check the file the first time. + else + create_default_mounts + end + end + end + + # List a specific directory's contents. + def list(url, links = :ignore, recurse = false, ignore = false, client = nil, clientip = nil) + mount, path = convert(url, client, clientip) + + mount.debug "Listing #{url} for #{client}" if client + + return "" unless mount.path_exists?(path, client) + + desc = mount.list(path, recurse, ignore, client) + + if desc.length == 0 + mount.notice "Got no information on //#{mount}/#{path}" + return "" + end + + desc.collect { |sub| sub.join("\t") }.join("\n") + end + + def local? + self.local + end + + # Is a given mount available? + def mounted?(name) + @mounts.include?(name) + end + + # Mount a new directory with a name. + def mount(path, name) + if @mounts.include?(name) + if @mounts[name] != path + raise FileServerError, "#{@mounts[name].path} is already mounted at #{name}" + else + # it's already mounted; no problem + return + end + end + + # Let the mounts do their own error-checking. + @mounts[name] = Mount.new(name, path) + @mounts[name].info "Mounted #{path}" + + @mounts[name] + end + + # Retrieve a file from the local disk and pass it to the remote + # client. + def retrieve(url, links = :ignore, client = nil, clientip = nil) + links = links.intern if links.is_a? String + + mount, path = convert(url, client, clientip) + + mount.info "Sending #{url} to #{client}" if client + + unless mount.path_exists?(path, client) + mount.debug "#{mount} reported that #{path} does not exist" + return "" + end + + links = links.intern if links.is_a? String + + if links == :ignore and FileTest.symlink?(path) + mount.debug "I think that #{path} is a symlink and we're ignoring them" + return "" + end + + str = mount.read_file(path, client) + + if @local + return str + else + return CGI.escape(str) + end + end + + def umount(name) + @mounts.delete(name) if @mounts.include? name + end + + private + + def authcheck(file, mount, client, clientip) + # If we're local, don't bother passing in information. + if local? + client = nil + clientip = nil + end + unless mount.allowed?(client, clientip) + mount.warning "#{client} cannot access #{file}" + raise Puppet::AuthorizationError, "Cannot access #{mount}" + end + end + + # Take a URL and some client info and return a mount and relative + # path pair. + # + def convert(url, client, clientip) + readconfig + + url = URI.unescape(url) + + mount, stub = splitpath(url, client) + + authcheck(url, mount, client, clientip) + + return mount, stub + end + + # Return the mount for the Puppet modules; allows file copying from + # the modules. + def modules_mount(module_name, client) + # Find our environment, if we have one. + unless hostname = (client || Facter.value("hostname")) + raise ArgumentError, "Could not find hostname" + end + env = (node = Puppet::Node.find(hostname)) ? node.environment : nil + + # And use the environment to look up the module. + (mod = Puppet::Node::Environment.new(env).module(module_name) and mod.files?) ? @mounts[MODULES].copy(mod.name, mod.file_directory) : nil + end + + # Read the configuration file. + def readconfig(check = true) + return if @noreadconfig + + return unless configuration + + return if check and ! @configuration.changed? + + newmounts = {} + begin + File.open(@configuration.file) { |f| + mount = nil + count = 1 + f.each { |line| + case line + when /^\s*#/; next # skip comments + when /^\s*$/; next # skip blank lines + when /\[([-\w]+)\]/ + name = $1 + raise FileServerError, "#{newmounts[name]} is already mounted as #{name} in #{@configuration.file}" if newmounts.include?(name) + mount = Mount.new(name) + newmounts[name] = mount + when /^\s*(\w+)\s+(.+)$/ + var = $1 + value = $2 + case var + when "path" + if mount.name == MODULES + Puppet.warning "The '#{mount.name}' module can not have a path. Ignoring attempt to set it" + else + begin + mount.path = value + rescue FileServerError => detail + Puppet.err "Removing mount #{mount.name}: #{detail}" + newmounts.delete(mount.name) + end + end + when "allow" + value.split(/\s*,\s*/).each { |val| + begin + mount.info "allowing #{val} access" + mount.allow(val) + rescue AuthStoreError => detail + puts detail.backtrace if Puppet[:trace] + + raise FileServerError.new( + detail.to_s, + + count, @configuration.file) + end + } + when "deny" + value.split(/\s*,\s*/).each { |val| + begin + mount.info "denying #{val} access" + mount.deny(val) + rescue AuthStoreError => detail + + raise FileServerError.new( + detail.to_s, + + count, @configuration.file) + end + } + else + raise FileServerError.new("Invalid argument '#{var}'", count, @configuration.file) + end + else + raise FileServerError.new("Invalid line '#{line.chomp}'", count, @configuration.file) + end + count += 1 + } + } + rescue Errno::EACCES => detail + Puppet.err "FileServer error: Cannot read #{@configuration}; cannot serve" + #raise Puppet::Error, "Cannot read #{@configuration}" + rescue Errno::ENOENT => detail + Puppet.err "FileServer error: '#{@configuration}' does not exist; cannot serve" + end + + unless newmounts[MODULES] + Puppet.debug "No #{MODULES} mount given; autocreating with default permissions" + mount = Mount.new(MODULES) + mount.allow("*") + newmounts[MODULES] = mount + end + + unless newmounts[PLUGINS] + Puppet.debug "No #{PLUGINS} mount given; autocreating with default permissions" + mount = PluginMount.new(PLUGINS) + mount.allow("*") + newmounts[PLUGINS] = mount + end + + unless newmounts[PLUGINS].valid? + Puppet.debug "No path given for #{PLUGINS} mount; creating a special PluginMount" + # We end up here if the user has specified access rules for + # the plugins mount, without specifying a path (which means + # they want to have the default behaviour for the mount, but + # special access control). So we need to move all the + # user-specified access controls into the new PluginMount + # object... + mount = PluginMount.new(PLUGINS) + # Yes, you're allowed to hate me for this. + + mount.instance_variable_set( + :@declarations, + + newmounts[PLUGINS].instance_variable_get(:@declarations) + ) + newmounts[PLUGINS] = mount + end + + # Verify each of the mounts are valid. + # We let the check raise an error, so that it can raise an error + # pointing to the specific problem. + newmounts.each { |name, mount| + raise FileServerError, "Invalid mount #{name}" unless mount.valid? + } + @mounts = newmounts + end + + # Split the path into the separate mount point and path. + def splitpath(dir, client) + # the dir is based on one of the mounts + # so first retrieve the mount path + mount = nil + path = nil + if dir =~ %r{/([-\w]+)} + # Strip off the mount name. + mount_name, path = dir.sub(%r{^/}, '').split(File::Separator, 2) + + unless mount = modules_mount(mount_name, client) + unless mount = @mounts[mount_name] + raise FileServerError, "Fileserver module '#{mount_name}' not mounted" + end + end + else + raise FileServerError, "Fileserver error: Invalid path '#{dir}'" + end + + if path.nil? or path == '' + path = '/' + elsif path + # Remove any double slashes that might have occurred + path = URI.unescape(path.gsub(/\/\//, "/")) + end + + return mount, path + end + + def to_s + "fileserver" + end + + # A simple class for wrapping mount points. Instances of this class + # don't know about the enclosing object; they're mainly just used for + # authorization. + class Mount < Puppet::Network::AuthStore + attr_reader :name + + @@syncs = {} + + @@files = {} + + Puppet::Util.logmethods(self, true) + + # Create a map for a specific client. + def clientmap(client) + { + "h" => client.sub(/\..*$/, ""), + "H" => client, + "d" => client.sub(/[^.]+\./, "") # domain name + } + end + + # Replace % patterns as appropriate. + def expand(path, client = nil) + # This map should probably be moved into a method. + map = nil + + if client + map = clientmap(client) + else + Puppet.notice "No client; expanding '#{path}' with local host" + # Else, use the local information + map = localmap + end + path.gsub(/%(.)/) do |v| + key = $1 + if key == "%" + "%" + else + map[key] || v + end + end + end + + # Do we have any patterns in our path, yo? + def expandable? + if defined?(@expandable) + @expandable + else + false + end + end + + # Return a fully qualified path, given a short path and + # possibly a client name. + def file_path(relative_path, node = nil) + full_path = path(node) + + unless full_path + p self + raise ArgumentError.new("Mounts without paths are not usable") unless full_path + end + + # If there's no relative path name, then we're serving the mount itself. + return full_path unless relative_path and relative_path != "/" + + File.join(full_path, relative_path) + end + + # Create out object. It must have a name. + def initialize(name, path = nil) + unless name =~ %r{^[-\w]+$} + raise FileServerError, "Invalid name format '#{name}'" + end + @name = name + + if path + self.path = path + else + @path = nil + end + + @files = {} + + super() + end + + def fileobj(path, links, client) + obj = nil + if obj = @files[file_path(path, client)] + # This can only happen in local fileserving, but it's an + # important one. It'd be nice if we didn't just set + # the check params every time, but I'm not sure it's worth + # the effort. + obj[:audit] = CHECKPARAMS + else + + obj = Puppet::Type.type(:file).new( + + :name => file_path(path, client), + + :audit => CHECKPARAMS + ) + @files[file_path(path, client)] = obj + end + + if links == :manage + links = :follow + end + + # This, ah, might be completely redundant + obj[:links] = links unless obj[:links] == links + + obj + end + + # Read the contents of the file at the relative path given. + def read_file(relpath, client) + File.read(file_path(relpath, client)) + end + + # Cache this manufactured map, since if it's used it's likely + # to get used a lot. + def localmap + unless defined?(@@localmap) + @@localmap = { + "h" => Facter.value("hostname"), + "H" => [Facter.value("hostname"), + Facter.value("domain")].join("."), + "d" => Facter.value("domain") + } + end + @@localmap + end + + # Return the path as appropriate, expanding as necessary. + def path(client = nil) + if expandable? + return expand(@path, client) + else + return @path + end + end + + # Set the path. + def path=(path) + # FIXME: For now, just don't validate paths with replacement + # patterns in them. + if path =~ /%./ + # Mark that we're expandable. + @expandable = true + else + raise FileServerError, "#{path} does not exist" unless FileTest.exists?(path) + raise FileServerError, "#{path} is not a directory" unless FileTest.directory?(path) + raise FileServerError, "#{path} is not readable" unless FileTest.readable?(path) + @expandable = false + end + @path = path + end + + # Verify that the path given exists within this mount's subtree. + # + def path_exists?(relpath, client = nil) + File.exists?(file_path(relpath, client)) + end + + # Return the current values for the object. + def properties(obj) + obj.retrieve.inject({}) { |props, ary| props[ary[0].name] = ary[1]; props } + end + + # Retrieve a specific directory relative to a mount point. + # If they pass in a client, then expand as necessary. + def subdir(dir = nil, client = nil) + basedir = self.path(client) + + dirname = if dir + File.join(basedir, *dir.split("/")) + else + basedir + end + + dirname + end + + def sync(path) + @@syncs[path] ||= Sync.new + @@syncs[path] + end + + def to_s + "mount[#{@name}]" + end + + # Verify our configuration is valid. This should really check to + # make sure at least someone will be allowed, but, eh. + def valid? + if name == MODULES + return @path.nil? + else + return ! @path.nil? + end + end + + # Return a new mount with the same properties as +self+, except + # with a different name and path. + def copy(name, path) + result = self.clone + result.path = path + result.instance_variable_set(:@name, name) + result + end + + # List the contents of the relative path +relpath+ of this mount. + # + # +recurse+ is the number of levels to recurse into the tree, + # or false to provide no recursion or true if you just want to + # go for broke. + # + # +ignore+ is an array of filenames to ignore when traversing + # the list. + # + # The return value of this method is a complex nest of arrays, + # which describes a directory tree. Each file or directory is + # represented by an array, where the first element is the path + # of the file (relative to the root of the mount), and the + # second element is the type. A directory is represented by an + # array as well, where the first element is a "directory" array, + # while the remaining elements are other file or directory + # arrays. Confusing? Hell yes. As an added bonus, all names + # must start with a slash, because... well, I'm fairly certain + # a complete explanation would involve the words "crack pipe" + # and "bad batch". + # + def list(relpath, recurse, ignore, client = nil) + abspath = file_path(relpath, client) + if FileTest.exists?(abspath) + if FileTest.directory?(abspath) and recurse + return reclist(abspath, recurse, ignore) + else + return [["/", File.stat(abspath).ftype]] + end + end + nil + end + + def reclist(abspath, recurse, ignore) + require 'puppet/file_serving' + require 'puppet/file_serving/fileset' + if recurse.is_a?(Fixnum) + args = { :recurse => true, :recurselimit => recurse, :links => :follow } + else + args = { :recurse => recurse, :links => :follow } + end + args[:ignore] = ignore if ignore + fs = Puppet::FileServing::Fileset.new(abspath, args) + ary = fs.files.collect do |file| + if file == "." + file = "/" + else + file = File.join("/", file ) + end + stat = fs.stat(File.join(abspath, file)) + next if stat.nil? + [ file, stat.ftype ] + end + + ary.compact + end + + end + + # A special mount class specifically for the plugins mount -- just + # has some magic to effectively do a union mount of the 'plugins' + # directory of all modules. + # + class PluginMount < Mount + def path(client) + '' + end + + def mod_path_exists?(mod, relpath, client = nil) + ! mod.plugin(relpath).nil? + end + + def path_exists?(relpath, client = nil) + !valid_modules(client).find { |mod| mod.plugin(relpath) }.nil? + end + + def valid? + true + end + + def mod_file_path(mod, relpath, client = nil) + File.join(mod, PLUGINS, relpath) + end + + def file_path(relpath, client = nil) + return nil unless mod = valid_modules(client).find { |m| m.plugin(relpath) } + mod.plugin(relpath) + end + + # create a list of files by merging all modules + def list(relpath, recurse, ignore, client = nil) + result = [] + valid_modules(client).each do |mod| + if modpath = mod.plugin(relpath) + if FileTest.directory?(modpath) and recurse + ary = reclist(modpath, recurse, ignore) + ary ||= [] + result += ary + else + result += [["/", File.stat(modpath).ftype]] + end + end + end + result + end + + private + def valid_modules(client) + Puppet::Node::Environment.new.modules.find_all { |mod| mod.exist? } + end + + def add_to_filetree(f, filetree) + first, rest = f.split(File::SEPARATOR, 2) + end + end + end +end + diff --git a/mcollective/lib/puppet/network/handler/master.rb b/mcollective/lib/puppet/network/handler/master.rb new file mode 100644 index 000000000..9ea24f2ad --- /dev/null +++ b/mcollective/lib/puppet/network/handler/master.rb @@ -0,0 +1,87 @@ +require 'openssl' +require 'puppet' +require 'xmlrpc/server' +require 'yaml' +require 'puppet/network/handler' + +class Puppet::Network::Handler + class MasterError < Puppet::Error; end + class Master < Handler + desc "Puppet's configuration interface. Used for all interactions related to + generating client configurations." + + include Puppet::Util + + attr_accessor :ast + attr_reader :ca + + @interface = XMLRPC::Service::Interface.new("puppetmaster") { |iface| + iface.add_method("string getconfig(string)") + iface.add_method("int freshness()") + } + + # Tell a client whether there's a fresh config for it + def freshness(client = nil, clientip = nil) + # Always force a recompile. Newer clients shouldn't do this (as of April 2008). + Time.now.to_i + end + + def initialize(hash = {}) + args = {} + + @local = hash[:Local] + + args[:Local] = true + + # This is only used by the cfengine module, or if --loadclasses was + # specified in +puppet+. + args[:Classes] = hash[:Classes] if hash.include?(:Classes) + end + + # Call our various handlers; this handler is getting deprecated. + def getconfig(facts, format = "marshal", client = nil, clientip = nil) + facts = decode_facts(facts) + + client ||= facts["hostname"] + + # Pass the facts to the fact handler + Puppet::Node::Facts.new(client, facts).save unless local? + + catalog = Puppet::Resource::Catalog.find(client) + + case format + when "yaml" + return CGI.escape(catalog.extract.to_yaml) + when "marshal" + return CGI.escape(Marshal.dump(catalog.extract)) + else + raise "Invalid markup format '#{format}'" + end + end + + # + def decode_facts(facts) + if @local + # we don't need to do anything, since we should already + # have raw objects + Puppet.debug "Our client is local" + else + Puppet.debug "Our client is remote" + + begin + facts = YAML.load(CGI.unescape(facts)) + rescue => detail + raise XMLRPC::FaultException.new( + 1, "Could not rebuild facts" + ) + end + end + + facts + end + + # Translate our configuration appropriately for sending back to a client. + def translate(config) + end + end +end diff --git a/mcollective/lib/puppet/network/handler/report.rb b/mcollective/lib/puppet/network/handler/report.rb new file mode 100755 index 000000000..5e3ee266d --- /dev/null +++ b/mcollective/lib/puppet/network/handler/report.rb @@ -0,0 +1,82 @@ +require 'puppet/util/instance_loader' +require 'puppet/reports' +require 'puppet/network/handler' +require 'xmlrpc/server' + +# A simple server for triggering a new run on a Puppet client. +class Puppet::Network::Handler + class Report < Handler + desc "Accepts a Puppet transaction report and processes it." + + @interface = XMLRPC::Service::Interface.new("puppetreports") { |iface| + iface.add_method("string report(array)") + } + + # Add a new report type. + def self.newreport(name, options = {}, &block) + Puppet.warning "The interface for registering report types has changed; use Puppet::Reports.register_report for report type #{name}" + Puppet::Reports.register_report(name, options, &block) + end + + def initialize(*args) + super + Puppet.settings.use(:main, :reporting, :metrics) + end + + # Accept a report from a client. + def report(report, client = nil, clientip = nil) + # Unescape the report + report = CGI.unescape(report) unless @local + + Puppet.info "Processing reports #{reports().join(", ")} for #{client}" + begin + process(report) + rescue => detail + Puppet.err "Could not process report for #{client}: #{detail}" + puts detail.backtrace if Puppet[:trace] + end + end + + private + + # Process the report using all of the existing hooks. + def process(yaml) + return if Puppet[:reports] == "none" + + # First convert the report to real objects + begin + report = YAML.load(yaml) + rescue => detail + Puppet.warning "Could not load report: #{detail}" + return + end + + # Used for those reports that accept yaml + client = report.host + + reports.each do |name| + if mod = Puppet::Reports.report(name) + # We have to use a dup because we're including a module in the + # report. + newrep = report.dup + begin + newrep.extend(mod) + newrep.process + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Report #{name} failed: #{detail}" + end + else + Puppet.warning "No report named '#{name}'" + end + end + end + + # Handle the parsing of the reports attribute. + def reports + # LAK:NOTE See http://snurl.com/21zf8 [groups_google_com] + x = Puppet[:reports].gsub(/(^\s+)|(\s+$)/, '').split(/\s*,\s*/) + end + end +end + diff --git a/mcollective/lib/puppet/network/handler/runner.rb b/mcollective/lib/puppet/network/handler/runner.rb new file mode 100755 index 000000000..1bc62bcd9 --- /dev/null +++ b/mcollective/lib/puppet/network/handler/runner.rb @@ -0,0 +1,33 @@ +require 'puppet/run' +require 'puppet/network/handler' +require 'xmlrpc/server' + +class Puppet::Network::Handler + class MissingMasterError < RuntimeError; end # Cannot find the master client + # A simple server for triggering a new run on a Puppet client. + class Runner < Handler + desc "An interface for triggering client configuration runs." + + @interface = XMLRPC::Service::Interface.new("puppetrunner") { |iface| + iface.add_method("string run(string, string)") + } + + side :client + + # Run the client configuration right now, optionally specifying + # tags and whether to ignore schedules + def run(tags = nil, ignoreschedules = false, fg = true, client = nil, clientip = nil) + options = {} + options[:tags] = tags if tags + options[:ignoreschedules] = ignoreschedules if ignoreschedules + options[:background] = !fg + + runner = Puppet::Run.new(options) + + runner.run + + runner.status + end + end +end + diff --git a/mcollective/lib/puppet/network/handler/status.rb b/mcollective/lib/puppet/network/handler/status.rb new file mode 100644 index 000000000..df6215f9c --- /dev/null +++ b/mcollective/lib/puppet/network/handler/status.rb @@ -0,0 +1,18 @@ +require 'puppet/network/handler' +require 'xmlrpc/server' +class Puppet::Network::Handler + class Status < Handler + desc "A simple interface for testing Puppet connectivity." + + side :client + + @interface = XMLRPC::Service::Interface.new("status") { |iface| + iface.add_method("int status()") + } + + def status(client = nil, clientip = nil) + 1 + end + end +end + diff --git a/mcollective/lib/puppet/network/http.rb b/mcollective/lib/puppet/network/http.rb new file mode 100644 index 000000000..512055b63 --- /dev/null +++ b/mcollective/lib/puppet/network/http.rb @@ -0,0 +1,15 @@ +module Puppet::Network::HTTP + def self.server_class_by_type(kind) + case kind.to_sym + when :webrick + require 'puppet/network/http/webrick' + return Puppet::Network::HTTP::WEBrick + when :mongrel + raise ArgumentError, "Mongrel is not installed on this platform" unless Puppet.features.mongrel? + require 'puppet/network/http/mongrel' + return Puppet::Network::HTTP::Mongrel + else + raise ArgumentError, "Unknown HTTP server name [#{kind}]" + end + end +end diff --git a/mcollective/lib/puppet/network/http/api.rb b/mcollective/lib/puppet/network/http/api.rb new file mode 100644 index 000000000..8b1b747ac --- /dev/null +++ b/mcollective/lib/puppet/network/http/api.rb @@ -0,0 +1,4 @@ +require 'puppet/network/http' + +class Puppet::Network::HTTP::API +end diff --git a/mcollective/lib/puppet/network/http/api/v1.rb b/mcollective/lib/puppet/network/http/api/v1.rb new file mode 100644 index 000000000..219e0cb80 --- /dev/null +++ b/mcollective/lib/puppet/network/http/api/v1.rb @@ -0,0 +1,73 @@ +require 'puppet/network/http/api' + +module Puppet::Network::HTTP::API::V1 + # How we map http methods and the indirection name in the URI + # to an indirection method. + METHOD_MAP = { + "GET" => { + :plural => :search, + :singular => :find + }, + "PUT" => { + :singular => :save + }, + "DELETE" => { + :singular => :destroy + }, + "HEAD" => { + :singular => :head + } + } + + def uri2indirection(http_method, uri, params) + environment, indirection, key = uri.split("/", 4)[1..-1] # the first field is always nil because of the leading slash + + raise ArgumentError, "The environment must be purely alphanumeric, not '#{environment}'" unless environment =~ /^\w+$/ + raise ArgumentError, "The indirection name must be purely alphanumeric, not '#{indirection}'" unless indirection =~ /^\w+$/ + + method = indirection_method(http_method, indirection) + + params[:environment] = environment + params.delete(:bucket_path) + + raise ArgumentError, "No request key specified in #{uri}" if key == "" or key.nil? + + key = URI.unescape(key) + + Puppet::Indirector::Request.new(indirection, method, key, params) + end + + def indirection2uri(request) + indirection = request.method == :search ? pluralize(request.indirection_name.to_s) : request.indirection_name.to_s + "/#{request.environment.to_s}/#{indirection}/#{request.escaped_key}#{request.query_string}" + end + + def indirection_method(http_method, indirection) + raise ArgumentError, "No support for http method #{http_method}" unless METHOD_MAP[http_method] + + unless method = METHOD_MAP[http_method][plurality(indirection)] + raise ArgumentError, "No support for plural #{http_method} operations" + end + + method + end + + def pluralize(indirection) + return(indirection == "status" ? "statuses" : indirection + "s") + end + + def plurality(indirection) + # NOTE This specific hook for facts is ridiculous, but it's a *many*-line + # fix to not need this, and our goal is to move away from the complication + # that leads to the fix being too long. + return :singular if indirection == "facts" + return :singular if indirection == "status" + return :plural if indirection == "inventory" + + result = (indirection =~ /s$|_search$/) ? :plural : :singular + + indirection.sub!(/s$|_search$|es$/, '') + + result + end +end diff --git a/mcollective/lib/puppet/network/http/compression.rb b/mcollective/lib/puppet/network/http/compression.rb new file mode 100644 index 000000000..c8d001169 --- /dev/null +++ b/mcollective/lib/puppet/network/http/compression.rb @@ -0,0 +1,114 @@ +require 'puppet/network/http' + +module Puppet::Network::HTTP::Compression + + # this module function allows to use the right underlying + # methods depending on zlib presence + def module + return(Puppet.features.zlib? ? Active : None) + end + module_function :module + + module Active + require 'zlib' + require 'stringio' + + # return an uncompressed body if the response has been + # compressed + def uncompress_body(response) + case response['content-encoding'] + when 'gzip' + return Zlib::GzipReader.new(StringIO.new(response.body)).read + when 'deflate' + return Zlib::Inflate.new.inflate(response.body) + when nil, 'identity' + return response.body + else + raise Net::HTTPError.new("Unknown content encoding - #{response['content-encoding']}", response) + end + end + + def uncompress(response) + raise Net::HTTPError.new("No block passed") unless block_given? + + case response['content-encoding'] + when 'gzip','deflate' + uncompressor = ZlibAdapter.new + when nil, 'identity' + uncompressor = IdentityAdapter.new + else + raise Net::HTTPError.new("Unknown content encoding - #{response['content-encoding']}", response) + end + + yield uncompressor + + uncompressor.close + end + + def add_accept_encoding(headers={}) + headers['accept-encoding'] = 'gzip; q=1.0, deflate; q=1.0; identity' if Puppet.settings[:http_compression] + headers + end + + # This adapters knows how to uncompress both 'zlib' stream (the deflate algorithm from Content-Encoding) + # and GZip streams. + class ZlibAdapter + def initialize + # Create an inflater that knows to parse GZip streams and zlib streams. + # This uses a property of the C Zlib library, documented as follow: + # windowBits can also be greater than 15 for optional gzip decoding. Add + # 32 to windowBits to enable zlib and gzip decoding with automatic header + # detection, or add 16 to decode only the gzip format (the zlib format will + # return a Z_DATA_ERROR). If a gzip stream is being decoded, strm->adler is + # a crc32 instead of an adler32. + @uncompressor = Zlib::Inflate.new(15 + 32) + @first = true + end + + def uncompress(chunk) + out = @uncompressor.inflate(chunk) + @first = false + return out + rescue Zlib::DataError => z + # it can happen that we receive a raw deflate stream + # which might make our inflate throw a data error. + # in this case, we try with a verbatim (no header) + # deflater. + @uncompressor = Zlib::Inflate.new + if @first then + @first = false + retry + end + raise + end + + def close + @uncompressor.finish + @uncompressor.close + end + end + end + + module None + def uncompress_body(response) + response.body + end + + def add_accept_encoding(headers) + headers + end + + def uncompress(response) + yield IdentityAdapter.new + end + end + + class IdentityAdapter + def uncompress(chunk) + chunk + end + + def close + end + end +end diff --git a/mcollective/lib/puppet/network/http/handler.rb b/mcollective/lib/puppet/network/http/handler.rb new file mode 100644 index 000000000..e192613ad --- /dev/null +++ b/mcollective/lib/puppet/network/http/handler.rb @@ -0,0 +1,244 @@ +module Puppet::Network::HTTP +end + +require 'puppet/network/http/api/v1' +require 'puppet/network/rest_authorization' +require 'puppet/network/rights' +require 'resolv' + +module Puppet::Network::HTTP::Handler + include Puppet::Network::HTTP::API::V1 + include Puppet::Network::RestAuthorization + + attr_reader :server, :handler + + # Retrieve the accept header from the http request. + def accept_header(request) + raise NotImplementedError + end + + # Retrieve the Content-Type header from the http request. + def content_type_header(request) + raise NotImplementedError + end + + # Which format to use when serializing our response or interpreting the request. + # IF the client provided a Content-Type use this, otherwise use the Accept header + # and just pick the first value. + def format_to_use(request) + unless header = accept_header(request) + raise ArgumentError, "An Accept header must be provided to pick the right format" + end + + format = nil + header.split(/,\s*/).each do |name| + next unless format = Puppet::Network::FormatHandler.format(name) + next unless format.suitable? + return format + end + + raise "No specified acceptable formats (#{header}) are functional on this machine" + end + + def request_format(request) + if header = content_type_header(request) + header.gsub!(/\s*;.*$/,'') # strip any charset + format = Puppet::Network::FormatHandler.mime(header) + raise "Client sent a mime-type (#{header}) that doesn't correspond to a format we support" if format.nil? + return format.name.to_s if format.suitable? + end + + raise "No Content-Type header was received, it isn't possible to unserialize the request" + end + + def format_to_mime(format) + format.is_a?(Puppet::Network::Format) ? format.mime : format + end + + def initialize_for_puppet(server) + @server = server + end + + # handle an HTTP request + def process(request, response) + indirection_request = uri2indirection(http_method(request), path(request), params(request)) + + check_authorization(indirection_request) + + send("do_#{indirection_request.method}", indirection_request, request, response) + rescue SystemExit,NoMemoryError + raise + rescue Exception => e + return do_exception(response, e) + end + + # Set the response up, with the body and status. + def set_response(response, body, status = 200) + raise NotImplementedError + end + + # Set the specified format as the content type of the response. + def set_content_type(response, format) + raise NotImplementedError + end + + def do_exception(response, exception, status=400) + if exception.is_a?(Puppet::Network::AuthorizationError) + # make sure we return the correct status code + # for authorization issues + status = 403 if status == 400 + end + if exception.is_a?(Exception) + puts exception.backtrace if Puppet[:trace] + Puppet.err(exception) + end + set_content_type(response, "text/plain") + set_response(response, exception.to_s, status) + end + + # Execute our find. + def do_find(indirection_request, request, response) + unless result = indirection_request.model.find(indirection_request.key, indirection_request.to_hash) + Puppet.info("Could not find #{indirection_request.indirection_name} for '#{indirection_request.key}'") + return do_exception(response, "Could not find #{indirection_request.indirection_name} #{indirection_request.key}", 404) + end + + # The encoding of the result must include the format to use, + # and it needs to be used for both the rendering and as + # the content type. + format = format_to_use(request) + set_content_type(response, format) + + if result.respond_to?(:render) + set_response(response, result.render(format)) + else + set_response(response, result) + end + end + + # Execute our head. + def do_head(indirection_request, request, response) + unless indirection_request.model.head(indirection_request.key, indirection_request.to_hash) + Puppet.info("Could not find #{indirection_request.indirection_name} for '#{indirection_request.key}'") + return do_exception(response, "Could not find #{indirection_request.indirection_name} #{indirection_request.key}", 404) + end + + # No need to set a response because no response is expected from a + # HEAD request. All we need to do is not die. + end + + # Execute our search. + def do_search(indirection_request, request, response) + result = indirection_request.model.search(indirection_request.key, indirection_request.to_hash) + + if result.nil? + return do_exception(response, "Could not find instances in #{indirection_request.indirection_name} with '#{indirection_request.key}'", 404) + end + + format = format_to_use(request) + set_content_type(response, format) + + set_response(response, indirection_request.model.render_multiple(format, result)) + end + + # Execute our destroy. + def do_destroy(indirection_request, request, response) + result = indirection_request.model.destroy(indirection_request.key, indirection_request.to_hash) + + return_yaml_response(response, result) + end + + # Execute our save. + def do_save(indirection_request, request, response) + data = body(request).to_s + raise ArgumentError, "No data to save" if !data or data.empty? + + format = request_format(request) + obj = indirection_request.model.convert_from(format, data) + result = save_object(indirection_request, obj) + return_yaml_response(response, result) + end + + # resolve node name from peer's ip address + # this is used when the request is unauthenticated + def resolve_node(result) + begin + return Resolv.getname(result[:ip]) + rescue => detail + Puppet.err "Could not resolve #{result[:ip]}: #{detail}" + end + result[:ip] + end + + private + + def return_yaml_response(response, body) + set_content_type(response, Puppet::Network::FormatHandler.format("yaml")) + set_response(response, body.to_yaml) + end + + # LAK:NOTE This has to be here for testing; it's a stub-point so + # we keep infinite recursion from happening. + def save_object(ind_request, object) + object.save(ind_request.key) + end + + def get?(request) + http_method(request) == 'GET' + end + + def put?(request) + http_method(request) == 'PUT' + end + + def delete?(request) + http_method(request) == 'DELETE' + end + + # methods to be overridden by the including web server class + + def http_method(request) + raise NotImplementedError + end + + def path(request) + raise NotImplementedError + end + + def request_key(request) + raise NotImplementedError + end + + def body(request) + raise NotImplementedError + end + + def params(request) + raise NotImplementedError + end + + def decode_params(params) + params.inject({}) do |result, ary| + param, value = ary + next result if param.nil? || param.empty? + + param = param.to_sym + + # These shouldn't be allowed to be set by clients + # in the query string, for security reasons. + next result if param == :node + next result if param == :ip + value = CGI.unescape(value) + if value =~ /^---/ + value = YAML.load(value) + else + value = true if value == "true" + value = false if value == "false" + value = Integer(value) if value =~ /^\d+$/ + value = value.to_f if value =~ /^\d+\.\d+$/ + end + result[param] = value + result + end + end +end diff --git a/mcollective/lib/puppet/network/http/mongrel.rb b/mcollective/lib/puppet/network/http/mongrel.rb new file mode 100644 index 000000000..2a638b229 --- /dev/null +++ b/mcollective/lib/puppet/network/http/mongrel.rb @@ -0,0 +1,55 @@ +require 'mongrel' if Puppet.features.mongrel? + +require 'puppet/network/http/mongrel/rest' + +class Puppet::Network::HTTP::Mongrel + def initialize(args = {}) + @listening = false + end + + def listen(args = {}) + raise ArgumentError, ":protocols must be specified." if !args[:protocols] or args[:protocols].empty? + raise ArgumentError, ":address must be specified." unless args[:address] + raise ArgumentError, ":port must be specified." unless args[:port] + raise "Mongrel server is already listening" if listening? + + @protocols = args[:protocols] + @xmlrpc_handlers = args[:xmlrpc_handlers] + @server = Mongrel::HttpServer.new(args[:address], args[:port]) + setup_handlers + + @listening = true + @server.run + end + + def unlisten + raise "Mongrel server is not listening" unless listening? + @server.stop + @server = nil + @listening = false + end + + def listening? + @listening + end + + private + + def setup_handlers + # Register our REST support at / + klass = class_for_protocol(:rest) + @server.register('/', klass.new(:server => @server)) + + setup_xmlrpc_handlers if @protocols.include?(:xmlrpc) and ! @xmlrpc_handlers.empty? + end + + # Use our existing code to provide the xmlrpc backward compatibility. + def setup_xmlrpc_handlers + @server.register('/RPC2', Puppet::Network::HTTPServer::Mongrel.new(@xmlrpc_handlers)) + end + + def class_for_protocol(protocol) + return Puppet::Network::HTTP::MongrelREST if protocol.to_sym == :rest + raise ArgumentError, "Unknown protocol [#{protocol}]." + end +end diff --git a/mcollective/lib/puppet/network/http/mongrel/rest.rb b/mcollective/lib/puppet/network/http/mongrel/rest.rb new file mode 100644 index 000000000..7ef13f046 --- /dev/null +++ b/mcollective/lib/puppet/network/http/mongrel/rest.rb @@ -0,0 +1,92 @@ +require 'puppet/network/http/handler' + +class Puppet::Network::HTTP::MongrelREST < Mongrel::HttpHandler + + include Puppet::Network::HTTP::Handler + + ACCEPT_HEADER = "HTTP_ACCEPT".freeze # yay, zed's a crazy-man + + def initialize(args={}) + super() + initialize_for_puppet(args) + end + + def accept_header(request) + request.params[ACCEPT_HEADER] + end + + def content_type_header(request) + request.params["HTTP_CONTENT_TYPE"] + end + + # which HTTP verb was used in this request + def http_method(request) + request.params[Mongrel::Const::REQUEST_METHOD] + end + + # Return the query params for this request. We had to expose this method for + # testing purposes. + def params(request) + params = Mongrel::HttpRequest.query_parse(request.params["QUERY_STRING"]) + params = decode_params(params) + params.merge(client_info(request)) + end + + # what path was requested? + def path(request) + # LAK:NOTE See http://snurl.com/21zf8 [groups_google_com] + #x = '/' + request.params[Mongrel::Const::REQUEST_PATH] + request.params[Mongrel::Const::REQUEST_PATH] + end + + # return the request body + def body(request) + request.body.read + end + + def set_content_type(response, format) + response.header['Content-Type'] = format_to_mime(format) + end + + # produce the body of the response + def set_response(response, result, status = 200) + # Set the 'reason' (or 'message', as it's called in Webrick), when + # we have a failure, unless we're on a version of mongrel that doesn't + # support this. + if status < 300 + unless result.is_a?(File) + response.start(status) { |head, body| body.write(result) } + else + response.start(status) { |head, body| } + response.send_status(result.stat.size) + response.send_header + response.send_file(result.path) + end + else + begin + response.start(status,false,result) { |head, body| body.write(result) } + rescue ArgumentError + response.start(status) { |head, body| body.write(result) } + end + end + end + + def client_info(request) + result = {} + params = request.params + result[:ip] = params["HTTP_X_FORWARDED_FOR"] ? params["HTTP_X_FORWARDED_FOR"].split(',').last.strip : params["REMOTE_ADDR"] + + # JJM #906 The following dn.match regular expression is forgiving + # enough to match the two Distinguished Name string contents + # coming from Apache, Pound or other reverse SSL proxies. + if dn = params[Puppet[:ssl_client_header]] and dn_matchdata = dn.match(/^.*?CN\s*=\s*(.*)/) + result[:node] = dn_matchdata[1].to_str + result[:authenticated] = (params[Puppet[:ssl_client_verify_header]] == 'SUCCESS') + else + result[:node] = resolve_node(result) + result[:authenticated] = false + end + + result + end +end diff --git a/mcollective/lib/puppet/network/http/rack.rb b/mcollective/lib/puppet/network/http/rack.rb new file mode 100644 index 000000000..5b4ef7e1c --- /dev/null +++ b/mcollective/lib/puppet/network/http/rack.rb @@ -0,0 +1,65 @@ +require 'rack' +require 'rack/request' +require 'rack/response' + +require 'puppet/network/http' +require 'puppet/network/http/rack/rest' +require 'puppet/network/http/rack/xmlrpc' + +# An rack application, for running the Puppet HTTP Server. +class Puppet::Network::HTTP::Rack + + def initialize(args) + raise ArgumentError, ":protocols must be specified." if !args[:protocols] or args[:protocols].empty? + protocols = args[:protocols] + + # Always prepare a REST handler + @rest_http_handler = Puppet::Network::HTTP::RackREST.new + protocols.delete :rest + + # Prepare the XMLRPC handler, for backward compatibility (if requested) + @xmlrpc_path = '/RPC2' + if args[:protocols].include?(:xmlrpc) + raise ArgumentError, "XMLRPC was requested, but no handlers were given" if !args.include?(:xmlrpc_handlers) + + @xmlrpc_http_handler = Puppet::Network::HTTP::RackXMLRPC.new(args[:xmlrpc_handlers]) + protocols.delete :xmlrpc + end + + raise ArgumentError, "there were unknown :protocols specified." if !protocols.empty? + end + + # The real rack application (which needs to respond to call). + # The work we need to do, roughly is: + # * Read request (from env) and prepare a response + # * Route the request to the correct handler + # * Return the response (in rack-format) to our caller. + def call(env) + request = Rack::Request.new(env) + response = Rack::Response.new + Puppet.debug 'Handling request: %s %s' % [request.request_method, request.fullpath] + + # if we shall serve XMLRPC, have /RPC2 go to the xmlrpc handler + if @xmlrpc_http_handler and @xmlrpc_path == request.path_info[0, @xmlrpc_path.size] + handler = @xmlrpc_http_handler + else + # everything else is handled by the new REST handler + handler = @rest_http_handler + end + + begin + handler.process(request, response) + rescue => detail + # Send a Status 500 Error on unhandled exceptions. + response.status = 500 + response['Content-Type'] = 'text/plain' + response.write 'Internal Server Error: "%s"' % detail.message + # log what happened + Puppet.err "Puppet Server (Rack): Internal Server Error: Unhandled Exception: \"%s\"" % detail.message + Puppet.err "Backtrace:" + detail.backtrace.each { |line| Puppet.err " > #{line}" } + end + response.finish + end +end + diff --git a/mcollective/lib/puppet/network/http/rack/httphandler.rb b/mcollective/lib/puppet/network/http/rack/httphandler.rb new file mode 100644 index 000000000..c54062357 --- /dev/null +++ b/mcollective/lib/puppet/network/http/rack/httphandler.rb @@ -0,0 +1,16 @@ +require 'openssl' +require 'puppet/ssl/certificate' + +class Puppet::Network::HTTP::RackHttpHandler + + def initialize + end + + # do something useful with request (a Rack::Request) and use + # response to fill your Rack::Response + def process(request, response) + raise NotImplementedError, "Your RackHttpHandler subclass is supposed to override service(request)" + end + +end + diff --git a/mcollective/lib/puppet/network/http/rack/rest.rb b/mcollective/lib/puppet/network/http/rack/rest.rb new file mode 100644 index 000000000..602927a78 --- /dev/null +++ b/mcollective/lib/puppet/network/http/rack/rest.rb @@ -0,0 +1,100 @@ +require 'puppet/network/http/handler' +require 'puppet/network/http/rack/httphandler' + +class Puppet::Network::HTTP::RackREST < Puppet::Network::HTTP::RackHttpHandler + + include Puppet::Network::HTTP::Handler + + HEADER_ACCEPT = 'HTTP_ACCEPT'.freeze + ContentType = 'Content-Type'.freeze + + CHUNK_SIZE = 8192 + + class RackFile + def initialize(file) + @file = file + end + + def each + while chunk = @file.read(CHUNK_SIZE) + yield chunk + end + end + + def close + @file.close + end + end + + def initialize(args={}) + super() + initialize_for_puppet(args) + end + + def set_content_type(response, format) + response[ContentType] = format_to_mime(format) + end + + # produce the body of the response + def set_response(response, result, status = 200) + response.status = status + unless result.is_a?(File) + response.write result + else + response["Content-Length"] = result.stat.size.to_s + response.body = RackFile.new(result) + end + end + + # Retrieve the accept header from the http request. + def accept_header(request) + request.env[HEADER_ACCEPT] + end + + # Retrieve the accept header from the http request. + def content_type_header(request) + request.content_type + end + + # Return which HTTP verb was used in this request. + def http_method(request) + request.request_method + end + + # Return the query params for this request. + def params(request) + result = decode_params(request.params) + result.merge(extract_client_info(request)) + end + + # what path was requested? (this is, without any query parameters) + def path(request) + request.path + end + + # return the request body + # request.body has some limitiations, so we need to concat it back + # into a regular string, which is something puppet can use. + def body(request) + request.body.read + end + + def extract_client_info(request) + result = {} + result[:ip] = request.ip + + # if we find SSL info in the headers, use them to get a hostname. + # try this with :ssl_client_header, which defaults should work for + # Apache with StdEnvVars. + if dn = request.env[Puppet[:ssl_client_header]] and dn_matchdata = dn.match(/^.*?CN\s*=\s*(.*)/) + result[:node] = dn_matchdata[1].to_str + result[:authenticated] = (request.env[Puppet[:ssl_client_verify_header]] == 'SUCCESS') + else + result[:node] = resolve_node(result) + result[:authenticated] = false + end + + result + end + +end diff --git a/mcollective/lib/puppet/network/http/rack/xmlrpc.rb b/mcollective/lib/puppet/network/http/rack/xmlrpc.rb new file mode 100644 index 000000000..f75342783 --- /dev/null +++ b/mcollective/lib/puppet/network/http/rack/xmlrpc.rb @@ -0,0 +1,65 @@ +require 'puppet/network/http/rack/httphandler' +require 'puppet/network/xmlrpc/server' +require 'resolv' + +class Puppet::Network::HTTP::RackXMLRPC < Puppet::Network::HTTP::RackHttpHandler + def initialize(handlers) + @xmlrpc_server = Puppet::Network::XMLRPCServer.new + handlers.each do |name| + Puppet.debug " -> register xmlrpc namespace #{name}" + unless handler = Puppet::Network::Handler.handler(name) + raise ArgumentError, "Invalid XMLRPC handler #{name}" + end + @xmlrpc_server.add_handler(handler.interface, handler.new({})) + end + super() + end + + def process(request, response) + # errors are sent as text/plain + response['Content-Type'] = 'text/plain' + if not request.post? + response.status = 405 + response.write 'Method Not Allowed' + return + end + if request.media_type != "text/xml" + response.status = 400 + response.write 'Bad Request' + return + end + + # get auth/certificate data + client_request = build_client_request(request) + + response_body = @xmlrpc_server.process(request.body.read, client_request) + + response.status = 200 + response['Content-Type'] = 'text/xml; charset=utf-8' + response.write response_body + end + + def build_client_request(request) + ip = request.ip + + # if we find SSL info in the headers, use them to get a hostname. + # try this with :ssl_client_header, which defaults should work for + # Apache with StdEnvVars. + if dn = request.env[Puppet[:ssl_client_header]] and dn_matchdata = dn.match(/^.*?CN\s*=\s*(.*)/) + node = dn_matchdata[1].to_str + authenticated = (request.env[Puppet[:ssl_client_verify_header]] == 'SUCCESS') + else + begin + node = Resolv.getname(ip) + rescue => detail + Puppet.err "Could not resolve #{ip}: #{detail}" + node = "unknown" + end + authenticated = false + end + + Puppet::Network::ClientRequest.new(node, ip, authenticated) + end + +end + diff --git a/mcollective/lib/puppet/network/http/webrick.rb b/mcollective/lib/puppet/network/http/webrick.rb new file mode 100644 index 000000000..8ed0b28ca --- /dev/null +++ b/mcollective/lib/puppet/network/http/webrick.rb @@ -0,0 +1,139 @@ +require 'webrick' +require 'webrick/https' +require 'puppet/network/http/webrick/rest' +require 'puppet/network/xmlrpc/webrick_servlet' +require 'thread' + +require 'puppet/ssl/certificate' +require 'puppet/ssl/certificate_revocation_list' + +class Puppet::Network::HTTP::WEBrick + def initialize(args = {}) + @listening = false + @mutex = Mutex.new + end + + def self.class_for_protocol(protocol) + return Puppet::Network::HTTP::WEBrickREST if protocol.to_sym == :rest + raise "Unknown protocol [#{protocol}]." + end + + def listen(args = {}) + raise ArgumentError, ":protocols must be specified." if !args[:protocols] or args[:protocols].empty? + raise ArgumentError, ":address must be specified." unless args[:address] + raise ArgumentError, ":port must be specified." unless args[:port] + + @protocols = args[:protocols] + @xmlrpc_handlers = args[:xmlrpc_handlers] + + arguments = {:BindAddress => args[:address], :Port => args[:port]} + arguments.merge!(setup_logger) + arguments.merge!(setup_ssl) + + @server = WEBrick::HTTPServer.new(arguments) + @server.listeners.each { |l| l.start_immediately = false } + + setup_handlers + + @mutex.synchronize do + raise "WEBrick server is already listening" if @listening + @listening = true + @thread = Thread.new { + @server.start { |sock| + raise "Client disconnected before connection could be established" unless IO.select([sock],nil,nil,0.1) + sock.accept + @server.run(sock) + } + } + sleep 0.1 until @server.status == :Running + end + end + + def unlisten + @mutex.synchronize do + raise "WEBrick server is not listening" unless @listening + @server.shutdown + @thread.join + @server = nil + @listening = false + end + end + + def listening? + @mutex.synchronize do + @listening + end + end + + # Configure our http log file. + def setup_logger + # Make sure the settings are all ready for us. + Puppet.settings.use(:main, :ssl, Puppet[:name]) + + if Puppet.run_mode.master? + file = Puppet[:masterhttplog] + else + file = Puppet[:httplog] + end + + # open the log manually to prevent file descriptor leak + file_io = ::File.open(file, "a+") + file_io.sync + file_io.fcntl(Fcntl::F_SETFD, Fcntl::FD_CLOEXEC) + + args = [file_io] + args << WEBrick::Log::DEBUG if Puppet::Util::Log.level == :debug + + logger = WEBrick::Log.new(*args) + return :Logger => logger, :AccessLog => [ + [logger, WEBrick::AccessLog::COMMON_LOG_FORMAT ], + [logger, WEBrick::AccessLog::REFERER_LOG_FORMAT ] + ] + end + + # Add all of the ssl cert information. + def setup_ssl + results = {} + + # Get the cached copy. We know it's been generated, too. + host = Puppet::SSL::Host.localhost + + raise Puppet::Error, "Could not retrieve certificate for #{host.name} and not running on a valid certificate authority" unless host.certificate + + results[:SSLPrivateKey] = host.key.content + results[:SSLCertificate] = host.certificate.content + results[:SSLStartImmediately] = true + results[:SSLEnable] = true + + raise Puppet::Error, "Could not find CA certificate" unless Puppet::SSL::Certificate.find(Puppet::SSL::CA_NAME) + + results[:SSLCACertificateFile] = Puppet[:localcacert] + results[:SSLVerifyClient] = OpenSSL::SSL::VERIFY_PEER + + results[:SSLCertificateStore] = host.ssl_store + + results + end + + private + + def setup_handlers + # Set up the new-style protocols. + klass = self.class.class_for_protocol(:rest) + @server.mount('/', klass, :this_value_is_apparently_necessary_but_unused) + + # And then set up xmlrpc, if configured. + @server.mount("/RPC2", xmlrpc_servlet) if @protocols.include?(:xmlrpc) and ! @xmlrpc_handlers.empty? + end + + # Create our xmlrpc servlet, which provides backward compatibility. + def xmlrpc_servlet + handlers = @xmlrpc_handlers.collect { |handler| + unless hclass = Puppet::Network::Handler.handler(handler) + raise "Invalid xmlrpc handler #{handler}" + end + hclass.new({}) + } + Puppet::Network::XMLRPC::WEBrickServlet.new handlers + end +end diff --git a/mcollective/lib/puppet/network/http/webrick/rest.rb b/mcollective/lib/puppet/network/http/webrick/rest.rb new file mode 100644 index 000000000..d5c146d88 --- /dev/null +++ b/mcollective/lib/puppet/network/http/webrick/rest.rb @@ -0,0 +1,81 @@ +require 'puppet/network/http/handler' +require 'resolv' +require 'webrick' + +class Puppet::Network::HTTP::WEBrickREST < WEBrick::HTTPServlet::AbstractServlet + + include Puppet::Network::HTTP::Handler + + def initialize(server, handler) + raise ArgumentError, "server is required" unless server + super(server) + initialize_for_puppet(:server => server, :handler => handler) + end + + # Retrieve the request parameters, including authentication information. + def params(request) + result = request.query + result = decode_params(result) + result.merge(client_information(request)) + end + + # WEBrick uses a service method to respond to requests. Simply delegate to the handler response method. + def service(request, response) + process(request, response) + end + + def accept_header(request) + request["accept"] + end + + def content_type_header(request) + request["content-type"] + end + + def http_method(request) + request.request_method + end + + def path(request) + request.path + end + + def body(request) + request.body + end + + # Set the specified format as the content type of the response. + def set_content_type(response, format) + response["content-type"] = format_to_mime(format) + end + + def set_response(response, result, status = 200) + response.status = status + if status >= 200 and status != 304 + response.body = result + response["content-length"] = result.stat.size if result.is_a?(File) + end + response.reason_phrase = result if status < 200 or status >= 300 + end + + # Retrieve node/cert/ip information from the request object. + def client_information(request) + result = {} + if peer = request.peeraddr and ip = peer[3] + result[:ip] = ip + end + + # If they have a certificate (which will almost always be true) + # then we get the hostname from the cert, instead of via IP + # info + result[:authenticated] = false + if cert = request.client_cert and nameary = cert.subject.to_a.find { |ary| ary[0] == "CN" } + result[:node] = nameary[1] + result[:authenticated] = true + else + result[:node] = resolve_node(result) + end + + result + end +end diff --git a/mcollective/lib/puppet/network/http_pool.rb b/mcollective/lib/puppet/network/http_pool.rb new file mode 100644 index 000000000..7d227b4d4 --- /dev/null +++ b/mcollective/lib/puppet/network/http_pool.rb @@ -0,0 +1,104 @@ +require 'puppet/ssl/host' +require 'net/https' +require 'puppet/util/cacher' + +module Puppet::Network; end + +# Manage Net::HTTP instances for keep-alive. +module Puppet::Network::HttpPool + class << self + include Puppet::Util::Cacher + + private + + cached_attr(:http_cache) { Hash.new } + end + + # Use the global localhost instance. + def self.ssl_host + Puppet::SSL::Host.localhost + end + + # 2008/03/23 + # LAK:WARNING: Enabling this has a high propability of + # causing corrupt files and who knows what else. See #1010. + HTTP_KEEP_ALIVE = false + + def self.keep_alive? + HTTP_KEEP_ALIVE + end + + # Clear our http cache, closing all connections. + def self.clear_http_instances + http_cache.each do |name, connection| + connection.finish if connection.started? + end + Puppet::Util::Cacher.expire + end + + # Make sure we set the driver up when we read the cert in. + def self.read_cert + if val = super # This calls read_cert from the Puppet::SSLCertificates::Support module. + # Clear out all of our connections, since they previously had no cert and now they + # should have them. + clear_http_instances + return val + else + return false + end + end + + # Use cert information from a Puppet client to set up the http object. + def self.cert_setup(http) + # Just no-op if we don't have certs. + return false unless FileTest.exist?(Puppet[:hostcert]) and FileTest.exist?(Puppet[:localcacert]) + + http.cert_store = ssl_host.ssl_store + http.ca_file = Puppet[:localcacert] + http.cert = ssl_host.certificate.content + http.verify_mode = OpenSSL::SSL::VERIFY_PEER + http.key = ssl_host.key.content + end + + # Retrieve a cached http instance if caching is enabled, else return + # a new one. + def self.http_instance(host, port, reset = false) + # We overwrite the uninitialized @http here with a cached one. + key = "#{host}:#{port}" + + # Return our cached instance if we've got a cache, as long as we're not + # resetting the instance. + if keep_alive? + return http_cache[key] if ! reset and http_cache[key] + + # Clean up old connections if we have them. + if http = http_cache[key] + http_cache.delete(key) + http.finish if http.started? + end + end + + args = [host, port] + if Puppet[:http_proxy_host] == "none" + args << nil << nil + else + args << Puppet[:http_proxy_host] << Puppet[:http_proxy_port] + end + http = Net::HTTP.new(*args) + + # Pop open the http client a little; older versions of Net::HTTP(s) didn't + # give us a reader for ca_file... Grr... + class << http; attr_accessor :ca_file; end + + http.use_ssl = true + # Use configured timeout (#1176) + http.read_timeout = Puppet[:configtimeout] + http.open_timeout = Puppet[:configtimeout] + + cert_setup(http) + + http_cache[key] = http if keep_alive? + + http + end +end diff --git a/mcollective/lib/puppet/network/http_server.rb b/mcollective/lib/puppet/network/http_server.rb new file mode 100644 index 000000000..e3826a654 --- /dev/null +++ b/mcollective/lib/puppet/network/http_server.rb @@ -0,0 +1,3 @@ +# Just a stub, so we can correctly scope other classes. +module Puppet::Network::HTTPServer # :nodoc: +end diff --git a/mcollective/lib/puppet/network/http_server/mongrel.rb b/mcollective/lib/puppet/network/http_server/mongrel.rb new file mode 100644 index 000000000..fb9516461 --- /dev/null +++ b/mcollective/lib/puppet/network/http_server/mongrel.rb @@ -0,0 +1,129 @@ +#!/usr/bin/env ruby +# File: 06-11-14-mongrel_xmlrpc.rb +# Author: Manuel Holtgrewe +# +# Copyright (c) 2006 Manuel Holtgrewe, 2007 Luke Kanies +# +# This file is based heavily on a file retrieved from +# http://ttt.ggnore.net/2006/11/15/xmlrpc-with-mongrel-and-ruby-off-rails/ + +require 'rubygems' +require 'mongrel' +require 'xmlrpc/server' +require 'puppet/network/xmlrpc/server' +require 'puppet/network/http_server' +require 'puppet/network/client_request' +require 'puppet/network/handler' + +require 'resolv' + +# This handler can be hooked into Mongrel to accept HTTP requests. After +# checking whether the request itself is sane, the handler forwards it +# to an internal instance of XMLRPC::BasicServer to process it. +# +# You can access the server by calling the Handler's "xmlrpc_server" +# attribute accessor method and add XMLRPC handlers there. For example: +# +#
+# handler = XmlRpcHandler.new
+# handler.xmlrpc_server.add_handler("my.add") { |a, b| a.to_i + b.to_i }
+# 
+module Puppet::Network + class HTTPServer::Mongrel < ::Mongrel::HttpHandler + attr_reader :xmlrpc_server + + def initialize(handlers) + if Puppet[:debug] + $mongrel_debug_client = true + Puppet.debug 'Mongrel client debugging enabled. [$mongrel_debug_client = true].' + end + # Create a new instance of BasicServer. We are supposed to subclass it + # but that does not make sense since we would not introduce any new + # behaviour and we have to subclass Mongrel::HttpHandler so our handler + # works for Mongrel. + @xmlrpc_server = Puppet::Network::XMLRPCServer.new + handlers.each do |name| + unless handler = Puppet::Network::Handler.handler(name) + raise ArgumentError, "Invalid handler #{name}" + end + @xmlrpc_server.add_handler(handler.interface, handler.new({})) + end + end + + # This method produces the same results as XMLRPC::CGIServer.serve + # from Ruby's stdlib XMLRPC implementation. + def process(request, response) + # Make sure this has been a POST as required for XMLRPC. + request_method = request.params[Mongrel::Const::REQUEST_METHOD] || Mongrel::Const::GET + if request_method != "POST" + response.start(405) { |head, out| out.write("Method Not Allowed") } + return + end + + # Make sure the user has sent text/xml data. + request_mime = request.params["CONTENT_TYPE"] || "text/plain" + if parse_content_type(request_mime).first != "text/xml" + response.start(400) { |head, out| out.write("Bad Request") } + return + end + + # Make sure there is data in the body at all. + length = request.params[Mongrel::Const::CONTENT_LENGTH].to_i + if length <= 0 + response.start(411) { |head, out| out.write("Length Required") } + return + end + + # Check the body to be valid. + if request.body.nil? or request.body.size != length + response.start(400) { |head, out| out.write("Bad Request") } + return + end + + info = client_info(request) + + # All checks above passed through + response.start(200) do |head, out| + head["Content-Type"] = "text/xml; charset=utf-8" + begin + out.write(@xmlrpc_server.process(request.body, info)) + rescue => detail + puts detail.backtrace + raise + end + end + end + + private + + def client_info(request) + params = request.params + ip = params["HTTP_X_FORWARDED_FOR"] ? params["HTTP_X_FORWARDED_FOR"].split(',').last.strip : params["REMOTE_ADDR"] + # JJM #906 The following dn.match regular expression is forgiving + # enough to match the two Distinguished Name string contents + # coming from Apache, Pound or other reverse SSL proxies. + if dn = params[Puppet[:ssl_client_header]] and dn_matchdata = dn.match(/^.*?CN\s*=\s*(.*)/) + client = dn_matchdata[1].to_str + valid = (params[Puppet[:ssl_client_verify_header]] == 'SUCCESS') + else + begin + client = Resolv.getname(ip) + rescue => detail + Puppet.err "Could not resolve #{ip}: #{detail}" + client = "unknown" + end + valid = false + end + + info = Puppet::Network::ClientRequest.new(client, ip, valid) + + info + end + + # Taken from XMLRPC::ParseContentType + def parse_content_type(str) + a, *b = str.split(";") + return a.strip, *b + end + end +end diff --git a/mcollective/lib/puppet/network/rest_authconfig.rb b/mcollective/lib/puppet/network/rest_authconfig.rb new file mode 100644 index 000000000..e6067612a --- /dev/null +++ b/mcollective/lib/puppet/network/rest_authconfig.rb @@ -0,0 +1,88 @@ +require 'puppet/network/authconfig' + +module Puppet + class Network::RestAuthConfig < Network::AuthConfig + + extend MonitorMixin + attr_accessor :rights + + DEFAULT_ACL = [ + { :acl => "~ ^\/catalog\/([^\/]+)$", :method => :find, :allow => '$1', :authenticated => true }, + # this one will allow all file access, and thus delegate + # to fileserver.conf + { :acl => "/file" }, + { :acl => "/certificate_revocation_list/ca", :method => :find, :authenticated => true }, + { :acl => "/report", :method => :save, :authenticated => true }, + { :acl => "/certificate/ca", :method => :find, :authenticated => false }, + { :acl => "/certificate/", :method => :find, :authenticated => false }, + { :acl => "/certificate_request", :method => [:find, :save], :authenticated => false }, + { :acl => "/status", :method => [:find], :authenticated => true }, + ] + + def self.main + synchronize do + add_acl = @main.nil? + super + @main.insert_default_acl if add_acl and !@main.exists? + end + @main + end + + # check wether this request is allowed in our ACL + # raise an Puppet::Network::AuthorizedError if the request + # is denied. + def allowed?(request) + read + + # we're splitting the request in part because + # fail_on_deny could as well be called in the XMLRPC context + # with a ClientRequest. + + if authorization_failure_exception = @rights.is_request_forbidden_and_why?(request) + Puppet.warning("Denying access: #{authorization_failure_exception}") + raise authorization_failure_exception + end + end + + def initialize(file = nil, parsenow = true) + super(file || Puppet[:rest_authconfig], parsenow) + + # if we didn't read a file (ie it doesn't exist) + # make sure we can create some default rights + @rights ||= Puppet::Network::Rights.new + end + + def parse + super() + insert_default_acl + end + + # force regular ACLs to be present + def insert_default_acl + DEFAULT_ACL.each do |acl| + unless rights[acl[:acl]] + Puppet.info "Inserting default '#{acl[:acl]}'(#{acl[:authenticated] ? "auth" : "non-auth"}) ACL because #{( !exists? ? "#{Puppet[:rest_authconfig]} doesn't exist" : "none were found in '#{@file}'")}" + mk_acl(acl) + end + end + # queue an empty (ie deny all) right for every other path + # actually this is not strictly necessary as the rights system + # denies not explicitely allowed paths + unless rights["/"] + rights.newright("/") + rights.restrict_authenticated("/", :any) + end + end + + def mk_acl(acl) + @rights.newright(acl[:acl]) + @rights.allow(acl[:acl], acl[:allow] || "*") + + if method = acl[:method] + method = [method] unless method.is_a?(Array) + method.each { |m| @rights.restrict_method(acl[:acl], m) } + end + @rights.restrict_authenticated(acl[:acl], acl[:authenticated]) unless acl[:authenticated].nil? + end + end +end diff --git a/mcollective/lib/puppet/network/rest_authorization.rb b/mcollective/lib/puppet/network/rest_authorization.rb new file mode 100644 index 000000000..e052245eb --- /dev/null +++ b/mcollective/lib/puppet/network/rest_authorization.rb @@ -0,0 +1,23 @@ +require 'puppet/network/client_request' +require 'puppet/network/rest_authconfig' + +module Puppet::Network + + module RestAuthorization + + + # Create our config object if necessary. If there's no configuration file + # we install our defaults + def authconfig + @authconfig ||= Puppet::Network::RestAuthConfig.main + + @authconfig + end + + # Verify that our client has access. + def check_authorization(request) + authconfig.allowed?(request) + end + end +end + diff --git a/mcollective/lib/puppet/network/rest_controller.rb b/mcollective/lib/puppet/network/rest_controller.rb new file mode 100644 index 000000000..76a9830ea --- /dev/null +++ b/mcollective/lib/puppet/network/rest_controller.rb @@ -0,0 +1,2 @@ +class Puppet::Network::RESTController # :nodoc: +end diff --git a/mcollective/lib/puppet/network/rights.rb b/mcollective/lib/puppet/network/rights.rb new file mode 100755 index 000000000..b1daef67c --- /dev/null +++ b/mcollective/lib/puppet/network/rights.rb @@ -0,0 +1,281 @@ +require 'puppet/network/authstore' +require 'puppet/error' + +module Puppet::Network + +# this exception is thrown when a request is not authenticated +class AuthorizationError < Puppet::Error; end + +# Define a set of rights and who has access to them. +# There are two types of rights: +# * named rights (ie a common string) +# * path based rights (which are matched on a longest prefix basis) +class Rights + + # We basically just proxy directly to our rights. Each Right stores + # its own auth abilities. + [:allow, :deny, :restrict_method, :restrict_environment, :restrict_authenticated].each do |method| + define_method(method) do |name, *args| + if obj = self[name] + obj.send(method, *args) + else + raise ArgumentError, "Unknown right '#{name}'" + end + end + end + + # Check that name is allowed or not + def allowed?(name, *args) + !is_forbidden_and_why?(name, :node => args[0], :ip => args[1]) + end + + def is_request_forbidden_and_why?(request) + methods_to_check = if request.method == :head + # :head is ok if either :find or :save is ok. + [:find, :save] + else + [request.method] + end + authorization_failure_exceptions = methods_to_check.map do |method| + is_forbidden_and_why?("/#{request.indirection_name}/#{request.key}", + :node => request.node, + :ip => request.ip, + :method => method, + :environment => request.environment, + :authenticated => request.authenticated) + end + if authorization_failure_exceptions.include? nil + # One of the methods we checked is ok, therefore this request is ok. + nil + else + # Just need to return any of the failure exceptions. + authorization_failure_exceptions.first + end + end + + def is_forbidden_and_why?(name, args = {}) + res = :nomatch + right = @rights.find do |acl| + found = false + # an acl can return :dunno, which means "I'm not qualified to answer your question, + # please ask someone else". This is used when for instance an acl matches, but not for the + # current rest method, where we might think some other acl might be more specific. + if match = acl.match?(name) + args[:match] = match + if (res = acl.allowed?(args[:node], args[:ip], args)) != :dunno + # return early if we're allowed + return nil if res + # we matched, select this acl + found = true + end + end + found + end + + # if we end here, then that means we either didn't match + # or failed, in any case will throw an error to the outside world + if name =~ /^\// or right + # we're a patch ACL, let's fail + msg = "#{(args[:node].nil? ? args[:ip] : "#{args[:node]}(#{args[:ip]})")} access to #{name} [#{args[:method]}]" + + msg += " authenticated " if args[:authenticated] + + error = AuthorizationError.new("Forbidden request: #{msg}") + if right + error.file = right.file + error.line = right.line + end + else + # there were no rights allowing/denying name + # if name is not a path, let's throw + raise ArgumentError, "Unknown namespace right '#{name}'" + end + error + end + + def initialize + @rights = [] + end + + def [](name) + @rights.find { |acl| acl == name } + end + + def include?(name) + @rights.include?(name) + end + + def each + @rights.each { |r| yield r.name,r } + end + + # Define a new right to which access can be provided. + def newright(name, line=nil, file=nil) + add_right( Right.new(name, line, file) ) + end + + private + + def add_right(right) + if right.acl_type == :name and include?(right.key) + raise ArgumentError, "Right '%s' already exists" + end + @rights << right + sort_rights + right + end + + def sort_rights + @rights.sort! + end + + # Retrieve a right by name. + def right(name) + self[name] + end + + # A right. + class Right < Puppet::Network::AuthStore + include Puppet::FileCollection::Lookup + + attr_accessor :name, :key, :acl_type + attr_accessor :methods, :environment, :authentication + + ALL = [:save, :destroy, :find, :search] + + Puppet::Util.logmethods(self, true) + + def initialize(name, line, file) + @methods = [] + @environment = [] + @authentication = true # defaults to authenticated + @name = name + @line = line || 0 + @file = file + + case name + when Symbol + @acl_type = :name + @key = name + when /^\[(.+)\]$/ + @acl_type = :name + @key = $1.intern if name.is_a?(String) + when /^\// + @acl_type = :regex + @key = Regexp.new("^" + Regexp.escape(name)) + @methods = ALL + when /^~/ # this is a regex + @acl_type = :regex + @name = name.gsub(/^~\s+/,'') + @key = Regexp.new(@name) + @methods = ALL + else + raise ArgumentError, "Unknown right type '#{name}'" + end + super() + end + + def to_s + "access[#{@name}]" + end + + # There's no real check to do at this point + def valid? + true + end + + def regex? + acl_type == :regex + end + + # does this right is allowed for this triplet? + # if this right is too restrictive (ie we don't match this access method) + # then return :dunno so that upper layers have a chance to try another right + # tailored to the given method + def allowed?(name, ip, args = {}) + return :dunno if acl_type == :regex and not @methods.include?(args[:method]) + return :dunno if acl_type == :regex and @environment.size > 0 and not @environment.include?(args[:environment]) + return :dunno if acl_type == :regex and not @authentication.nil? and args[:authenticated] != @authentication + + begin + # make sure any capture are replaced if needed + interpolate(args[:match]) if acl_type == :regex and args[:match] + res = super(name,ip) + ensure + reset_interpolation if acl_type == :regex + end + res + end + + # restrict this right to some method only + def restrict_method(m) + m = m.intern if m.is_a?(String) + + raise ArgumentError, "'#{m}' is not an allowed value for method directive" unless ALL.include?(m) + + # if we were allowing all methods, then starts from scratch + if @methods === ALL + @methods = [] + end + + raise ArgumentError, "'#{m}' is already in the '#{name}' ACL" if @methods.include?(m) + + @methods << m + end + + def restrict_environment(env) + env = Puppet::Node::Environment.new(env) + raise ArgumentError, "'#{env}' is already in the '#{name}' ACL" if @environment.include?(env) + + @environment << env + end + + def restrict_authenticated(authentication) + case authentication + when "yes", "on", "true", true + authentication = true + when "no", "off", "false", false + authentication = false + when "all","any", :all, :any + authentication = nil + else + raise ArgumentError, "'#{name}' incorrect authenticated value: #{authentication}" + end + @authentication = authentication + end + + def match?(key) + # if we are a namespace compare directly + return self.key == namespace_to_key(key) if acl_type == :name + + # otherwise match with the regex + self.key.match(key) + end + + def namespace_to_key(key) + key = key.intern if key.is_a?(String) + key + end + + # this is where all the magic happens. + # we're sorting the rights array with this scheme: + # * namespace rights are all in front + # * regex path rights are then all queued in file order + def <=>(rhs) + # move namespace rights at front + return self.acl_type == :name ? -1 : 1 if self.acl_type != rhs.acl_type + + # sort by creation order (ie first match appearing in the file will win) + # that is don't sort, in which case the sort algorithm will order in the + # natural array order (ie the creation order) + 0 + end + + def ==(name) + return(acl_type == :name ? self.key == namespace_to_key(name) : self.name == name.gsub(/^~\s+/,'')) + end + + end + +end +end diff --git a/mcollective/lib/puppet/network/server.rb b/mcollective/lib/puppet/network/server.rb new file mode 100644 index 000000000..80987e4b0 --- /dev/null +++ b/mcollective/lib/puppet/network/server.rb @@ -0,0 +1,165 @@ +require 'puppet/network/http' +require 'puppet/util/pidlock' + +class Puppet::Network::Server + attr_reader :server_type, :protocols, :address, :port + + # Put the daemon into the background. + def daemonize + if pid = fork + Process.detach(pid) + exit(0) + end + + # Get rid of console logging + Puppet::Util::Log.close(:console) + + Process.setsid + Dir.chdir("/") + begin + $stdin.reopen "/dev/null" + $stdout.reopen "/dev/null", "a" + $stderr.reopen $stdout + Puppet::Util::Log.reopen + rescue => detail + Puppet::Util.replace_file("/tmp/daemonout", 0644) { |f| + f.puts "Could not start #{Puppet[:name]}: #{detail}" + } + raise "Could not start #{Puppet[:name]}: #{detail}" + end + end + + # Create a pidfile for our daemon, so we can be stopped and others + # don't try to start. + def create_pidfile + Puppet::Util.synchronize_on(Puppet[:name],Sync::EX) do + raise "Could not create PID file: #{pidfile}" unless Puppet::Util::Pidlock.new(pidfile).lock + end + end + + # Remove the pid file for our daemon. + def remove_pidfile + Puppet::Util.synchronize_on(Puppet[:name],Sync::EX) do + locker = Puppet::Util::Pidlock.new(pidfile) + locker.unlock or Puppet.err "Could not remove PID file #{pidfile}" if locker.locked? + end + end + + # Provide the path to our pidfile. + def pidfile + Puppet[:pidfile] + end + + def initialize(args = {}) + valid_args = [:handlers, :xmlrpc_handlers, :port] + bad_args = args.keys.find_all { |p| ! valid_args.include?(p) }.collect { |p| p.to_s }.join(",") + raise ArgumentError, "Invalid argument(s) #{bad_args}" unless bad_args == "" + @server_type = Puppet[:servertype] or raise "No servertype configuration found." # e.g., WEBrick, Mongrel, etc. + http_server_class || raise(ArgumentError, "Could not determine HTTP Server class for server type [#{@server_type}]") + + @port = args[:port] || Puppet[:masterport] || raise(ArgumentError, "Must specify :port or configure Puppet :masterport") + @address = determine_bind_address + + @protocols = [ :rest, :xmlrpc ] + @listening = false + @routes = {} + @xmlrpc_routes = {} + self.register(args[:handlers]) if args[:handlers] + self.register_xmlrpc(args[:xmlrpc_handlers]) if args[:xmlrpc_handlers] + + # Make sure we have all of the directories we need to function. + Puppet.settings.use(:main, :ssl, Puppet[:name]) + end + + # Register handlers for REST networking, based on the Indirector. + def register(*indirections) + raise ArgumentError, "Indirection names are required." if indirections.empty? + indirections.flatten.each do |name| + Puppet::Indirector::Indirection.model(name) || raise(ArgumentError, "Cannot locate indirection '#{name}'.") + @routes[name.to_sym] = true + end + end + + # Unregister Indirector handlers. + def unregister(*indirections) + raise "Cannot unregister indirections while server is listening." if listening? + indirections = @routes.keys if indirections.empty? + + indirections.flatten.each do |i| + raise(ArgumentError, "Indirection [#{i}] is unknown.") unless @routes[i.to_sym] + end + + indirections.flatten.each do |i| + @routes.delete(i.to_sym) + end + end + + # Register xmlrpc handlers for backward compatibility. + def register_xmlrpc(*namespaces) + raise ArgumentError, "XMLRPC namespaces are required." if namespaces.empty? + namespaces.flatten.each do |name| + Puppet::Network::Handler.handler(name) || raise(ArgumentError, "Cannot locate XMLRPC handler for namespace '#{name}'.") + @xmlrpc_routes[name.to_sym] = true + end + end + + # Unregister xmlrpc handlers. + def unregister_xmlrpc(*namespaces) + raise "Cannot unregister xmlrpc handlers while server is listening." if listening? + namespaces = @xmlrpc_routes.keys if namespaces.empty? + + namespaces.flatten.each do |i| + raise(ArgumentError, "XMLRPC handler '#{i}' is unknown.") unless @xmlrpc_routes[i.to_sym] + end + + namespaces.flatten.each do |i| + @xmlrpc_routes.delete(i.to_sym) + end + end + + def listening? + @listening + end + + def listen + raise "Cannot listen -- already listening." if listening? + @listening = true + http_server.listen(:address => address, :port => port, :handlers => @routes.keys, :xmlrpc_handlers => @xmlrpc_routes.keys, :protocols => protocols) + end + + def unlisten + raise "Cannot unlisten -- not currently listening." unless listening? + http_server.unlisten + @listening = false + end + + def http_server_class + http_server_class_by_type(@server_type) + end + + def start + create_pidfile + listen + end + + def stop + unlisten + remove_pidfile + end + + private + + def http_server + @http_server ||= http_server_class.new + end + + def http_server_class_by_type(kind) + Puppet::Network::HTTP.server_class_by_type(kind) + end + + def determine_bind_address + tmp = Puppet[:bindaddress] + return tmp if tmp != "" + server_type.to_s == "webrick" ? "0.0.0.0" : "127.0.0.1" + end +end diff --git a/mcollective/lib/puppet/network/xmlrpc/processor.rb b/mcollective/lib/puppet/network/xmlrpc/processor.rb new file mode 100644 index 000000000..dea8a02fa --- /dev/null +++ b/mcollective/lib/puppet/network/xmlrpc/processor.rb @@ -0,0 +1,86 @@ +require 'puppet/network/authorization' +require 'xmlrpc/server' + +# Just silly. +class ::XMLRPC::FaultException + def to_s + self.message + end +end + +module Puppet::Network + # Most of our subclassing is just so that we can get + # access to information from the request object, like + # the client name and IP address. + module XMLRPCProcessor + include Puppet::Network::Authorization + + ERR_UNAUTHORIZED = 30 + + def add_handler(interface, handler) + @loadedhandlers << interface.prefix + super(interface, handler) + end + + def handler_loaded?(handler) + @loadedhandlers.include?(handler.to_s) + end + + # Convert our data and client request into xmlrpc calls, and verify + # they're authorized and such-like. This method differs from the + # default in that it expects a ClientRequest object in addition to the + # data. + def process(data, request) + call, params = parser.parseMethodCall(data) + params << request.name << request.ip + handler, method = call.split(".") + request.handler = handler + request.method = method + begin + verify(request) + rescue InvalidClientRequest => detail + raise ::XMLRPC::FaultException.new(ERR_UNAUTHORIZED, detail.to_s) + end + handle(request.call, *params) + end + + private + + # Provide error handling for method calls. + def protect_service(obj, *args) + begin + obj.call(*args) + rescue ::XMLRPC::FaultException + raise + rescue Puppet::AuthorizationError => detail + Puppet.err "Permission denied: #{detail}" + raise ::XMLRPC::FaultException.new( + 1, detail.to_s + ) + rescue Puppet::Error => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err detail.to_s + error = ::XMLRPC::FaultException.new( + 1, detail.to_s + ) + error.set_backtrace detail.backtrace + raise error + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Could not call: #{detail}" + error = ::XMLRPC::FaultException.new(1, detail.to_s) + error.set_backtrace detail.backtrace + raise error + end + end + + # Set up our service hook and init our handler list. + def setup_processor + @loadedhandlers = [] + self.set_service_hook do |obj, *args| + protect_service(obj, *args) + end + end + end +end + diff --git a/mcollective/lib/puppet/network/xmlrpc/server.rb b/mcollective/lib/puppet/network/xmlrpc/server.rb new file mode 100644 index 000000000..e54881756 --- /dev/null +++ b/mcollective/lib/puppet/network/xmlrpc/server.rb @@ -0,0 +1,19 @@ +require 'xmlrpc/server' +require 'puppet/network/authorization' +require 'puppet/network/xmlrpc/processor' + +module Puppet::Network + # Most of our subclassing is just so that we can get + # access to information from the request object, like + # the client name and IP address. + class XMLRPCServer < ::XMLRPC::BasicServer + include Puppet::Util + include Puppet::Network::XMLRPCProcessor + + def initialize + super() + setup_processor + end + end +end + diff --git a/mcollective/lib/puppet/network/xmlrpc/webrick_servlet.rb b/mcollective/lib/puppet/network/xmlrpc/webrick_servlet.rb new file mode 100644 index 000000000..c538cf74c --- /dev/null +++ b/mcollective/lib/puppet/network/xmlrpc/webrick_servlet.rb @@ -0,0 +1,114 @@ +require 'xmlrpc/server' +require 'puppet/network/authorization' +require 'puppet/network/xmlrpc/processor' + +module Puppet::Network::XMLRPC + class ServletError < RuntimeError; end + class WEBrickServlet < ::XMLRPC::WEBrickServlet + include Puppet::Network::XMLRPCProcessor + + # This is a hackish way to avoid an auth message every time we have a + # normal operation + def self.log(msg) + @logs ||= {} + if @logs.include?(msg) + @logs[msg] += 1 + else + Puppet.info msg + @logs[msg] = 1 + end + end + + # Accept a list of handlers and register them all. + def initialize(handlers) + # the servlet base class does not consume any arguments + # and its BasicServer base class only accepts a 'class_delim' + # option which won't change in Puppet at all + # thus, we don't need to pass any args to our base class, + # and we can consume them all ourselves + super() + + setup_processor + + # Set up each of the passed handlers. + handlers.each do |handler| + add_handler(handler.class.interface, handler) + end + end + + # Handle the actual request. We can't use the super() method, because + # we need to pass a ClientRequest object to process so we can do + # authorization. It's the only way to stay thread-safe. + def service(request, response) + if @valid_ip + raise WEBrick::HTTPStatus::Forbidden unless @valid_ip.any? { |ip| request.peeraddr[3] =~ ip } + end + + if request.request_method != "POST" + raise WEBrick::HTTPStatus::MethodNotAllowed, + "unsupported method `#{request.request_method}'." + end + + raise WEBrick::HTTPStatus::BadRequest if parse_content_type(request['Content-type']).first != "text/xml" + + length = (request['Content-length'] || 0).to_i + + raise WEBrick::HTTPStatus::LengthRequired unless length > 0 + + data = request.body + + raise WEBrick::HTTPStatus::BadRequest if data.nil? or data.size != length + + resp = process(data, client_request(request)) + raise WEBrick::HTTPStatus::InternalServerError if resp.nil? or resp.size <= 0 + + response.status = 200 + response['Content-Length'] = resp.size + response['Content-Type'] = "text/xml; charset=utf-8" + response.body = resp + end + + private + + # Generate a ClientRequest object for later validation. + def client_request(request) + if peer = request.peeraddr + client = peer[2] + clientip = peer[3] + else + + raise ::XMLRPC::FaultException.new( + + ERR_UNCAUGHT_EXCEPTION, + + "Could not retrieve client information" + ) + end + + # If they have a certificate (which will almost always be true) + # then we get the hostname from the cert, instead of via IP + # info + valid = false + if cert = request.client_cert + nameary = cert.subject.to_a.find { |ary| + ary[0] == "CN" + } + + if nameary.nil? + Puppet.warning "Could not retrieve server name from cert" + else + unless client == nameary[1] + Puppet.debug "Overriding #{client} with cert name #{nameary[1]}" + client = nameary[1] + end + valid = true + end + end + + info = Puppet::Network::ClientRequest.new(client, clientip, valid) + + info + end + end +end + diff --git a/mcollective/lib/puppet/node.rb b/mcollective/lib/puppet/node.rb new file mode 100644 index 000000000..74095bb96 --- /dev/null +++ b/mcollective/lib/puppet/node.rb @@ -0,0 +1,125 @@ +require 'puppet/indirector' +require 'logger' + +# A class for managing nodes, including their facts and environment. +class Puppet::Node + require 'puppet/node/facts' + require 'puppet/node/environment' + + # Set up indirection, so that nodes can be looked for in + # the node sources. + extend Puppet::Indirector + + # Adds the environment getter and setter, with some instance/string conversion + include Puppet::Node::Environment::Helper + + # Use the node source as the indirection terminus. + indirects :node, :terminus_setting => :node_terminus, :doc => "Where to find node information. + A node is composed of its name, its facts, and its environment." + + attr_accessor :name, :classes, :source, :ipaddress, :parameters + attr_reader :time + + def environment + return super if @environment + + if env = parameters["environment"] + self.environment = env + return super + end + + # Else, return the default + Puppet::Node::Environment.new + end + + def initialize(name, options = {}) + @logger = ::Logger.new(STDOUT) + @logger.level = ::Logger::DEBUG + + + raise ArgumentError, "Node names cannot be nil" unless name + @name = name + + if classes = options[:classes] + if classes.is_a?(String) + @classes = [classes] + else + @classes = classes + end + else + @classes = [] + end + + @parameters = options[:parameters] || {} + + + if env = options[:environment] + self.environment = env + end + + @time = Time.now + end + + # Merge the node facts with parameters from the node source. + def fact_merge + if facts = Puppet::Node::Facts.find(name) + merge(facts.values) + end + rescue => detail + error = Puppet::Error.new("Could not retrieve facts for #{name}: #{detail}") + error.set_backtrace(detail.backtrace) + raise error + end + + # Merge any random parameters into our parameter list. + def merge(params) + params.each do |name, value| + @parameters[name] = value unless @parameters.include?(name) + end + + @parameters["environment"] ||= self.environment.name.to_s if self.environment + end + + # Calculate the list of names we might use for looking + # up our node. This is only used for AST nodes. + def names + return [name] if Puppet.settings[:strict_hostname_checking] + + names = [] + + names += split_name(name) if name.include?(".") + + # First, get the fqdn + unless fqdn = parameters["fqdn"] + if parameters["hostname"] and parameters["domain"] + fqdn = parameters["hostname"] + "." + parameters["domain"] + else + Puppet.warning "Host is missing hostname and/or domain: #{name}" + end + end + + # Now that we (might) have the fqdn, add each piece to the name + # list to search, in order of longest to shortest. + names += split_name(fqdn) if fqdn + + # And make sure the node name is first, since that's the most + # likely usage. + # The name is usually the Certificate CN, but it can be + # set to the 'facter' hostname instead. + if Puppet[:node_name] == 'cert' + names.unshift name + else + names.unshift parameters["hostname"] + end + names.uniq + end + + def split_name(name) + list = name.split(".") + tmp = [] + list.each_with_index do |short, i| + tmp << list[0..i].join(".") + end + tmp.reverse + end +end diff --git a/mcollective/lib/puppet/node/environment.rb b/mcollective/lib/puppet/node/environment.rb new file mode 100644 index 000000000..807479bc8 --- /dev/null +++ b/mcollective/lib/puppet/node/environment.rb @@ -0,0 +1,151 @@ +require 'puppet/util/cacher' +require 'monitor' + +# Just define it, so this class has fewer load dependencies. +class Puppet::Node +end + +# Model the environment that a node can operate in. This class just +# provides a simple wrapper for the functionality around environments. +class Puppet::Node::Environment + module Helper + def environment + Puppet::Node::Environment.new(@environment) + end + + def environment=(env) + if env.is_a?(String) or env.is_a?(Symbol) + @environment = env + else + @environment = env.name + end + end + end + + include Puppet::Util::Cacher + + @seen = {} + + # Return an existing environment instance, or create a new one. + def self.new(name = nil) + return name if name.is_a?(self) + name ||= Puppet.settings.value(:environment) + + raise ArgumentError, "Environment name must be specified" unless name + + symbol = name.to_sym + + return @seen[symbol] if @seen[symbol] + + obj = self.allocate + obj.send :initialize, symbol + @seen[symbol] = obj + end + + def self.current + Thread.current[:environment] || root + end + + def self.current=(env) + Thread.current[:environment] = new(env) + end + + def self.root + @root + end + + # This is only used for testing. + def self.clear + @seen.clear + end + + attr_reader :name + + # Return an environment-specific setting. + def [](param) + Puppet.settings.value(param, self.name) + end + + def initialize(name) + @name = name + extend MonitorMixin + end + + def known_resource_types + # This makes use of short circuit evaluation to get the right thread-safe + # per environment semantics with an efficient most common cases; we almost + # always just return our thread's known-resource types. Only at the start + # of a compilation (after our thread var has been set to nil) or when the + # environment has changed do we delve deeper. + Thread.current[:known_resource_types] = nil if (krt = Thread.current[:known_resource_types]) && krt.environment != self + Thread.current[:known_resource_types] ||= synchronize { + if @known_resource_types.nil? or @known_resource_types.require_reparse? + @known_resource_types = Puppet::Resource::TypeCollection.new(self) + @known_resource_types.perform_initial_import + end + @known_resource_types + } + end + + def module(name) + mod = Puppet::Module.new(name, self) + return nil unless mod.exist? + mod + end + + # Cache the modulepath, so that we aren't searching through + # all known directories all the time. + cached_attr(:modulepath, :ttl => Puppet[:filetimeout]) do + dirs = self[:modulepath].split(File::PATH_SEPARATOR) + dirs = ENV["PUPPETLIB"].split(File::PATH_SEPARATOR) + dirs if ENV["PUPPETLIB"] + validate_dirs(dirs) + end + + # Return all modules from this environment. + # Cache the list, because it can be expensive to create. + cached_attr(:modules, :ttl => Puppet[:filetimeout]) do + module_names = modulepath.collect { |path| Dir.entries(path) }.flatten.uniq + module_names.collect do |path| + begin + Puppet::Module.new(path, self) + rescue Puppet::Module::Error => e + nil + end + end.compact + end + + # Cache the manifestdir, so that we aren't searching through + # all known directories all the time. + cached_attr(:manifestdir, :ttl => Puppet[:filetimeout]) do + validate_dirs(self[:manifestdir].split(File::PATH_SEPARATOR)) + end + + def to_s + name.to_s + end + + def to_sym + to_s.to_sym + end + + # The only thing we care about when serializing an environment is its + # identity; everything else is ephemeral and should not be stored or + # transmitted. + def to_zaml(z) + self.to_s.to_zaml(z) + end + + def validate_dirs(dirs) + dirs.collect do |dir| + if dir !~ /^#{File::SEPARATOR}/ + File.join(Dir.getwd, dir) + else + dir + end + end.find_all do |p| + p =~ /^#{File::SEPARATOR}/ && FileTest.directory?(p) + end + end + + @root = new(:'*root*') +end diff --git a/mcollective/lib/puppet/node/facts.rb b/mcollective/lib/puppet/node/facts.rb new file mode 100755 index 000000000..0a96e553b --- /dev/null +++ b/mcollective/lib/puppet/node/facts.rb @@ -0,0 +1,99 @@ +require 'time' + +require 'puppet/node' +require 'puppet/indirector' + +require 'puppet/util/pson' + +# Manage a given node's facts. This either accepts facts and stores them, or +# returns facts for a given node. +class Puppet::Node::Facts + # Set up indirection, so that nodes can be looked for in + # the node sources. + extend Puppet::Indirector + extend Puppet::Util::Pson + + # We want to expire any cached nodes if the facts are saved. + module NodeExpirer + def save(key, instance) + Puppet::Node.expire(instance.name) + super + end + end + + indirects :facts, :terminus_setting => :facts_terminus, :extend => NodeExpirer + + attr_accessor :name, :values + + def add_local_facts + values["clientcert"] = Puppet.settings[:certname] + values["clientversion"] = Puppet.version.to_s + values["environment"] ||= Puppet.settings[:environment] + end + + def initialize(name, values = {}) + @name = name + @values = values + + add_internal + end + + def downcase_if_necessary + return unless Puppet.settings[:downcasefacts] + + Puppet.warning "DEPRECATION NOTICE: Fact downcasing is deprecated; please disable (20080122)" + values.each do |fact, value| + values[fact] = value.downcase if value.is_a?(String) + end + end + + # Convert all fact values into strings. + def stringify + values.each do |fact, value| + values[fact] = value.to_s + end + end + + def ==(other) + return false unless self.name == other.name + strip_internal == other.send(:strip_internal) + end + + def timestamp=(time) + self.values[:_timestamp] = time + end + + def timestamp + self.values[:_timestamp] + end + + def self.from_pson(data) + result = new(data['name'], data['values']) + result.timestamp = Time.parse(data['timestamp']) + result.expiration = Time.parse(data['expiration']) + result + end + + def to_pson(*args) + { + 'expiration' => expiration, + 'name' => name, + 'timestamp' => timestamp, + 'values' => strip_internal, + }.to_pson(*args) + end + + private + + # Add internal data to the facts for storage. + def add_internal + self.values[:_timestamp] = Time.now + end + + # Strip out that internal data. + def strip_internal + newvals = values.dup + newvals.find_all { |name, value| name.to_s =~ /^_/ }.each { |name, value| newvals.delete(name) } + newvals + end +end diff --git a/mcollective/lib/puppet/parameter.rb b/mcollective/lib/puppet/parameter.rb new file mode 100644 index 000000000..29d60fc66 --- /dev/null +++ b/mcollective/lib/puppet/parameter.rb @@ -0,0 +1,304 @@ +require 'puppet/util/methodhelper' +require 'puppet/util/log_paths' +require 'puppet/util/logging' +require 'puppet/util/docs' +require 'puppet/util/cacher' + +class Puppet::Parameter + include Puppet::Util + include Puppet::Util::Errors + include Puppet::Util::LogPaths + include Puppet::Util::Logging + include Puppet::Util::MethodHelper + include Puppet::Util::Cacher + + require 'puppet/parameter/value_collection' + + class << self + include Puppet::Util + include Puppet::Util::Docs + attr_reader :validater, :munger, :name, :default, :required_features, :value_collection + attr_accessor :metaparam + + # Define the default value for a given parameter or parameter. This + # means that 'nil' is an invalid default value. This defines + # the 'default' instance method. + def defaultto(value = nil, &block) + if block + define_method(:default, &block) + else + if value.nil? + raise Puppet::DevError, + "Either a default value or block must be provided" + end + define_method(:default) do value end + end + end + + # Return a documentation string. If there are valid values, + # then tack them onto the string. + def doc + @doc ||= "" + + unless defined?(@addeddocvals) + @doc += value_collection.doc + + if f = self.required_features + @doc += " Requires features #{f.flatten.collect { |f| f.to_s }.join(" ")}." + end + @addeddocvals = true + end + + @doc + end + + def nodefault + undef_method :default if public_method_defined? :default + end + + # Store documentation for this parameter. + def desc(str) + @doc = str + end + + def initvars + @value_collection = ValueCollection.new + end + + # This is how we munge the value. Basically, this is our + # opportunity to convert the value from one form into another. + def munge(&block) + # I need to wrap the unsafe version in begin/rescue parameterments, + # but if I directly call the block then it gets bound to the + # class's context, not the instance's, thus the two methods, + # instead of just one. + define_method(:unsafe_munge, &block) + end + + # Does the parameter supports reverse munge? + # This will be called when something wants to access the parameter + # in a canonical form different to what the storage form is. + def unmunge(&block) + define_method(:unmunge, &block) + end + + # Mark whether we're the namevar. + def isnamevar + @isnamevar = true + @required = true + end + + # Is this parameter the namevar? Defaults to false. + def isnamevar? + @isnamevar + end + + # This parameter is required. + def isrequired + @required = true + end + + # Specify features that are required for this parameter to work. + def required_features=(*args) + @required_features = args.flatten.collect { |a| a.to_s.downcase.intern } + end + + # Is this parameter required? Defaults to false. + def required? + @required + end + + # Verify that we got a good value + def validate(&block) + define_method(:unsafe_validate, &block) + end + + # Define a new value for our parameter. + def newvalues(*names) + @value_collection.newvalues(*names) + end + + def aliasvalue(name, other) + @value_collection.aliasvalue(name, other) + end + end + + # Just a simple method to proxy instance methods to class methods + def self.proxymethods(*values) + values.each { |val| + define_method(val) do + self.class.send(val) + end + } + end + + # And then define one of these proxies for each method in our + # ParamHandler class. + proxymethods("required?", "isnamevar?") + + attr_accessor :resource + # LAK 2007-05-09: Keep the @parent around for backward compatibility. + attr_accessor :parent + + [:line, :file, :version].each do |param| + define_method(param) do + resource.send(param) + end + end + + def devfail(msg) + self.fail(Puppet::DevError, msg) + end + + def expirer + resource.catalog + end + + def fail(*args) + type = nil + if args[0].is_a?(Class) + type = args.shift + else + type = Puppet::Error + end + + error = type.new(args.join(" ")) + + error.line = @resource.line if @resource and @resource.line + + error.file = @resource.file if @resource and @resource.file + + raise error + end + + # Basic parameter initialization. + def initialize(options = {}) + options = symbolize_options(options) + if resource = options[:resource] + self.resource = resource + options.delete(:resource) + else + raise Puppet::DevError, "No resource set for #{self.class.name}" + end + + set_options(options) + end + + def log(msg) + send_log(resource[:loglevel], msg) + end + + # Is this parameter a metaparam? + def metaparam? + self.class.metaparam + end + + # each parameter class must define the name method, and parameter + # instances do not change that name this implicitly means that a given + # object can only have one parameter instance of a given parameter + # class + def name + self.class.name + end + + # for testing whether we should actually do anything + def noop + @noop ||= false + tmp = @noop || self.resource.noop || Puppet[:noop] || false + #debug "noop is #{tmp}" + tmp + end + + # return the full path to us, for logging and rollback; not currently + # used + def pathbuilder + if @resource + return [@resource.pathbuilder, self.name] + else + return [self.name] + end + end + + # If the specified value is allowed, then munge appropriately. + # If the developer uses a 'munge' hook, this method will get overridden. + def unsafe_munge(value) + self.class.value_collection.munge(value) + end + + # no unmunge by default + def unmunge(value) + value + end + + # A wrapper around our munging that makes sure we raise useful exceptions. + def munge(value) + begin + ret = unsafe_munge(value) + rescue Puppet::Error => detail + Puppet.debug "Reraising #{detail}" + raise + rescue => detail + raise Puppet::DevError, "Munging failed for value #{value.inspect} in class #{self.name}: #{detail}", detail.backtrace + end + ret + end + + # Verify that the passed value is valid. + # If the developer uses a 'validate' hook, this method will get overridden. + def unsafe_validate(value) + self.class.value_collection.validate(value) + end + + # A protected validation method that only ever raises useful exceptions. + def validate(value) + begin + unsafe_validate(value) + rescue ArgumentError => detail + fail detail.to_s + rescue Puppet::Error, TypeError + raise + rescue => detail + raise Puppet::DevError, "Validate method failed for class #{self.name}: #{detail}", detail.backtrace + end + end + + def remove + @resource = nil + end + + def value + unmunge(@value) unless @value.nil? + end + + # Store the value provided. All of the checking should possibly be + # late-binding (e.g., users might not exist when the value is assigned + # but might when it is asked for). + def value=(value) + validate(value) + + @value = munge(value) + end + + # Retrieve the resource's provider. Some types don't have providers, in which + # case we return the resource object itself. + def provider + @resource.provider + end + + # The properties need to return tags so that logs correctly collect them. + def tags + unless defined?(@tags) + @tags = [] + # This might not be true in testing + @tags = @resource.tags if @resource.respond_to? :tags + @tags << self.name.to_s + end + @tags + end + + def to_s + name.to_s + end +end + +require 'puppet/parameter/path' diff --git a/mcollective/lib/puppet/parameter/path.rb b/mcollective/lib/puppet/parameter/path.rb new file mode 100644 index 000000000..44886afd0 --- /dev/null +++ b/mcollective/lib/puppet/parameter/path.rb @@ -0,0 +1,42 @@ +require 'puppet/parameter' + +class Puppet::Parameter::Path < Puppet::Parameter + def self.accept_arrays(bool = true) + @accept_arrays = !!bool + end + def self.arrays? + @accept_arrays + end + + def validate_path(paths) + if paths.is_a?(Array) and ! self.class.arrays? then + fail "#{name} only accepts a single path, not an array of paths" + end + + # We *always* support Unix path separators, as Win32 does now too. + absolute = "[/#{::Regexp.quote(::File::SEPARATOR)}]" + win32 = Puppet.features.microsoft_windows? + + Array(paths).each do |path| + next if path =~ %r{^#{absolute}} + next if win32 and path =~ %r{^(?:[a-zA-Z]:)?#{absolute}} + fail("#{name} must be a fully qualified path") + end + + paths + end + + # This will be overridden if someone uses the validate option, which is why + # it just delegates to the other, useful, method. + def unsafe_validate(paths) + validate_path(paths) + end + + # Likewise, this might be overridden, but by default... + def unsafe_munge(paths) + if paths.is_a?(Array) and ! self.class.arrays? then + fail "#{name} only accepts a single path, not an array of paths" + end + paths + end +end diff --git a/mcollective/lib/puppet/parameter/value.rb b/mcollective/lib/puppet/parameter/value.rb new file mode 100644 index 000000000..d9bfbafe2 --- /dev/null +++ b/mcollective/lib/puppet/parameter/value.rb @@ -0,0 +1,63 @@ +require 'puppet/parameter/value_collection' + +# An individual Value class. +class Puppet::Parameter::Value + attr_reader :name, :options, :event + attr_accessor :block, :call, :method, :required_features + + # Add an alias for this value. + def alias(name) + @aliases << convert(name) + end + + # Return all aliases. + def aliases + @aliases.dup + end + + # Store the event that our value generates, if it does so. + def event=(value) + @event = convert(value) + end + + def initialize(name) + if name.is_a?(Regexp) + @name = name + else + # Convert to a string and then a symbol, so things like true/false + # still show up as symbols. + @name = convert(name) + end + + @aliases = [] + + @call = :instead + end + + # Does a provided value match our value? + def match?(value) + if regex? + return true if name =~ value.to_s + else + return(name == convert(value) ? true : @aliases.include?(convert(value))) + end + end + + # Is our value a regex? + def regex? + @name.is_a?(Regexp) + end + + private + + # A standard way of converting all of our values, so we're always + # comparing apples to apples. + def convert(value) + if value == '' + # We can't intern an empty string, yay. + value + else + value.to_s.to_sym + end + end +end diff --git a/mcollective/lib/puppet/parameter/value_collection.rb b/mcollective/lib/puppet/parameter/value_collection.rb new file mode 100644 index 000000000..619e0731d --- /dev/null +++ b/mcollective/lib/puppet/parameter/value_collection.rb @@ -0,0 +1,143 @@ +require 'puppet/parameter/value' + +# A collection of values and regexes, used for specifying +# what values are allowed in a given parameter. +class Puppet::Parameter::ValueCollection + + def aliasvalue(name, other) + other = other.to_sym + unless value = match?(other) + raise Puppet::DevError, "Cannot alias nonexistent value #{other}" + end + + value.alias(name) + end + + # Return a doc string for all of the values in this parameter/property. + def doc + unless defined?(@doc) + @doc = "" + unless values.empty? + @doc += " Valid values are " + @doc += @strings.collect do |value| + if aliases = value.aliases and ! aliases.empty? + "`#{value.name}` (also called `#{aliases.join(", ")}`)" + else + "`#{value.name}`" + end + end.join(", ") + "." + end + + @doc += " Values can match `" + regexes.join("`, `") + "`." unless regexes.empty? + end + + @doc + end + + # Does this collection contain any value definitions? + def empty? + @values.empty? + end + + def initialize + # We often look values up by name, so a hash makes more sense. + @values = {} + + # However, we want to retain the ability to match values in order, + # but we always prefer directly equality (i.e., strings) over regex matches. + @regexes = [] + @strings = [] + end + + # Can we match a given value? + def match?(test_value) + # First look for normal values + if value = @strings.find { |v| v.match?(test_value) } + return value + end + + # Then look for a regex match + @regexes.find { |v| v.match?(test_value) } + end + + # If the specified value is allowed, then munge appropriately. + def munge(value) + return value if empty? + + if instance = match?(value) + if instance.regex? + return value + else + return instance.name + end + else + return value + end + end + + # Define a new valid value for a property. You must provide the value itself, + # usually as a symbol, or a regex to match the value. + # + # The first argument to the method is either the value itself or a regex. + # The second argument is an option hash; valid options are: + # * :event: The event that should be returned when this value is set. + # * :call: When to call any associated block. The default value + # is ``instead``, which means to call the value instead of calling the + # provider. You can also specify ``before`` or ``after``, which will + # call both the block and the provider, according to the order you specify + # (the ``first`` refers to when the block is called, not the provider). + def newvalue(name, options = {}, &block) + value = Puppet::Parameter::Value.new(name) + @values[value.name] = value + if value.regex? + @regexes << value + else + @strings << value + end + + options.each { |opt, arg| value.send(opt.to_s + "=", arg) } + if block_given? + value.block = block + else + value.call = options[:call] || :none + end + + value.method ||= "set_#{value.name}" if block_given? and ! value.regex? + + value + end + + # Define one or more new values for our parameter. + def newvalues(*names) + names.each { |name| newvalue(name) } + end + + def regexes + @regexes.collect { |r| r.name.inspect } + end + + # Verify that the passed value is valid. + def validate(value) + return if empty? + + unless @values.detect { |name, v| v.match?(value) } + str = "Invalid value #{value.inspect}. " + + str += "Valid values are #{values.join(", ")}. " unless values.empty? + + str += "Valid values match #{regexes.join(", ")}." unless regexes.empty? + + raise ArgumentError, str + end + end + + # Return a single value instance. + def value(name) + @values[name] + end + + # Return the list of valid values. + def values + @strings.collect { |s| s.name } + end +end diff --git a/mcollective/lib/puppet/parser.rb b/mcollective/lib/puppet/parser.rb new file mode 100644 index 000000000..4d274b43c --- /dev/null +++ b/mcollective/lib/puppet/parser.rb @@ -0,0 +1,4 @@ +require 'puppet/parser/parser' +require 'puppet/parser/compiler' +require 'puppet/resource/type_collection' + diff --git a/mcollective/lib/puppet/parser/ast.rb b/mcollective/lib/puppet/parser/ast.rb new file mode 100644 index 000000000..a5aaeddc4 --- /dev/null +++ b/mcollective/lib/puppet/parser/ast.rb @@ -0,0 +1,138 @@ +# the parent class for all of our syntactical objects + +require 'puppet' +require 'puppet/util/autoload' +require 'puppet/file_collection/lookup' + +# The base class for all of the objects that make up the parse trees. +# Handles things like file name, line #, and also does the initialization +# for all of the parameters of all of the child objects. +class Puppet::Parser::AST + # Do this so I don't have to type the full path in all of the subclasses + AST = Puppet::Parser::AST + + include Puppet::FileCollection::Lookup + + include Puppet::Util::Errors + include Puppet::Util::MethodHelper + include Puppet::Util::Docs + + attr_accessor :parent, :scope + + def inspect + "( #{self.class} #{self.to_s} #{@children.inspect} )" + end + + # don't fetch lexer comment by default + def use_docs + self.class.use_docs + end + + # allow our subclass to specify they want documentation + class << self + attr_accessor :use_docs + def associates_doc + self.use_docs = true + end + end + + # Does this ast object set something? If so, it gets evaluated first. + def self.settor? + if defined?(@settor) + @settor + else + false + end + end + + # Evaluate the current object. Just a stub method, since the subclass + # should override this method. + # of the contained children and evaluates them in turn, returning a + # list of all of the collected values, rejecting nil values + def evaluate(*options) + raise Puppet::DevError, "Did not override #evaluate in #{self.class}" + end + + # Throw a parse error. + def parsefail(message) + self.fail(Puppet::ParseError, message) + end + + # Wrap a statemp in a reusable way so we always throw a parse error. + def parsewrap + exceptwrap :type => Puppet::ParseError do + yield + end + end + + # The version of the evaluate method that should be called, because it + # correctly handles errors. It is critical to use this method because + # it can enable you to catch the error where it happens, rather than + # much higher up the stack. + def safeevaluate(*options) + # We duplicate code here, rather than using exceptwrap, because this + # is called so many times during parsing. + begin + return self.evaluate(*options) + rescue Puppet::Error => detail + raise adderrorcontext(detail) + rescue => detail + error = Puppet::Error.new(detail.to_s) + # We can't use self.fail here because it always expects strings, + # not exceptions. + raise adderrorcontext(error, detail) + end + end + + # Initialize the object. Requires a hash as the argument, and + # takes each of the parameters of the hash and calls the settor + # method for them. This is probably pretty inefficient and should + # likely be changed at some point. + def initialize(args) + set_options(args) + end + + # evaluate ourselves, and match + def evaluate_match(value, scope) + obj = self.safeevaluate(scope) + + obj = obj.downcase if obj.respond_to?(:downcase) + value = value.downcase if value.respond_to?(:downcase) + + obj = Puppet::Parser::Scope.number?(obj) || obj + value = Puppet::Parser::Scope.number?(value) || value + + # "" == undef for case/selector/if + obj == value or (obj == "" and value == :undef) + end +end + +# And include all of the AST subclasses. +require 'puppet/parser/ast/arithmetic_operator' +require 'puppet/parser/ast/astarray' +require 'puppet/parser/ast/asthash' +require 'puppet/parser/ast/branch' +require 'puppet/parser/ast/boolean_operator' +require 'puppet/parser/ast/caseopt' +require 'puppet/parser/ast/casestatement' +require 'puppet/parser/ast/collection' +require 'puppet/parser/ast/collexpr' +require 'puppet/parser/ast/comparison_operator' +require 'puppet/parser/ast/else' +require 'puppet/parser/ast/function' +require 'puppet/parser/ast/ifstatement' +require 'puppet/parser/ast/in_operator' +require 'puppet/parser/ast/leaf' +require 'puppet/parser/ast/match_operator' +require 'puppet/parser/ast/minus' +require 'puppet/parser/ast/nop' +require 'puppet/parser/ast/not' +require 'puppet/parser/ast/resource' +require 'puppet/parser/ast/resource_defaults' +require 'puppet/parser/ast/resource_override' +require 'puppet/parser/ast/resource_reference' +require 'puppet/parser/ast/resourceparam' +require 'puppet/parser/ast/selector' +require 'puppet/parser/ast/tag' +require 'puppet/parser/ast/vardef' +require 'puppet/parser/ast/relationship' diff --git a/mcollective/lib/puppet/parser/ast/arithmetic_operator.rb b/mcollective/lib/puppet/parser/ast/arithmetic_operator.rb new file mode 100644 index 000000000..33352d727 --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/arithmetic_operator.rb @@ -0,0 +1,39 @@ +require 'puppet' +require 'puppet/parser/ast/branch' + +class Puppet::Parser::AST + class ArithmeticOperator < AST::Branch + + attr_accessor :operator, :lval, :rval + + # Iterate across all of our children. + def each + [@lval,@rval,@operator].each { |child| yield child } + end + + # Returns a boolean which is the result of the boolean operation + # of lval and rval operands + def evaluate(scope) + # evaluate the operands, should return a boolean value + lval = @lval.safeevaluate(scope) + lval = Puppet::Parser::Scope.number?(lval) + if lval == nil + raise ArgumentError, "left operand of #{@operator} is not a number" + end + rval = @rval.safeevaluate(scope) + rval = Puppet::Parser::Scope.number?(rval) + if rval == nil + raise ArgumentError, "right operand of #{@operator} is not a number" + end + + # compute result + lval.send(@operator, rval) + end + + def initialize(hash) + super + + raise ArgumentError, "Invalid arithmetic operator #{@operator}" unless %w{+ - * / << >>}.include?(@operator) + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/astarray.rb b/mcollective/lib/puppet/parser/ast/astarray.rb new file mode 100644 index 000000000..529998e3c --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/astarray.rb @@ -0,0 +1,61 @@ +require 'puppet/parser/ast/branch' + +class Puppet::Parser::AST + # The basic container class. This object behaves almost identically + # to a normal array except at initialization time. Note that its name + # is 'AST::ASTArray', rather than plain 'AST::Array'; I had too many + # bugs when it was just 'AST::Array', because things like + # 'object.is_a?(Array)' never behaved as I expected. + class ASTArray < Branch + include Enumerable + + # Return a child by index. Probably never used. + def [](index) + @children[index] + end + + # Evaluate our children. + def evaluate(scope) + # Make a new array, so we don't have to deal with the details of + # flattening and such + items = [] + + # First clean out any AST::ASTArrays + @children.each { |child| + if child.instance_of?(AST::ASTArray) + child.each do |ac| + items << ac + end + else + items << child + end + } + + rets = items.flatten.collect { |child| + child.safeevaluate(scope) + } + rets.reject { |o| o.nil? } + end + + def push(*ary) + ary.each { |child| + #Puppet.debug "adding %s(%s) of type %s to %s" % + # [child, child.object_id, child.class.to_s.sub(/.+::/,''), + # self.object_id] + @children.push(child) + } + + self + end + + def to_s + "[" + @children.collect { |c| c.to_s }.join(', ') + "]" + end + end + + # A simple container class, containing the parameters for an object. + # Used for abstracting the grammar declarations. Basically unnecessary + # except that I kept finding bugs because I had too many arrays that + # meant completely different things. + class ResourceInstance < ASTArray; end +end diff --git a/mcollective/lib/puppet/parser/ast/asthash.rb b/mcollective/lib/puppet/parser/ast/asthash.rb new file mode 100644 index 000000000..ae81d35dd --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/asthash.rb @@ -0,0 +1,37 @@ +require 'puppet/parser/ast/leaf' + +class Puppet::Parser::AST + class ASTHash < Leaf + include Enumerable + + # Evaluate our children. + def evaluate(scope) + items = {} + + @value.each_pair do |k,v| + key = k.respond_to?(:safeevaluate) ? k.safeevaluate(scope) : k + items.merge!({ key => v.safeevaluate(scope) }) + end + + items + end + + def merge(hash) + case hash + when ASTHash + @value = @value.merge(hash.value) + when Hash + @value = @value.merge(hash) + end + end + + def to_s + "{" + @value.collect { |v| v.collect { |a| a.to_s }.join(' => ') }.join(', ') + "}" + end + + def initialize(args) + super(args) + @value ||= {} + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/boolean_operator.rb b/mcollective/lib/puppet/parser/ast/boolean_operator.rb new file mode 100644 index 000000000..8481e4f8d --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/boolean_operator.rb @@ -0,0 +1,46 @@ +require 'puppet' +require 'puppet/parser/ast/branch' + +class Puppet::Parser::AST + class BooleanOperator < AST::Branch + + attr_accessor :operator, :lval, :rval + + # Iterate across all of our children. + def each + [@lval,@rval,@operator].each { |child| yield child } + end + + # Returns a boolean which is the result of the boolean operation + # of lval and rval operands + def evaluate(scope) + # evaluate the first operand, should return a boolean value + lval = @lval.safeevaluate(scope) + + # return result + # lazy evaluate right operand + case @operator + when "and" + if Puppet::Parser::Scope.true?(lval) + rval = @rval.safeevaluate(scope) + Puppet::Parser::Scope.true?(rval) + else # false and false == false + false + end + when "or" + if Puppet::Parser::Scope.true?(lval) + true + else + rval = @rval.safeevaluate(scope) + Puppet::Parser::Scope.true?(rval) + end + end + end + + def initialize(hash) + super + + raise ArgumentError, "Invalid boolean operator #{@operator}" unless %w{and or}.include?(@operator) + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/branch.rb b/mcollective/lib/puppet/parser/ast/branch.rb new file mode 100644 index 000000000..73a2f674b --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/branch.rb @@ -0,0 +1,37 @@ +class Puppet::Parser::AST + # The parent class of all AST objects that contain other AST objects. + # Everything but the really simple objects descend from this. It is + # important to note that Branch objects contain other AST objects only -- + # if you want to contain values, use a descendent of the AST::Leaf class. + class Branch < AST + include Enumerable + attr_accessor :pin, :children + + # Yield each contained AST node in turn. Used mostly by 'evaluate'. + # This definition means that I don't have to override 'evaluate' + # every time, but each child of Branch will likely need to override + # this method. + def each + @children.each { |child| + yield child + } + end + + # Initialize our object. Largely relies on the method from the base + # class, but also does some verification. + def initialize(arghash) + super(arghash) + + # Create the hash, if it was not set at initialization time. + @children ||= [] + + # Verify that we only got valid AST nodes. + @children.each { |child| + unless child.is_a?(AST) + raise Puppet::DevError, + "child #{child} is a #{child.class} instead of ast" + end + } + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/caseopt.rb b/mcollective/lib/puppet/parser/ast/caseopt.rb new file mode 100644 index 000000000..4e296e82f --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/caseopt.rb @@ -0,0 +1,64 @@ +require 'puppet/parser/ast/branch' + +class Puppet::Parser::AST + # Each individual option in a case statement. + class CaseOpt < AST::Branch + attr_accessor :value, :statements + + # CaseOpt is a bit special -- we just want the value first, + # so that CaseStatement can compare, and then it will selectively + # decide whether to fully evaluate this option + + def each + [@value,@statements].each { |child| yield child } + end + + # Are we the default option? + def default? + # Cache the @default value. + return @default if defined?(@default) + + if @value.is_a?(AST::ASTArray) + @value.each { |subval| + if subval.is_a?(AST::Default) + @default = true + break + end + } + else + @default = true if @value.is_a?(AST::Default) + end + + @default ||= false + + @default + end + + # You can specify a list of values; return each in turn. + def eachvalue(scope) + if @value.is_a?(AST::ASTArray) + @value.each { |subval| + yield subval.safeevaluate(scope) + } + else + yield @value.safeevaluate(scope) + end + end + + def eachopt + if @value.is_a?(AST::ASTArray) + @value.each { |subval| + yield subval + } + else + yield @value + end + end + + # Evaluate the actual statements; this only gets called if + # our option matched. + def evaluate(scope) + @statements.safeevaluate(scope) + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/casestatement.rb b/mcollective/lib/puppet/parser/ast/casestatement.rb new file mode 100644 index 000000000..8370d11f3 --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/casestatement.rb @@ -0,0 +1,44 @@ +require 'puppet/parser/ast/branch' + +class Puppet::Parser::AST + # The basic logical structure in Puppet. Supports a list of + # tests and statement arrays. + class CaseStatement < AST::Branch + attr_accessor :test, :options, :default + + associates_doc + + # Short-curcuit evaluation. Return the value of the statements for + # the first option that matches. + def evaluate(scope) + level = scope.ephemeral_level + + value = @test.safeevaluate(scope) + + retvalue = nil + found = false + + # Iterate across the options looking for a match. + default = nil + @options.each do |option| + option.eachopt do |opt| + return option.safeevaluate(scope) if opt.evaluate_match(value, scope) + end + + default = option if option.default? + end + + # Unless we found something, look for the default. + return default.safeevaluate(scope) if default + + Puppet.debug "No true answers and no default" + return nil + ensure + scope.unset_ephemeral_var(level) + end + + def each + [@test,@options].each { |child| yield child } + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/collection.rb b/mcollective/lib/puppet/parser/ast/collection.rb new file mode 100644 index 000000000..565b83195 --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/collection.rb @@ -0,0 +1,49 @@ +require 'puppet' +require 'puppet/parser/ast/branch' +require 'puppet/parser/collector' + +# An object that collects stored objects from the central cache and returns +# them to the current host, yo. +class Puppet::Parser::AST + class Collection < AST::Branch + attr_accessor :type, :query, :form + attr_reader :override + + associates_doc + + # We return an object that does a late-binding evaluation. + def evaluate(scope) + str, code = query && query.safeevaluate(scope) + + resource_type = scope.find_resource_type(@type) + fail "Resource type #{@type} doesn't exist" unless resource_type + newcoll = Puppet::Parser::Collector.new(scope, resource_type.name, str, code, self.form) + + scope.compiler.add_collection(newcoll) + + # overrides if any + # Evaluate all of the specified params. + if @override + params = @override.collect { |param| param.safeevaluate(scope) } + newcoll.add_override( + :parameters => params, + :file => @file, + :line => @line, + :source => scope.source, + :scope => scope + ) + end + + newcoll + end + + # Handle our parameter ourselves + def override=(override) + @override = if override.is_a?(AST::ASTArray) + override + else + AST::ASTArray.new(:line => override.line,:file => override.file,:children => [override]) + end + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/collexpr.rb b/mcollective/lib/puppet/parser/ast/collexpr.rb new file mode 100644 index 000000000..f912b4b33 --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/collexpr.rb @@ -0,0 +1,86 @@ +require 'puppet' +require 'puppet/parser/ast/branch' +require 'puppet/parser/collector' + +# An object that collects stored objects from the central cache and returns +# them to the current host, yo. +class Puppet::Parser::AST +class CollExpr < AST::Branch + attr_accessor :test1, :test2, :oper, :form, :type, :parens + + # We return an object that does a late-binding evaluation. + def evaluate(scope) + # Make sure our contained expressions have all the info they need. + [@test1, @test2].each do |t| + if t.is_a?(self.class) + t.form ||= self.form + t.type ||= self.type + end + end + + # The code is only used for virtual lookups + str1, code1 = @test1.safeevaluate scope + str2, code2 = @test2.safeevaluate scope + + # First build up the virtual code. + # If we're a conjunction operator, then we're calling code. I did + # some speed comparisons, and it's at least twice as fast doing these + # case statements as doing an eval here. + code = proc do |resource| + case @oper + when "and"; code1.call(resource) and code2.call(resource) + when "or"; code1.call(resource) or code2.call(resource) + when "==" + if str1 == "tag" + resource.tagged?(str2) + else + if resource[str1].is_a?(Array) + resource[str1].include?(str2) + else + resource[str1] == str2 + end + end + when "!="; resource[str1] != str2 + end + end + + # Now build up the rails conditions code + if self.parens and self.form == :exported + Puppet.warning "Parentheses are ignored in Rails searches" + end + + case @oper + when "and", "or" + if form == :exported + raise Puppet::ParseError, "Puppet does not currently support collecting exported resources with more than one condition" + end + oper = @oper.upcase + when "=="; oper = "=" + else + oper = @oper + end + + if oper == "=" or oper == "!=" + # Add the rails association info where necessary + case str1 + when "title" + str = "title #{oper} '#{str2}'" + when "tag" + str = "puppet_tags.name #{oper} '#{str2}'" + else + str = "param_values.value #{oper} '#{str2}' and param_names.name = '#{str1}'" + end + else + str = "(#{str1}) #{oper} (#{str2})" + end + + return str, code + end + + def initialize(hash = {}) + super + + raise ArgumentError, "Invalid operator #{@oper}" unless %w{== != and or}.include?(@oper) + end +end +end diff --git a/mcollective/lib/puppet/parser/ast/comparison_operator.rb b/mcollective/lib/puppet/parser/ast/comparison_operator.rb new file mode 100644 index 000000000..039c81df8 --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/comparison_operator.rb @@ -0,0 +1,38 @@ +require 'puppet' +require 'puppet/parser/ast/branch' + +class Puppet::Parser::AST + class ComparisonOperator < AST::Branch + + attr_accessor :operator, :lval, :rval + + # Iterate across all of our children. + def each + [@lval,@rval,@operator].each { |child| yield child } + end + + # Returns a boolean which is the result of the boolean operation + # of lval and rval operands + def evaluate(scope) + # evaluate the operands, should return a boolean value + lval = @lval.safeevaluate(scope) + + case @operator + when "==","!=" + @rval.evaluate_match(lval, scope) ? @operator == '==' : @operator == '!=' + else + rval = @rval.safeevaluate(scope) + rval = Puppet::Parser::Scope.number?(rval) || rval + lval = Puppet::Parser::Scope.number?(lval) || lval + + lval.send(@operator,rval) + end + end + + def initialize(hash) + super + + raise ArgumentError, "Invalid comparison operator #{@operator}" unless %w{== != < > <= >=}.include?(@operator) + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/else.rb b/mcollective/lib/puppet/parser/ast/else.rb new file mode 100644 index 000000000..172149116 --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/else.rb @@ -0,0 +1,22 @@ +require 'puppet/parser/ast/branch' + +class Puppet::Parser::AST + # A separate ElseIf statement; can function as an 'else' if there's no + # test. + class Else < AST::Branch + + associates_doc + + attr_accessor :statements + + def each + yield @statements + end + + # Evaluate the actual statements; this only gets called if + # our test was true matched. + def evaluate(scope) + @statements.safeevaluate(scope) + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/function.rb b/mcollective/lib/puppet/parser/ast/function.rb new file mode 100644 index 000000000..80e6e6512 --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/function.rb @@ -0,0 +1,50 @@ +require 'puppet/parser/ast/branch' + +class Puppet::Parser::AST + # An AST object to call a function. + class Function < AST::Branch + + associates_doc + + attr_accessor :name, :arguments + + @settor = true + + def evaluate(scope) + # Make sure it's a defined function + raise Puppet::ParseError, "Unknown function #{@name}" unless Puppet::Parser::Functions.function(@name) + + # Now check that it's been used correctly + case @ftype + when :rvalue + raise Puppet::ParseError, "Function '#{@name}' does not return a value" unless Puppet::Parser::Functions.rvalue?(@name) + when :statement + if Puppet::Parser::Functions.rvalue?(@name) + raise Puppet::ParseError, + "Function '#{@name}' must be the value of a statement" + end + else + raise Puppet::DevError, "Invalid function type #{@ftype.inspect}" + end + + # We don't need to evaluate the name, because it's plaintext + args = @arguments.safeevaluate(scope).map { |x| x == :undef ? '' : x } + + scope.send("function_#{@name}", args) + end + + def initialize(hash) + @ftype = hash[:ftype] || :rvalue + hash.delete(:ftype) if hash.include? :ftype + + super(hash) + + # Lastly, check the parity + end + + def to_s + args = arguments.is_a?(ASTArray) ? arguments.to_s.gsub(/\[(.*)\]/,'\1') : arguments + "#{name}(#{args})" + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/ifstatement.rb b/mcollective/lib/puppet/parser/ast/ifstatement.rb new file mode 100644 index 000000000..7fd8a576a --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/ifstatement.rb @@ -0,0 +1,34 @@ +require 'puppet/parser/ast/branch' + +class Puppet::Parser::AST + # A basic 'if/elsif/else' statement. + class IfStatement < AST::Branch + + associates_doc + + attr_accessor :test, :else, :statements + + def each + [@test,@else,@statements].each { |child| yield child } + end + + # Short-curcuit evaluation. If we're true, evaluate our statements, + # else if there's an 'else' setting, evaluate it. + # the first option that matches. + def evaluate(scope) + level = scope.ephemeral_level + value = @test.safeevaluate(scope) + + # let's emulate a new scope for each branches + begin + if Puppet::Parser::Scope.true?(value) + return @statements.safeevaluate(scope) + else + return defined?(@else) ? @else.safeevaluate(scope) : nil + end + ensure + scope.unset_ephemeral_var(level) + end + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/in_operator.rb b/mcollective/lib/puppet/parser/ast/in_operator.rb new file mode 100644 index 000000000..2a163e726 --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/in_operator.rb @@ -0,0 +1,24 @@ +require 'puppet' +require 'puppet/parser/ast/branch' + +class Puppet::Parser::AST + class InOperator < AST::Branch + + attr_accessor :lval, :rval + + # Returns a boolean which is the result of the 'in' operation + # of lval and rval operands + def evaluate(scope) + + # evaluate the operands, should return a boolean value + lval = @lval.safeevaluate(scope) + raise ArgumentError, "'#{lval}' from left operand of 'in' expression is not a string" unless lval.is_a?(::String) + + rval = @rval.safeevaluate(scope) + unless rval.respond_to?(:include?) + raise ArgumentError, "'#{rval}' from right operand of 'in' expression is not of a supported type (string, array or hash)" + end + rval.include?(lval) + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/leaf.rb b/mcollective/lib/puppet/parser/ast/leaf.rb new file mode 100644 index 000000000..77617e992 --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/leaf.rb @@ -0,0 +1,221 @@ +class Puppet::Parser::AST + # The base class for all of the leaves of the parse trees. These + # basically just have types and values. Both of these parameters + # are simple values, not AST objects. + class Leaf < AST + attr_accessor :value, :type + + # Return our value. + def evaluate(scope) + @value + end + + def match(value) + @value == value + end + + def to_s + @value.to_s unless @value.nil? + end + end + + # The boolean class. True or false. Converts the string it receives + # to a Ruby boolean. + class Boolean < AST::Leaf + + # Use the parent method, but then convert to a real boolean. + def initialize(hash) + super + + unless @value == true or @value == false + raise Puppet::DevError, + "'#{@value}' is not a boolean" + end + @value + end + + def to_s + @value ? "true" : "false" + end + end + + # The base string class. + class String < AST::Leaf + def evaluate(scope) + @value.dup + end + + def to_s + "\"#{@value}\"" + end + end + + # An uninterpreted string. + class FlatString < AST::Leaf + def evaluate(scope) + @value + end + + def to_s + "\"#{@value}\"" + end + end + + class Concat < AST::Leaf + def evaluate(scope) + @value.collect { |x| x.evaluate(scope) }.collect{ |x| x == :undef ? '' : x }.join + end + + def to_s + "#{@value.map { |s| s.to_s.gsub(/^"(.*)"$/, '\1') }.join}" + end + end + + # The 'default' option on case statements and selectors. + class Default < AST::Leaf; end + + # Capitalized words; used mostly for type-defaults, but also + # get returned by the lexer any other time an unquoted capitalized + # word is found. + class Type < AST::Leaf; end + + # Lower-case words. + class Name < AST::Leaf; end + + # double-colon separated class names + class ClassName < AST::Leaf; end + + # undef values; equiv to nil + class Undef < AST::Leaf; end + + # Host names, either fully qualified or just the short name, or even a regex + class HostName < AST::Leaf + def initialize(hash) + super + + # Note that this is an AST::Regex, not a Regexp + @value = @value.to_s.downcase unless @value.is_a?(Regex) + if @value =~ /[^-\w.]/ + raise Puppet::DevError, + "'#{@value}' is not a valid hostname" + end + end + + # implementing eql? and hash so that when an HostName is stored + # in a hash it has the same hashing properties as the underlying value + def eql?(value) + value = value.value if value.is_a?(HostName) + @value.eql?(value) + end + + def hash + @value.hash + end + + def to_s + @value.to_s + end + end + + # A simple variable. This object is only used during interpolation; + # the VarDef class is used for assignment. + class Variable < Name + # Looks up the value of the object in the scope tree (does + # not include syntactical constructs, like '$' and '{}'). + def evaluate(scope) + parsewrap do + if (var = scope.lookupvar(@value, false)) == :undefined + var = :undef + end + var + end + end + + def to_s + "\$#{value}" + end + end + + class HashOrArrayAccess < AST::Leaf + attr_accessor :variable, :key + + def evaluate_container(scope) + container = variable.respond_to?(:evaluate) ? variable.safeevaluate(scope) : variable + (container.is_a?(Hash) or container.is_a?(Array)) ? container : scope.lookupvar(container) + end + + def evaluate_key(scope) + key.respond_to?(:evaluate) ? key.safeevaluate(scope) : key + end + + def array_index_or_key(object, key) + if object.is_a?(Array) + raise Puppet::ParserError, "#{key} is not an integer, but is used as an index of an array" unless key = Puppet::Parser::Scope.number?(key) + end + key + end + + def evaluate(scope) + object = evaluate_container(scope) + accesskey = evaluate_key(scope) + + raise Puppet::ParseError, "#{variable} is not an hash or array when accessing it with #{accesskey}" unless object.is_a?(Hash) or object.is_a?(Array) + + object[array_index_or_key(object, accesskey)] + end + + # Assign value to this hashkey or array index + def assign(scope, value) + object = evaluate_container(scope) + accesskey = evaluate_key(scope) + + if object.is_a?(Hash) and object.include?(accesskey) + raise Puppet::ParseError, "Assigning to the hash '#{variable}' with an existing key '#{accesskey}' is forbidden" + end + + # assign to hash or array + object[array_index_or_key(object, accesskey)] = value + end + + def to_s + "\$#{variable.to_s}[#{key.to_s}]" + end + end + + class Regex < AST::Leaf + def initialize(hash) + super + @value = Regexp.new(@value) unless @value.is_a?(Regexp) + end + + # we're returning self here to wrap the regexp and to be used in places + # where a string would have been used, without modifying any client code. + # For instance, in many places we have the following code snippet: + # val = @val.safeevaluate(@scope) + # if val.match(otherval) + # ... + # end + # this way, we don't have to modify this test specifically for handling + # regexes. + def evaluate(scope) + self + end + + def evaluate_match(value, scope, options = {}) + value = value.is_a?(String) ? value : value.to_s + + if matched = @value.match(value) + scope.ephemeral_from(matched, options[:file], options[:line]) + end + matched + end + + def match(value) + @value.match(value) + end + + def to_s + "/#{@value.source}/" + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/match_operator.rb b/mcollective/lib/puppet/parser/ast/match_operator.rb new file mode 100644 index 000000000..6207a8c2c --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/match_operator.rb @@ -0,0 +1,28 @@ +require 'puppet' +require 'puppet/parser/ast/branch' + +class Puppet::Parser::AST + class MatchOperator < AST::Branch + + attr_accessor :lval, :rval, :operator + + # Iterate across all of our children. + def each + [@lval,@rval].each { |child| yield child } + end + + # Returns a boolean which is the result of the boolean operation + # of lval and rval operands + def evaluate(scope) + lval = @lval.safeevaluate(scope) + + return(rval.evaluate_match(lval, scope) ? @operator == "=~" : @operator == "!~") + end + + def initialize(hash) + super + + raise ArgumentError, "Invalid regexp operator #{@operator}" unless %w{!~ =~}.include?(@operator) + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/minus.rb b/mcollective/lib/puppet/parser/ast/minus.rb new file mode 100644 index 000000000..d7a362aa1 --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/minus.rb @@ -0,0 +1,23 @@ +require 'puppet' +require 'puppet/parser/ast/branch' + +# An object that returns a boolean which is the boolean not +# of the given value. +class Puppet::Parser::AST + class Minus < AST::Branch + attr_accessor :value + + def each + yield @value + end + + def evaluate(scope) + val = @value.safeevaluate(scope) + val = Puppet::Parser::Scope.number?(val) + if val == nil + raise ArgumentError, "minus operand #{val} is not a number" + end + -val + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/nop.rb b/mcollective/lib/puppet/parser/ast/nop.rb new file mode 100644 index 000000000..bf35c6a5c --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/nop.rb @@ -0,0 +1,11 @@ +require 'puppet/parser/ast/branch' + +class Puppet::Parser::AST + # This class is a no-op, it doesn't produce anything + # when evaluated, hence it's name :-) + class Nop < AST::Leaf + def evaluate(scope) + # nothing to do + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/not.rb b/mcollective/lib/puppet/parser/ast/not.rb new file mode 100644 index 000000000..30fa6d503 --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/not.rb @@ -0,0 +1,19 @@ +require 'puppet' +require 'puppet/parser/ast/branch' + +# An object that returns a boolean which is the boolean not +# of the given value. +class Puppet::Parser::AST + class Not < AST::Branch + attr_accessor :value + + def each + yield @value + end + + def evaluate(scope) + val = @value.safeevaluate(scope) + ! Puppet::Parser::Scope.true?(val) + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/relationship.rb b/mcollective/lib/puppet/parser/ast/relationship.rb new file mode 100644 index 000000000..a7134a04f --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/relationship.rb @@ -0,0 +1,60 @@ +require 'puppet/parser/ast' +require 'puppet/parser/ast/branch' +require 'puppet/parser/relationship' + +class Puppet::Parser::AST::Relationship < Puppet::Parser::AST::Branch + RELATIONSHIP_TYPES = %w{-> <- ~> <~} + + attr_accessor :left, :right, :arrow, :type + + def actual_left + chained? ? left.right : left + end + + # Evaluate our object, but just return a simple array of the type + # and name. + def evaluate(scope) + if chained? + real_left = left.safeevaluate(scope) + left_dep = left_dep.shift if left_dep.is_a?(Array) + else + real_left = left.safeevaluate(scope) + end + real_right = right.safeevaluate(scope) + + source, target = sides2edge(real_left, real_right) + result = Puppet::Parser::Relationship.new(source, target, type) + scope.compiler.add_relationship(result) + real_right + end + + def initialize(left, right, arrow, args = {}) + super(args) + unless RELATIONSHIP_TYPES.include?(arrow) + raise ArgumentError, "Invalid relationship type #{arrow.inspect}; valid types are #{RELATIONSHIP_TYPES.collect { |r| r.to_s }.join(", ")}" + end + @left, @right, @arrow = left, right, arrow + end + + def type + subscription? ? :subscription : :relationship + end + + def sides2edge(left, right) + out_edge? ? [left, right] : [right, left] + end + + private + + def chained? + left.is_a?(self.class) + end + + def out_edge? + ["->", "~>"].include?(arrow) + end + + def subscription? + ["~>", "<~"].include?(arrow) + end +end diff --git a/mcollective/lib/puppet/parser/ast/resource.rb b/mcollective/lib/puppet/parser/ast/resource.rb new file mode 100644 index 000000000..b019e6aac --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/resource.rb @@ -0,0 +1,75 @@ +require 'puppet/parser/ast/resource_reference' + +# Any normal puppet resource declaration. Can point to a definition or a +# builtin type. +class Puppet::Parser::AST +class Resource < AST::ResourceReference + + associates_doc + + attr_accessor :title, :type, :exported, :virtual + attr_reader :parameters + + # Does not actually return an object; instead sets an object + # in the current scope. + def evaluate(scope) + # Evaluate all of the specified params. + paramobjects = parameters.collect { |param| + param.safeevaluate(scope) + } + + resource_titles = @title.safeevaluate(scope) + + # it's easier to always use an array, even for only one name + resource_titles = [resource_titles] unless resource_titles.is_a?(Array) + + # We want virtual to be true if exported is true. We can't + # just set :virtual => self.virtual in the initialization, + # because sometimes the :virtual attribute is set *after* + # :exported, in which case it clobbers :exported if :exported + # is true. Argh, this was a very tough one to track down. + virt = self.virtual || self.exported + + # This is where our implicit iteration takes place; if someone + # passed an array as the name, then we act just like the called us + # many times. + fully_qualified_type, resource_titles = scope.resolve_type_and_titles(type, resource_titles) + + resource_titles.flatten.collect { |resource_title| + exceptwrap :type => Puppet::ParseError do + resource = Puppet::Parser::Resource.new( + fully_qualified_type, resource_title, + :parameters => paramobjects, + :file => self.file, + :line => self.line, + :exported => self.exported, + :virtual => virt, + :source => scope.source, + :scope => scope, + :strict => true + ) + + if resource.resource_type.is_a? Puppet::Resource::Type + resource.resource_type.instantiate_resource(scope, resource) + end + scope.compiler.add_resource(scope, resource) + scope.compiler.evaluate_classes([resource_title],scope,false) if fully_qualified_type == 'class' + resource + end + }.reject { |resource| resource.nil? } + end + + # Set the parameters for our object. + def parameters=(params) + if params.is_a?(AST::ASTArray) + @parameters = params + else + @parameters = AST::ASTArray.new( + :line => params.line, + :file => params.file, + :children => [params] + ) + end + end +end +end diff --git a/mcollective/lib/puppet/parser/ast/resource_defaults.rb b/mcollective/lib/puppet/parser/ast/resource_defaults.rb new file mode 100644 index 000000000..812b979e9 --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/resource_defaults.rb @@ -0,0 +1,24 @@ +require 'puppet/parser/ast/branch' + +class Puppet::Parser::AST + # A statement syntactically similar to an ResourceDef, but uses a + # capitalized object type and cannot have a name. + class ResourceDefaults < AST::Branch + attr_accessor :type, :parameters + + associates_doc + + # As opposed to ResourceDef, this stores each default for the given + # object type. + def evaluate(scope) + # Use a resource reference to canonize the type + ref = Puppet::Resource.new(@type, "whatever") + type = ref.type + params = @parameters.safeevaluate(scope) + + parsewrap do + scope.setdefaults(type, params) + end + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/resource_override.rb b/mcollective/lib/puppet/parser/ast/resource_override.rb new file mode 100644 index 000000000..e0be889ff --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/resource_override.rb @@ -0,0 +1,68 @@ +require 'puppet/parser/ast/resource' + +class Puppet::Parser::AST + # Set a parameter on a resource specification created somewhere else in the + # configuration. The object is responsible for verifying that this is allowed. + class ResourceOverride < Resource + + associates_doc + + attr_accessor :object + attr_reader :parameters + + # Iterate across all of our children. + def each + [@object,@parameters].flatten.each { |param| + #Puppet.debug("yielding param #{param}") + yield param + } + end + + # Does not actually return an object; instead sets an object + # in the current scope. + def evaluate(scope) + # Get our object reference. + resource = @object.safeevaluate(scope) + + hash = {} + + # Evaluate all of the specified params. + params = @parameters.collect { |param| + param.safeevaluate(scope) + } + + # Now we just create a normal resource, but we call a very different + # method on the scope. + resource = [resource] unless resource.is_a?(Array) + + resource = resource.collect do |r| + + res = Puppet::Parser::Resource.new( + r.type, r.title, + :parameters => params, + :file => file, + :line => line, + :source => scope.source, + + :scope => scope + ) + + # Now we tell the scope that it's an override, and it behaves as + # necessary. + scope.compiler.add_override(res) + + res + end + # decapsulate array in case of only one item + return(resource.length == 1 ? resource.pop : resource) + end + + # Create our ResourceDef. Handles type checking for us. + def initialize(hash) + @checked = false + super + + #self.typecheck(@type.value) + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/resource_reference.rb b/mcollective/lib/puppet/parser/ast/resource_reference.rb new file mode 100644 index 000000000..0f8e655bf --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/resource_reference.rb @@ -0,0 +1,28 @@ +require 'puppet/parser/ast' +require 'puppet/parser/ast/branch' + +class Puppet::Parser::AST::ResourceReference < Puppet::Parser::AST::Branch + attr_accessor :title, :type + + # Evaluate our object, but just return a simple array of the type + # and name. + def evaluate(scope) + titles = Array(title.safeevaluate(scope)) + + a_type, titles = scope.resolve_type_and_titles(type, titles) + + resources = titles.collect{ |a_title| + Puppet::Resource.new(a_type, a_title) + } + + return(resources.length == 1 ? resources.pop : resources) + end + + def to_s + if title.is_a?(Puppet::Parser::AST::ASTArray) + "#{type.to_s.capitalize}#{title}" + else + "#{type.to_s.capitalize}[#{title}]" + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/resourceparam.rb b/mcollective/lib/puppet/parser/ast/resourceparam.rb new file mode 100644 index 000000000..4073a197b --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/resourceparam.rb @@ -0,0 +1,29 @@ +require 'puppet/parser/ast/branch' + +class Puppet::Parser::AST + # The AST object for the parameters inside ResourceDefs and Selectors. + class ResourceParam < AST::Branch + attr_accessor :value, :param, :add + + def each + [@param,@value].each { |child| yield child } + end + + # Return the parameter and the value. + def evaluate(scope) + + return Puppet::Parser::Resource::Param.new( + + :name => @param, + :value => @value.safeevaluate(scope), + + :source => scope.source, :line => self.line, :file => self.file, + :add => self.add + ) + end + + def to_s + "#{@param} => #{@value.to_s}" + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/selector.rb b/mcollective/lib/puppet/parser/ast/selector.rb new file mode 100644 index 000000000..d6a4ea436 --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/selector.rb @@ -0,0 +1,44 @@ +require 'puppet/parser/ast/branch' + +class Puppet::Parser::AST + # The inline conditional operator. Unlike CaseStatement, which executes + # code, we just return a value. + class Selector < AST::Branch + attr_accessor :param, :values + + def each + [@param,@values].each { |child| yield child } + end + + # Find the value that corresponds with the test. + def evaluate(scope) + level = scope.ephemeral_level + # Get our parameter. + paramvalue = @param.safeevaluate(scope) + + default = nil + + @values = [@values] unless @values.instance_of? AST::ASTArray or @values.instance_of? Array + + # Then look for a match in the options. + @values.each do |obj| + # short circuit asap if we have a match + return obj.value.safeevaluate(scope) if obj.param.evaluate_match(paramvalue, scope) + + # Store the default, in case it's necessary. + default = obj if obj.param.is_a?(Default) + end + + # Unless we found something, look for the default. + return default.value.safeevaluate(scope) if default + + self.fail Puppet::ParseError, "No matching value for selector param '#{paramvalue}'" + ensure + scope.unset_ephemeral_var(level) + end + + def to_s + param.to_s + " ? { " + values.collect { |v| v.to_s }.join(', ') + " }" + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/tag.rb b/mcollective/lib/puppet/parser/ast/tag.rb new file mode 100644 index 000000000..6f906a1c6 --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/tag.rb @@ -0,0 +1,24 @@ +require 'puppet/parser/ast/branch' + +class Puppet::Parser::AST + # The code associated with a class. This is different from components + # in that each class is a singleton -- only one will exist for a given + # node. + class Tag < AST::Branch + @name = :class + attr_accessor :type + + def evaluate(scope) + types = @type.safeevaluate(scope) + + types = [types] unless types.is_a? Array + + types.each do |type| + # Now set our class. We don't have to worry about checking + # whether we've been evaluated because we're not evaluating + # any code. + scope.setclass(self.object_id, type) + end + end + end +end diff --git a/mcollective/lib/puppet/parser/ast/vardef.rb b/mcollective/lib/puppet/parser/ast/vardef.rb new file mode 100644 index 000000000..6de1860c8 --- /dev/null +++ b/mcollective/lib/puppet/parser/ast/vardef.rb @@ -0,0 +1,33 @@ +require 'puppet/parser/ast/branch' + +class Puppet::Parser::AST + # Define a variable. Stores the value in the current scope. + class VarDef < AST::Branch + + associates_doc + + attr_accessor :name, :value, :append + + @settor = true + + # Look up our name and value, and store them appropriately. The + # lexer strips off the syntax stuff like '$'. + def evaluate(scope) + value = @value.safeevaluate(scope) + if name.is_a?(HashOrArrayAccess) + name.assign(scope, value) + else + name = @name.safeevaluate(scope) + + parsewrap do + scope.setvar(name,value, :file => @file, :line => @line, :append => @append) + end + end + end + + def each + [@name,@value].each { |child| yield child } + end + end + +end diff --git a/mcollective/lib/puppet/parser/collector.rb b/mcollective/lib/puppet/parser/collector.rb new file mode 100644 index 000000000..de60cb170 --- /dev/null +++ b/mcollective/lib/puppet/parser/collector.rb @@ -0,0 +1,223 @@ +# An object that collects stored objects from the central cache and returns +# them to the current host, yo. +class Puppet::Parser::Collector + attr_accessor :type, :scope, :vquery, :equery, :form, :resources, :overrides, :collected + + # Call the collection method, mark all of the returned objects as non-virtual, + # optionally applying parameter overrides. The collector can also delete himself + # from the compiler if there is no more resources to collect (valid only for resource fixed-set collector + # which get their resources from +collect_resources+ and not from the catalog) + def evaluate + # Shortcut if we're not using storeconfigs and they're trying to collect + # exported resources. + if form == :exported and Puppet[:storeconfigs] != true + Puppet.warning "Not collecting exported resources without storeconfigs" + return false + end + + if self.resources + unless objects = collect_resources and ! objects.empty? + return false + end + else + method = "collect_#{@form.to_s}" + objects = send(method).each do |obj| + obj.virtual = false + end + return false if objects.empty? + end + + # we have an override for the collected resources + if @overrides and !objects.empty? + + # force the resource to be always child of any other resource + overrides[:source].meta_def(:child_of?) do + true + end + + # tell the compiler we have some override for him unless we already + # overrided those resources + objects.each do |res| + unless @collected.include?(res.ref) + + newres = Puppet::Parser::Resource.new( + res.type, res.title, + :parameters => overrides[:parameters], + :file => overrides[:file], + :line => overrides[:line], + :source => overrides[:source], + + :scope => overrides[:scope] + ) + + scope.compiler.add_override(newres) + end + end + end + + # filter out object that already have been collected by ourself + objects.reject! { |o| @collected.include?(o.ref) } + + return false if objects.empty? + + # keep an eye on the resources we have collected + objects.inject(@collected) { |c,o| c[o.ref]=o; c } + + # return our newly collected resources + objects + end + + def initialize(scope, type, equery, vquery, form) + @scope = scope + + # initialisation + @collected = {} + + # Canonize the type + @type = Puppet::Resource.new(type, "whatever").type + @equery = equery + @vquery = vquery + + raise(ArgumentError, "Invalid query form #{form}") unless [:exported, :virtual].include?(form) + @form = form + end + + # add a resource override to the soon to be exported/realized resources + def add_override(hash) + raise ArgumentError, "Exported resource try to override without parameters" unless hash[:parameters] + + # schedule an override for an upcoming collection + @overrides = hash + end + + private + + # Create our active record query. + def build_active_record_query + Puppet::Rails.init unless ActiveRecord::Base.connected? + + raise Puppet::DevError, "Cannot collect resources for a nil host" unless @scope.host + host = Puppet::Rails::Host.find_by_name(@scope.host) + + search = "(exported=? AND restype=?)" + values = [true, @type] + + search += " AND (#{@equery})" if @equery + + # note: + # we're not eagerly including any relations here because + # it can creates so much objects we'll throw out later. + # We used to eagerly include param_names/values but the way + # the search filter is built ruined those efforts and we + # were eagerly loading only the searched parameter and not + # the other ones. + query = {} + case search + when /puppet_tags/ + query = {:joins => {:resource_tags => :puppet_tag}} + when /param_name/ + query = {:joins => {:param_values => :param_name}} + end + + # We're going to collect objects from rails, but we don't want any + # objects from this host. + search = ("host_id != ? AND #{search}") and values.unshift(host.id) if host + + query[:conditions] = [search, *values] + + query + end + + # Collect exported objects. + def collect_exported + # First get everything from the export table. Just reuse our + # collect_virtual method but tell it to use 'exported? for the test. + resources = collect_virtual(true).reject { |r| ! r.virtual? } + + count = resources.length + + query = build_active_record_query + + # Now look them up in the rails db. When we support attribute comparison + # and such, we'll need to vary the conditions, but this works with no + # attributes, anyway. + time = Puppet::Util.thinmark do + Puppet::Rails::Resource.find(:all, query).each do |obj| + if resource = exported_resource(obj) + count += 1 + resources << resource + end + end + end + + scope.debug("Collected %s %s resource%s in %.2f seconds" % [count, @type, count == 1 ? "" : "s", time]) + + resources + end + + def collect_resources + @resources = [@resources] unless @resources.is_a?(Array) + method = "collect_#{form.to_s}_resources" + send(method) + end + + def collect_exported_resources + raise Puppet::ParseError, "realize() is not yet implemented for exported resources" + end + + # Collect resources directly; this is the result of using 'realize', + # which specifies resources, rather than using a normal collection. + def collect_virtual_resources + return [] unless defined?(@resources) and ! @resources.empty? + result = @resources.dup.collect do |ref| + if res = @scope.findresource(ref.to_s) + @resources.delete(ref) + res + end + end.reject { |r| r.nil? }.each do |res| + res.virtual = false + end + + # If there are no more resources to find, delete this from the list + # of collections. + @scope.compiler.delete_collection(self) if @resources.empty? + + result + end + + # Collect just virtual objects, from our local compiler. + def collect_virtual(exported = false) + scope.compiler.resources.find_all do |resource| + resource.type == @type and (exported ? resource.exported? : true) and match?(resource) + end + end + + # Seek a specific exported resource. + def exported_resource(obj) + if existing = @scope.findresource(obj.restype, obj.title) + # Next see if we've already collected this resource + return nil if existing.rails_id == obj.id + + # This is the one we've already collected + raise Puppet::ParseError, "Exported resource #{obj.ref} cannot override local resource" + end + + resource = obj.to_resource(self.scope) + + resource.exported = false + + scope.compiler.add_resource(scope, resource) + + resource + end + + # Does the resource match our tests? We don't yet support tests, + # so it's always true at the moment. + def match?(resource) + if self.vquery + return self.vquery.call(resource) + else + return true + end + end +end diff --git a/mcollective/lib/puppet/parser/compiler.rb b/mcollective/lib/puppet/parser/compiler.rb new file mode 100644 index 000000000..2d065dad5 --- /dev/null +++ b/mcollective/lib/puppet/parser/compiler.rb @@ -0,0 +1,490 @@ +# Created by Luke A. Kanies on 2007-08-13. +# Copyright (c) 2007. All rights reserved. + +require 'puppet/node' +require 'puppet/resource/catalog' +require 'puppet/util/errors' + +require 'puppet/resource/type_collection_helper' + +# Maintain a graph of scopes, along with a bunch of data +# about the individual catalog we're compiling. +class Puppet::Parser::Compiler + include Puppet::Util + include Puppet::Util::Errors + include Puppet::Resource::TypeCollectionHelper + + def self.compile(node) + # We get these from the environment and only cache them in a thread + # variable for the duration of the compilation. If nothing else is using + # the thread, though, we can leave 'em hanging round with no ill effects, + # and this is safer than cleaning them at the end and assuming that will + # stick until the next entry to this function. + Thread.current[:known_resource_types] = nil + Thread.current[:env_module_directories] = nil + + # ...and we actually do the compile now we have caching ready. + new(node).compile.to_resource + rescue => detail + puts detail.backtrace if Puppet[:trace] + raise Puppet::Error, "#{detail} on node #{node.name}" + end + + attr_reader :node, :facts, :collections, :catalog, :node_scope, :resources, :relationships + + # Add a collection to the global list. + def add_collection(coll) + @collections << coll + end + + def add_relationship(dep) + @relationships << dep + end + + # Store a resource override. + def add_override(override) + # If possible, merge the override in immediately. + if resource = @catalog.resource(override.ref) + resource.merge(override) + else + # Otherwise, store the override for later; these + # get evaluated in Resource#finish. + @resource_overrides[override.ref] << override + end + end + + # Store a resource in our resource table. + def add_resource(scope, resource) + @resources << resource + + # Note that this will fail if the resource is not unique. + @catalog.add_resource(resource) + + if resource.type.to_s.downcase != "class" && resource[:stage] + raise ArgumentError, "Only classes can set 'stage'; normal resources like #{resource} cannot change run stage" + end + + # Stages should not be inside of classes. They are always a + # top-level container, regardless of where they appear in the + # manifest. + return if resource.type.to_s.downcase == "stage" + + # This adds a resource to the class it lexically appears in in the + # manifest. + if resource.type.to_s.downcase != "class" + return @catalog.add_edge(scope.resource, resource) + end + end + + # Do we use nodes found in the code, vs. the external node sources? + def ast_nodes? + known_resource_types.nodes? + end + + # Store the fact that we've evaluated a class + def add_class(name) + @catalog.add_class(name) unless name == "" + end + + + # Return a list of all of the defined classes. + def classlist + @catalog.classes + end + + # Compiler our catalog. This mostly revolves around finding and evaluating classes. + # This is the main entry into our catalog. + def compile + # Set the client's parameters into the top scope. + set_node_parameters + create_settings_scope + + evaluate_main + + evaluate_ast_node + + evaluate_node_classes + + evaluate_generators + + finish + + fail_on_unevaluated + + @catalog + end + + # LAK:FIXME There are no tests for this. + def delete_collection(coll) + @collections.delete(coll) if @collections.include?(coll) + end + + # Return the node's environment. + def environment + unless defined?(@environment) + @environment = (node.environment and node.environment != "") ? node.environment : nil + end + Puppet::Node::Environment.current = @environment + @environment + end + + # Evaluate all of the classes specified by the node. + def evaluate_node_classes + evaluate_classes(@node.classes, topscope) + end + + # Evaluate each specified class in turn. If there are any classes we can't + # find, just tag the catalog and move on. This method really just + # creates resource objects that point back to the classes, and then the + # resources are themselves evaluated later in the process. + def evaluate_classes(classes, scope, lazy_evaluate = true) + raise Puppet::DevError, "No source for scope passed to evaluate_classes" unless scope.source + found = [] + param_classes = nil + # if we are a param class, save the classes hash + # and transform classes to be the keys + if classes.class == Hash + param_classes = classes + classes = classes.keys + end + classes.each do |name| + # If we can find the class, then make a resource that will evaluate it. + if klass = scope.find_hostclass(name) + + if param_classes + resource = klass.ensure_in_catalog(scope, param_classes[name] || {}) + else + found << name and next if scope.class_scope(klass) + resource = klass.ensure_in_catalog(scope) + end + + # If they've disabled lazy evaluation (which the :include function does), + # then evaluate our resource immediately. + resource.evaluate unless lazy_evaluate + found << name + else + Puppet.warning "Could not find class #{name} for #{node.name}" + @catalog.tag(name) + end + end + found + end + + def evaluate_relationships + @relationships.each { |rel| rel.evaluate(catalog) } + end + + # Return a resource by either its ref or its type and title. + def findresource(*args) + @catalog.resource(*args) + end + + def initialize(node, options = {}) + @node = node + + options.each do |param, value| + begin + send(param.to_s + "=", value) + rescue NoMethodError + raise ArgumentError, "Compiler objects do not accept #{param}" + end + end + + initvars + end + + # Create a new scope, with either a specified parent scope or + # using the top scope. + def newscope(parent, options = {}) + parent ||= topscope + options[:compiler] = self + scope = Puppet::Parser::Scope.new(options) + scope.parent = parent + scope + end + + # Return any overrides for the given resource. + def resource_overrides(resource) + @resource_overrides[resource.ref] + end + + # The top scope is usually the top-level scope, but if we're using AST nodes, + # then it is instead the node's scope. + def topscope + node_scope || @topscope + end + + private + + # If ast nodes are enabled, then see if we can find and evaluate one. + def evaluate_ast_node + return unless ast_nodes? + + # Now see if we can find the node. + astnode = nil + @node.names.each do |name| + break if astnode = known_resource_types.node(name.to_s.downcase) + end + + unless (astnode ||= known_resource_types.node("default")) + raise Puppet::ParseError, "Could not find default node or by name with '#{node.names.join(", ")}'" + end + + # Create a resource to model this node, and then add it to the list + # of resources. + resource = astnode.ensure_in_catalog(topscope) + + resource.evaluate + + # Now set the node scope appropriately, so that :topscope can + # behave differently. + @node_scope = topscope.class_scope(astnode) + end + + # Evaluate our collections and return true if anything returned an object. + # The 'true' is used to continue a loop, so it's important. + def evaluate_collections + return false if @collections.empty? + + found_something = false + exceptwrap do + # We have to iterate over a dup of the array because + # collections can delete themselves from the list, which + # changes its length and causes some collections to get missed. + @collections.dup.each do |collection| + found_something = true if collection.evaluate + end + end + + found_something + end + + # Make sure all of our resources have been evaluated into native resources. + # We return true if any resources have, so that we know to continue the + # evaluate_generators loop. + def evaluate_definitions + exceptwrap do + !unevaluated_resources.each { |resource| resource.evaluate }.empty? + end + end + + # Iterate over collections and resources until we're sure that the whole + # compile is evaluated. This is necessary because both collections + # and defined resources can generate new resources, which themselves could + # be defined resources. + def evaluate_generators + count = 0 + loop do + done = true + + # Call collections first, then definitions. + done = false if evaluate_collections + done = false if evaluate_definitions + break if done + + count += 1 + + if count > 1000 + raise Puppet::ParseError, "Somehow looped more than 1000 times while evaluating host catalog" + end + end + end + + # Find and evaluate our main object, if possible. + def evaluate_main + @main = known_resource_types.find_hostclass([""], "") || known_resource_types.add(Puppet::Resource::Type.new(:hostclass, "")) + @topscope.source = @main + @main_resource = Puppet::Parser::Resource.new("class", :main, :scope => @topscope, :source => @main) + @topscope.resource = @main_resource + + add_resource(@topscope, @main_resource) + + @main_resource.evaluate + end + + # Make sure the entire catalog is evaluated. + def fail_on_unevaluated + fail_on_unevaluated_overrides + fail_on_unevaluated_resource_collections + end + + # If there are any resource overrides remaining, then we could + # not find the resource they were supposed to override, so we + # want to throw an exception. + def fail_on_unevaluated_overrides + remaining = [] + @resource_overrides.each do |name, overrides| + remaining += overrides + end + + unless remaining.empty? + fail Puppet::ParseError, + "Could not find resource(s) %s for overriding" % remaining.collect { |o| + o.ref + }.join(", ") + end + end + + # Make sure we don't have any remaining collections that specifically + # look for resources, because we want to consider those to be + # parse errors. + def fail_on_unevaluated_resource_collections + remaining = [] + @collections.each do |coll| + # We're only interested in the 'resource' collections, + # which result from direct calls of 'realize'. Anything + # else is allowed not to return resources. + # Collect all of them, so we have a useful error. + if r = coll.resources + if r.is_a?(Array) + remaining += r + else + remaining << r + end + end + end + + raise Puppet::ParseError, "Failed to realize virtual resources #{remaining.join(', ')}" unless remaining.empty? + end + + # Make sure all of our resources and such have done any last work + # necessary. + def finish + evaluate_relationships + + resources.each do |resource| + # Add in any resource overrides. + if overrides = resource_overrides(resource) + overrides.each do |over| + resource.merge(over) + end + + # Remove the overrides, so that the configuration knows there + # are none left. + overrides.clear + end + + resource.finish if resource.respond_to?(:finish) + end + + add_resource_metaparams + end + + def add_resource_metaparams + unless main = catalog.resource(:class, :main) + raise "Couldn't find main" + end + + names = [] + Puppet::Type.eachmetaparam do |name| + next if Puppet::Parser::Resource.relationship_parameter?(name) + names << name + end + + data = {} + catalog.walk(main, :out) do |source, target| + if source_data = data[source] || metaparams_as_data(source, names) + # only store anything in the data hash if we've actually got + # data + data[source] ||= source_data + source_data.each do |param, value| + target[param] = value if target[param].nil? + end + data[target] = source_data.merge(metaparams_as_data(target, names)) + end + + target.tag(*(source.tags)) + end + end + + def metaparams_as_data(resource, params) + data = nil + params.each do |param| + unless resource[param].nil? + # Because we could be creating a hash for every resource, + # and we actually probably don't often have any data here at all, + # we're optimizing a bit by only creating a hash if there's + # any data to put in it. + data ||= {} + data[param] = resource[param] + end + end + data + end + + # Set up all of our internal variables. + def initvars + # The list of objects that will available for export. + @exported_resources = {} + + # The list of overrides. This is used to cache overrides on objects + # that don't exist yet. We store an array of each override. + @resource_overrides = Hash.new do |overs, ref| + overs[ref] = [] + end + + # The list of collections that have been created. This is a global list, + # but they each refer back to the scope that created them. + @collections = [] + + # The list of relationships to evaluate. + @relationships = [] + + # For maintaining the relationship between scopes and their resources. + @catalog = Puppet::Resource::Catalog.new(@node.name) + @catalog.version = known_resource_types.version + + # Create our initial scope and a resource that will evaluate main. + @topscope = Puppet::Parser::Scope.new(:compiler => self) + + @main_stage_resource = Puppet::Parser::Resource.new("stage", :main, :scope => @topscope) + @catalog.add_resource(@main_stage_resource) + + # local resource array to maintain resource ordering + @resources = [] + + # Make sure any external node classes are in our class list + if @node.classes.class == Hash + @catalog.add_class(*@node.classes.keys) + else + @catalog.add_class(*@node.classes) + end + end + + # Set the node's parameters into the top-scope as variables. + def set_node_parameters + node.parameters.each do |param, value| + @topscope.setvar(param, value) + end + + # These might be nil. + catalog.client_version = node.parameters["clientversion"] + catalog.server_version = node.parameters["serverversion"] + end + + def create_settings_scope + unless settings_type = environment.known_resource_types.hostclass("settings") + settings_type = Puppet::Resource::Type.new :hostclass, "settings" + environment.known_resource_types.add(settings_type) + end + + settings_resource = Puppet::Parser::Resource.new("class", "settings", :scope => @topscope) + settings_type.evaluate_code(settings_resource) + + @catalog.add_resource(settings_resource) + + scope = @topscope.class_scope(settings_type) + + Puppet.settings.each do |name, setting| + next if name.to_s == "name" + scope.setvar name.to_s, environment[name] + end + end + + # Return an array of all of the unevaluated resources. These will be definitions, + # which need to get evaluated into native resources. + def unevaluated_resources + # The order of these is significant for speed due to short-circuting + resources.reject { |resource| resource.evaluated? or resource.virtual? or resource.builtin_type? } + end +end diff --git a/mcollective/lib/puppet/parser/files.rb b/mcollective/lib/puppet/parser/files.rb new file mode 100644 index 000000000..f34683153 --- /dev/null +++ b/mcollective/lib/puppet/parser/files.rb @@ -0,0 +1,88 @@ +require 'puppet/module' +require 'puppet/parser/parser' + +# This is a silly central module for finding +# different kinds of files while parsing. This code +# doesn't really belong in the Puppet::Module class, +# but it doesn't really belong anywhere else, either. +module Puppet::Parser::Files + module_function + + # Return a list of manifests (as absolute filenames) that match +pat+ + # with the current directory set to +cwd+. If the first component of + # +pat+ does not contain any wildcards and is an existing module, return + # a list of manifests in that module matching the rest of +pat+ + # Otherwise, try to find manifests matching +pat+ relative to +cwd+ + def find_manifests(start, options = {}) + cwd = options[:cwd] || Dir.getwd + module_name, pattern = split_file_path(start) + begin + if mod = Puppet::Module.find(module_name, options[:environment]) + return [mod.name, mod.match_manifests(pattern)] + end + rescue Puppet::Module::InvalidName + # Than that would be a "no." + end + abspat = File::expand_path(start, cwd) + [nil, Dir.glob(abspat + (File.extname(abspat).empty? ? '{.pp,.rb}' : '' )).uniq.reject { |f| FileTest.directory?(f) }] + end + + # Find the concrete file denoted by +file+. If +file+ is absolute, + # return it directly. Otherwise try to find it as a template in a + # module. If that fails, return it relative to the +templatedir+ config + # param. + # In all cases, an absolute path is returned, which does not + # necessarily refer to an existing file + def find_template(template, environment = nil) + if template == File.expand_path(template) + return template + end + + if template_paths = templatepath(environment) + # If we can find the template in :templatedir, we return that. + template_paths.collect { |path| + File::join(path, template) + }.each do |f| + return f if FileTest.exist?(f) + end + end + + # check in the default template dir, if there is one + if td_file = find_template_in_module(template, environment) + return td_file + end + + nil + end + + def find_template_in_module(template, environment = nil) + path, file = split_file_path(template) + + # Because templates don't have an assumed template name, like manifests do, + # we treat templates with no name as being templates in the main template + # directory. + return nil unless file + + if mod = Puppet::Module.find(path, environment) and t = mod.template(file) + return t + end + nil + end + + # Return an array of paths by splitting the +templatedir+ config + # parameter. + def templatepath(environment = nil) + dirs = Puppet.settings.value(:templatedir, environment).split(File::PATH_SEPARATOR) + dirs.select do |p| + File::directory?(p) + end + end + + # Split the path into the module and the rest of the path, or return + # nil if the path is empty or absolute (starts with a /). + # This method can return nil & anyone calling it needs to handle that. + def split_file_path(path) + path.split(File::SEPARATOR, 2) unless path == "" or path == File.expand_path(path) + end + +end diff --git a/mcollective/lib/puppet/parser/functions.rb b/mcollective/lib/puppet/parser/functions.rb new file mode 100644 index 000000000..5807c0bbe --- /dev/null +++ b/mcollective/lib/puppet/parser/functions.rb @@ -0,0 +1,124 @@ +require 'puppet/util/autoload' +require 'puppet/parser/scope' +require 'monitor' + +# A module for managing parser functions. Each specified function +# is added to a central module that then gets included into the Scope +# class. +module Puppet::Parser::Functions + + (@functions = Hash.new { |h,k| h[k] = {} }).extend(MonitorMixin) + (@modules = {} ).extend(MonitorMixin) + + class << self + include Puppet::Util + end + + def self.autoloader + unless defined?(@autoloader) + + @autoloader = Puppet::Util::Autoload.new( + self, + "puppet/parser/functions", + + :wrap => false + ) + end + + @autoloader + end + + Environment = Puppet::Node::Environment + + def self.environment_module(env = nil) + @modules.synchronize { + @modules[ env || Environment.current || Environment.root ] ||= Module.new + } + end + + # Create a new function type. + def self.newfunction(name, options = {}, &block) + name = symbolize(name) + + raise Puppet::DevError, "Function #{name} already defined" if functions.include?(name) + + ftype = options[:type] || :statement + + unless ftype == :statement or ftype == :rvalue + raise Puppet::DevError, "Invalid statement type #{ftype.inspect}" + end + + fname = "function_#{name}" + environment_module.send(:define_method, fname, &block) + + # Someday we'll support specifying an arity, but for now, nope + #functions[name] = {:arity => arity, :type => ftype} + functions[name] = {:type => ftype, :name => fname} + functions[name][:doc] = options[:doc] if options[:doc] + end + + # Remove a function added by newfunction + def self.rmfunction(name) + name = symbolize(name) + + raise Puppet::DevError, "Function #{name} is not defined" unless functions.include? name + + functions.delete name + + fname = "function_#{name}" + environment_module.send(:remove_method, fname) + end + + # Determine if a given name is a function + def self.function(name) + name = symbolize(name) + + @functions.synchronize do + unless functions.include?(name) or functions(Puppet::Node::Environment.root).include?(name) + autoloader.load(name,Environment.current || Environment.root) + end + end + + ( functions(Environment.root)[name] || functions[name] || {:name => false} )[:name] + end + + def self.functiondocs + autoloader.loadall + + ret = "" + + functions.sort { |a,b| a[0].to_s <=> b[0].to_s }.each do |name, hash| + #ret += "#{name}\n#{hash[:type]}\n" + ret += "#{name}\n#{"-" * name.to_s.length}\n" + if hash[:doc] + ret += Puppet::Util::Docs.scrub(hash[:doc]) + else + ret += "Undocumented.\n" + end + + ret += "\n\n- *Type*: #{hash[:type]}\n\n" + end + + ret + end + + def self.functions(env = nil) + @functions.synchronize { + @functions[ env || Environment.current || Environment.root ] + } + end + + # Determine if a given function returns a value or not. + def self.rvalue?(name) + (functions[symbolize(name)] || {})[:type] == :rvalue + end + + # Runs a newfunction to create a function for each of the log levels + + Puppet::Util::Log.levels.each do |level| + newfunction(level, :doc => "Log a message on the server at level #{level.to_s}.") do |vals| + send(level, vals.join(" ")) + end + end + +end diff --git a/mcollective/lib/puppet/parser/functions/defined.rb b/mcollective/lib/puppet/parser/functions/defined.rb new file mode 100644 index 000000000..2aeaa9ba0 --- /dev/null +++ b/mcollective/lib/puppet/parser/functions/defined.rb @@ -0,0 +1,49 @@ +# Test whether a given class or definition is defined +Puppet::Parser::Functions::newfunction(:defined, :type => :rvalue, :doc => "Determine whether + a given class or resource type is defined. This function can also determine whether a + specific resource has been declared. Returns true or false. Accepts class names, + type names, and resource references. + + The `defined` function checks both native and defined types, including types + provided as plugins via modules. Types and classes are both checked using their names: + + defined(\"file\") + defined(\"customtype\") + defined(\"foo\") + defined(\"foo::bar\") + + Resource declarations are checked using resource references, e.g. + `defined( File['/tmp/myfile'] )`. Checking whether a given resource + has been declared is, unfortunately, dependent on the parse order of + the configuration, and the following code will not work: + + if defined(File['/tmp/foo']) { + notify(\"This configuration includes the /tmp/foo file.\") + } + file {\"/tmp/foo\": + ensure => present, + } + + However, this order requirement refers to parse order only, and ordering of + resources in the configuration graph (e.g. with `before` or `require`) does not + affect the behavior of `defined`.") do |vals| + result = false + vals = [vals] unless vals.is_a?(Array) + vals.each do |val| + case val + when String + if Puppet::Type.type(val) or find_definition(val) or find_hostclass(val) + result = true + break + end + when Puppet::Resource + if findresource(val.to_s) + result = true + break + end + else + raise ArgumentError, "Invalid argument of type '#{val.class}' to 'defined'" + end + end + result +end diff --git a/mcollective/lib/puppet/parser/functions/extlookup.rb b/mcollective/lib/puppet/parser/functions/extlookup.rb new file mode 100644 index 000000000..bc55410b9 --- /dev/null +++ b/mcollective/lib/puppet/parser/functions/extlookup.rb @@ -0,0 +1,157 @@ +require 'csv' + +module Puppet::Parser::Functions + newfunction(:extlookup, + :type => :rvalue, + :doc => "This is a parser function to read data from external files, this version +uses CSV files but the concept can easily be adjust for databases, yaml +or any other queryable data source. + +The object of this is to make it obvious when it's being used, rather than +magically loading data in when an module is loaded I prefer to look at the code +and see statements like: + + $snmp_contact = extlookup(\"snmp_contact\") + +The above snippet will load the snmp_contact value from CSV files, this in its +own is useful but a common construct in puppet manifests is something like this: + + case $domain { + \"myclient.com\": { $snmp_contact = \"John Doe \" } + default: { $snmp_contact = \"My Support \" } + } + +Over time there will be a lot of this kind of thing spread all over your manifests +and adding an additional client involves grepping through manifests to find all the +places where you have constructs like this. + +This is a data problem and shouldn't be handled in code, a using this function you +can do just that. + +First you configure it in site.pp: + + $extlookup_datadir = \"/etc/puppet/manifests/extdata\" + $extlookup_precedence = [\"%{fqdn}\", \"domain_%{domain}\", \"common\"] + +The array tells the code how to resolve values, first it will try to find it in +web1.myclient.com.csv then in domain_myclient.com.csv and finally in common.csv + +Now create the following data files in /etc/puppet/manifests/extdata: + + domain_myclient.com.csv: + snmp_contact,John Doe + root_contact,support@%{domain} + client_trusted_ips,192.168.1.130,192.168.10.0/24 + + common.csv: + snmp_contact,My Support + root_contact,support@my.com + +Now you can replace the case statement with the simple single line to achieve +the exact same outcome: + + $snmp_contact = extlookup(\"snmp_contact\") + +The above code shows some other features, you can use any fact or variable that +is in scope by simply using %{varname} in your data files, you can return arrays +by just having multiple values in the csv after the initial variable name. + +In the event that a variable is nowhere to be found a critical error will be raised +that will prevent your manifest from compiling, this is to avoid accidentally putting +in empty values etc. You can however specify a default value: + + $ntp_servers = extlookup(\"ntp_servers\", \"1.${country}.pool.ntp.org\") + +In this case it will default to \"1.${country}.pool.ntp.org\" if nothing is defined in +any data file. + +You can also specify an additional data file to search first before any others at use +time, for example: + + $version = extlookup(\"rsyslog_version\", \"present\", \"packages\") + package{\"rsyslog\": ensure => $version } + +This will look for a version configured in packages.csv and then in the rest as configured +by $extlookup_precedence if it's not found anywhere it will default to `present`, this kind +of use case makes puppet a lot nicer for managing large amounts of packages since you do not +need to edit a load of manifests to do simple things like adjust a desired version number. + +Precedence values can have variables embedded in them in the form %{fqdn}, you could for example do: + + $extlookup_precedence = [\"hosts/%{fqdn}\", \"common\"] + +This will result in /path/to/extdata/hosts/your.box.com.csv being searched. + +This is for back compatibility to interpolate variables with %. % interpolation is a workaround for a problem that has been fixed: Puppet variable interpolation at top scope used to only happen on each run.") do |args| + + key = args[0] + + default = args[1] + datafile = args[2] + + raise Puppet::ParseError, ("extlookup(): wrong number of arguments (#{args.length}; must be <= 3)") if args.length > 3 + + extlookup_datadir = lookupvar('extlookup_datadir') + extlookup_precedence = Array.new + + extlookup_precedence = lookupvar('extlookup_precedence').collect do |var| + var.gsub(/%\{(.+?)\}/) do |capture| + lookupvar($1) + end + end + + datafiles = Array.new + + # if we got a custom data file, put it first in the array of search files + if datafile != "" + datafiles << extlookup_datadir + "/#{datafile}.csv" if File.exists?(extlookup_datadir + "/#{datafile}.csv") + end + + extlookup_precedence.each do |d| + datafiles << extlookup_datadir + "/#{d}.csv" + end + + desired = nil + + datafiles.each do |file| + if desired.nil? + if File.exists?(file) + result = CSV.read(file).find_all do |r| + r[0] == key + end + + # return just the single result if theres just one, + # else take all the fields in the csv and build an array + if result.length > 0 + if result[0].length == 2 + val = result[0][1].to_s + + # parse %{}'s in the CSV into local variables using lookupvar() + while val =~ /%\{(.+?)\}/ + val.gsub!(/%\{#{$1}\}/, lookupvar($1)) + end + + desired = val + elsif result[0].length > 1 + length = result[0].length + cells = result[0][1,length] + + # Individual cells in a CSV result are a weird data type and throws + # puppets yaml parsing, so just map it all to plain old strings + desired = cells.map do |c| + # parse %{}'s in the CSV into local variables using lookupvar() + while c =~ /%\{(.+?)\}/ + c.gsub!(/%\{#{$1}\}/, lookupvar($1)) + end + + c.to_s + end + end + end + end + end + end + + desired || default or raise Puppet::ParseError, "No match found for '#{key}' in any data file during extlookup()" + end +end diff --git a/mcollective/lib/puppet/parser/functions/fail.rb b/mcollective/lib/puppet/parser/functions/fail.rb new file mode 100644 index 000000000..5bef6c7e3 --- /dev/null +++ b/mcollective/lib/puppet/parser/functions/fail.rb @@ -0,0 +1,4 @@ +Puppet::Parser::Functions::newfunction(:fail, :doc => "Fail with a parse error.") do |vals| + vals = vals.collect { |s| s.to_s }.join(" ") if vals.is_a? Array + raise Puppet::ParseError, vals.to_s +end diff --git a/mcollective/lib/puppet/parser/functions/file.rb b/mcollective/lib/puppet/parser/functions/file.rb new file mode 100644 index 000000000..19ab9ba2e --- /dev/null +++ b/mcollective/lib/puppet/parser/functions/file.rb @@ -0,0 +1,23 @@ +# Returns the contents of a file + + Puppet::Parser::Functions::newfunction( + :file, :type => :rvalue, + + :doc => "Return the contents of a file. Multiple files + can be passed, and the first file that exists will be read in.") do |vals| + ret = nil + vals.each do |file| + unless file =~ /^#{File::SEPARATOR}/ + raise Puppet::ParseError, "Files must be fully qualified" + end + if FileTest.exists?(file) + ret = File.read(file) + break + end + end + if ret + ret + else + raise Puppet::ParseError, "Could not find any files from #{vals.join(", ")}" + end +end diff --git a/mcollective/lib/puppet/parser/functions/fqdn_rand.rb b/mcollective/lib/puppet/parser/functions/fqdn_rand.rb new file mode 100644 index 000000000..52946f2c1 --- /dev/null +++ b/mcollective/lib/puppet/parser/functions/fqdn_rand.rb @@ -0,0 +1,12 @@ +Puppet::Parser::Functions::newfunction(:fqdn_rand, :type => :rvalue, :doc => + "Generates random numbers based on the node's fqdn. Generated random values + will be a range from 0 up to and excluding n, where n is the first parameter. + The second argument specifies a number to add to the seed and is optional, for example: + + $random_number = fqdn_rand(30) + $random_number_seed = fqdn_rand(30,30)") do |args| + require 'md5' + max = args.shift + srand MD5.new([lookupvar('fqdn'),args].join(':')).to_s.hex + rand(max).to_s +end diff --git a/mcollective/lib/puppet/parser/functions/generate.rb b/mcollective/lib/puppet/parser/functions/generate.rb new file mode 100644 index 000000000..91f7b2240 --- /dev/null +++ b/mcollective/lib/puppet/parser/functions/generate.rb @@ -0,0 +1,31 @@ +# Runs an external command and returns the results +Puppet::Parser::Functions::newfunction(:generate, :type => :rvalue, + :doc => "Calls an external command on the Puppet master and returns + the results of the command. Any arguments are passed to the external command as + arguments. If the generator does not exit with return code of 0, + the generator is considered to have failed and a parse error is + thrown. Generators can only have file separators, alphanumerics, dashes, + and periods in them. This function will attempt to protect you from + malicious generator calls (e.g., those with '..' in them), but it can + never be entirely safe. No subshell is used to execute + generators, so all shell metacharacters are passed directly to + the generator.") do |args| + + raise Puppet::ParseError, "Generators must be fully qualified" unless args[0] =~ /^#{File::SEPARATOR}/ + + unless args[0] =~ /^[-#{File::SEPARATOR}\w.]+$/ + raise Puppet::ParseError, + "Generators can only contain alphanumerics, file separators, and dashes" + end + + if args[0] =~ /\.\./ + raise Puppet::ParseError, + "Can not use generators with '..' in them." + end + + begin + Dir.chdir(File.dirname(args[0])) { Puppet::Util.execute(args) } + rescue Puppet::ExecutionFailure => detail + raise Puppet::ParseError, "Failed to execute generator #{args[0]}: #{detail}" + end +end diff --git a/mcollective/lib/puppet/parser/functions/include.rb b/mcollective/lib/puppet/parser/functions/include.rb new file mode 100644 index 000000000..1e12a6a88 --- /dev/null +++ b/mcollective/lib/puppet/parser/functions/include.rb @@ -0,0 +1,24 @@ +# Include the specified classes +Puppet::Parser::Functions::newfunction(:include, :doc => "Evaluate one or more classes.") do |vals| + vals = [vals] unless vals.is_a?(Array) + + # The 'false' disables lazy evaluation. + klasses = compiler.evaluate_classes(vals, self, false) + + missing = vals.find_all do |klass| + ! klasses.include?(klass) + end + + unless missing.empty? + # Throw an error if we didn't evaluate all of the classes. + str = "Could not find class" + str += "es" if missing.length > 1 + + str += " " + missing.join(", ") + + if n = namespaces and ! n.empty? and n != [""] + str += " in namespaces #{@namespaces.join(", ")}" + end + self.fail Puppet::ParseError, str + end +end diff --git a/mcollective/lib/puppet/parser/functions/inline_template.rb b/mcollective/lib/puppet/parser/functions/inline_template.rb new file mode 100644 index 000000000..9759ff6e1 --- /dev/null +++ b/mcollective/lib/puppet/parser/functions/inline_template.rb @@ -0,0 +1,21 @@ +Puppet::Parser::Functions::newfunction(:inline_template, :type => :rvalue, :doc => + "Evaluate a template string and return its value. See + [the templating docs](http://docs.puppetlabs.com/guides/templating.html) for + more information. Note that if multiple template strings are specified, their + output is all concatenated and returned as the output of the function.") do |vals| + + require 'erb' + + vals.collect do |string| + # Use a wrapper, so the template can't get access to the full + # Scope object. + + wrapper = Puppet::Parser::TemplateWrapper.new(self) + begin + wrapper.result(string) + rescue => detail + raise Puppet::ParseError, + "Failed to parse inline template: #{detail}" + end + end.join("") +end diff --git a/mcollective/lib/puppet/parser/functions/md5.rb b/mcollective/lib/puppet/parser/functions/md5.rb new file mode 100644 index 000000000..f7a4f7222 --- /dev/null +++ b/mcollective/lib/puppet/parser/functions/md5.rb @@ -0,0 +1,5 @@ +Puppet::Parser::Functions::newfunction(:md5, :type => :rvalue, :doc => "Returns a MD5 hash value from a provided string.") do |args| + require 'md5' + + Digest::MD5.hexdigest(args[0]) +end diff --git a/mcollective/lib/puppet/parser/functions/realize.rb b/mcollective/lib/puppet/parser/functions/realize.rb new file mode 100644 index 000000000..c21ccd14a --- /dev/null +++ b/mcollective/lib/puppet/parser/functions/realize.rb @@ -0,0 +1,14 @@ +# This is just syntactic sugar for a collection, although it will generally +# be a good bit faster. + +Puppet::Parser::Functions::newfunction(:realize, :doc => "Make a virtual object real. This is useful + when you want to know the name of the virtual object and don't want to + bother with a full collection. It is slightly faster than a collection, + and, of course, is a bit shorter. You must pass the object using a + reference; e.g.: `realize User[luke]`." ) do |vals| + coll = Puppet::Parser::Collector.new(self, :nomatter, nil, nil, :virtual) + vals = [vals] unless vals.is_a?(Array) + coll.resources = vals.flatten + + compiler.add_collection(coll) +end diff --git a/mcollective/lib/puppet/parser/functions/regsubst.rb b/mcollective/lib/puppet/parser/functions/regsubst.rb new file mode 100644 index 000000000..b6bb5afcf --- /dev/null +++ b/mcollective/lib/puppet/parser/functions/regsubst.rb @@ -0,0 +1,93 @@ +module Puppet::Parser::Functions + + newfunction( + :regsubst, :type => :rvalue, + + :doc => " +Perform regexp replacement on a string or array of strings. + +* *Parameters* (in order): + * _target_ The string or array of strings to operate on. If an array, the replacement will be performed on each of the elements in the array, and the return value will be an array. + * _regexp_ The regular expression matching the target string. If you want it anchored at the start and or end of the string, you must do that with ^ and $ yourself. + * _replacement_ Replacement string. Can contain backreferences to what was matched using \\0 (whole match), \\1 (first set of parentheses), and so on. + * _flags_ Optional. String of single letter flags for how the regexp is interpreted: + - *E* Extended regexps + - *I* Ignore case in regexps + - *M* Multiline regexps + - *G* Global replacement; all occurrences of the regexp in each target string will be replaced. Without this, only the first occurrence will be replaced. + * _encoding_ Optional. How to handle multibyte characters. A single-character string with the following values: + - *N* None + - *E* EUC + - *S* SJIS + - *U* UTF-8 + +* *Examples* + +Get the third octet from the node's IP address: + + $i3 = regsubst($ipaddress,'^(\\d+)\\.(\\d+)\\.(\\d+)\\.(\\d+)$','\\3') + +Put angle brackets around each octet in the node's IP address: + + $x = regsubst($ipaddress, '([0-9]+)', '<\\1>', 'G') +") \ + do |args| + unless args.length.between?(3, 5) + + raise( + Puppet::ParseError, + + "regsubst(): got #{args.length} arguments, expected 3 to 5") + end + target, regexp, replacement, flags, lang = args + reflags = 0 + operation = :sub + if flags == nil + flags = [] + elsif flags.respond_to?(:split) + flags = flags.split('') + else + + raise( + Puppet::ParseError, + + "regsubst(): bad flags parameter #{flags.class}:`#{flags}'") + end + flags.each do |f| + case f + when 'G' then operation = :gsub + when 'E' then reflags |= Regexp::EXTENDED + when 'I' then reflags |= Regexp::IGNORECASE + when 'M' then reflags |= Regexp::MULTILINE + else raise(Puppet::ParseError, "regsubst(): bad flag `#{f}'") + end + end + begin + re = Regexp.compile(regexp, reflags, lang) + rescue RegexpError, TypeError + + raise( + Puppet::ParseError, + + "regsubst(): Bad regular expression `#{regexp}'") + end + if target.respond_to?(operation) + # String parameter -> string result + result = target.send(operation, re, replacement) + elsif target.respond_to?(:collect) and + target.respond_to?(:all?) and + target.all? { |e| e.respond_to?(operation) } + # Array parameter -> array result + result = target.collect { |e| + e.send(operation, re, replacement) + } + else + + raise( + Puppet::ParseError, + + "regsubst(): bad target #{target.class}:`#{target}'") + end + return result + end +end diff --git a/mcollective/lib/puppet/parser/functions/require.rb b/mcollective/lib/puppet/parser/functions/require.rb new file mode 100644 index 000000000..64285307e --- /dev/null +++ b/mcollective/lib/puppet/parser/functions/require.rb @@ -0,0 +1,57 @@ +# Requires the specified classes + + Puppet::Parser::Functions::newfunction( + :require, + + :doc =>"Evaluate one or more classes, adding the required class as a dependency. + +The relationship metaparameters work well for specifying relationships +between individual resources, but they can be clumsy for specifying +relationships between classes. This function is a superset of the +'include' function, adding a class relationship so that the requiring +class depends on the required class. + +Warning: using require in place of include can lead to unwanted dependency cycles. + +For instance the following manifest, with 'require' instead of 'include' would produce a nasty dependence cycle, because notify imposes a before between File[/foo] and Service[foo]: + + class myservice { + service { foo: ensure => running } + } + + class otherstuff { + include myservice + file { '/foo': notify => Service[foo] } + } + +Note that this function only works with clients 0.25 and later, and it will +fail if used with earlier clients. + +") do |vals| + # Verify that the 'include' function is loaded + method = Puppet::Parser::Functions.function(:include) + + send(method, vals) + if resource.metaparam_compatibility_mode? + warning "The 'require' function is only compatible with clients at 0.25 and above; including class but not adding dependency" + else + vals = [vals] unless vals.is_a?(Array) + + vals.each do |klass| + # lookup the class in the scopes + if classobj = find_hostclass(klass) + klass = classobj.name + else + raise Puppet::ParseError, "Could not find class #{klass}" + end + + # This is a bit hackish, in some ways, but it's the only way + # to configure a dependency that will make it to the client. + # The 'obvious' way is just to add an edge in the catalog, + # but that is considered a containment edge, not a dependency + # edge, so it usually gets lost on the client. + ref = Puppet::Resource.new(:class, klass) + resource.set_parameter(:require, [resource[:require]].flatten.compact << ref) + end + end +end diff --git a/mcollective/lib/puppet/parser/functions/search.rb b/mcollective/lib/puppet/parser/functions/search.rb new file mode 100644 index 000000000..8a9c7c8be --- /dev/null +++ b/mcollective/lib/puppet/parser/functions/search.rb @@ -0,0 +1,7 @@ +Puppet::Parser::Functions::newfunction(:search, :doc => "Add another namespace for this class to search. + This allows you to create classes with sets of definitions and add + those classes to another class's search path.") do |vals| + vals.each do |val| + add_namespace(val) + end +end diff --git a/mcollective/lib/puppet/parser/functions/sha1.rb b/mcollective/lib/puppet/parser/functions/sha1.rb new file mode 100644 index 000000000..10cc55cfe --- /dev/null +++ b/mcollective/lib/puppet/parser/functions/sha1.rb @@ -0,0 +1,5 @@ +Puppet::Parser::Functions::newfunction(:sha1, :type => :rvalue, :doc => "Returns a SHA1 hash value from a provided string.") do |args| + require 'sha1' + + Digest::SHA1.hexdigest(args[0]) +end diff --git a/mcollective/lib/puppet/parser/functions/shellquote.rb b/mcollective/lib/puppet/parser/functions/shellquote.rb new file mode 100644 index 000000000..3ddb988f2 --- /dev/null +++ b/mcollective/lib/puppet/parser/functions/shellquote.rb @@ -0,0 +1,39 @@ +module Puppet::Parser::Functions + + Safe = 'a-zA-Z0-9@%_+=:,./-' # Safe unquoted + Dangerous = '!"`$\\' # Unsafe inside double quotes + + newfunction(:shellquote, :type => :rvalue, :doc => "\ + Quote and concatenate arguments for use in Bourne shell. + + Each argument is quoted separately, and then all are concatenated + with spaces. If an argument is an array, the elements of that + array is interpolated within the rest of the arguments; this makes + it possible to have an array of arguments and pass that array to + shellquote instead of having to specify each argument + individually in the call. + ") \ + do |args| + + result = [] + args.flatten.each do |word| + if word.length != 0 and word.count(Safe) == word.length + result << word + elsif word.count(Dangerous) == 0 + result << ('"' + word + '"') + elsif word.count("'") == 0 + result << ("'" + word + "'") + else + r = '"' + word.each_byte do |c| + r += "\\" if Dangerous.include?(c) + r += c.chr + end + r += '"' + result << r + end + end + + return result.join(" ") + end +end diff --git a/mcollective/lib/puppet/parser/functions/split.rb b/mcollective/lib/puppet/parser/functions/split.rb new file mode 100644 index 000000000..ad027865b --- /dev/null +++ b/mcollective/lib/puppet/parser/functions/split.rb @@ -0,0 +1,29 @@ +module Puppet::Parser::Functions + + newfunction( + :split, :type => :rvalue, + + :doc => "\ +Split a string variable into an array using the specified split regexp. + +*Example:* + + $string = 'v1.v2:v3.v4' + $array_var1 = split($string, ':') + $array_var2 = split($string, '[.]') + $array_var3 = split($string, '[.:]') + +`$array_var1` now holds the result `['v1.v2', 'v3.v4']`, +while `$array_var2` holds `['v1', 'v2:v3', 'v4']`, and +`$array_var3` holds `['v1', 'v2', 'v3', 'v4']`. + +Note that in the second example, we split on a literal string that contains +a regexp meta-character (.), which must be escaped. A simple +way to do that for a single character is to enclose it in square +brackets; a backslash will also escape a single character.") do |args| + + raise Puppet::ParseError, ("split(): wrong number of arguments (#{args.length}; must be 2)") if args.length != 2 + + return args[0].split(Regexp.compile(args[1])) + end +end diff --git a/mcollective/lib/puppet/parser/functions/sprintf.rb b/mcollective/lib/puppet/parser/functions/sprintf.rb new file mode 100644 index 000000000..5eb4a4f9d --- /dev/null +++ b/mcollective/lib/puppet/parser/functions/sprintf.rb @@ -0,0 +1,13 @@ +module Puppet::Parser::Functions + + newfunction( + :sprintf, :type => :rvalue, + + :doc => "Perform printf-style formatting of text. + + The first parameter is format string describing how the rest of the parameters should be formatted. See the documentation for the `Kernel::sprintf` function in Ruby for all the details.") do |args| + raise Puppet::ParseError, 'sprintf() needs at least one argument' if args.length < 1 + fmt = args.shift + return sprintf(fmt, *args) + end +end diff --git a/mcollective/lib/puppet/parser/functions/tag.rb b/mcollective/lib/puppet/parser/functions/tag.rb new file mode 100644 index 000000000..84df175eb --- /dev/null +++ b/mcollective/lib/puppet/parser/functions/tag.rb @@ -0,0 +1,6 @@ +# Tag the current scope with each passed name +Puppet::Parser::Functions::newfunction(:tag, :doc => "Add the specified tags to the containing class + or definition. All contained objects will then acquire that tag, also. + ") do |vals| + self.resource.tag(*vals) +end diff --git a/mcollective/lib/puppet/parser/functions/tagged.rb b/mcollective/lib/puppet/parser/functions/tagged.rb new file mode 100644 index 000000000..aaa2adfad --- /dev/null +++ b/mcollective/lib/puppet/parser/functions/tagged.rb @@ -0,0 +1,18 @@ +# Test whether a given tag is set. This functions as a big OR -- if any of the specified tags are unset, we return false. +Puppet::Parser::Functions::newfunction(:tagged, :type => :rvalue, :doc => "A boolean function that + tells you whether the current container is tagged with the specified tags. + The tags are ANDed, so that all of the specified tags must be included for + the function to return true.") do |vals| + configtags = compiler.catalog.tags + resourcetags = resource.tags + + retval = true + vals.each do |val| + unless configtags.include?(val) or resourcetags.include?(val) + retval = false + break + end + end + + return retval +end diff --git a/mcollective/lib/puppet/parser/functions/template.rb b/mcollective/lib/puppet/parser/functions/template.rb new file mode 100644 index 000000000..6fa110332 --- /dev/null +++ b/mcollective/lib/puppet/parser/functions/template.rb @@ -0,0 +1,24 @@ +Puppet::Parser::Functions::newfunction(:template, :type => :rvalue, :doc => + "Evaluate a template and return its value. See + [the templating docs](http://docs.puppetlabs.com/guides/templating.html) for + more information. + + Note that if multiple templates are specified, their output is all + concatenated and returned as the output of the function.") do |vals| + require 'erb' + + vals.collect do |file| + # Use a wrapper, so the template can't get access to the full + # Scope object. + debug "Retrieving template #{file}" + + wrapper = Puppet::Parser::TemplateWrapper.new(self) + wrapper.file = file + begin + wrapper.result + rescue => detail + raise Puppet::ParseError, + "Failed to parse template #{file}: #{detail}" + end + end.join("") +end diff --git a/mcollective/lib/puppet/parser/functions/versioncmp.rb b/mcollective/lib/puppet/parser/functions/versioncmp.rb new file mode 100644 index 000000000..6091e0923 --- /dev/null +++ b/mcollective/lib/puppet/parser/functions/versioncmp.rb @@ -0,0 +1,34 @@ +require 'puppet/util/package' + + + Puppet::Parser::Functions::newfunction( + :versioncmp, :type => :rvalue, + + :doc => "Compares two versions + +Prototype: + + \$result = versioncmp(a, b) + +Where a and b are arbitrary version strings + +This functions returns a number: + +* Greater than 0 if version a is greater than version b +* Equal to 0 if both version are equals +* Less than 0 if version a is less than version b + +Example: + + if versioncmp('2.6-1', '2.4.5') > 0 { + notice('2.6-1 is > than 2.4.5') + } + +") do |args| + + unless args.length == 2 + raise Puppet::ParseError, "versioncmp should have 2 arguments" + end + + return Puppet::Util::Package.versioncmp(args[0], args[1]) +end diff --git a/mcollective/lib/puppet/parser/grammar.ra b/mcollective/lib/puppet/parser/grammar.ra new file mode 100644 index 000000000..af0ab182b --- /dev/null +++ b/mcollective/lib/puppet/parser/grammar.ra @@ -0,0 +1,873 @@ +# vim: syntax=ruby + +# the parser + +class Puppet::Parser::Parser + +token STRING DQPRE DQMID DQPOST +token LBRACK RBRACK LBRACE RBRACE SYMBOL FARROW COMMA TRUE +token FALSE EQUALS APPENDS LESSEQUAL NOTEQUAL DOT COLON LLCOLLECT RRCOLLECT +token QMARK LPAREN RPAREN ISEQUAL GREATEREQUAL GREATERTHAN LESSTHAN +token IF ELSE IMPORT DEFINE ELSIF VARIABLE CLASS INHERITS NODE BOOLEAN +token NAME SEMIC CASE DEFAULT AT LCOLLECT RCOLLECT CLASSNAME CLASSREF +token NOT OR AND UNDEF PARROW PLUS MINUS TIMES DIV LSHIFT RSHIFT UMINUS +token MATCH NOMATCH REGEX IN_EDGE OUT_EDGE IN_EDGE_SUB OUT_EDGE_SUB +token IN + +prechigh + right NOT + nonassoc UMINUS + left IN MATCH NOMATCH + left TIMES DIV + left MINUS PLUS + left LSHIFT RSHIFT + left NOTEQUAL ISEQUAL + left GREATEREQUAL GREATERTHAN LESSTHAN LESSEQUAL + left AND + left OR +preclow + +rule +program: statements { + if val[0] + # Make sure we always return an array. + if val[0].is_a?(AST::ASTArray) + if val[0].children.empty? + result = nil + else + result = val[0] + end + else + result = aryfy(val[0]) + end + else + result = nil + end +} + | nil + +statements: statement + | statements statement { + if val[0] and val[1] + if val[0].instance_of?(AST::ASTArray) + val[0].push(val[1]) + result = val[0] + else + result = ast AST::ASTArray, :children => [val[0],val[1]] + end + elsif obj = (val[0] || val[1]) + result = obj + else result = nil + end +} + +# The main list of valid statements +statement: resource + | virtualresource + | collection + | assignment + | casestatement + | ifstatement_begin + | import + | fstatement + | definition + | hostclass + | nodedef + | resourceoverride + | append + | relationship + +relationship: relationship_side edge relationship_side { + result = AST::Relationship.new(val[0], val[2], val[1][:value], ast_context) +} + | relationship edge relationship_side { + result = AST::Relationship.new(val[0], val[2], val[1][:value], ast_context) +} + +relationship_side: resource | resourceref | collection + +edge: IN_EDGE | OUT_EDGE | IN_EDGE_SUB | OUT_EDGE_SUB + +fstatement: NAME LPAREN funcvalues RPAREN { + args = aryfy(val[2]) + result = ast AST::Function, + :name => val[0][:value], + :line => val[0][:line], + :arguments => args, + :ftype => :statement +} +| NAME LPAREN funcvalues COMMA RPAREN { + args = aryfy(val[2]) + result = ast AST::Function, + :name => val[0][:value], + :line => val[0][:line], + :arguments => args, + :ftype => :statement +} | NAME LPAREN RPAREN { + result = ast AST::Function, + :name => val[0][:value], + :line => val[0][:line], + :arguments => AST::ASTArray.new({}), + :ftype => :statement +} + | NAME funcvalues { + args = aryfy(val[1]) + result = ast AST::Function, + :name => val[0][:value], + :line => val[0][:line], + :arguments => args, + :ftype => :statement +} + +funcvalues: namestring + | resourceref + | funcvalues COMMA namestring { + result = aryfy(val[0], val[2]) + result.line = @lexer.line + result.file = @lexer.file +} + | funcvalues COMMA resourceref { + unless val[0].is_a?(AST::ASTArray) + val[0] = aryfy(val[0]) + end + + val[0].push(val[2]) + + result = val[0] +} + +# This is *almost* an rvalue, but I couldn't get a full +# rvalue to work without scads of shift/reduce conflicts. +namestring: name + | variable + | type + | boolean + | funcrvalue + | selector + | quotedtext + | hasharrayaccesses + | CLASSNAME { + result = ast AST::Name, :value => val[0][:value] + } + +resource: classname LBRACE resourceinstances endsemi RBRACE { + @lexer.commentpop + array = val[2] + array = [array] if array.instance_of?(AST::ResourceInstance) + result = ast AST::ASTArray + + # this iterates across each specified resourceinstance + array.each { |instance| + raise Puppet::Dev, "Got something that isn't an instance" unless instance.instance_of?(AST::ResourceInstance) + # now, i need to somehow differentiate between those things with + # arrays in their names, and normal things + + result.push ast( + AST::Resource, + :type => val[0], + :title => instance[0], + + :parameters => instance[1]) + } +} | classname LBRACE params endcomma RBRACE { + # This is a deprecated syntax. + error "All resource specifications require names" +} | classref LBRACE params endcomma RBRACE { + # a defaults setting for a type + @lexer.commentpop + result = ast(AST::ResourceDefaults, :type => val[0], :parameters => val[2]) +} + +# Override a value set elsewhere in the configuration. +resourceoverride: resourceref LBRACE anyparams endcomma RBRACE { + @lexer.commentpop + result = ast AST::ResourceOverride, :object => val[0], :parameters => val[2] +} + +# Exported and virtual resources; these don't get sent to the client +# unless they get collected elsewhere in the db. +virtualresource: at resource { + type = val[0] + + if (type == :exported and ! Puppet[:storeconfigs]) and ! Puppet[:parseonly] + Puppet.warning addcontext("You cannot collect without storeconfigs being set") + end + + error "Defaults are not virtualizable" if val[1].is_a? AST::ResourceDefaults + + method = type.to_s + "=" + + # Just mark our resources as exported and pass them through. + if val[1].instance_of?(AST::ASTArray) + val[1].each do |obj| + obj.send(method, true) + end + else + val[1].send(method, true) + end + + result = val[1] +} + +at: AT { result = :virtual } + | AT AT { result = :exported } + +# A collection statement. Currently supports no arguments at all, but eventually +# will, I assume. +collection: classref collectrhand LBRACE anyparams endcomma RBRACE { + @lexer.commentpop + Puppet.warning addcontext("Collection names must now be capitalized") if val[0] =~ /^[a-z]/ + type = val[0].downcase + args = {:type => type} + + if val[1].is_a?(AST::CollExpr) + args[:query] = val[1] + args[:query].type = type + args[:form] = args[:query].form + else + args[:form] = val[1] + end + if args[:form] == :exported and ! Puppet[:storeconfigs] and ! Puppet[:parseonly] + Puppet.warning addcontext("You cannot collect exported resources without storeconfigs being set; the collection will be ignored") + end + args[:override] = val[3] + result = ast AST::Collection, args +} + | classref collectrhand { + if val[0] =~ /^[a-z]/ + Puppet.warning addcontext("Collection names must now be capitalized") + end + type = val[0].downcase + args = {:type => type } + + if val[1].is_a?(AST::CollExpr) + args[:query] = val[1] + args[:query].type = type + args[:form] = args[:query].form + else + args[:form] = val[1] + end + if args[:form] == :exported and ! Puppet[:storeconfigs] and ! Puppet[:parseonly] + Puppet.warning addcontext("You cannot collect exported resources without storeconfigs being set; the collection will be ignored") + end + result = ast AST::Collection, args +} + + +collectrhand: LCOLLECT collstatements RCOLLECT { + if val[1] + result = val[1] + result.form = :virtual + else + result = :virtual + end +} + | LLCOLLECT collstatements RRCOLLECT { + if val[1] + result = val[1] + result.form = :exported + else + result = :exported + end +} + +# A mini-language for handling collection comparisons. This is organized +# to avoid the need for precedence indications. +collstatements: nil + | collstatement + | collstatements colljoin collstatement { + result = ast AST::CollExpr, :test1 => val[0], :oper => val[1], :test2 => val[2] +} + +collstatement: collexpr + | LPAREN collstatements RPAREN { + result = val[1] + result.parens = true +} + +colljoin: AND { result=val[0][:value] } + | OR { result=val[0][:value] } + +collexpr: colllval ISEQUAL simplervalue { + result = ast AST::CollExpr, :test1 => val[0], :oper => val[1][:value], :test2 => val[2] + #result = ast AST::CollExpr + #result.push *val +} + | colllval NOTEQUAL simplervalue { + result = ast AST::CollExpr, :test1 => val[0], :oper => val[1][:value], :test2 => val[2] + #result = ast AST::CollExpr + #result.push *val +} + +colllval: variable + | name + +resourceinst: resourcename COLON params endcomma { + result = ast AST::ResourceInstance, :children => [val[0],val[2]] +} + +resourceinstances: resourceinst + | resourceinstances SEMIC resourceinst { + if val[0].instance_of?(AST::ResourceInstance) + result = ast AST::ASTArray, :children => [val[0],val[2]] + else + val[0].push val[2] + result = val[0] + end +} + +endsemi: # nothing + | SEMIC + +undef: UNDEF { + result = ast AST::Undef, :value => :undef +} + +name: NAME { + result = ast AST::Name, :value => val[0][:value], :line => val[0][:line] +} + +type: CLASSREF { + result = ast AST::Type, :value => val[0][:value], :line => val[0][:line] +} + +resourcename: quotedtext + | name + | type + | selector + | variable + | array + | hasharrayaccesses + +assignment: VARIABLE EQUALS expression { + raise Puppet::ParseError, "Cannot assign to variables in other namespaces" if val[0][:value] =~ /::/ + # this is distinct from referencing a variable + variable = ast AST::Name, :value => val[0][:value], :line => val[0][:line] + result = ast AST::VarDef, :name => variable, :value => val[2], :line => val[0][:line] +} + | hasharrayaccess EQUALS expression { + result = ast AST::VarDef, :name => val[0], :value => val[2] +} + +append: VARIABLE APPENDS expression { + variable = ast AST::Name, :value => val[0][:value], :line => val[0][:line] + result = ast AST::VarDef, :name => variable, :value => val[2], :append => true, :line => val[0][:line] +} + +params: # nothing +{ + result = ast AST::ASTArray +} + | param { result = val[0] } + | params COMMA param { + if val[0].instance_of?(AST::ASTArray) + val[0].push(val[2]) + result = val[0] + else + result = ast AST::ASTArray, :children => [val[0],val[2]] + end +} + +param: NAME FARROW rvalue { + result = ast AST::ResourceParam, :param => val[0][:value], :line => val[0][:line], :value => val[2] +} + +addparam: NAME PARROW rvalue { + result = ast AST::ResourceParam, :param => val[0][:value], :line => val[0][:line], :value => val[2], + :add => true +} + +anyparam: param + | addparam + +anyparams: # nothing +{ + result = ast AST::ASTArray +} + | anyparam { result = val[0] } + | anyparams COMMA anyparam { + if val[0].instance_of?(AST::ASTArray) + val[0].push(val[2]) + result = val[0] + else + result = ast AST::ASTArray, :children => [val[0],val[2]] + end +} + +rvalues: rvalue + | rvalues comma rvalue { + if val[0].instance_of?(AST::ASTArray) + result = val[0].push(val[2]) + else + result = ast AST::ASTArray, :children => [val[0],val[2]] + end +} + +simplervalue: quotedtext + | name + | type + | boolean + | selector + | variable + +rvalue: quotedtext + | name + | type + | boolean + | selector + | variable + | array + | hash + | hasharrayaccesses + | resourceref + | funcrvalue + | undef + +# We currently require arguments in these functions. +funcrvalue: NAME LPAREN funcvalues RPAREN { + args = aryfy(val[2]) + result = ast AST::Function, + :name => val[0][:value], :line => val[0][:line], + :arguments => args, + :ftype => :rvalue +} | NAME LPAREN RPAREN { + result = ast AST::Function, + :name => val[0][:value], :line => val[0][:line], + :arguments => AST::ASTArray.new({}), + :ftype => :rvalue +} + +quotedtext: STRING { result = ast AST::String, :value => val[0][:value], :line => val[0][:line] } + | DQPRE dqrval { result = ast AST::Concat, :value => [ast(AST::String,val[0])]+val[1], :line => val[0][:line] } + +dqrval: expression dqtail { result = [val[0]] + val[1] } + +dqtail: DQPOST { result = [ast(AST::String,val[0])] } + | DQMID dqrval { result = [ast(AST::String,val[0])] + val[1] } + +boolean: BOOLEAN { + result = ast AST::Boolean, :value => val[0][:value], :line => val[0][:line] +} + +resourceref: NAME LBRACK rvalues RBRACK { + Puppet.warning addcontext("Deprecation notice: Resource references should now be capitalized") + result = ast AST::ResourceReference, :type => val[0][:value], :line => val[0][:line], :title => val[2] +} | classref LBRACK rvalues RBRACK { + result = ast AST::ResourceReference, :type => val[0], :title => val[2] +} + +ifstatement_begin: IF ifstatement { + result = val[1] +} + +ifstatement: expression LBRACE statements RBRACE else { + @lexer.commentpop + args = { + :test => val[0], + :statements => val[2] + } + + args[:else] = val[4] if val[4] + + result = ast AST::IfStatement, args +} + | expression LBRACE RBRACE else { + @lexer.commentpop + args = { + :test => val[0], + :statements => ast(AST::Nop) + } + + args[:else] = val[3] if val[3] + + result = ast AST::IfStatement, args +} + +else: # nothing + | ELSIF ifstatement { + result = ast AST::Else, :statements => val[1] +} + | ELSE LBRACE statements RBRACE { + @lexer.commentpop + result = ast AST::Else, :statements => val[2] +} + | ELSE LBRACE RBRACE { + @lexer.commentpop + result = ast AST::Else, :statements => ast(AST::Nop) +} + +# Unlike yacc/bison, it seems racc +# gives tons of shift/reduce warnings +# with the following syntax: +# +# expression: ... +# | expression arithop expressio { ... } +# +# arithop: PLUS | MINUS | DIVIDE | TIMES ... +# +# So I had to develop the expression by adding one rule +# per operator :-( + +expression: rvalue + | expression IN rvalue { + result = ast AST::InOperator, :lval => val[0], :rval => val[2] +} + | expression MATCH regex { + result = ast AST::MatchOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] +} + | expression NOMATCH regex { + result = ast AST::MatchOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] +} + | expression PLUS expression { + result = ast AST::ArithmeticOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] +} + | expression MINUS expression { + result = ast AST::ArithmeticOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] +} + | expression DIV expression { + result = ast AST::ArithmeticOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] +} + | expression TIMES expression { + result = ast AST::ArithmeticOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] +} + | expression LSHIFT expression { + result = ast AST::ArithmeticOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] +} + | expression RSHIFT expression { + result = ast AST::ArithmeticOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] +} + | MINUS expression =UMINUS { + result = ast AST::Minus, :value => val[1] +} + | expression NOTEQUAL expression { + result = ast AST::ComparisonOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] +} + | expression ISEQUAL expression { + result = ast AST::ComparisonOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] +} + | expression GREATERTHAN expression { + result = ast AST::ComparisonOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] +} + | expression GREATEREQUAL expression { + result = ast AST::ComparisonOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] +} + | expression LESSTHAN expression { + result = ast AST::ComparisonOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] +} + | expression LESSEQUAL expression { + result = ast AST::ComparisonOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] +} + | NOT expression { + result = ast AST::Not, :value => val[1] +} + | expression AND expression { + result = ast AST::BooleanOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] +} + | expression OR expression { + result = ast AST::BooleanOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] +} + | LPAREN expression RPAREN { + result = val[1] +} + +casestatement: CASE rvalue LBRACE caseopts RBRACE { + @lexer.commentpop + options = val[3] + options = ast AST::ASTArray, :children => [val[3]] unless options.instance_of?(AST::ASTArray) + result = ast AST::CaseStatement, :test => val[1], :options => options +} + +caseopts: caseopt + | caseopts caseopt { + if val[0].instance_of?(AST::ASTArray) + val[0].push val[1] + result = val[0] + else + result = ast AST::ASTArray, :children => [val[0], val[1]] + end +} + +caseopt: casevalues COLON LBRACE statements RBRACE { + @lexer.commentpop + result = ast AST::CaseOpt, :value => val[0], :statements => val[3] +} | casevalues COLON LBRACE RBRACE { + @lexer.commentpop + + result = ast( + AST::CaseOpt, + :value => val[0], + + :statements => ast(AST::ASTArray) + ) +} + +casevalues: selectlhand + | casevalues COMMA selectlhand { + if val[0].instance_of?(AST::ASTArray) + val[0].push(val[2]) + result = val[0] + else + result = ast AST::ASTArray, :children => [val[0],val[2]] + end +} + +selector: selectlhand QMARK svalues { + result = ast AST::Selector, :param => val[0], :values => val[2] +} + +svalues: selectval + | LBRACE sintvalues endcomma RBRACE { + @lexer.commentpop + result = val[1] +} + +sintvalues: selectval + | sintvalues comma selectval { + if val[0].instance_of?(AST::ASTArray) + val[0].push(val[2]) + result = val[0] + else + result = ast AST::ASTArray, :children => [val[0],val[2]] + end +} + +selectval: selectlhand FARROW rvalue { + result = ast AST::ResourceParam, :param => val[0], :value => val[2] +} + +selectlhand: name + | type + | quotedtext + | variable + | funcrvalue + | boolean + | undef + | hasharrayaccess + | DEFAULT { + result = ast AST::Default, :value => val[0][:value], :line => val[0][:line] +} + | regex + +# These are only used for importing, and we don't interpolate there. +string: STRING { result = [val[0][:value]] } +strings: string + | strings COMMA string { result = val[0] += val[2] } + +import: IMPORT strings { + val[1].each do |file| + import(file) + end + + result = AST::ASTArray.new(:children => []) +} + +# Disable definition inheritance for now. 8/27/06, luke +#definition: DEFINE NAME argumentlist parent LBRACE statements RBRACE { +definition: DEFINE classname argumentlist LBRACE statements RBRACE { + @lexer.commentpop + newdefine classname(val[1]), :arguments => val[2], :code => val[4], :line => val[0][:line] + @lexer.indefine = false + result = nil + +#} | DEFINE NAME argumentlist parent LBRACE RBRACE { +} | DEFINE classname argumentlist LBRACE RBRACE { + @lexer.commentpop + newdefine classname(val[1]), :arguments => val[2], :line => val[0][:line] + @lexer.indefine = false + result = nil +} + +#hostclass: CLASS NAME argumentlist parent LBRACE statements RBRACE { +hostclass: CLASS classname argumentlist classparent LBRACE statements RBRACE { + @lexer.commentpop + # Our class gets defined in the parent namespace, not our own. + @lexer.namepop + newclass classname(val[1]), :arguments => val[2], :parent => val[3], :code => val[5], :line => val[0][:line] + result = nil +} | CLASS classname argumentlist classparent LBRACE RBRACE { + @lexer.commentpop + # Our class gets defined in the parent namespace, not our own. + @lexer.namepop + newclass classname(val[1]), :arguments => val[2], :parent => val[3], :line => val[0][:line] + result = nil +} + +nodedef: NODE hostnames nodeparent LBRACE statements RBRACE { + @lexer.commentpop + newnode val[1], :parent => val[2], :code => val[4], :line => val[0][:line] + result = nil +} | NODE hostnames nodeparent LBRACE RBRACE { + @lexer.commentpop + newnode val[1], :parent => val[2], :line => val[0][:line] + result = nil +} + +classref: CLASSREF { result = val[0][:value] } + +classname: NAME { result = val[0][:value] } + | CLASSNAME { result = val[0][:value] } + | CLASS { result = "class" } + +# Multiple hostnames, as used for node names. These are all literal +# strings, not AST objects. +hostnames: nodename + | hostnames COMMA nodename { + result = val[0] + result = [result] unless result.is_a?(Array) + result << val[2] +} + +nodename: hostname { + result = ast AST::HostName, :value => val[0] +} + +hostname: NAME { result = val[0][:value] } + | STRING { result = val[0][:value] } + | DEFAULT { result = val[0][:value] } + | regex + +nil: { + result = nil +} + +nothing: { + result = ast AST::ASTArray, :children => [] +} + +argumentlist: nil + | LPAREN nothing RPAREN { + result = nil +} + | LPAREN arguments RPAREN { + result = val[1] + result = [result] unless result[0].is_a?(Array) +} + +arguments: argument + | arguments COMMA argument { + result = val[0] + result = [result] unless result[0].is_a?(Array) + result << val[2] +} + +argument: NAME EQUALS rvalue { + Puppet.warning addcontext("Deprecation notice: must now include '$' in prototype") + result = [val[0][:value], val[2]] +} + | NAME { + Puppet.warning addcontext("Deprecation notice: must now include '$' in prototype") + result = [val[0][:value]] +} | VARIABLE EQUALS rvalue { + result = [val[0][:value], val[2]] +} | VARIABLE { + result = [val[0][:value]] +} + +nodeparent: nil + | INHERITS hostname { + result = val[1] +} + +classparent: nil + | INHERITS classnameordefault { + result = val[1] +} + +classnameordefault: classname | DEFAULT + +variable: VARIABLE { + result = ast AST::Variable, :value => val[0][:value], :line => val[0][:line] +} + +array: LBRACK rvalues RBRACK { + if val[1].instance_of?(AST::ASTArray) + result = val[1] + else + result = ast AST::ASTArray, :children => [val[1]] + end +} + | LBRACK rvalues COMMA RBRACK { + if val[1].instance_of?(AST::ASTArray) + result = val[1] + else + result = ast AST::ASTArray, :children => [val[1]] + end +} | LBRACK RBRACK { + result = ast AST::ASTArray +} + +comma: FARROW + | COMMA + +endcomma: # nothing + | COMMA { result = nil } + +regex: REGEX { + result = ast AST::Regex, :value => val[0][:value] +} + +hash: LBRACE hashpairs RBRACE { + if val[1].instance_of?(AST::ASTHash) + result = val[1] + else + result = ast AST::ASTHash, { :value => val[1] } + end +} + | LBRACE hashpairs COMMA RBRACE { + if val[1].instance_of?(AST::ASTHash) + result = val[1] + else + result = ast AST::ASTHash, { :value => val[1] } + end +} | LBRACE RBRACE { + result = ast AST::ASTHash +} + +hashpairs: hashpair + | hashpairs COMMA hashpair { + if val[0].instance_of?(AST::ASTHash) + result = val[0].merge(val[2]) + else + result = ast AST::ASTHash, :value => val[0] + result.merge(val[2]) + end +} + +hashpair: key FARROW rvalue { + result = ast AST::ASTHash, { :value => { val[0] => val[2] } } +} + +key: NAME { result = val[0][:value] } + | quotedtext { result = val[0] } + +hasharrayaccess: VARIABLE LBRACK rvalue RBRACK { + result = ast AST::HashOrArrayAccess, :variable => val[0][:value], :key => val[2] +} + +hasharrayaccesses: hasharrayaccess + | hasharrayaccesses LBRACK rvalue RBRACK { + result = ast AST::HashOrArrayAccess, :variable => val[0], :key => val[2] +} + +end +---- header ---- +require 'puppet' +require 'puppet/util/loadedfile' +require 'puppet/parser/lexer' +require 'puppet/parser/ast' + +module Puppet + class ParseError < Puppet::Error; end + class ImportError < Racc::ParseError; end + class AlreadyImportedError < ImportError; end +end + +---- inner ---- + +# It got too annoying having code in a file that needs to be compiled. +require 'puppet/parser/parser_support' + +# Make emacs happy +# Local Variables: +# mode: ruby +# End: diff --git a/mcollective/lib/puppet/parser/lexer.rb b/mcollective/lib/puppet/parser/lexer.rb new file mode 100644 index 000000000..71d9440ff --- /dev/null +++ b/mcollective/lib/puppet/parser/lexer.rb @@ -0,0 +1,583 @@ +# the scanner/lexer + +require 'strscan' +require 'puppet' + + +module Puppet + class LexError < RuntimeError; end +end + +module Puppet::Parser; end + +class Puppet::Parser::Lexer + attr_reader :last, :file, :lexing_context, :token_queue + + attr_accessor :line, :indefine + + def lex_error msg + raise Puppet::LexError.new(msg) + end + + class Token + attr_accessor :regex, :name, :string, :skip, :incr_line, :skip_text, :accumulate + + def initialize(regex, name) + if regex.is_a?(String) + @name, @string = name, regex + @regex = Regexp.new(Regexp.escape(@string)) + else + @name, @regex = name, regex + end + end + + # MQR: Why not just alias? + %w{skip accumulate}.each do |method| + define_method(method+"?") do + self.send(method) + end + end + + def to_s + if self.string + @string + else + @name.to_s + end + end + + def acceptable?(context={}) + # By default tokens are aceeptable in any context + true + end + end + + # Maintain a list of tokens. + class TokenList + attr_reader :regex_tokens, :string_tokens + + def [](name) + @tokens[name] + end + + # Create a new token. + def add_token(name, regex, options = {}, &block) + token = Token.new(regex, name) + raise(ArgumentError, "Token #{name} already exists") if @tokens.include?(name) + @tokens[token.name] = token + if token.string + @string_tokens << token + @tokens_by_string[token.string] = token + else + @regex_tokens << token + end + + options.each do |name, option| + token.send(name.to_s + "=", option) + end + + token.meta_def(:convert, &block) if block_given? + + token + end + + def initialize + @tokens = {} + @regex_tokens = [] + @string_tokens = [] + @tokens_by_string = {} + end + + # Look up a token by its value, rather than name. + def lookup(string) + @tokens_by_string[string] + end + + # Define more tokens. + def add_tokens(hash) + hash.each do |regex, name| + add_token(name, regex) + end + end + + # Sort our tokens by length, so we know once we match, we're done. + # This helps us avoid the O(n^2) nature of token matching. + def sort_tokens + @string_tokens.sort! { |a, b| b.string.length <=> a.string.length } + end + end + + TOKENS = TokenList.new + + TOKENS.add_tokens( + + '[' => :LBRACK, + ']' => :RBRACK, + '{' => :LBRACE, + '}' => :RBRACE, + '(' => :LPAREN, + + ')' => :RPAREN, + '=' => :EQUALS, + '+=' => :APPENDS, + '==' => :ISEQUAL, + '>=' => :GREATEREQUAL, + '>' => :GREATERTHAN, + '<' => :LESSTHAN, + '<=' => :LESSEQUAL, + '!=' => :NOTEQUAL, + '!' => :NOT, + ',' => :COMMA, + '.' => :DOT, + ':' => :COLON, + '@' => :AT, + '<<|' => :LLCOLLECT, + '->' => :IN_EDGE, + '<-' => :OUT_EDGE, + '~>' => :IN_EDGE_SUB, + '<~' => :OUT_EDGE_SUB, + '|>>' => :RRCOLLECT, + '<|' => :LCOLLECT, + '|>' => :RCOLLECT, + ';' => :SEMIC, + '?' => :QMARK, + '\\' => :BACKSLASH, + '=>' => :FARROW, + '+>' => :PARROW, + '+' => :PLUS, + '-' => :MINUS, + '/' => :DIV, + '*' => :TIMES, + '<<' => :LSHIFT, + '>>' => :RSHIFT, + '=~' => :MATCH, + '!~' => :NOMATCH, + %r{([a-z][-\w]*)?(::[a-z][-\w]*)+} => :CLASSNAME, # Require '::' in the class name, else we'd compete with NAME + %r{((::){0,1}[A-Z][-\w]*)+} => :CLASSREF, + "" => :STRING, + "" => :DQPRE, + "" => :DQMID, + "" => :DQPOST, + "" => :BOOLEAN + ) + + TOKENS.add_token :NUMBER, %r{\b(?:0[xX][0-9A-Fa-f]+|0?\d+(?:\.\d+)?(?:[eE]-?\d+)?)\b} do |lexer, value| + [TOKENS[:NAME], value] + end + #:stopdoc: # Issue #4161 + def (TOKENS[:NUMBER]).acceptable?(context={}) + ![:DQPRE,:DQMID].include? context[:after] + end + #:startdoc: + + TOKENS.add_token :NAME, %r{[a-z0-9][-\w]*} do |lexer, value| + string_token = self + # we're looking for keywords here + if tmp = KEYWORDS.lookup(value) + string_token = tmp + if [:TRUE, :FALSE].include?(string_token.name) + value = eval(value) + string_token = TOKENS[:BOOLEAN] + end + end + [string_token, value] + end + [:NAME,:CLASSNAME,:CLASSREF].each { |name_token| + #:stopdoc: # Issue #4161 + def (TOKENS[name_token]).acceptable?(context={}) + ![:DQPRE,:DQMID].include? context[:after] + end + #:startdoc: + } + + TOKENS.add_token :COMMENT, %r{#.*}, :accumulate => true, :skip => true do |lexer,value| + value.sub!(/# ?/,'') + [self, value] + end + + TOKENS.add_token :MLCOMMENT, %r{/\*(.*?)\*/}m, :accumulate => true, :skip => true do |lexer, value| + lexer.line += value.count("\n") + value.sub!(/^\/\* ?/,'') + value.sub!(/ ?\*\/$/,'') + [self,value] + end + + TOKENS.add_token :REGEX, %r{/[^/\n]*/} do |lexer, value| + # Make sure we haven't matched an escaped / + while value[-2..-2] == '\\' + other = lexer.scan_until(%r{/}) + value += other + end + regex = value.sub(%r{\A/}, "").sub(%r{/\Z}, '').gsub("\\/", "/") + [self, Regexp.new(regex)] + end + + #:stopdoc: # Issue #4161 + def (TOKENS[:REGEX]).acceptable?(context={}) + [:NODE,:LBRACE,:RBRACE,:MATCH,:NOMATCH,:COMMA].include? context[:after] + end + #:startdoc: + + TOKENS.add_token :RETURN, "\n", :skip => true, :incr_line => true, :skip_text => true + + TOKENS.add_token :SQUOTE, "'" do |lexer, value| + [TOKENS[:STRING], lexer.slurpstring(value,["'"],:ignore_invalid_escapes).first ] + end + + DQ_initial_token_types = {'$' => :DQPRE,'"' => :STRING} + DQ_continuation_token_types = {'$' => :DQMID,'"' => :DQPOST} + + TOKENS.add_token :DQUOTE, /"/ do |lexer, value| + lexer.tokenize_interpolated_string(DQ_initial_token_types) + end + + TOKENS.add_token :DQCONT, /\}/ do |lexer, value| + lexer.tokenize_interpolated_string(DQ_continuation_token_types) + end + #:stopdoc: # Issue #4161 + def (TOKENS[:DQCONT]).acceptable?(context={}) + context[:string_interpolation_depth] > 0 + end + #:startdoc: + + TOKENS.add_token :DOLLAR_VAR, %r{\$(\w*::)*\w+} do |lexer, value| + [TOKENS[:VARIABLE],value[1..-1]] + end + + TOKENS.add_token :VARIABLE, %r{(\w*::)*\w+} + #:stopdoc: # Issue #4161 + def (TOKENS[:VARIABLE]).acceptable?(context={}) + [:DQPRE,:DQMID].include? context[:after] + end + #:startdoc: + + + TOKENS.sort_tokens + + @@pairs = { + "{" => "}", + "(" => ")", + "[" => "]", + "<|" => "|>", + "<<|" => "|>>" + } + + KEYWORDS = TokenList.new + + + KEYWORDS.add_tokens( + + "case" => :CASE, + "class" => :CLASS, + "default" => :DEFAULT, + "define" => :DEFINE, + "import" => :IMPORT, + "if" => :IF, + "elsif" => :ELSIF, + "else" => :ELSE, + "inherits" => :INHERITS, + "node" => :NODE, + "and" => :AND, + "or" => :OR, + "undef" => :UNDEF, + "false" => :FALSE, + "true" => :TRUE, + + "in" => :IN + ) + + def clear + initvars + end + + def expected + return nil if @expected.empty? + name = @expected[-1] + TOKENS.lookup(name) or lex_error "Could not find expected token #{name}" + end + + # scan the whole file + # basically just used for testing + def fullscan + array = [] + + self.scan { |token, str| + # Ignore any definition nesting problems + @indefine = false + array.push([token,str]) + } + array + end + + def file=(file) + @file = file + @line = 1 + contents = File.exists?(file) ? File.read(file) : "" + @scanner = StringScanner.new(contents) + end + + def shift_token + @token_queue.shift + end + + def find_string_token + # We know our longest string token is three chars, so try each size in turn + # until we either match or run out of chars. This way our worst-case is three + # tries, where it is otherwise the number of string token we have. Also, + # the lookups are optimized hash lookups, instead of regex scans. + # + s = @scanner.peek(3) + token = TOKENS.lookup(s[0,3]) || TOKENS.lookup(s[0,2]) || TOKENS.lookup(s[0,1]) + [ token, token && @scanner.scan(token.regex) ] + end + + # Find the next token that matches a regex. We look for these first. + def find_regex_token + @regex += 1 + best_token = nil + best_length = 0 + + # I tried optimizing based on the first char, but it had + # a slightly negative affect and was a good bit more complicated. + TOKENS.regex_tokens.each do |token| + if length = @scanner.match?(token.regex) and token.acceptable?(lexing_context) + # We've found a longer match + if length > best_length + best_length = length + best_token = token + end + end + end + + return best_token, @scanner.scan(best_token.regex) if best_token + end + + # Find the next token, returning the string and the token. + def find_token + @find += 1 + shift_token || find_regex_token || find_string_token + end + + def indefine? + if defined?(@indefine) + @indefine + else + false + end + end + + def initialize + @find = 0 + @regex = 0 + initvars + end + + def initvars + @line = 1 + @previous_token = nil + @scanner = nil + @file = nil + # AAARRGGGG! okay, regexes in ruby are bloody annoying + # no one else has "\n" =~ /\s/ + @skip = %r{[ \t\r]+} + + @namestack = [] + @token_queue = [] + @indefine = false + @expected = [] + @commentstack = [ ['', @line] ] + @lexing_context = { + :after => nil, + :start_of_line => true, + :string_interpolation_depth => 0 + } + end + + # Make any necessary changes to the token and/or value. + def munge_token(token, value) + @line += 1 if token.incr_line + + skip if token.skip_text + + return if token.skip and not token.accumulate? + + token, value = token.convert(self, value) if token.respond_to?(:convert) + + return unless token + + if token.accumulate? + comment = @commentstack.pop + comment[0] << value + "\n" + @commentstack.push(comment) + end + + return if token.skip + + return token, { :value => value, :line => @line } + end + + # Go up one in the namespace. + def namepop + @namestack.pop + end + + # Collect the current namespace. + def namespace + @namestack.join("::") + end + + # This value might have :: in it, but we don't care -- it'll be + # handled normally when joining, and when popping we want to pop + # this full value, however long the namespace is. + def namestack(value) + @namestack << value + end + + def rest + @scanner.rest + end + + # this is the heart of the lexer + def scan + #Puppet.debug("entering scan") + lex_error "Invalid or empty string" unless @scanner + + # Skip any initial whitespace. + skip + + until token_queue.empty? and @scanner.eos? do + yielded = false + matched_token, value = find_token + + # error out if we didn't match anything at all + lex_error "Could not match #{@scanner.rest[/^(\S+|\s+|.*)/]}" unless matched_token + + newline = matched_token.name == :RETURN + + # this matches a blank line; eat the previously accumulated comments + getcomment if lexing_context[:start_of_line] and newline + lexing_context[:start_of_line] = newline + + final_token, token_value = munge_token(matched_token, value) + + unless final_token + skip + next + end + + lexing_context[:after] = final_token.name unless newline + lexing_context[:string_interpolation_depth] += 1 if final_token.name == :DQPRE + lexing_context[:string_interpolation_depth] -= 1 if final_token.name == :DQPOST + + value = token_value[:value] + + if match = @@pairs[value] and final_token.name != :DQUOTE and final_token.name != :SQUOTE + @expected << match + elsif exp = @expected[-1] and exp == value and final_token.name != :DQUOTE and final_token.name != :SQUOTE + @expected.pop + end + + if final_token.name == :LBRACE or final_token.name == :LPAREN + commentpush + end + if final_token.name == :RPAREN + commentpop + end + + yield [final_token.name, token_value] + + if @previous_token + namestack(value) if @previous_token.name == :CLASS and value != '{' + + if @previous_token.name == :DEFINE + if indefine? + msg = "Cannot nest definition #{value} inside #{@indefine}" + self.indefine = false + raise Puppet::ParseError, msg + end + + @indefine = value + end + end + @previous_token = final_token + skip + end + @scanner = nil + + # This indicates that we're done parsing. + yield [false,false] + end + + # Skip any skipchars in our remaining string. + def skip + @scanner.skip(@skip) + end + + # Provide some limited access to the scanner, for those + # tokens that need it. + def scan_until(regex) + @scanner.scan_until(regex) + end + + # we've encountered the start of a string... + # slurp in the rest of the string and return it + def slurpstring(terminators,escapes=%w{ \\ $ ' " n t s }+["\n"],ignore_invalid_escapes=false) + # we search for the next quote that isn't preceded by a + # backslash; the caret is there to match empty strings + str = @scanner.scan_until(/([^\\]|^|[^\\])([\\]{2})*[#{terminators}]/) or lex_error "Unclosed quote after '#{last}' in '#{rest}'" + @line += str.count("\n") # literal carriage returns add to the line count. + str.gsub!(/\\(.)/m) { + ch = $1 + if escapes.include? ch + case ch + when 'n'; "\n" + when 't'; "\t" + when 's'; " " + when "\n": '' + else ch + end + else + Puppet.warning "Unrecognised escape sequence '\\#{ch}'#{file && " in file #{file}"}#{line && " at line #{line}"}" unless ignore_invalid_escapes + "\\#{ch}" + end + } + [ str[0..-2],str[-1,1] ] + end + + def tokenize_interpolated_string(token_type,preamble='') + value,terminator = slurpstring('"$') + token_queue << [TOKENS[token_type[terminator]],preamble+value] + if terminator != '$' or @scanner.scan(/\{/) + token_queue.shift + elsif var_name = @scanner.scan(%r{(\w*::)*\w+|[0-9]}) + token_queue << [TOKENS[:VARIABLE],var_name] + tokenize_interpolated_string(DQ_continuation_token_types) + else + tokenize_interpolated_string(token_type,token_queue.pop.last + terminator) + end + end + + # just parse a string, not a whole file + def string=(string) + @scanner = StringScanner.new(string) + end + + # returns the content of the currently accumulated content cache + def commentpop + @commentstack.pop[0] + end + + def getcomment(line = nil) + comment = @commentstack.last + if line.nil? or comment[1] <= line + @commentstack.pop + @commentstack.push(['', @line]) + return comment[0] + end + '' + end + + def commentpush + @commentstack.push(['', @line]) + end +end diff --git a/mcollective/lib/puppet/parser/makefile b/mcollective/lib/puppet/parser/makefile new file mode 100644 index 000000000..c4911d097 --- /dev/null +++ b/mcollective/lib/puppet/parser/makefile @@ -0,0 +1,8 @@ +#parser.rb: grammar.ry +# ryacc --output parser grammar + +parser.rb: grammar.ra + racc -o$@ grammar.ra + +grammar.output: grammar.ra + racc -v -o$@ grammar.ra diff --git a/mcollective/lib/puppet/parser/parser.rb b/mcollective/lib/puppet/parser/parser.rb new file mode 100644 index 000000000..c2fbf976d --- /dev/null +++ b/mcollective/lib/puppet/parser/parser.rb @@ -0,0 +1,2688 @@ +# +# DO NOT MODIFY!!!! +# This file is automatically generated by Racc 1.4.6 +# from Racc grammer file "". +# + +require 'racc/parser.rb' + +require 'puppet' +require 'puppet/util/loadedfile' +require 'puppet/parser/lexer' +require 'puppet/parser/ast' + +module Puppet + class ParseError < Puppet::Error; end + class ImportError < Racc::ParseError; end + class AlreadyImportedError < ImportError; end +end + +module Puppet + module Parser + class Parser < Racc::Parser + +module_eval(<<'...end grammar.ra/module_eval...', 'grammar.ra', 866) + +# It got too annoying having code in a file that needs to be compiled. +require 'puppet/parser/parser_support' + +# Make emacs happy +# Local Variables: +# mode: ruby +# End: + +# $Id$ +...end grammar.ra/module_eval... +##### State transition tables begin ### + +racc_action_table = [ + 256, 257, 228, 63, 327, 64, 156, 54, 82, 356, + -166, 245, 176, 205, 210, 254, 37, 357, 65, 244, + 38, -168, 201, 203, 206, 209, 184, 11, 255, 241, + 242, 158, 54, 251, 72, 75, 72, 75, 102, 117, + 106, -170, 62, 194, 230, 58, 204, 208, 60, 306, + 213, 196, 197, 198, 200, 202, 97, 207, 211, 72, + 75, 241, 242, 102, 199, 106, 163, 71, 59, 307, + 58, 83, 86, 60, 193, 92, 54, 162, 72, 75, + 78, 100, 169, 163, 89, 72, 75, 94, 308, 102, + 163, 106, 71, 59, 162, 59, 83, 86, 59, 169, + 92, 162, 311, 72, 75, 78, 169, 97, 181, 89, + 353, 71, 228, 352, 58, 83, 269, 60, 71, 92, + 59, 345, 83, 86, 137, 184, 92, -171, 89, 72, + 75, 78, 100, 246, 368, 89, 71, 59, 94, 59, + 83, 86, 309, 173, 92, 314, 59, 163, 76, 78, + 72, 75, -167, 89, 102, 310, 106, 37, 162, 173, + 218, 127, 71, 169, 59, 220, 83, 269, 11, 14, + 92, 63, 97, 152, 37, 137, 72, 75, 127, 89, + 102, 319, 106, 71, 218, 11, 14, 83, 86, 220, + 59, 92, 72, 75, 72, 75, 78, 100, 270, 279, + 89, 349, 278, 94, 353, 207, 211, 352, 320, 71, + -169, 59, 199, 83, 86, 197, 198, 92, 72, 75, + 207, 211, 78, -169, 37, 71, 89, 199, 38, 83, + 269, -167, 193, 92, -166, 11, 14, 59, 137, 72, + 75, 272, 89, 102, 182, 106, 37, 207, 211, -186, + 38, 71, 181, 59, 199, 83, 86, 11, 337, 92, + 231, 97, 339, 76, 78, 72, 75, 37, 89, 82, + 48, 38, 71, 48, 323, 176, 83, 86, 11, 59, + 92, 342, 46, 47, 184, 78, 100, 74, -168, 89, + 72, 75, 94, -172, 102, 346, 106, -173, 71, 175, + 59, 59, 83, 86, 240, -171, 92, -170, 241, 242, + 76, 78, 97, 197, 198, 89, 72, 75, 207, 211, + 102, 214, 106, 71, 64, 199, 59, 83, 86, 276, + 215, 92, 217, 246, 275, 173, 78, 100, 97, 82, + 89, 72, 75, 94, 155, 102, 122, 106, 152, 71, + 223, 59, -168, 83, 86, 249, 277, 92, 176, 246, + 247, 122, 78, 100, 225, -166, 89, 72, 75, 94, + 117, 102, 226, 106, 71, -169, 271, 59, 83, 86, + 246, 247, 92, -21, -21, -21, -21, 78, 226, 97, + -167, 89, 72, 75, 52, -168, 102, -166, 106, -169, + 71, -167, 59, -171, 83, 86, 366, 152, 92, -23, + -23, -23, -23, 78, 100, 228, 226, 89, 72, 75, + 94, 50, 102, 373, 106, 71, 49, 375, 59, 83, + 86, 229, -221, 92, 237, 378, 72, 75, 78, 40, + 97, 39, 89, 355, 44, 45, 41, 42, 231, 234, + nil, 71, nil, 59, nil, 83, 86, nil, nil, 92, + 44, 45, 41, 42, 78, 100, 72, 75, 89, 71, + 102, 94, 106, 83, 269, nil, nil, 92, nil, 59, + nil, nil, 137, nil, nil, nil, 89, nil, 97, nil, + nil, nil, 72, 75, nil, nil, 102, 59, 106, 71, + nil, nil, nil, 83, 86, nil, nil, 92, nil, nil, + nil, nil, 78, 100, 97, nil, 89, nil, nil, 94, + nil, nil, 72, 75, nil, 71, 102, 59, 106, 83, + 86, nil, nil, 92, nil, nil, nil, nil, 78, 100, + nil, nil, 89, nil, 97, 94, nil, nil, 72, 75, + nil, nil, 102, 59, 106, 71, nil, nil, nil, 83, + 86, nil, nil, 92, nil, nil, 72, 75, 78, 100, + 97, nil, 89, 72, 75, 94, nil, 102, nil, 106, + nil, 71, nil, 59, nil, 83, 86, 72, 75, 92, + nil, 102, nil, nil, 78, 100, nil, nil, 89, 71, + nil, 94, nil, 83, 269, nil, 71, 92, nil, 59, + 83, 86, 137, nil, 92, nil, 89, nil, nil, 78, + 71, nil, nil, 89, 83, 143, nil, 59, 92, nil, + nil, nil, nil, 137, 59, 72, 75, 89, nil, 102, + nil, 106, 213, 196, 197, 198, 200, 202, 59, 207, + 211, nil, 213, 196, 197, 198, 199, 97, nil, 207, + 211, 72, 75, nil, nil, 102, 199, 106, 71, nil, + nil, nil, 83, 86, nil, nil, 92, nil, nil, nil, + nil, 78, 100, 97, nil, 89, nil, nil, 94, nil, + nil, 72, 75, nil, 71, 102, 59, 106, 83, 86, + nil, nil, 92, nil, nil, nil, nil, 78, 100, nil, + nil, 89, nil, 97, 94, nil, nil, nil, nil, nil, + nil, nil, 59, nil, 71, nil, nil, nil, 83, 86, + 72, 75, 92, nil, 102, 189, 106, 78, 100, nil, + nil, 89, nil, nil, 94, nil, nil, nil, nil, 72, + 75, nil, 59, 102, nil, 106, 72, 75, nil, nil, + 102, nil, 106, 71, nil, nil, nil, 83, 86, nil, + nil, 92, nil, nil, nil, nil, 78, nil, 97, nil, + 89, nil, 71, nil, nil, nil, 83, 86, nil, 71, + 92, 59, nil, 83, 86, 78, nil, 92, nil, 89, + nil, nil, 78, 100, nil, nil, 89, 72, 75, 94, + 59, 102, nil, 106, nil, nil, nil, 59, nil, nil, + nil, nil, nil, nil, nil, nil, 72, 75, nil, 97, + 102, nil, 106, 72, 75, nil, nil, 102, nil, 106, + 71, nil, nil, nil, 83, 86, nil, nil, 92, nil, + nil, nil, nil, 78, 100, 97, nil, 89, nil, 71, + 94, nil, nil, 83, 86, nil, 71, 92, 59, nil, + 83, 86, 78, nil, 92, nil, 89, nil, nil, 78, + 100, nil, nil, 89, 72, 75, 94, 59, 102, nil, + 106, nil, nil, nil, 59, nil, nil, nil, nil, nil, + nil, nil, nil, 72, 75, nil, 97, nil, nil, 72, + 75, nil, nil, nil, nil, nil, nil, 71, nil, nil, + nil, 83, 86, nil, nil, 92, 340, nil, nil, nil, + 78, 100, 177, nil, 89, nil, 71, 94, nil, nil, + 83, 86, 71, nil, 92, 59, 83, 86, 76, 78, + 92, nil, nil, 89, 76, 78, 72, 75, nil, 89, + 102, nil, 106, nil, 59, nil, 213, 196, 197, 198, + 59, nil, nil, 207, 211, 72, 75, nil, 97, 102, + 199, 106, 72, 75, nil, nil, nil, nil, nil, 71, + nil, nil, nil, 83, 86, nil, nil, 92, nil, nil, + 72, 75, 78, 100, nil, nil, 89, nil, 71, 94, + nil, nil, 83, 86, nil, 71, 92, 59, nil, 83, + 269, 78, nil, 92, nil, 89, nil, nil, 137, nil, + nil, nil, 89, 71, nil, nil, 59, 83, 269, nil, + nil, 92, nil, 59, nil, nil, 137, 72, 75, nil, + 89, 102, nil, 106, nil, nil, nil, nil, nil, nil, + nil, 59, nil, nil, nil, nil, 72, 75, nil, 97, + 102, nil, 106, 72, 75, nil, nil, 102, nil, 106, + 71, nil, nil, nil, 83, 86, nil, nil, 92, nil, + nil, nil, nil, 78, 100, 72, 75, 89, nil, 71, + 94, nil, nil, 83, 86, nil, 71, 92, 59, nil, + 83, 86, 78, nil, 92, nil, 89, nil, nil, 78, + 72, 75, nil, 89, 102, nil, 106, 59, 71, nil, + nil, nil, 83, 269, 59, nil, 92, nil, 72, 75, + nil, 137, 102, nil, 106, 89, nil, nil, nil, nil, + nil, nil, nil, 71, nil, nil, 59, 83, 86, nil, + 97, 92, nil, 72, 75, nil, 78, 102, nil, 106, + 89, 71, nil, 72, 75, 83, 86, 102, nil, 92, + nil, 59, nil, nil, 78, 100, nil, nil, 89, nil, + nil, 94, nil, nil, nil, nil, 71, nil, nil, 59, + 83, 86, nil, nil, 92, nil, 71, nil, nil, 78, + 83, 269, nil, 89, 92, nil, 72, 75, nil, 137, + 102, nil, 106, 89, 59, nil, nil, nil, nil, nil, + nil, nil, nil, nil, 59, 72, 75, nil, 97, 102, + nil, 106, 72, 75, nil, nil, 102, nil, 106, 71, + nil, nil, nil, 83, 86, nil, nil, 92, nil, nil, + nil, nil, 78, 100, 97, nil, 89, nil, 71, 94, + nil, nil, 83, 86, nil, 71, 92, 59, nil, 83, + 86, 78, nil, 92, nil, 89, nil, nil, 78, 100, + 212, nil, 89, nil, nil, 94, 59, nil, nil, 205, + 210, nil, nil, 59, nil, nil, nil, nil, 201, 203, + 206, 209, nil, nil, 205, 210, nil, nil, nil, nil, + nil, nil, 274, 201, 203, 206, 209, nil, nil, nil, + nil, nil, 204, 208, nil, nil, 213, 196, 197, 198, + 200, 202, nil, 207, 211, nil, nil, 204, 208, nil, + 199, 213, 196, 197, 198, 200, 202, nil, 207, 211, + 205, 210, nil, nil, nil, 199, nil, nil, nil, 201, + 203, 206, 209, nil, nil, 205, 210, nil, nil, nil, + nil, nil, nil, nil, 201, 203, 206, 209, nil, nil, + nil, nil, nil, nil, 208, nil, nil, 213, 196, 197, + 198, 200, 202, nil, 207, 211, nil, nil, 204, 208, + nil, 199, 213, 196, 197, 198, 200, 202, nil, 207, + 211, 205, 210, nil, nil, nil, 199, nil, nil, nil, + 201, 203, 206, 209, nil, nil, 205, 210, nil, nil, + nil, nil, nil, nil, nil, 201, 203, 206, 209, nil, + nil, nil, nil, nil, 204, 208, nil, nil, 213, 196, + 197, 198, 200, 202, nil, 207, 211, nil, nil, nil, + nil, nil, 199, 213, 196, 197, 198, 200, 202, nil, + 207, 211, 205, 210, nil, nil, nil, 199, nil, nil, + nil, 201, 203, 206, 209, nil, nil, nil, 210, nil, + 213, 196, 197, 198, 200, 202, 201, 207, 211, nil, + nil, nil, nil, nil, 199, 204, 208, 210, nil, 213, + 196, 197, 198, 200, 202, 201, 207, 211, nil, nil, + nil, nil, 210, 199, 213, 196, 197, 198, 200, 202, + 201, 207, 211, nil, nil, nil, nil, nil, 199, nil, + nil, 210, nil, 213, 196, 197, 198, 200, 202, 201, + 207, 211, nil, nil, nil, nil, nil, 199, 213, 196, + 197, 198, 200, 202, nil, 207, 211, nil, nil, 384, + nil, nil, 199, nil, nil, nil, nil, 213, 196, 197, + 198, 200, 202, nil, 207, 211, nil, nil, 297, nil, + 26, 199, 33, 1, nil, 7, 12, nil, 17, nil, + 23, nil, 29, nil, 3, nil, nil, 11, 14, 26, + 305, 33, 1, nil, 7, 12, nil, 17, nil, 23, + nil, 29, nil, 3, nil, nil, 11, 14, nil, 383, + nil, 26, nil, 33, 1, nil, 7, 12, nil, 17, + nil, 23, nil, 29, nil, 3, nil, nil, 11, 14, + 26, 325, 33, 1, nil, 7, 12, nil, 17, nil, + 23, nil, 29, nil, 3, nil, nil, 11, 14, nil, + 382, nil, 26, nil, 33, 1, nil, 7, 12, nil, + 17, nil, 23, nil, 29, nil, 3, nil, nil, 11, + 14, 26, 380, 33, 1, nil, 7, 12, nil, 17, + nil, 23, nil, 29, nil, 3, nil, nil, 11, 14, + nil, 376, nil, 26, nil, 33, 1, nil, 7, 12, + nil, 17, nil, 23, nil, 29, nil, 3, nil, nil, + 11, 14, 26, 350, 33, 1, nil, 7, 12, nil, + 17, nil, 23, nil, 29, nil, 3, nil, nil, 11, + 14, nil, 358, nil, 26, nil, 33, 1, nil, 7, + 12, nil, 17, nil, 23, nil, 29, nil, 3, nil, + nil, 11, 14, 26, 365, 33, 1, nil, 7, 12, + nil, 17, nil, 23, nil, 29, nil, 3, nil, nil, + 11, 14, nil, 364, nil, 26, nil, 33, 1, nil, + 7, 12, nil, 17, nil, 23, nil, 29, nil, 3, + nil, nil, 11, 14, 26, nil, 33, 1, nil, 7, + 12, nil, 17, nil, 23, nil, 29, nil, 3, nil, + nil, 11, 14, 26, nil, 33, 1, nil, 7, 12, + nil, 17, nil, 23, nil, 29, nil, 3, nil, nil, + 11, 14, 26, nil, 33, 1, nil, 7, 12, nil, + 17, nil, 23, nil, 29, nil, 3, nil, nil, 11, + 14 ] + +racc_action_check = [ + 180, 180, 152, 22, 243, 22, 55, 17, 86, 301, + 81, 165, 96, 180, 180, 178, 12, 301, 22, 165, + 12, 95, 180, 180, 180, 180, 86, 12, 178, 243, + 243, 55, 156, 174, 208, 208, 106, 106, 208, 215, + 208, 91, 22, 106, 152, 17, 180, 180, 17, 218, + 180, 180, 180, 180, 180, 180, 208, 180, 180, 176, + 176, 174, 174, 176, 180, 176, 62, 208, 17, 219, + 156, 208, 208, 156, 106, 208, 158, 62, 369, 369, + 208, 208, 62, 239, 208, 205, 205, 208, 220, 205, + 65, 205, 176, 156, 239, 208, 176, 176, 211, 239, + 176, 65, 224, 181, 181, 176, 65, 205, 273, 176, + 350, 369, 143, 350, 158, 369, 369, 158, 205, 369, + 176, 273, 205, 205, 369, 143, 205, 90, 369, 356, + 356, 205, 205, 344, 344, 205, 181, 158, 205, 369, + 181, 181, 221, 226, 181, 227, 205, 163, 181, 181, + 352, 352, 87, 181, 352, 221, 352, 120, 163, 229, + 309, 120, 356, 163, 181, 309, 356, 356, 120, 120, + 356, 85, 352, 231, 43, 356, 342, 342, 43, 356, + 342, 232, 342, 352, 122, 43, 43, 352, 352, 122, + 356, 352, 182, 182, 278, 278, 352, 352, 182, 195, + 352, 278, 195, 352, 297, 281, 281, 297, 233, 342, + 103, 352, 281, 342, 342, 280, 280, 342, 184, 184, + 280, 280, 342, 84, 30, 182, 342, 280, 30, 182, + 182, 105, 278, 182, 101, 30, 30, 342, 182, 204, + 204, 184, 182, 204, 80, 204, 1, 282, 282, 78, + 1, 184, 77, 182, 282, 184, 184, 1, 250, 184, + 252, 204, 253, 184, 184, 23, 23, 234, 184, 23, + 71, 234, 204, 7, 234, 70, 204, 204, 234, 184, + 204, 264, 7, 7, 269, 204, 204, 23, 68, 204, + 26, 26, 204, 107, 26, 275, 26, 67, 23, 66, + 204, 207, 23, 23, 160, 108, 23, 109, 160, 160, + 23, 23, 26, 298, 298, 23, 196, 196, 298, 298, + 196, 114, 196, 26, 115, 298, 23, 26, 26, 188, + 119, 26, 121, 188, 188, 64, 26, 26, 196, 127, + 26, 29, 29, 26, 52, 29, 51, 29, 50, 196, + 132, 26, 133, 196, 196, 171, 192, 196, 135, 171, + 171, 36, 196, 196, 136, 138, 196, 213, 213, 196, + 33, 213, 139, 213, 29, 140, 183, 196, 29, 29, + 183, 183, 29, 28, 28, 28, 28, 29, 316, 213, + 142, 29, 306, 306, 16, 328, 306, 329, 306, 331, + 213, 332, 29, 333, 213, 213, 338, 175, 213, 35, + 35, 35, 35, 213, 213, 173, 172, 213, 197, 197, + 213, 9, 197, 353, 197, 306, 8, 357, 213, 306, + 306, 144, 368, 306, 157, 370, 299, 299, 306, 3, + 197, 2, 306, 299, 34, 34, 34, 34, 153, 154, + nil, 197, nil, 306, nil, 197, 197, nil, nil, 197, + 4, 4, 4, 4, 197, 197, 39, 39, 197, 299, + 39, 197, 39, 299, 299, nil, nil, 299, nil, 197, + nil, nil, 299, nil, nil, nil, 299, nil, 39, nil, + nil, nil, 206, 206, nil, nil, 206, 299, 206, 39, + nil, nil, nil, 39, 39, nil, nil, 39, nil, nil, + nil, nil, 39, 39, 206, nil, 39, nil, nil, 39, + nil, nil, 46, 46, nil, 206, 46, 39, 46, 206, + 206, nil, nil, 206, nil, nil, nil, nil, 206, 206, + nil, nil, 206, nil, 46, 206, nil, nil, 47, 47, + nil, nil, 47, 206, 47, 46, nil, nil, nil, 46, + 46, nil, nil, 46, nil, nil, 270, 270, 46, 46, + 47, nil, 46, 48, 48, 46, nil, 48, nil, 48, + nil, 47, nil, 46, nil, 47, 47, 49, 49, 47, + nil, 49, nil, nil, 47, 47, nil, nil, 47, 270, + nil, 47, nil, 270, 270, nil, 48, 270, nil, 47, + 48, 48, 270, nil, 48, nil, 270, nil, nil, 48, + 49, nil, nil, 48, 49, 49, nil, 270, 49, nil, + nil, nil, nil, 49, 48, 203, 203, 49, nil, 203, + nil, 203, 294, 294, 294, 294, 294, 294, 49, 294, + 294, nil, 284, 284, 284, 284, 294, 203, nil, 284, + 284, 209, 209, nil, nil, 209, 284, 209, 203, nil, + nil, nil, 203, 203, nil, nil, 203, nil, nil, nil, + nil, 203, 203, 209, nil, 203, nil, nil, 203, nil, + nil, 210, 210, nil, 209, 210, 203, 210, 209, 209, + nil, nil, 209, nil, nil, nil, nil, 209, 209, nil, + nil, 209, nil, 210, 209, nil, nil, nil, nil, nil, + nil, nil, 209, nil, 210, nil, nil, nil, 210, 210, + 102, 102, 210, nil, 102, 102, 102, 210, 210, nil, + nil, 210, nil, nil, 210, nil, nil, nil, nil, 63, + 63, nil, 210, 63, nil, 63, 202, 202, nil, nil, + 202, nil, 202, 102, nil, nil, nil, 102, 102, nil, + nil, 102, nil, nil, nil, nil, 102, nil, 202, nil, + 102, nil, 63, nil, nil, nil, 63, 63, nil, 202, + 63, 102, nil, 202, 202, 63, nil, 202, nil, 63, + nil, nil, 202, 202, nil, nil, 202, 100, 100, 202, + 63, 100, nil, 100, nil, nil, nil, 202, nil, nil, + nil, nil, nil, nil, nil, nil, 277, 277, nil, 100, + 277, nil, 277, 198, 198, nil, nil, 198, nil, 198, + 100, nil, nil, nil, 100, 100, nil, nil, 100, nil, + nil, nil, nil, 100, 100, 198, nil, 100, nil, 277, + 100, nil, nil, 277, 277, nil, 198, 277, 100, nil, + 198, 198, 277, nil, 198, nil, 277, nil, nil, 198, + 198, nil, nil, 198, 256, 256, 198, 277, 256, nil, + 256, nil, nil, nil, 198, nil, nil, nil, nil, nil, + nil, nil, nil, 254, 254, nil, 256, nil, nil, 74, + 74, nil, nil, nil, nil, nil, nil, 256, nil, nil, + nil, 256, 256, nil, nil, 256, 254, nil, nil, nil, + 256, 256, 74, nil, 256, nil, 254, 256, nil, nil, + 254, 254, 74, nil, 254, 256, 74, 74, 254, 254, + 74, nil, nil, 254, 74, 74, 75, 75, nil, 74, + 75, nil, 75, nil, 254, nil, 286, 286, 286, 286, + 74, nil, nil, 286, 286, 248, 248, nil, 75, 248, + 286, 248, 245, 245, nil, nil, nil, nil, nil, 75, + nil, nil, nil, 75, 75, nil, nil, 75, nil, nil, + 244, 244, 75, 75, nil, nil, 75, nil, 248, 75, + nil, nil, 248, 248, nil, 245, 248, 75, nil, 245, + 245, 248, nil, 245, nil, 248, nil, nil, 245, nil, + nil, nil, 245, 244, nil, nil, 248, 244, 244, nil, + nil, 244, nil, 245, nil, nil, 244, 97, 97, nil, + 244, 97, nil, 97, nil, nil, nil, nil, nil, nil, + nil, 244, nil, nil, nil, nil, 82, 82, nil, 97, + 82, nil, 82, 199, 199, nil, nil, 199, nil, 199, + 97, nil, nil, nil, 97, 97, nil, nil, 97, nil, + nil, nil, nil, 97, 97, 214, 214, 97, nil, 82, + 97, nil, nil, 82, 82, nil, 199, 82, 97, nil, + 199, 199, 82, nil, 199, nil, 82, nil, nil, 199, + 230, 230, nil, 199, 230, nil, 230, 82, 214, nil, + nil, nil, 214, 214, 199, nil, 214, nil, 200, 200, + nil, 214, 200, nil, 200, 214, nil, nil, nil, nil, + nil, nil, nil, 230, nil, nil, 214, 230, 230, nil, + 200, 230, nil, 228, 228, nil, 230, 228, nil, 228, + 230, 200, nil, 225, 225, 200, 200, 225, nil, 200, + nil, 230, nil, nil, 200, 200, nil, nil, 200, nil, + nil, 200, nil, nil, nil, nil, 228, nil, nil, 200, + 228, 228, nil, nil, 228, nil, 225, nil, nil, 228, + 225, 225, nil, 228, 225, nil, 201, 201, nil, 225, + 201, nil, 201, 225, 228, nil, nil, nil, nil, nil, + nil, nil, nil, nil, 225, 308, 308, nil, 201, 308, + nil, 308, 94, 94, nil, nil, 94, nil, 94, 201, + nil, nil, nil, 201, 201, nil, nil, 201, nil, nil, + nil, nil, 201, 201, 94, nil, 201, nil, 308, 201, + nil, nil, 308, 308, nil, 94, 308, 201, nil, 94, + 94, 308, nil, 94, nil, 308, nil, nil, 94, 94, + 111, nil, 94, nil, nil, 94, 308, nil, nil, 111, + 111, nil, nil, 94, nil, nil, nil, nil, 111, 111, + 111, 111, nil, nil, 186, 186, nil, nil, nil, nil, + nil, nil, 186, 186, 186, 186, 186, nil, nil, nil, + nil, nil, 111, 111, nil, nil, 111, 111, 111, 111, + 111, 111, nil, 111, 111, nil, nil, 186, 186, nil, + 111, 186, 186, 186, 186, 186, 186, nil, 186, 186, + 288, 288, nil, nil, nil, 186, nil, nil, nil, 288, + 288, 288, 288, nil, nil, 131, 131, nil, nil, nil, + nil, nil, nil, nil, 131, 131, 131, 131, nil, nil, + nil, nil, nil, nil, 288, nil, nil, 288, 288, 288, + 288, 288, 288, nil, 288, 288, nil, nil, 131, 131, + nil, 288, 131, 131, 131, 131, 131, 131, nil, 131, + 131, 130, 130, nil, nil, nil, 131, nil, nil, nil, + 130, 130, 130, 130, nil, nil, 292, 292, nil, nil, + nil, nil, nil, nil, nil, 292, 292, 292, 292, nil, + nil, nil, nil, nil, 130, 130, nil, nil, 130, 130, + 130, 130, 130, 130, nil, 130, 130, nil, nil, nil, + nil, nil, 130, 292, 292, 292, 292, 292, 292, nil, + 292, 292, 124, 124, nil, nil, nil, 292, nil, nil, + nil, 124, 124, 124, 124, nil, nil, nil, 293, nil, + 285, 285, 285, 285, 285, 285, 293, 285, 285, nil, + nil, nil, nil, nil, 285, 124, 124, 289, nil, 124, + 124, 124, 124, 124, 124, 289, 124, 124, nil, nil, + nil, nil, 287, 124, 293, 293, 293, 293, 293, 293, + 287, 293, 293, nil, nil, nil, nil, nil, 293, nil, + nil, 290, nil, 289, 289, 289, 289, 289, 289, 290, + 289, 289, nil, nil, nil, nil, nil, 289, 287, 287, + 287, 287, 287, 287, nil, 287, 287, nil, nil, 381, + nil, nil, 287, nil, nil, nil, nil, 290, 290, 290, + 290, 290, 290, nil, 290, 290, nil, nil, 212, nil, + 381, 290, 381, 381, nil, 381, 381, nil, 381, nil, + 381, nil, 381, nil, 381, nil, nil, 381, 381, 212, + 217, 212, 212, nil, 212, 212, nil, 212, nil, 212, + nil, 212, nil, 212, nil, nil, 212, 212, nil, 379, + nil, 217, nil, 217, 217, nil, 217, 217, nil, 217, + nil, 217, nil, 217, nil, 217, nil, nil, 217, 217, + 379, 237, 379, 379, nil, 379, 379, nil, 379, nil, + 379, nil, 379, nil, 379, nil, nil, 379, 379, nil, + 375, nil, 237, nil, 237, 237, nil, 237, 237, nil, + 237, nil, 237, nil, 237, nil, 237, nil, nil, 237, + 237, 375, 373, 375, 375, nil, 375, 375, nil, 375, + nil, 375, nil, 375, nil, 375, nil, nil, 375, 375, + nil, 363, nil, 373, nil, 373, 373, nil, 373, 373, + nil, 373, nil, 373, nil, 373, nil, 373, nil, nil, + 373, 373, 363, 296, 363, 363, nil, 363, 363, nil, + 363, nil, 363, nil, 363, nil, 363, nil, nil, 363, + 363, nil, 304, nil, 296, nil, 296, 296, nil, 296, + 296, nil, 296, nil, 296, nil, 296, nil, 296, nil, + nil, 296, 296, 304, 324, 304, 304, nil, 304, 304, + nil, 304, nil, 304, nil, 304, nil, 304, nil, nil, + 304, 304, nil, 320, nil, 324, nil, 324, 324, nil, + 324, 324, nil, 324, nil, 324, nil, 324, nil, 324, + nil, nil, 324, 324, 320, nil, 320, 320, nil, 320, + 320, nil, 320, nil, 320, nil, 320, nil, 320, nil, + nil, 320, 320, 19, nil, 19, 19, nil, 19, 19, + nil, 19, nil, 19, nil, 19, nil, 19, nil, nil, + 19, 19, 0, nil, 0, 0, nil, 0, 0, nil, + 0, nil, 0, nil, 0, nil, 0, nil, nil, 0, + 0 ] + +racc_action_pointer = [ + 1832, 210, 426, 395, 396, nil, nil, 267, 418, 413, + nil, nil, -20, nil, nil, nil, 394, 5, nil, 1813, + nil, nil, -3, 263, nil, nil, 288, nil, 319, 339, + 188, nil, nil, 368, 380, 345, 337, nil, nil, 464, + nil, nil, nil, 138, nil, nil, 520, 546, 571, 585, + 308, 322, 344, nil, nil, -6, nil, nil, nil, nil, + nil, nil, 42, 747, 295, 66, 291, 274, 265, nil, + 269, 264, nil, nil, 907, 954, nil, 240, 243, nil, + 221, -13, 1064, nil, 200, 165, 2, 129, nil, nil, + 104, 18, nil, nil, 1240, -2, 6, 1045, nil, nil, + 805, 211, 728, 187, nil, 208, 34, 270, 282, 284, + nil, 1282, nil, nil, 313, 316, nil, nil, nil, 318, + 121, 324, 149, nil, 1465, nil, nil, 333, nil, nil, + 1404, 1358, 343, 329, nil, 352, 323, nil, 342, 360, + 352, nil, 367, 101, 411, nil, nil, nil, nil, nil, + nil, nil, -9, 436, 412, nil, 30, 426, 74, nil, + 258, nil, nil, 123, nil, -7, nil, nil, nil, nil, + nil, 348, 404, 404, 11, 367, 57, nil, 3, nil, + -4, 101, 190, 369, 216, nil, 1297, nil, 322, nil, + nil, nil, 345, nil, nil, 190, 314, 416, 831, 1071, + 1136, 1214, 754, 633, 237, 83, 490, 238, 32, 659, + 689, 35, 1589, 365, 1093, 37, nil, 1611, 34, 44, + 73, 130, nil, nil, 93, 1171, 103, 136, 1161, 119, + 1118, 133, 172, 200, 231, nil, nil, 1652, nil, 59, + nil, nil, nil, -21, 998, 980, nil, nil, 973, nil, + 249, nil, 248, 255, 901, nil, 882, nil, nil, nil, + nil, nil, nil, nil, 270, nil, nil, nil, nil, 260, + 564, nil, nil, 96, nil, 288, nil, 824, 192, nil, + 159, 144, 186, nil, 598, 1446, 912, 1514, 1343, 1499, + 1533, nil, 1419, 1480, 588, nil, 1734, 173, 257, 434, + nil, -3, nil, nil, 1753, nil, 390, nil, 1233, 125, + nil, nil, nil, nil, nil, nil, 376, nil, nil, nil, + 1794, nil, nil, nil, 1775, nil, nil, nil, 372, 374, + nil, 376, 378, 380, nil, nil, nil, nil, 397, nil, + nil, nil, 174, nil, 122, nil, nil, nil, nil, nil, + 79, nil, 148, 415, nil, nil, 127, 419, nil, nil, + nil, nil, nil, 1712, nil, nil, nil, nil, 423, 76, + 426, nil, nil, 1693, nil, 1671, nil, nil, nil, 1630, + nil, 1570, nil, nil, nil ] + +racc_action_default = [ + -197, -234, -234, -50, -234, -8, -9, -234, -234, -22, + -10, -188, -189, -11, -186, -12, -234, -234, -13, -1, + -14, -2, -234, -187, -15, -3, -234, -16, -5, -234, + -234, -17, -6, -234, -18, -7, -197, -189, -187, -234, + -51, -26, -27, -234, -24, -25, -234, -234, -234, -85, + -92, -197, -234, -196, -194, -197, -190, -192, -193, -222, + -195, -4, -197, -234, -85, -197, -53, -232, -42, -175, + -43, -214, -117, -33, -234, -234, -44, -31, -74, -32, + -234, -36, -234, -122, -37, -234, -73, -38, -172, -72, + -39, -40, -174, -41, -234, -103, -111, -234, -132, -112, + -234, -104, -234, -108, -110, -105, -234, -114, -106, -113, + -109, -234, -125, -107, -234, -234, -49, -176, -177, -179, + -234, -234, -198, -199, -83, -19, -22, -187, -21, -23, + -82, -84, -234, -75, -86, -81, -70, -74, -76, -220, + -79, -68, -77, -73, -234, -171, -170, -80, -78, -90, + -91, -93, -234, -220, -197, 385, -234, -234, -234, -208, + -234, -57, -214, -197, -59, -234, -66, -65, -56, -73, + -95, -234, -220, -234, -234, -92, -234, -30, -234, -118, + -234, -234, -234, -234, -234, -142, -234, -149, -234, -217, + -230, -226, -234, -229, -225, -234, -234, -234, -234, -234, + -234, -234, -234, -234, -234, -234, -234, -234, -234, -234, + -234, -234, -234, -234, -234, -234, -20, -234, -207, -234, + -205, -234, -202, -231, -234, -71, -221, -234, -234, -85, + -234, -221, -234, -234, -234, -210, -191, -234, -209, -234, + -54, -62, -61, -234, -234, -234, -218, -219, -234, -124, + -234, -55, -220, -234, -234, -28, -234, -120, -119, -35, + -34, -173, -168, -166, -234, -169, -160, -167, -161, -73, + -234, -123, -116, -234, -152, -219, -215, -234, -234, -223, + -137, -139, -138, -133, -140, -144, -141, -146, -151, -148, + -145, -134, -150, -147, -143, -135, -234, -128, -136, -234, + -154, -234, -158, -178, -234, -181, -234, -200, -234, -234, + -201, -45, -69, -87, -46, -88, -220, -89, -94, -48, + -234, -212, -211, -213, -234, -185, -58, -60, -97, -98, + -63, -102, -99, -100, -101, -64, -96, -47, -234, -233, + -29, -121, -234, -163, -220, -115, -216, -228, -227, -224, + -128, -127, -234, -234, -155, -153, -234, -234, -180, -206, + -204, -203, -67, -234, -183, -184, -52, -165, -219, -234, + -234, -126, -129, -234, -159, -234, -182, -164, -162, -234, + -131, -234, -157, -130, -156 ] + +racc_goto_table = [ + 22, 9, 68, 112, 222, 264, 61, 36, 53, 179, + 268, 141, 70, 19, 2, 77, 191, 118, 51, 22, + 9, 139, 116, 21, 73, 91, 56, 147, 133, 149, + 115, 227, 153, 2, 300, 128, 172, 302, 135, 160, + 125, 129, 174, 22, 126, 232, 351, 43, 171, 299, + 260, 146, 369, 68, 121, 330, 335, 258, 266, 123, + 318, 344, 136, 70, 250, 119, 178, 183, 224, 154, + 233, 55, 157, 66, 123, 73, 91, 120, 159, 238, + 219, 221, 326, 322, 195, 190, 16, 188, nil, nil, + nil, nil, nil, 264, nil, nil, nil, nil, 343, 371, + nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, + nil, nil, 128, nil, nil, 88, nil, 216, 129, 354, + 22, 126, 302, 260, nil, nil, nil, nil, nil, nil, + nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, + 243, 88, nil, nil, 338, nil, nil, 53, nil, 53, + nil, nil, nil, nil, 149, nil, nil, 252, nil, nil, + 68, 262, nil, 68, nil, 236, 88, nil, nil, nil, + 70, nil, nil, 70, nil, nil, 273, 235, nil, 374, + nil, nil, 259, 91, 146, 73, 91, 312, 348, nil, + 341, 361, 264, 262, 87, nil, 261, 377, 291, 303, + nil, 316, 295, 147, 133, 313, nil, nil, 362, nil, + 149, nil, 22, 9, 135, nil, 146, 22, 9, nil, + 142, nil, nil, 328, 328, 296, 2, 146, 261, nil, + 304, 2, nil, 68, nil, nil, 370, 22, 9, nil, + 321, nil, nil, 70, nil, 87, 146, 146, nil, 262, + 324, 2, nil, nil, nil, 259, 91, 190, 261, 261, + nil, 81, nil, nil, nil, nil, nil, nil, nil, nil, + nil, nil, 146, 88, 88, nil, 88, nil, 262, nil, + nil, nil, nil, 61, 261, nil, nil, 138, 84, nil, + nil, 61, nil, nil, nil, nil, 22, 9, nil, nil, + 166, 146, nil, 166, 22, 9, 88, nil, nil, nil, + 2, 61, 81, 261, 140, nil, nil, 88, 2, nil, + 22, 9, nil, nil, 22, 9, nil, 167, nil, 372, + 167, nil, nil, 363, 2, 262, 88, 88, 2, 84, + nil, 90, nil, nil, nil, nil, 88, nil, 262, nil, + 61, nil, 87, 267, nil, 87, nil, nil, 146, nil, + nil, nil, 88, 22, 9, nil, 61, 145, 61, nil, + 261, 146, nil, 22, 9, 22, 9, 2, 114, 22, + 9, 22, 9, 261, 93, 267, 379, 2, 381, 2, + nil, 88, 90, 2, nil, 2, 142, 132, nil, nil, + nil, 166, nil, nil, nil, nil, nil, nil, nil, nil, + 148, nil, 170, nil, nil, 332, 332, nil, nil, 81, + 263, nil, 81, nil, nil, 87, nil, nil, 167, nil, + nil, 170, nil, nil, nil, 93, nil, nil, nil, nil, + nil, 267, nil, nil, nil, nil, 84, 265, 88, 84, + nil, 170, 263, nil, nil, nil, nil, nil, nil, nil, + nil, 88, nil, 138, nil, nil, nil, nil, nil, nil, + 267, nil, nil, nil, nil, nil, nil, 166, 124, 265, + nil, nil, 329, 329, nil, 130, 131, nil, nil, nil, + 140, nil, 81, nil, nil, nil, nil, nil, nil, 90, + 145, nil, 90, nil, 167, nil, nil, nil, 263, 331, + 331, nil, nil, nil, 180, nil, nil, nil, nil, 84, + nil, nil, nil, nil, nil, 253, nil, 267, nil, nil, + nil, nil, 145, 185, nil, 265, 186, 263, nil, 187, + 267, nil, 93, 145, nil, 93, nil, nil, 283, nil, + nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, + nil, nil, 333, 333, 265, nil, nil, nil, nil, nil, + nil, nil, 90, nil, nil, nil, nil, 315, nil, 317, + nil, nil, nil, nil, nil, nil, 148, nil, 145, nil, + nil, nil, nil, nil, 263, nil, nil, 336, nil, nil, + nil, nil, nil, nil, nil, 334, 334, 263, nil, nil, + nil, nil, nil, nil, nil, 93, nil, 145, nil, nil, + nil, 265, nil, nil, nil, nil, 347, nil, nil, nil, + nil, nil, nil, nil, 265, 280, 281, 282, nil, 284, + 285, 286, 287, 288, 289, 290, nil, 292, 293, 294, + nil, nil, 298, nil, nil, 359, nil, 360, nil, nil, + nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, + nil, nil, nil, nil, 145, nil, nil, nil, nil, nil, + nil, nil, nil, nil, nil, nil, nil, 145, nil, nil, + nil, 367, nil, nil, nil, 180 ] + +racc_goto_check = [ + 37, 21, 30, 62, 82, 68, 4, 32, 64, 60, + 70, 47, 31, 2, 52, 22, 85, 72, 32, 37, + 21, 35, 5, 3, 21, 28, 78, 50, 30, 53, + 37, 36, 38, 52, 66, 5, 35, 68, 31, 41, + 19, 7, 41, 37, 21, 36, 63, 20, 57, 65, + 23, 28, 58, 30, 74, 46, 46, 61, 69, 3, + 56, 71, 33, 31, 36, 73, 22, 57, 34, 74, + 75, 76, 77, 40, 3, 21, 28, 20, 3, 79, + 80, 81, 42, 83, 84, 30, 1, 57, nil, nil, + nil, nil, nil, 68, nil, nil, nil, nil, 70, 63, + nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, + nil, nil, 5, nil, nil, 49, nil, 19, 7, 66, + 37, 21, 68, 23, nil, nil, nil, nil, nil, nil, + nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, + 41, 49, nil, nil, 36, nil, nil, 64, nil, 64, + nil, nil, nil, nil, 53, nil, nil, 38, nil, nil, + 30, 30, nil, 30, nil, 78, 49, nil, nil, nil, + 31, nil, nil, 31, nil, nil, 22, 3, nil, 68, + nil, nil, 21, 28, 28, 21, 28, 47, 85, nil, + 60, 82, 68, 30, 26, nil, 52, 70, 64, 72, + nil, 35, 64, 50, 30, 53, nil, nil, 36, nil, + 53, nil, 37, 21, 31, nil, 28, 37, 21, nil, + 26, nil, nil, 30, 30, 2, 52, 28, 52, nil, + 2, 52, nil, 30, nil, nil, 36, 37, 21, nil, + 32, nil, nil, 31, nil, 26, 28, 28, nil, 30, + 2, 52, nil, nil, nil, 21, 28, 30, 52, 52, + nil, 24, nil, nil, nil, nil, nil, nil, nil, nil, + nil, nil, 28, 49, 49, nil, 49, nil, 30, nil, + nil, nil, nil, 4, 52, nil, nil, 24, 25, nil, + nil, 4, nil, nil, nil, nil, 37, 21, nil, nil, + 24, 28, nil, 24, 37, 21, 49, nil, nil, nil, + 52, 4, 24, 52, 25, nil, nil, 49, 52, nil, + 37, 21, nil, nil, 37, 21, nil, 25, nil, 62, + 25, nil, nil, 2, 52, 30, 49, 49, 52, 25, + nil, 27, nil, nil, nil, nil, 49, nil, 30, nil, + 4, nil, 26, 26, nil, 26, nil, nil, 28, nil, + nil, nil, 49, 37, 21, nil, 4, 27, 4, nil, + 52, 28, nil, 37, 21, 37, 21, 52, 54, 37, + 21, 37, 21, 52, 29, 26, 2, 52, 2, 52, + nil, 49, 27, 52, nil, 52, 26, 54, nil, nil, + nil, 24, nil, nil, nil, nil, nil, nil, nil, nil, + 29, nil, 54, nil, nil, 26, 26, nil, nil, 24, + 24, nil, 24, nil, nil, 26, nil, nil, 25, nil, + nil, 54, nil, nil, nil, 29, nil, nil, nil, nil, + nil, 26, nil, nil, nil, nil, 25, 25, 49, 25, + nil, 54, 24, nil, nil, nil, nil, nil, nil, nil, + nil, 49, nil, 24, nil, nil, nil, nil, nil, nil, + 26, nil, nil, nil, nil, nil, nil, 24, 51, 25, + nil, nil, 24, 24, nil, 51, 51, nil, nil, nil, + 25, nil, 24, nil, nil, nil, nil, nil, nil, 27, + 27, nil, 27, nil, 25, nil, nil, nil, 24, 25, + 25, nil, nil, nil, 51, nil, nil, nil, nil, 25, + nil, nil, nil, nil, nil, 54, nil, 26, nil, nil, + nil, nil, 27, 51, nil, 25, 51, 24, nil, 51, + 26, nil, 29, 27, nil, 29, nil, nil, 54, nil, + nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, + nil, nil, 27, 27, 25, nil, nil, nil, nil, nil, + nil, nil, 27, nil, nil, nil, nil, 54, nil, 54, + nil, nil, nil, nil, nil, nil, 29, nil, 27, nil, + nil, nil, nil, nil, 24, nil, nil, 54, nil, nil, + nil, nil, nil, nil, nil, 29, 29, 24, nil, nil, + nil, nil, nil, nil, nil, 29, nil, 27, nil, nil, + nil, 25, nil, nil, nil, nil, 54, nil, nil, nil, + nil, nil, nil, nil, 25, 51, 51, 51, nil, 51, + 51, 51, 51, 51, 51, 51, nil, 51, 51, 51, + nil, nil, 51, nil, nil, 54, nil, 54, nil, nil, + nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, + nil, nil, nil, nil, 27, nil, nil, nil, nil, nil, + nil, nil, nil, nil, nil, nil, nil, 27, nil, nil, + nil, 54, nil, nil, nil, 51 ] + +racc_goto_pointer = [ + nil, 86, 13, 23, -13, -8, nil, -2, nil, nil, + nil, nil, nil, nil, nil, nil, nil, nil, nil, -3, + 43, 1, -8, -131, 238, 265, 171, 318, 2, 361, + -21, -11, 6, 13, -68, -28, -108, 0, -18, nil, + 51, -23, -157, nil, nil, nil, -189, -38, nil, 92, + -22, 439, 14, -21, 349, nil, -171, -15, -292, nil, + -66, -123, -23, -251, -9, -165, -180, nil, -177, -124, + -172, -209, -16, 32, 18, -84, 54, 17, 9, -79, + -42, -41, -118, -151, -22, -90, nil ] + +racc_goto_default = [ + nil, nil, nil, 168, 25, 28, 32, 35, 5, 6, + 10, 13, 15, 18, 20, 24, 27, 31, 34, 4, + nil, 99, nil, 79, 101, 103, 105, 108, 109, 113, + 95, 96, 8, nil, nil, nil, nil, 85, nil, 30, + nil, nil, 161, 239, 164, 165, nil, nil, 144, 107, + 110, 111, 67, 134, 98, 150, 151, nil, 248, 104, + nil, nil, nil, nil, 69, nil, nil, 301, 80, nil, + nil, nil, nil, nil, nil, nil, nil, nil, nil, 57, + nil, nil, nil, nil, nil, nil, 192 ] + +racc_reduce_table = [ + 0, 0, :racc_error, + 1, 70, :_reduce_1, + 1, 70, :_reduce_none, + 1, 71, :_reduce_none, + 2, 71, :_reduce_4, + 1, 73, :_reduce_none, + 1, 73, :_reduce_none, + 1, 73, :_reduce_none, + 1, 73, :_reduce_none, + 1, 73, :_reduce_none, + 1, 73, :_reduce_none, + 1, 73, :_reduce_none, + 1, 73, :_reduce_none, + 1, 73, :_reduce_none, + 1, 73, :_reduce_none, + 1, 73, :_reduce_none, + 1, 73, :_reduce_none, + 1, 73, :_reduce_none, + 1, 73, :_reduce_none, + 3, 87, :_reduce_19, + 3, 87, :_reduce_20, + 1, 88, :_reduce_none, + 1, 88, :_reduce_none, + 1, 88, :_reduce_none, + 1, 89, :_reduce_none, + 1, 89, :_reduce_none, + 1, 89, :_reduce_none, + 1, 89, :_reduce_none, + 4, 81, :_reduce_28, + 5, 81, :_reduce_29, + 3, 81, :_reduce_30, + 2, 81, :_reduce_31, + 1, 91, :_reduce_none, + 1, 91, :_reduce_none, + 3, 91, :_reduce_34, + 3, 91, :_reduce_35, + 1, 92, :_reduce_none, + 1, 92, :_reduce_none, + 1, 92, :_reduce_none, + 1, 92, :_reduce_none, + 1, 92, :_reduce_none, + 1, 92, :_reduce_none, + 1, 92, :_reduce_none, + 1, 92, :_reduce_none, + 1, 92, :_reduce_44, + 5, 74, :_reduce_45, + 5, 74, :_reduce_46, + 5, 74, :_reduce_47, + 5, 85, :_reduce_48, + 2, 75, :_reduce_49, + 1, 108, :_reduce_50, + 2, 108, :_reduce_51, + 6, 76, :_reduce_52, + 2, 76, :_reduce_53, + 3, 109, :_reduce_54, + 3, 109, :_reduce_55, + 1, 110, :_reduce_none, + 1, 110, :_reduce_none, + 3, 110, :_reduce_58, + 1, 111, :_reduce_none, + 3, 111, :_reduce_60, + 1, 112, :_reduce_61, + 1, 112, :_reduce_62, + 3, 113, :_reduce_63, + 3, 113, :_reduce_64, + 1, 114, :_reduce_none, + 1, 114, :_reduce_none, + 4, 116, :_reduce_67, + 1, 102, :_reduce_none, + 3, 102, :_reduce_69, + 0, 103, :_reduce_none, + 1, 103, :_reduce_none, + 1, 118, :_reduce_72, + 1, 93, :_reduce_73, + 1, 95, :_reduce_74, + 1, 117, :_reduce_none, + 1, 117, :_reduce_none, + 1, 117, :_reduce_none, + 1, 117, :_reduce_none, + 1, 117, :_reduce_none, + 1, 117, :_reduce_none, + 1, 117, :_reduce_none, + 3, 77, :_reduce_82, + 3, 77, :_reduce_83, + 3, 86, :_reduce_84, + 0, 104, :_reduce_85, + 1, 104, :_reduce_86, + 3, 104, :_reduce_87, + 3, 122, :_reduce_88, + 3, 124, :_reduce_89, + 1, 125, :_reduce_none, + 1, 125, :_reduce_none, + 0, 107, :_reduce_92, + 1, 107, :_reduce_93, + 3, 107, :_reduce_94, + 1, 126, :_reduce_none, + 3, 126, :_reduce_96, + 1, 115, :_reduce_none, + 1, 115, :_reduce_none, + 1, 115, :_reduce_none, + 1, 115, :_reduce_none, + 1, 115, :_reduce_none, + 1, 115, :_reduce_none, + 1, 123, :_reduce_none, + 1, 123, :_reduce_none, + 1, 123, :_reduce_none, + 1, 123, :_reduce_none, + 1, 123, :_reduce_none, + 1, 123, :_reduce_none, + 1, 123, :_reduce_none, + 1, 123, :_reduce_none, + 1, 123, :_reduce_none, + 1, 123, :_reduce_none, + 1, 123, :_reduce_none, + 1, 123, :_reduce_none, + 4, 97, :_reduce_115, + 3, 97, :_reduce_116, + 1, 99, :_reduce_117, + 2, 99, :_reduce_118, + 2, 129, :_reduce_119, + 1, 130, :_reduce_120, + 2, 130, :_reduce_121, + 1, 96, :_reduce_122, + 4, 90, :_reduce_123, + 4, 90, :_reduce_124, + 2, 79, :_reduce_125, + 5, 131, :_reduce_126, + 4, 131, :_reduce_127, + 0, 132, :_reduce_none, + 2, 132, :_reduce_129, + 4, 132, :_reduce_130, + 3, 132, :_reduce_131, + 1, 120, :_reduce_none, + 3, 120, :_reduce_133, + 3, 120, :_reduce_134, + 3, 120, :_reduce_135, + 3, 120, :_reduce_136, + 3, 120, :_reduce_137, + 3, 120, :_reduce_138, + 3, 120, :_reduce_139, + 3, 120, :_reduce_140, + 3, 120, :_reduce_141, + 2, 120, :_reduce_142, + 3, 120, :_reduce_143, + 3, 120, :_reduce_144, + 3, 120, :_reduce_145, + 3, 120, :_reduce_146, + 3, 120, :_reduce_147, + 3, 120, :_reduce_148, + 2, 120, :_reduce_149, + 3, 120, :_reduce_150, + 3, 120, :_reduce_151, + 3, 120, :_reduce_152, + 5, 78, :_reduce_153, + 1, 134, :_reduce_none, + 2, 134, :_reduce_155, + 5, 135, :_reduce_156, + 4, 135, :_reduce_157, + 1, 136, :_reduce_none, + 3, 136, :_reduce_159, + 3, 98, :_reduce_160, + 1, 138, :_reduce_none, + 4, 138, :_reduce_162, + 1, 140, :_reduce_none, + 3, 140, :_reduce_164, + 3, 139, :_reduce_165, + 1, 137, :_reduce_none, + 1, 137, :_reduce_none, + 1, 137, :_reduce_none, + 1, 137, :_reduce_none, + 1, 137, :_reduce_none, + 1, 137, :_reduce_none, + 1, 137, :_reduce_none, + 1, 137, :_reduce_none, + 1, 137, :_reduce_174, + 1, 137, :_reduce_none, + 1, 141, :_reduce_176, + 1, 142, :_reduce_none, + 3, 142, :_reduce_178, + 2, 80, :_reduce_179, + 6, 82, :_reduce_180, + 5, 82, :_reduce_181, + 7, 83, :_reduce_182, + 6, 83, :_reduce_183, + 6, 84, :_reduce_184, + 5, 84, :_reduce_185, + 1, 106, :_reduce_186, + 1, 101, :_reduce_187, + 1, 101, :_reduce_188, + 1, 101, :_reduce_189, + 1, 145, :_reduce_none, + 3, 145, :_reduce_191, + 1, 147, :_reduce_192, + 1, 148, :_reduce_193, + 1, 148, :_reduce_194, + 1, 148, :_reduce_195, + 1, 148, :_reduce_none, + 0, 72, :_reduce_197, + 0, 149, :_reduce_198, + 1, 143, :_reduce_none, + 3, 143, :_reduce_200, + 3, 143, :_reduce_201, + 1, 150, :_reduce_none, + 3, 150, :_reduce_203, + 3, 151, :_reduce_204, + 1, 151, :_reduce_205, + 3, 151, :_reduce_206, + 1, 151, :_reduce_207, + 1, 146, :_reduce_none, + 2, 146, :_reduce_209, + 1, 144, :_reduce_none, + 2, 144, :_reduce_211, + 1, 152, :_reduce_none, + 1, 152, :_reduce_none, + 1, 94, :_reduce_214, + 3, 119, :_reduce_215, + 4, 119, :_reduce_216, + 2, 119, :_reduce_217, + 1, 127, :_reduce_none, + 1, 127, :_reduce_none, + 0, 105, :_reduce_none, + 1, 105, :_reduce_221, + 1, 133, :_reduce_222, + 3, 128, :_reduce_223, + 4, 128, :_reduce_224, + 2, 128, :_reduce_225, + 1, 153, :_reduce_none, + 3, 153, :_reduce_227, + 3, 154, :_reduce_228, + 1, 155, :_reduce_229, + 1, 155, :_reduce_230, + 4, 121, :_reduce_231, + 1, 100, :_reduce_none, + 4, 100, :_reduce_233 ] + +racc_reduce_n = 234 + +racc_shift_n = 385 + +racc_token_table = { + false => 0, + :error => 1, + :STRING => 2, + :DQPRE => 3, + :DQMID => 4, + :DQPOST => 5, + :LBRACK => 6, + :RBRACK => 7, + :LBRACE => 8, + :RBRACE => 9, + :SYMBOL => 10, + :FARROW => 11, + :COMMA => 12, + :TRUE => 13, + :FALSE => 14, + :EQUALS => 15, + :APPENDS => 16, + :LESSEQUAL => 17, + :NOTEQUAL => 18, + :DOT => 19, + :COLON => 20, + :LLCOLLECT => 21, + :RRCOLLECT => 22, + :QMARK => 23, + :LPAREN => 24, + :RPAREN => 25, + :ISEQUAL => 26, + :GREATEREQUAL => 27, + :GREATERTHAN => 28, + :LESSTHAN => 29, + :IF => 30, + :ELSE => 31, + :IMPORT => 32, + :DEFINE => 33, + :ELSIF => 34, + :VARIABLE => 35, + :CLASS => 36, + :INHERITS => 37, + :NODE => 38, + :BOOLEAN => 39, + :NAME => 40, + :SEMIC => 41, + :CASE => 42, + :DEFAULT => 43, + :AT => 44, + :LCOLLECT => 45, + :RCOLLECT => 46, + :CLASSNAME => 47, + :CLASSREF => 48, + :NOT => 49, + :OR => 50, + :AND => 51, + :UNDEF => 52, + :PARROW => 53, + :PLUS => 54, + :MINUS => 55, + :TIMES => 56, + :DIV => 57, + :LSHIFT => 58, + :RSHIFT => 59, + :UMINUS => 60, + :MATCH => 61, + :NOMATCH => 62, + :REGEX => 63, + :IN_EDGE => 64, + :OUT_EDGE => 65, + :IN_EDGE_SUB => 66, + :OUT_EDGE_SUB => 67, + :IN => 68 } + +racc_nt_base = 69 + +racc_use_result_var = true + +Racc_arg = [ + racc_action_table, + racc_action_check, + racc_action_default, + racc_action_pointer, + racc_goto_table, + racc_goto_check, + racc_goto_default, + racc_goto_pointer, + racc_nt_base, + racc_reduce_table, + racc_token_table, + racc_shift_n, + racc_reduce_n, + racc_use_result_var ] + +Racc_token_to_s_table = [ + "$end", + "error", + "STRING", + "DQPRE", + "DQMID", + "DQPOST", + "LBRACK", + "RBRACK", + "LBRACE", + "RBRACE", + "SYMBOL", + "FARROW", + "COMMA", + "TRUE", + "FALSE", + "EQUALS", + "APPENDS", + "LESSEQUAL", + "NOTEQUAL", + "DOT", + "COLON", + "LLCOLLECT", + "RRCOLLECT", + "QMARK", + "LPAREN", + "RPAREN", + "ISEQUAL", + "GREATEREQUAL", + "GREATERTHAN", + "LESSTHAN", + "IF", + "ELSE", + "IMPORT", + "DEFINE", + "ELSIF", + "VARIABLE", + "CLASS", + "INHERITS", + "NODE", + "BOOLEAN", + "NAME", + "SEMIC", + "CASE", + "DEFAULT", + "AT", + "LCOLLECT", + "RCOLLECT", + "CLASSNAME", + "CLASSREF", + "NOT", + "OR", + "AND", + "UNDEF", + "PARROW", + "PLUS", + "MINUS", + "TIMES", + "DIV", + "LSHIFT", + "RSHIFT", + "UMINUS", + "MATCH", + "NOMATCH", + "REGEX", + "IN_EDGE", + "OUT_EDGE", + "IN_EDGE_SUB", + "OUT_EDGE_SUB", + "IN", + "$start", + "program", + "statements", + "nil", + "statement", + "resource", + "virtualresource", + "collection", + "assignment", + "casestatement", + "ifstatement_begin", + "import", + "fstatement", + "definition", + "hostclass", + "nodedef", + "resourceoverride", + "append", + "relationship", + "relationship_side", + "edge", + "resourceref", + "funcvalues", + "namestring", + "name", + "variable", + "type", + "boolean", + "funcrvalue", + "selector", + "quotedtext", + "hasharrayaccesses", + "classname", + "resourceinstances", + "endsemi", + "params", + "endcomma", + "classref", + "anyparams", + "at", + "collectrhand", + "collstatements", + "collstatement", + "colljoin", + "collexpr", + "colllval", + "simplervalue", + "resourceinst", + "resourcename", + "undef", + "array", + "expression", + "hasharrayaccess", + "param", + "rvalue", + "addparam", + "anyparam", + "rvalues", + "comma", + "hash", + "dqrval", + "dqtail", + "ifstatement", + "else", + "regex", + "caseopts", + "caseopt", + "casevalues", + "selectlhand", + "svalues", + "selectval", + "sintvalues", + "string", + "strings", + "argumentlist", + "classparent", + "hostnames", + "nodeparent", + "nodename", + "hostname", + "nothing", + "arguments", + "argument", + "classnameordefault", + "hashpairs", + "hashpair", + "key" ] + +Racc_debug_parser = false + +##### State transition tables end ##### + +# reduce 0 omitted + +module_eval(<<'.,.,', 'grammar.ra', 31) + def _reduce_1(val, _values, result) + if val[0] + # Make sure we always return an array. + if val[0].is_a?(AST::ASTArray) + if val[0].children.empty? + result = nil + else + result = val[0] + end + else + result = aryfy(val[0]) + end + else + result = nil + end + + result + end +.,., + +# reduce 2 omitted + +# reduce 3 omitted + +module_eval(<<'.,.,', 'grammar.ra', 50) + def _reduce_4(val, _values, result) + if val[0] and val[1] + if val[0].instance_of?(AST::ASTArray) + val[0].push(val[1]) + result = val[0] + else + result = ast AST::ASTArray, :children => [val[0],val[1]] + end + elsif obj = (val[0] || val[1]) + result = obj + else result = nil + end + + result + end +.,., + +# reduce 5 omitted + +# reduce 6 omitted + +# reduce 7 omitted + +# reduce 8 omitted + +# reduce 9 omitted + +# reduce 10 omitted + +# reduce 11 omitted + +# reduce 12 omitted + +# reduce 13 omitted + +# reduce 14 omitted + +# reduce 15 omitted + +# reduce 16 omitted + +# reduce 17 omitted + +# reduce 18 omitted + +module_eval(<<'.,.,', 'grammar.ra', 80) + def _reduce_19(val, _values, result) + result = AST::Relationship.new(val[0], val[2], val[1][:value], ast_context) + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 83) + def _reduce_20(val, _values, result) + result = AST::Relationship.new(val[0], val[2], val[1][:value], ast_context) + + result + end +.,., + +# reduce 21 omitted + +# reduce 22 omitted + +# reduce 23 omitted + +# reduce 24 omitted + +# reduce 25 omitted + +# reduce 26 omitted + +# reduce 27 omitted + +module_eval(<<'.,.,', 'grammar.ra', 91) + def _reduce_28(val, _values, result) + args = aryfy(val[2]) + result = ast AST::Function, + :name => val[0][:value], + :line => val[0][:line], + :arguments => args, + :ftype => :statement + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 99) + def _reduce_29(val, _values, result) + args = aryfy(val[2]) + result = ast AST::Function, + :name => val[0][:value], + :line => val[0][:line], + :arguments => args, + :ftype => :statement + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 106) + def _reduce_30(val, _values, result) + result = ast AST::Function, + :name => val[0][:value], + :line => val[0][:line], + :arguments => AST::ASTArray.new({}), + :ftype => :statement + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 113) + def _reduce_31(val, _values, result) + args = aryfy(val[1]) + result = ast AST::Function, + :name => val[0][:value], + :line => val[0][:line], + :arguments => args, + :ftype => :statement + + result + end +.,., + +# reduce 32 omitted + +# reduce 33 omitted + +module_eval(<<'.,.,', 'grammar.ra', 124) + def _reduce_34(val, _values, result) + result = aryfy(val[0], val[2]) + result.line = @lexer.line + result.file = @lexer.file + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 129) + def _reduce_35(val, _values, result) + unless val[0].is_a?(AST::ASTArray) + val[0] = aryfy(val[0]) + end + + val[0].push(val[2]) + + result = val[0] + + result + end +.,., + +# reduce 36 omitted + +# reduce 37 omitted + +# reduce 38 omitted + +# reduce 39 omitted + +# reduce 40 omitted + +# reduce 41 omitted + +# reduce 42 omitted + +# reduce 43 omitted + +module_eval(<<'.,.,', 'grammar.ra', 149) + def _reduce_44(val, _values, result) + result = ast AST::Name, :value => val[0][:value] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 153) + def _reduce_45(val, _values, result) + @lexer.commentpop + array = val[2] + array = [array] if array.instance_of?(AST::ResourceInstance) + result = ast AST::ASTArray + + # this iterates across each specified resourceinstance + array.each { |instance| + raise Puppet::Dev, "Got something that isn't an instance" unless instance.instance_of?(AST::ResourceInstance) + # now, i need to somehow differentiate between those things with + # arrays in their names, and normal things + + result.push ast( + AST::Resource, + :type => val[0], + :title => instance[0], + + :parameters => instance[1]) + } + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 172) + def _reduce_46(val, _values, result) + # This is a deprecated syntax. + error "All resource specifications require names" + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 175) + def _reduce_47(val, _values, result) + # a defaults setting for a type + @lexer.commentpop + result = ast(AST::ResourceDefaults, :type => val[0], :parameters => val[2]) + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 182) + def _reduce_48(val, _values, result) + @lexer.commentpop + result = ast AST::ResourceOverride, :object => val[0], :parameters => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 189) + def _reduce_49(val, _values, result) + type = val[0] + + if (type == :exported and ! Puppet[:storeconfigs]) and ! Puppet[:parseonly] + Puppet.warning addcontext("You cannot collect without storeconfigs being set") + end + + error "Defaults are not virtualizable" if val[1].is_a? AST::ResourceDefaults + + method = type.to_s + "=" + + # Just mark our resources as exported and pass them through. + if val[1].instance_of?(AST::ASTArray) + val[1].each do |obj| + obj.send(method, true) + end + else + val[1].send(method, true) + end + + result = val[1] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 211) + def _reduce_50(val, _values, result) + result = :virtual + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 212) + def _reduce_51(val, _values, result) + result = :exported + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 217) + def _reduce_52(val, _values, result) + @lexer.commentpop + Puppet.warning addcontext("Collection names must now be capitalized") if val[0] =~ /^[a-z]/ + type = val[0].downcase + args = {:type => type} + + if val[1].is_a?(AST::CollExpr) + args[:query] = val[1] + args[:query].type = type + args[:form] = args[:query].form + else + args[:form] = val[1] + end + if args[:form] == :exported and ! Puppet[:storeconfigs] and ! Puppet[:parseonly] + Puppet.warning addcontext("You cannot collect exported resources without storeconfigs being set; the collection will be ignored") + end + args[:override] = val[3] + result = ast AST::Collection, args + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 236) + def _reduce_53(val, _values, result) + if val[0] =~ /^[a-z]/ + Puppet.warning addcontext("Collection names must now be capitalized") + end + type = val[0].downcase + args = {:type => type } + + if val[1].is_a?(AST::CollExpr) + args[:query] = val[1] + args[:query].type = type + args[:form] = args[:query].form + else + args[:form] = val[1] + end + if args[:form] == :exported and ! Puppet[:storeconfigs] and ! Puppet[:parseonly] + Puppet.warning addcontext("You cannot collect exported resources without storeconfigs being set; the collection will be ignored") + end + result = ast AST::Collection, args + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 257) + def _reduce_54(val, _values, result) + if val[1] + result = val[1] + result.form = :virtual + else + result = :virtual + end + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 265) + def _reduce_55(val, _values, result) + if val[1] + result = val[1] + result.form = :exported + else + result = :exported + end + + result + end +.,., + +# reduce 56 omitted + +# reduce 57 omitted + +module_eval(<<'.,.,', 'grammar.ra', 278) + def _reduce_58(val, _values, result) + result = ast AST::CollExpr, :test1 => val[0], :oper => val[1], :test2 => val[2] + + result + end +.,., + +# reduce 59 omitted + +module_eval(<<'.,.,', 'grammar.ra', 283) + def _reduce_60(val, _values, result) + result = val[1] + result.parens = true + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 287) + def _reduce_61(val, _values, result) + result=val[0][:value] + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 288) + def _reduce_62(val, _values, result) + result=val[0][:value] + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 291) + def _reduce_63(val, _values, result) + result = ast AST::CollExpr, :test1 => val[0], :oper => val[1][:value], :test2 => val[2] + #result = ast AST::CollExpr + #result.push *val + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 296) + def _reduce_64(val, _values, result) + result = ast AST::CollExpr, :test1 => val[0], :oper => val[1][:value], :test2 => val[2] + #result = ast AST::CollExpr + #result.push *val + + result + end +.,., + +# reduce 65 omitted + +# reduce 66 omitted + +module_eval(<<'.,.,', 'grammar.ra', 305) + def _reduce_67(val, _values, result) + result = ast AST::ResourceInstance, :children => [val[0],val[2]] + + result + end +.,., + +# reduce 68 omitted + +module_eval(<<'.,.,', 'grammar.ra', 310) + def _reduce_69(val, _values, result) + if val[0].instance_of?(AST::ResourceInstance) + result = ast AST::ASTArray, :children => [val[0],val[2]] + else + val[0].push val[2] + result = val[0] + end + + result + end +.,., + +# reduce 70 omitted + +# reduce 71 omitted + +module_eval(<<'.,.,', 'grammar.ra', 322) + def _reduce_72(val, _values, result) + result = ast AST::Undef, :value => :undef + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 326) + def _reduce_73(val, _values, result) + result = ast AST::Name, :value => val[0][:value], :line => val[0][:line] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 330) + def _reduce_74(val, _values, result) + result = ast AST::Type, :value => val[0][:value], :line => val[0][:line] + + result + end +.,., + +# reduce 75 omitted + +# reduce 76 omitted + +# reduce 77 omitted + +# reduce 78 omitted + +# reduce 79 omitted + +# reduce 80 omitted + +# reduce 81 omitted + +module_eval(<<'.,.,', 'grammar.ra', 342) + def _reduce_82(val, _values, result) + raise Puppet::ParseError, "Cannot assign to variables in other namespaces" if val[0][:value] =~ /::/ + # this is distinct from referencing a variable + variable = ast AST::Name, :value => val[0][:value], :line => val[0][:line] + result = ast AST::VarDef, :name => variable, :value => val[2], :line => val[0][:line] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 348) + def _reduce_83(val, _values, result) + result = ast AST::VarDef, :name => val[0], :value => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 352) + def _reduce_84(val, _values, result) + variable = ast AST::Name, :value => val[0][:value], :line => val[0][:line] + result = ast AST::VarDef, :name => variable, :value => val[2], :append => true, :line => val[0][:line] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 358) + def _reduce_85(val, _values, result) + result = ast AST::ASTArray + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 360) + def _reduce_86(val, _values, result) + result = val[0] + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 362) + def _reduce_87(val, _values, result) + if val[0].instance_of?(AST::ASTArray) + val[0].push(val[2]) + result = val[0] + else + result = ast AST::ASTArray, :children => [val[0],val[2]] + end + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 371) + def _reduce_88(val, _values, result) + result = ast AST::ResourceParam, :param => val[0][:value], :line => val[0][:line], :value => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 375) + def _reduce_89(val, _values, result) + result = ast AST::ResourceParam, :param => val[0][:value], :line => val[0][:line], :value => val[2], + :add => true + + result + end +.,., + +# reduce 90 omitted + +# reduce 91 omitted + +module_eval(<<'.,.,', 'grammar.ra', 384) + def _reduce_92(val, _values, result) + result = ast AST::ASTArray + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 386) + def _reduce_93(val, _values, result) + result = val[0] + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 388) + def _reduce_94(val, _values, result) + if val[0].instance_of?(AST::ASTArray) + val[0].push(val[2]) + result = val[0] + else + result = ast AST::ASTArray, :children => [val[0],val[2]] + end + + result + end +.,., + +# reduce 95 omitted + +module_eval(<<'.,.,', 'grammar.ra', 398) + def _reduce_96(val, _values, result) + if val[0].instance_of?(AST::ASTArray) + result = val[0].push(val[2]) + else + result = ast AST::ASTArray, :children => [val[0],val[2]] + end + + result + end +.,., + +# reduce 97 omitted + +# reduce 98 omitted + +# reduce 99 omitted + +# reduce 100 omitted + +# reduce 101 omitted + +# reduce 102 omitted + +# reduce 103 omitted + +# reduce 104 omitted + +# reduce 105 omitted + +# reduce 106 omitted + +# reduce 107 omitted + +# reduce 108 omitted + +# reduce 109 omitted + +# reduce 110 omitted + +# reduce 111 omitted + +# reduce 112 omitted + +# reduce 113 omitted + +# reduce 114 omitted + +module_eval(<<'.,.,', 'grammar.ra', 427) + def _reduce_115(val, _values, result) + args = aryfy(val[2]) + result = ast AST::Function, + :name => val[0][:value], :line => val[0][:line], + :arguments => args, + :ftype => :rvalue + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 433) + def _reduce_116(val, _values, result) + result = ast AST::Function, + :name => val[0][:value], :line => val[0][:line], + :arguments => AST::ASTArray.new({}), + :ftype => :rvalue + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 439) + def _reduce_117(val, _values, result) + result = ast AST::String, :value => val[0][:value], :line => val[0][:line] + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 440) + def _reduce_118(val, _values, result) + result = ast AST::Concat, :value => [ast(AST::String,val[0])]+val[1], :line => val[0][:line] + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 442) + def _reduce_119(val, _values, result) + result = [val[0]] + val[1] + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 444) + def _reduce_120(val, _values, result) + result = [ast(AST::String,val[0])] + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 445) + def _reduce_121(val, _values, result) + result = [ast(AST::String,val[0])] + val[1] + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 448) + def _reduce_122(val, _values, result) + result = ast AST::Boolean, :value => val[0][:value], :line => val[0][:line] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 452) + def _reduce_123(val, _values, result) + Puppet.warning addcontext("Deprecation notice: Resource references should now be capitalized") + result = ast AST::ResourceReference, :type => val[0][:value], :line => val[0][:line], :title => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 455) + def _reduce_124(val, _values, result) + result = ast AST::ResourceReference, :type => val[0], :title => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 459) + def _reduce_125(val, _values, result) + result = val[1] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 463) + def _reduce_126(val, _values, result) + @lexer.commentpop + args = { + :test => val[0], + :statements => val[2] + } + + args[:else] = val[4] if val[4] + + result = ast AST::IfStatement, args + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 474) + def _reduce_127(val, _values, result) + @lexer.commentpop + args = { + :test => val[0], + :statements => ast(AST::Nop) + } + + args[:else] = val[3] if val[3] + + result = ast AST::IfStatement, args + + result + end +.,., + +# reduce 128 omitted + +module_eval(<<'.,.,', 'grammar.ra', 487) + def _reduce_129(val, _values, result) + result = ast AST::Else, :statements => val[1] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 490) + def _reduce_130(val, _values, result) + @lexer.commentpop + result = ast AST::Else, :statements => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 494) + def _reduce_131(val, _values, result) + @lexer.commentpop + result = ast AST::Else, :statements => ast(AST::Nop) + + result + end +.,., + +# reduce 132 omitted + +module_eval(<<'.,.,', 'grammar.ra', 512) + def _reduce_133(val, _values, result) + result = ast AST::InOperator, :lval => val[0], :rval => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 515) + def _reduce_134(val, _values, result) + result = ast AST::MatchOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 518) + def _reduce_135(val, _values, result) + result = ast AST::MatchOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 521) + def _reduce_136(val, _values, result) + result = ast AST::ArithmeticOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 524) + def _reduce_137(val, _values, result) + result = ast AST::ArithmeticOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 527) + def _reduce_138(val, _values, result) + result = ast AST::ArithmeticOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 530) + def _reduce_139(val, _values, result) + result = ast AST::ArithmeticOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 533) + def _reduce_140(val, _values, result) + result = ast AST::ArithmeticOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 536) + def _reduce_141(val, _values, result) + result = ast AST::ArithmeticOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 539) + def _reduce_142(val, _values, result) + result = ast AST::Minus, :value => val[1] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 542) + def _reduce_143(val, _values, result) + result = ast AST::ComparisonOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 545) + def _reduce_144(val, _values, result) + result = ast AST::ComparisonOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 548) + def _reduce_145(val, _values, result) + result = ast AST::ComparisonOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 551) + def _reduce_146(val, _values, result) + result = ast AST::ComparisonOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 554) + def _reduce_147(val, _values, result) + result = ast AST::ComparisonOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 557) + def _reduce_148(val, _values, result) + result = ast AST::ComparisonOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 560) + def _reduce_149(val, _values, result) + result = ast AST::Not, :value => val[1] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 563) + def _reduce_150(val, _values, result) + result = ast AST::BooleanOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 566) + def _reduce_151(val, _values, result) + result = ast AST::BooleanOperator, :operator => val[1][:value], :lval => val[0], :rval => val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 569) + def _reduce_152(val, _values, result) + result = val[1] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 573) + def _reduce_153(val, _values, result) + @lexer.commentpop + options = val[3] + options = ast AST::ASTArray, :children => [val[3]] unless options.instance_of?(AST::ASTArray) + result = ast AST::CaseStatement, :test => val[1], :options => options + + result + end +.,., + +# reduce 154 omitted + +module_eval(<<'.,.,', 'grammar.ra', 581) + def _reduce_155(val, _values, result) + if val[0].instance_of?(AST::ASTArray) + val[0].push val[1] + result = val[0] + else + result = ast AST::ASTArray, :children => [val[0], val[1]] + end + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 590) + def _reduce_156(val, _values, result) + @lexer.commentpop + result = ast AST::CaseOpt, :value => val[0], :statements => val[3] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 593) + def _reduce_157(val, _values, result) + @lexer.commentpop + + result = ast( + AST::CaseOpt, + :value => val[0], + + :statements => ast(AST::ASTArray) + ) + + result + end +.,., + +# reduce 158 omitted + +module_eval(<<'.,.,', 'grammar.ra', 605) + def _reduce_159(val, _values, result) + if val[0].instance_of?(AST::ASTArray) + val[0].push(val[2]) + result = val[0] + else + result = ast AST::ASTArray, :children => [val[0],val[2]] + end + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 614) + def _reduce_160(val, _values, result) + result = ast AST::Selector, :param => val[0], :values => val[2] + + result + end +.,., + +# reduce 161 omitted + +module_eval(<<'.,.,', 'grammar.ra', 619) + def _reduce_162(val, _values, result) + @lexer.commentpop + result = val[1] + + result + end +.,., + +# reduce 163 omitted + +module_eval(<<'.,.,', 'grammar.ra', 625) + def _reduce_164(val, _values, result) + if val[0].instance_of?(AST::ASTArray) + val[0].push(val[2]) + result = val[0] + else + result = ast AST::ASTArray, :children => [val[0],val[2]] + end + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 634) + def _reduce_165(val, _values, result) + result = ast AST::ResourceParam, :param => val[0], :value => val[2] + + result + end +.,., + +# reduce 166 omitted + +# reduce 167 omitted + +# reduce 168 omitted + +# reduce 169 omitted + +# reduce 170 omitted + +# reduce 171 omitted + +# reduce 172 omitted + +# reduce 173 omitted + +module_eval(<<'.,.,', 'grammar.ra', 646) + def _reduce_174(val, _values, result) + result = ast AST::Default, :value => val[0][:value], :line => val[0][:line] + + result + end +.,., + +# reduce 175 omitted + +module_eval(<<'.,.,', 'grammar.ra', 651) + def _reduce_176(val, _values, result) + result = [val[0][:value]] + result + end +.,., + +# reduce 177 omitted + +module_eval(<<'.,.,', 'grammar.ra', 653) + def _reduce_178(val, _values, result) + result = val[0] += val[2] + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 656) + def _reduce_179(val, _values, result) + val[1].each do |file| + import(file) + end + + result = AST::ASTArray.new(:children => []) + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 666) + def _reduce_180(val, _values, result) + @lexer.commentpop + newdefine classname(val[1]), :arguments => val[2], :code => val[4], :line => val[0][:line] + @lexer.indefine = false + result = nil + +#} | DEFINE NAME argumentlist parent LBRACE RBRACE { + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 673) + def _reduce_181(val, _values, result) + @lexer.commentpop + newdefine classname(val[1]), :arguments => val[2], :line => val[0][:line] + @lexer.indefine = false + result = nil + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 681) + def _reduce_182(val, _values, result) + @lexer.commentpop + # Our class gets defined in the parent namespace, not our own. + @lexer.namepop + newclass classname(val[1]), :arguments => val[2], :parent => val[3], :code => val[5], :line => val[0][:line] + result = nil + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 687) + def _reduce_183(val, _values, result) + @lexer.commentpop + # Our class gets defined in the parent namespace, not our own. + @lexer.namepop + newclass classname(val[1]), :arguments => val[2], :parent => val[3], :line => val[0][:line] + result = nil + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 695) + def _reduce_184(val, _values, result) + @lexer.commentpop + newnode val[1], :parent => val[2], :code => val[4], :line => val[0][:line] + result = nil + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 699) + def _reduce_185(val, _values, result) + @lexer.commentpop + newnode val[1], :parent => val[2], :line => val[0][:line] + result = nil + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 704) + def _reduce_186(val, _values, result) + result = val[0][:value] + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 706) + def _reduce_187(val, _values, result) + result = val[0][:value] + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 707) + def _reduce_188(val, _values, result) + result = val[0][:value] + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 708) + def _reduce_189(val, _values, result) + result = "class" + result + end +.,., + +# reduce 190 omitted + +module_eval(<<'.,.,', 'grammar.ra', 714) + def _reduce_191(val, _values, result) + result = val[0] + result = [result] unless result.is_a?(Array) + result << val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 720) + def _reduce_192(val, _values, result) + result = ast AST::HostName, :value => val[0] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 723) + def _reduce_193(val, _values, result) + result = val[0][:value] + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 724) + def _reduce_194(val, _values, result) + result = val[0][:value] + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 725) + def _reduce_195(val, _values, result) + result = val[0][:value] + result + end +.,., + +# reduce 196 omitted + +module_eval(<<'.,.,', 'grammar.ra', 729) + def _reduce_197(val, _values, result) + result = nil + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 733) + def _reduce_198(val, _values, result) + result = ast AST::ASTArray, :children => [] + + result + end +.,., + +# reduce 199 omitted + +module_eval(<<'.,.,', 'grammar.ra', 738) + def _reduce_200(val, _values, result) + result = nil + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 741) + def _reduce_201(val, _values, result) + result = val[1] + result = [result] unless result[0].is_a?(Array) + + result + end +.,., + +# reduce 202 omitted + +module_eval(<<'.,.,', 'grammar.ra', 747) + def _reduce_203(val, _values, result) + result = val[0] + result = [result] unless result[0].is_a?(Array) + result << val[2] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 753) + def _reduce_204(val, _values, result) + Puppet.warning addcontext("Deprecation notice: must now include '$' in prototype") + result = [val[0][:value], val[2]] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 757) + def _reduce_205(val, _values, result) + Puppet.warning addcontext("Deprecation notice: must now include '$' in prototype") + result = [val[0][:value]] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 760) + def _reduce_206(val, _values, result) + result = [val[0][:value], val[2]] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 762) + def _reduce_207(val, _values, result) + result = [val[0][:value]] + + result + end +.,., + +# reduce 208 omitted + +module_eval(<<'.,.,', 'grammar.ra', 767) + def _reduce_209(val, _values, result) + result = val[1] + + result + end +.,., + +# reduce 210 omitted + +module_eval(<<'.,.,', 'grammar.ra', 772) + def _reduce_211(val, _values, result) + result = val[1] + + result + end +.,., + +# reduce 212 omitted + +# reduce 213 omitted + +module_eval(<<'.,.,', 'grammar.ra', 778) + def _reduce_214(val, _values, result) + result = ast AST::Variable, :value => val[0][:value], :line => val[0][:line] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 782) + def _reduce_215(val, _values, result) + if val[1].instance_of?(AST::ASTArray) + result = val[1] + else + result = ast AST::ASTArray, :children => [val[1]] + end + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 789) + def _reduce_216(val, _values, result) + if val[1].instance_of?(AST::ASTArray) + result = val[1] + else + result = ast AST::ASTArray, :children => [val[1]] + end + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 795) + def _reduce_217(val, _values, result) + result = ast AST::ASTArray + + result + end +.,., + +# reduce 218 omitted + +# reduce 219 omitted + +# reduce 220 omitted + +module_eval(<<'.,.,', 'grammar.ra', 802) + def _reduce_221(val, _values, result) + result = nil + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 805) + def _reduce_222(val, _values, result) + result = ast AST::Regex, :value => val[0][:value] + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 809) + def _reduce_223(val, _values, result) + if val[1].instance_of?(AST::ASTHash) + result = val[1] + else + result = ast AST::ASTHash, { :value => val[1] } + end + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 816) + def _reduce_224(val, _values, result) + if val[1].instance_of?(AST::ASTHash) + result = val[1] + else + result = ast AST::ASTHash, { :value => val[1] } + end + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 822) + def _reduce_225(val, _values, result) + result = ast AST::ASTHash + + result + end +.,., + +# reduce 226 omitted + +module_eval(<<'.,.,', 'grammar.ra', 827) + def _reduce_227(val, _values, result) + if val[0].instance_of?(AST::ASTHash) + result = val[0].merge(val[2]) + else + result = ast AST::ASTHash, :value => val[0] + result.merge(val[2]) + end + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 836) + def _reduce_228(val, _values, result) + result = ast AST::ASTHash, { :value => { val[0] => val[2] } } + + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 839) + def _reduce_229(val, _values, result) + result = val[0][:value] + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 840) + def _reduce_230(val, _values, result) + result = val[0] + result + end +.,., + +module_eval(<<'.,.,', 'grammar.ra', 843) + def _reduce_231(val, _values, result) + result = ast AST::HashOrArrayAccess, :variable => val[0][:value], :key => val[2] + + result + end +.,., + +# reduce 232 omitted + +module_eval(<<'.,.,', 'grammar.ra', 848) + def _reduce_233(val, _values, result) + result = ast AST::HashOrArrayAccess, :variable => val[0], :key => val[2] + + result + end +.,., + +def _reduce_none(val, _values, result) + val[0] +end + + end # class Parser + end # module Parser + end # module Puppet diff --git a/mcollective/lib/puppet/parser/parser_support.rb b/mcollective/lib/puppet/parser/parser_support.rb new file mode 100644 index 000000000..9e580efb2 --- /dev/null +++ b/mcollective/lib/puppet/parser/parser_support.rb @@ -0,0 +1,236 @@ +# I pulled this into a separate file, because I got +# tired of rebuilding the parser.rb file all the time. +class Puppet::Parser::Parser + require 'puppet/parser/functions' + require 'puppet/parser/files' + require 'puppet/resource/type_collection' + require 'puppet/resource/type_collection_helper' + require 'puppet/resource/type' + require 'monitor' + + AST = Puppet::Parser::AST + + include Puppet::Resource::TypeCollectionHelper + + attr_reader :version, :environment + attr_accessor :files + + attr_accessor :lexer + + # Add context to a message; useful for error messages and such. + def addcontext(message, obj = nil) + obj ||= @lexer + + message += " on line #{obj.line}" + if file = obj.file + message += " in file #{file}" + end + + message + end + + # Create an AST array out of all of the args + def aryfy(*args) + if args[0].instance_of?(AST::ASTArray) + result = args.shift + args.each { |arg| + result.push arg + } + else + result = ast AST::ASTArray, :children => args + end + + result + end + + # Create an AST object, and automatically add the file and line information if + # available. + def ast(klass, hash = {}) + klass.new ast_context(klass.use_docs, hash[:line]).merge(hash) + end + + def ast_context(include_docs = false, ast_line = nil) + result = { + :line => ast_line || lexer.line, + :file => lexer.file + } + result[:doc] = lexer.getcomment(result[:line]) if include_docs + result + end + + # The fully qualifed name, with the full namespace. + def classname(name) + [@lexer.namespace, name].join("::").sub(/^::/, '') + end + + def clear + initvars + end + + # Raise a Parse error. + def error(message) + if brace = @lexer.expected + message += "; expected '%s'" + end + except = Puppet::ParseError.new(message) + except.line = @lexer.line + except.file = @lexer.file if @lexer.file + + raise except + end + + def file + @lexer.file + end + + def file=(file) + unless FileTest.exist?(file) + unless file =~ /\.pp$/ + file = file + ".pp" + end + end + raise Puppet::AlreadyImportedError, "Import loop detected" if known_resource_types.watching_file?(file) + + watch_file(file) + @lexer.file = file + end + + [:hostclass, :definition, :node, :nodes?].each do |method| + define_method(method) do |*args| + known_resource_types.send(method, *args) + end + end + + def find_hostclass(namespace, name) + known_resource_types.find_or_load(namespace, name, :hostclass) + end + + def find_definition(namespace, name) + known_resource_types.find_or_load(namespace, name, :definition) + end + + def import(file) + known_resource_types.loader.import(file, @lexer.file) + end + + def initialize(env) + # The environment is needed to know how to find the resource type collection. + @environment = env.is_a?(String) ? Puppet::Node::Environment.new(env) : env + initvars + end + + # Initialize or reset all of our variables. + def initvars + @lexer = Puppet::Parser::Lexer.new + end + + # Split an fq name into a namespace and name + def namesplit(fullname) + ary = fullname.split("::") + n = ary.pop || "" + ns = ary.join("::") + return ns, n + end + + # Create a new class, or merge with an existing class. + def newclass(name, options = {}) + known_resource_types.add Puppet::Resource::Type.new(:hostclass, name, ast_context(true, options[:line]).merge(options)) + end + + # Create a new definition. + def newdefine(name, options = {}) + known_resource_types.add Puppet::Resource::Type.new(:definition, name, ast_context(true, options[:line]).merge(options)) + end + + # Create a new node. Nodes are special, because they're stored in a global + # table, not according to namespaces. + def newnode(names, options = {}) + names = [names] unless names.instance_of?(Array) + context = ast_context(true, options[:line]) + names.collect do |name| + known_resource_types.add(Puppet::Resource::Type.new(:node, name, context.merge(options))) + end + end + + def on_error(token,value,stack) + if token == 0 # denotes end of file + value = 'end of file' + else + value = "'#{value[:value]}'" + end + error = "Syntax error at #{value}" + + if brace = @lexer.expected + error += "; expected '#{brace}'" + end + + except = Puppet::ParseError.new(error) + except.line = @lexer.line + except.file = @lexer.file if @lexer.file + + raise except + end + + # how should I do error handling here? + def parse(string = nil) + return parse_ruby_file if self.file =~ /\.rb$/ + self.string = string if string + begin + @yydebug = false + main = yyparse(@lexer,:scan) + rescue Racc::ParseError => except + error = Puppet::ParseError.new(except) + error.line = @lexer.line + error.file = @lexer.file + error.set_backtrace except.backtrace + raise error + rescue Puppet::ParseError => except + except.line ||= @lexer.line + except.file ||= @lexer.file + raise except + rescue Puppet::Error => except + # and this is a framework error + except.line ||= @lexer.line + except.file ||= @lexer.file + raise except + rescue Puppet::DevError => except + except.line ||= @lexer.line + except.file ||= @lexer.file + raise except + rescue => except + error = Puppet::DevError.new(except.message) + error.line = @lexer.line + error.file = @lexer.file + error.set_backtrace except.backtrace + raise error + end + if main + # Store the results as the top-level class. + newclass("", :code => main) + end + return known_resource_types + ensure + @lexer.clear + end + + def parse_ruby_file + # Execute the contents of the file inside its own "main" object so + # that it can call methods in the resource type API. + Puppet::DSL::ResourceTypeAPI.new.instance_eval(File.read(self.file)) + end + + def string=(string) + @lexer.string = string + end + + def version + known_resource_types.version + end + + # Add a new file to be checked when we're checking to see if we should be + # reparsed. This is basically only used by the TemplateWrapper to let the + # parser know about templates that should be parsed. + def watch_file(filename) + known_resource_types.watch_file(filename) + end +end diff --git a/mcollective/lib/puppet/parser/relationship.rb b/mcollective/lib/puppet/parser/relationship.rb new file mode 100644 index 000000000..6190df52c --- /dev/null +++ b/mcollective/lib/puppet/parser/relationship.rb @@ -0,0 +1,43 @@ +class Puppet::Parser::Relationship + attr_accessor :source, :target, :type + + PARAM_MAP = {:relationship => :before, :subscription => :notify} + + def evaluate(catalog) + if source.is_a?(Puppet::Parser::Collector) + sources = source.collected.values + else + sources = [source] + end + if target.is_a?(Puppet::Parser::Collector) + targets = target.collected.values + else + targets = [target] + end + sources.each do |s| + targets.each do |t| + mk_relationship(s, t, catalog) + end + end + end + + def initialize(source, target, type) + @source, @target, @type = source, target, type + end + + def param_name + PARAM_MAP[type] || raise(ArgumentError, "Invalid relationship type #{type}") + end + + def mk_relationship(source, target, catalog) + unless source_resource = catalog.resource(source.to_s) + raise ArgumentError, "Could not find resource '#{source}' for relationship on '#{target}'" + end + unless target_resource = catalog.resource(target.to_s) + raise ArgumentError, "Could not find resource '#{target}' for relationship from '#{source}'" + end + Puppet.debug "Adding relationship from #{source.to_s} to #{target.to_s} with '#{param_name}'" + source_resource[param_name] ||= [] + source_resource[param_name] << target.to_s + end +end diff --git a/mcollective/lib/puppet/parser/resource.rb b/mcollective/lib/puppet/parser/resource.rb new file mode 100644 index 000000000..e4f913013 --- /dev/null +++ b/mcollective/lib/puppet/parser/resource.rb @@ -0,0 +1,343 @@ +require 'puppet/resource' + +# The primary difference between this class and its +# parent is that this class has rules on who can set +# parameters +class Puppet::Parser::Resource < Puppet::Resource + require 'puppet/parser/resource/param' + require 'puppet/util/tagging' + require 'puppet/file_collection/lookup' + require 'puppet/parser/yaml_trimmer' + require 'puppet/resource/type_collection_helper' + + include Puppet::FileCollection::Lookup + include Puppet::Resource::TypeCollectionHelper + + include Puppet::Util + include Puppet::Util::MethodHelper + include Puppet::Util::Errors + include Puppet::Util::Logging + include Puppet::Util::Tagging + include Puppet::Parser::YamlTrimmer + + attr_accessor :source, :scope, :rails_id + attr_accessor :virtual, :override, :translated, :catalog, :evaluated + + attr_reader :exported, :parameters + + # Determine whether the provided parameter name is a relationship parameter. + def self.relationship_parameter?(name) + @relationship_names ||= Puppet::Type.relationship_params.collect { |p| p.name } + @relationship_names.include?(name) + end + + # Set up some boolean test methods + def translated?; !!@translated; end + def override?; !!@override; end + def evaluated?; !!@evaluated; end + + def [](param) + param = symbolize(param) + if param == :title + return self.title + end + if @parameters.has_key?(param) + @parameters[param].value + else + nil + end + end + + def []=(param, value) + set_parameter(param, value) + end + + def eachparam + @parameters.each do |name, param| + yield param + end + end + + def environment + scope.environment + end + + # Process the stage metaparameter for a class. A containment edge + # is drawn from the class to the stage. The stage for containment + # defaults to main, if none is specified. + def add_edge_to_stage + return unless self.type.to_s.downcase == "class" + + unless stage = catalog.resource(:stage, self[:stage] || (scope && scope.resource && scope.resource[:stage]) || :main) + raise ArgumentError, "Could not find stage #{self[:stage] || :main} specified by #{self}" + end + + self[:stage] ||= stage.title unless stage.title == :main + catalog.add_edge(stage, self) + end + + # Retrieve the associated definition and evaluate it. + def evaluate + return if evaluated? + @evaluated = true + if klass = resource_type and ! builtin_type? + finish + evaluated_code = klass.evaluate_code(self) + add_edge_to_stage + + return evaluated_code + elsif builtin? + devfail "Cannot evaluate a builtin type (#{type})" + else + self.fail "Cannot find definition #{type}" + end + end + + # Mark this resource as both exported and virtual, + # or remove the exported mark. + def exported=(value) + if value + @virtual = true + @exported = value + else + @exported = value + end + end + + # Do any finishing work on this object, called before evaluation or + # before storage/translation. + def finish + return if finished? + @finished = true + add_defaults + add_metaparams + add_scope_tags + validate + end + + # Has this resource already been finished? + def finished? + @finished + end + + def initialize(*args) + raise ArgumentError, "Resources require a scope" unless args.last[:scope] + super + + @source ||= scope.source + end + + # Is this resource modeling an isomorphic resource type? + def isomorphic? + if builtin_type? + return resource_type.isomorphic? + else + return true + end + end + + # Merge an override resource in. This will throw exceptions if + # any overrides aren't allowed. + def merge(resource) + # Test the resource scope, to make sure the resource is even allowed + # to override. + unless self.source.object_id == resource.source.object_id || resource.source.child_of?(self.source) + raise Puppet::ParseError.new("Only subclasses can override parameters", resource.line, resource.file) + end + # Some of these might fail, but they'll fail in the way we want. + resource.parameters.each do |name, param| + override_parameter(param) + end + end + + # Unless we're running >= 0.25, we're in compat mode. + def metaparam_compatibility_mode? + ! (catalog and ver = (catalog.client_version||'0.0.0').split(".") and (ver[0] > "0" or ver[1].to_i >= 25)) + end + + def name + self[:name] || self.title + end + + # A temporary occasion, until I get paths in the scopes figured out. + def path + to_s + end + + # Define a parameter in our resource. + # if we ever receive a parameter named 'tag', set + # the resource tags with its value. + def set_parameter(param, value = nil) + if ! value.nil? + param = Puppet::Parser::Resource::Param.new( + :name => param, :value => value, :source => self.source + ) + elsif ! param.is_a?(Puppet::Parser::Resource::Param) + raise ArgumentError, "Must pass a parameter or all necessary values" + end + + tag(*param.value) if param.name == :tag + + # And store it in our parameter hash. + @parameters[param.name] = param + end + + def to_hash + @parameters.inject({}) do |hash, ary| + param = ary[1] + # Skip "undef" values. + hash[param.name] = param.value if param.value != :undef + hash + end + end + + + # Create a Puppet::Resource instance from this parser resource. + # We plan, at some point, on not needing to do this conversion, but + # it's sufficient for now. + def to_resource + result = Puppet::Resource.new(type, title) + + to_hash.each do |p, v| + if v.is_a?(Puppet::Resource) + v = Puppet::Resource.new(v.type, v.title) + elsif v.is_a?(Array) + # flatten resource references arrays + v = v.flatten if v.flatten.find { |av| av.is_a?(Puppet::Resource) } + v = v.collect do |av| + av = Puppet::Resource.new(av.type, av.title) if av.is_a?(Puppet::Resource) + av + end + end + + # If the value is an array with only one value, then + # convert it to a single value. This is largely so that + # the database interaction doesn't have to worry about + # whether it returns an array or a string. + result[p] = if v.is_a?(Array) and v.length == 1 + v[0] + else + v + end + end + + result.file = self.file + result.line = self.line + result.exported = self.exported + result.virtual = self.virtual + result.tag(*self.tags) + + result + end + + # Translate our object to a transportable object. + def to_trans + return nil if virtual? + + to_resource.to_trans + end + + # Convert this resource to a RAL resource. We hackishly go via the + # transportable stuff. + def to_ral + to_resource.to_ral + end + + private + + # Add default values from our definition. + def add_defaults + scope.lookupdefaults(self.type).each do |name, param| + unless @parameters.include?(name) + self.debug "Adding default for #{name}" + + @parameters[name] = param.dup + end + end + end + + def add_backward_compatible_relationship_param(name) + # Skip metaparams for which we get no value. + return unless val = scope.lookupvar(name.to_s, false) and val != :undefined + + # The default case: just set the value + set_parameter(name, val) and return unless @parameters[name] + + # For relationship params, though, join the values (a la #446). + @parameters[name].value = [@parameters[name].value, val].flatten + end + + # Add any metaparams defined in our scope. This actually adds any metaparams + # from any parent scope, and there's currently no way to turn that off. + def add_metaparams + compat_mode = metaparam_compatibility_mode? + + Puppet::Type.eachmetaparam do |name| + next unless self.class.relationship_parameter?(name) + add_backward_compatible_relationship_param(name) if compat_mode + end + end + + def add_scope_tags + if scope_resource = scope.resource + tag(*scope_resource.tags) + end + end + + # Accept a parameter from an override. + def override_parameter(param) + # This can happen if the override is defining a new parameter, rather + # than replacing an existing one. + (set_parameter(param) and return) unless current = @parameters[param.name] + + # The parameter is already set. Fail if they're not allowed to override it. + unless param.source.child_of?(current.source) + puts caller if Puppet[:trace] + msg = "Parameter '#{param.name}' is already set on #{self}" + msg += " by #{current.source}" if current.source.to_s != "" + if current.file or current.line + fields = [] + fields << current.file if current.file + fields << current.line.to_s if current.line + msg += " at #{fields.join(":")}" + end + msg += "; cannot redefine" + raise Puppet::ParseError.new(msg, param.line, param.file) + end + + # If we've gotten this far, we're allowed to override. + + # Merge with previous value, if the parameter was generated with the +> + # syntax. It's important that we use a copy of the new param instance + # here, not the old one, and not the original new one, so that the source + # is registered correctly for later overrides but the values aren't + # implcitly shared when multiple resources are overrriden at once (see + # ticket #3556). + if param.add + param = param.dup + param.value = [current.value, param.value].flatten + end + + set_parameter(param) + end + + # Make sure the resource's parameters are all valid for the type. + def validate + @parameters.each do |name, param| + validate_parameter(name) + end + rescue => detail + fail Puppet::ParseError, detail.to_s + end + + private + + def extract_parameters(params) + params.each do |param| + # Don't set the same parameter twice + self.fail Puppet::ParseError, "Duplicate parameter '#{param.name}' for on #{self}" if @parameters[param.name] + + set_parameter(param) + end + end +end diff --git a/mcollective/lib/puppet/parser/resource/param.rb b/mcollective/lib/puppet/parser/resource/param.rb new file mode 100644 index 000000000..c28322337 --- /dev/null +++ b/mcollective/lib/puppet/parser/resource/param.rb @@ -0,0 +1,27 @@ +require 'puppet/file_collection/lookup' +require 'puppet/parser/yaml_trimmer' + +# The parameters we stick in Resources. +class Puppet::Parser::Resource::Param + attr_accessor :name, :value, :source, :add + include Puppet::Util + include Puppet::Util::Errors + include Puppet::Util::MethodHelper + + include Puppet::FileCollection::Lookup + include Puppet::Parser::YamlTrimmer + + def initialize(hash) + set_options(hash) + requiredopts(:name, :value) + @name = symbolize(@name) + end + + def line_to_i + line ? Integer(line) : nil + end + + def to_s + "#{self.name} => #{self.value}" + end +end diff --git a/mcollective/lib/puppet/parser/scope.rb b/mcollective/lib/puppet/parser/scope.rb new file mode 100644 index 000000000..c369f129a --- /dev/null +++ b/mcollective/lib/puppet/parser/scope.rb @@ -0,0 +1,518 @@ +# The scope class, which handles storing and retrieving variables and types and +# such. + +require 'puppet/parser/parser' +require 'puppet/parser/templatewrapper' +require 'puppet/transportable' +require 'strscan' + +require 'puppet/resource/type_collection_helper' + +class Puppet::Parser::Scope + include Puppet::Resource::TypeCollectionHelper + require 'puppet/parser/resource' + + AST = Puppet::Parser::AST + + Puppet::Util.logmethods(self) + + include Enumerable + include Puppet::Util::Errors + attr_accessor :level, :source, :resource + attr_accessor :base, :keyword + attr_accessor :top, :translated, :compiler + attr_accessor :parent + attr_reader :namespaces + + # thin wrapper around an ephemeral + # symbol table. + # when a symbol + class Ephemeral + def initialize(parent=nil) + @symbols = {} + @parent = parent + end + + [:include?, :delete, :[]=].each do |m| + define_method(m) do |*args| + @symbols.send(m, *args) + end + end + + def [](name) + unless @symbols.include?(name) or @parent.nil? + @parent[name] + else + @symbols[name] + end + end + end + + # A demeterific shortcut to the catalog. + def catalog + compiler.catalog + end + + def environment + compiler.environment + end + + # Proxy accessors + def host + @compiler.node.name + end + + # Is the value true? This allows us to control the definition of truth + # in one place. + def self.true?(value) + (value != false and value != "" and value != :undef) + end + + # Is the value a number?, return the correct object or nil if not a number + def self.number?(value) + return nil unless value.is_a?(Fixnum) or value.is_a?(Bignum) or value.is_a?(Float) or value.is_a?(String) + + if value.is_a?(String) + if value =~ /^-?\d+(:?\.\d+|(:?\.\d+)?e\d+)$/ + return value.to_f + elsif value =~ /^0x[0-9a-f]+$/i + return value.to_i(16) + elsif value =~ /^0[0-7]+$/ + return value.to_i(8) + elsif value =~ /^-?\d+$/ + return value.to_i + else + return nil + end + end + # it is one of Fixnum,Bignum or Float + value + end + + # Add to our list of namespaces. + def add_namespace(ns) + return false if @namespaces.include?(ns) + if @namespaces == [""] + @namespaces = [ns] + else + @namespaces << ns + end + end + + # Remove this when rebasing + def environment + compiler ? compiler.environment : nil + end + + # Are we the top scope? + def topscope? + @level == 1 + end + + def find_hostclass(name) + known_resource_types.find_hostclass(namespaces, name) + end + + def find_definition(name) + known_resource_types.find_definition(namespaces, name) + end + + def findresource(string, name = nil) + compiler.findresource(string, name) + end + + # Initialize our new scope. Defaults to having no parent. + def initialize(hash = {}) + if hash.include?(:namespace) + if n = hash[:namespace] + @namespaces = [n] + end + hash.delete(:namespace) + else + @namespaces = [""] + end + hash.each { |name, val| + method = name.to_s + "=" + if self.respond_to? method + self.send(method, val) + else + raise Puppet::DevError, "Invalid scope argument #{name}" + end + } + + extend_with_functions_module + + @tags = [] + + # The symbol table for this scope. This is where we store variables. + @symtable = {} + + # the ephemeral symbol tables + # those should not persist long, and are used for the moment only + # for $0..$xy capture variables of regexes + # this is actually implemented as a stack, with each ephemeral scope + # shadowing the previous one + @ephemeral = [ Ephemeral.new ] + + # All of the defaults set for types. It's a hash of hashes, + # with the first key being the type, then the second key being + # the parameter. + @defaults = Hash.new { |dhash,type| + dhash[type] = {} + } + + # The table for storing class singletons. This will only actually + # be used by top scopes and node scopes. + @class_scopes = {} + end + + # Store the fact that we've evaluated a class, and store a reference to + # the scope in which it was evaluated, so that we can look it up later. + def class_set(name, scope) + return parent.class_set(name,scope) if parent + @class_scopes[name] = scope + end + + # Return the scope associated with a class. This is just here so + # that subclasses can set their parent scopes to be the scope of + # their parent class, and it's also used when looking up qualified + # variables. + def class_scope(klass) + # They might pass in either the class or class name + k = klass.respond_to?(:name) ? klass.name : klass + @class_scopes[k] || (parent && parent.class_scope(k)) + end + + # Collect all of the defaults set at any higher scopes. + # This is a different type of lookup because it's additive -- + # it collects all of the defaults, with defaults in closer scopes + # overriding those in later scopes. + def lookupdefaults(type) + values = {} + + # first collect the values from the parents + unless parent.nil? + parent.lookupdefaults(type).each { |var,value| + values[var] = value + } + end + + # then override them with any current values + # this should probably be done differently + if @defaults.include?(type) + @defaults[type].each { |var,value| + values[var] = value + } + end + + #Puppet.debug "Got defaults for %s: %s" % + # [type,values.inspect] + values + end + + # Look up a defined type. + def lookuptype(name) + find_definition(name) || find_hostclass(name) + end + + def lookup_qualified_var(name, usestring) + parts = name.split(/::/) + shortname = parts.pop + klassname = parts.join("::") + klass = find_hostclass(klassname) + unless klass + warning "Could not look up qualified variable '#{name}'; class #{klassname} could not be found" + return usestring ? "" : :undefined + end + unless kscope = class_scope(klass) + warning "Could not look up qualified variable '#{name}'; class #{klassname} has not been evaluated" + return usestring ? "" : :undefined + end + kscope.lookupvar(shortname, usestring) + end + + private :lookup_qualified_var + + # Look up a variable. The simplest value search we do. Default to returning + # an empty string for missing values, but support returning a constant. + def lookupvar(name, usestring = true) + table = ephemeral?(name) ? @ephemeral.last : @symtable + # If the variable is qualified, then find the specified scope and look the variable up there instead. + if name =~ /::/ + return lookup_qualified_var(name, usestring) + end + # We can't use "if table[name]" here because the value might be false + if ephemeral_include?(name) or table.include?(name) + if usestring and table[name] == :undef + return "" + else + return table[name] + end + elsif self.parent + return parent.lookupvar(name, usestring) + elsif usestring + return "" + else + return :undefined + end + end + + # Return a hash containing our variables and their values, optionally (and + # by default) including the values defined in our parent. Local values + # shadow parent values. + def to_hash(recursive = true) + target = parent.to_hash(recursive) if recursive and parent + target ||= Hash.new + @symtable.keys.each { |name| + value = @symtable[name] + if value == :undef + target.delete(name) + else + target[name] = value + end + } + target + end + + def namespaces + @namespaces.dup + end + + # Create a new scope and set these options. + def newscope(options = {}) + compiler.newscope(self, options) + end + + def parent_module_name + return nil unless @parent + return nil unless @parent.source + @parent.source.module_name + end + + # Return the list of scopes up to the top scope, ordered with our own first. + # This is used for looking up variables and defaults. + def scope_path + if parent + [self, parent.scope_path].flatten.compact + else + [self] + end + end + + # Set defaults for a type. The typename should already be downcased, + # so that the syntax is isolated. We don't do any kind of type-checking + # here; instead we let the resource do it when the defaults are used. + def setdefaults(type, params) + table = @defaults[type] + + # if we got a single param, it'll be in its own array + params = [params] unless params.is_a?(Array) + + params.each { |param| + #Puppet.debug "Default for %s is %s => %s" % + # [type,ary[0].inspect,ary[1].inspect] + if table.include?(param.name) + raise Puppet::ParseError.new("Default already defined for #{type} { #{param.name} }; cannot redefine", param.line, param.file) + end + table[param.name] = param + } + end + + # Set a variable in the current scope. This will override settings + # in scopes above, but will not allow variables in the current scope + # to be reassigned. + def setvar(name,value, options = {}) + table = options[:ephemeral] ? @ephemeral.last : @symtable + #Puppet.debug "Setting %s to '%s' at level %s mode append %s" % + # [name.inspect,value,self.level, append] + if table.include?(name) + unless options[:append] + error = Puppet::ParseError.new("Cannot reassign variable #{name}") + else + error = Puppet::ParseError.new("Cannot append, variable #{name} is defined in this scope") + end + error.file = options[:file] if options[:file] + error.line = options[:line] if options[:line] + raise error + end + + unless options[:append] + table[name] = value + else # append case + # lookup the value in the scope if it exists and insert the var + table[name] = lookupvar(name) + # concatenate if string, append if array, nothing for other types + case value + when Array + table[name] += value + when Hash + raise ArgumentError, "Trying to append to a hash with something which is not a hash is unsupported" unless value.is_a?(Hash) + table[name].merge!(value) + else + table[name] << value + end + end + end + + # Return an interpolated string. + def strinterp(string, file = nil, line = nil) + # Most strings won't have variables in them. + ss = StringScanner.new(string) + out = "" + while not ss.eos? + if ss.scan(/^\$\{((\w*::)*\w+|[0-9]+)\}|^\$([0-9])|^\$((\w*::)*\w+)/) + # If it matches the backslash, then just retun the dollar sign. + if ss.matched == '\\$' + out << '$' + else # look the variable up + # make sure $0-$9 are lookupable only if ephemeral + var = ss[1] || ss[3] || ss[4] + if var and var =~ /^[0-9]+$/ and not ephemeral_include?(var) + next + end + out << lookupvar(var).to_s || "" + end + elsif ss.scan(/^\\(.)/) + # Puppet.debug("Got escape: pos:%d; m:%s" % [ss.pos, ss.matched]) + case ss[1] + when 'n' + out << "\n" + when 't' + out << "\t" + when 's' + out << " " + when '\\' + out << '\\' + when '$' + out << '$' + else + str = "Unrecognised escape sequence '#{ss.matched}'" + str += " in file #{file}" if file + str += " at line #{line}" if line + Puppet.warning str + out << ss.matched + end + elsif ss.scan(/^\$/) + out << '$' + elsif ss.scan(/^\\\n/) # an escaped carriage return + next + else + tmp = ss.scan(/[^\\$]+/) + # Puppet.debug("Got other: pos:%d; m:%s" % [ss.pos, tmp]) + unless tmp + error = Puppet::ParseError.new("Could not parse string #{string.inspect}") + {:file= => file, :line= => line}.each do |m,v| + error.send(m, v) if v + end + raise error + end + out << tmp + end + end + + out + end + + # Return the tags associated with this scope. It's basically + # just our parents' tags, plus our type. We don't cache this value + # because our parent tags might change between calls. + def tags + resource.tags + end + + # Used mainly for logging + def to_s + "Scope(#{@resource})" + end + + # Undefine a variable; only used for testing. + def unsetvar(var) + table = ephemeral?(var) ? @ephemeral.last : @symtable + table.delete(var) if table.include?(var) + end + + # remove ephemeral scope up to level + def unset_ephemeral_var(level=:all) + if level == :all + @ephemeral = [ Ephemeral.new ] + else + (@ephemeral.size - level).times do + @ephemeral.pop + end + end + end + + # check if name exists in one of the ephemeral scope. + def ephemeral_include?(name) + @ephemeral.reverse.each do |eph| + return true if eph.include?(name) + end + false + end + + # is name an ephemeral variable? + def ephemeral?(name) + name =~ /^\d+$/ + end + + def ephemeral_level + @ephemeral.size + end + + def new_ephemeral + @ephemeral.push(Ephemeral.new(@ephemeral.last)) + end + + def ephemeral_from(match, file = nil, line = nil) + raise(ArgumentError,"Invalid regex match data") unless match.is_a?(MatchData) + + new_ephemeral + + setvar("0", match[0], :file => file, :line => line, :ephemeral => true) + match.captures.each_with_index do |m,i| + setvar("#{i+1}", m, :file => file, :line => line, :ephemeral => true) + end + end + + def find_resource_type(type) + # It still works fine without the type == 'class' short-cut, but it is a lot slower. + return nil if ["class", "node"].include? type.to_s.downcase + find_builtin_resource_type(type) || find_defined_resource_type(type) + end + + def find_builtin_resource_type(type) + Puppet::Type.type(type.to_s.downcase.to_sym) + end + + def find_defined_resource_type(type) + environment.known_resource_types.find_definition(namespaces, type.to_s.downcase) + end + + def resolve_type_and_titles(type, titles) + raise ArgumentError, "titles must be an array" unless titles.is_a?(Array) + + case type.downcase + when "class" + # resolve the titles + titles = titles.collect do |a_title| + hostclass = find_hostclass(a_title) + hostclass ? hostclass.name : a_title + end + when "node" + # no-op + else + # resolve the type + resource_type = find_resource_type(type) + type = resource_type.name if resource_type + end + + return [type, titles] + end + + private + + def extend_with_functions_module + extend Puppet::Parser::Functions.environment_module(Puppet::Node::Environment.root) + extend Puppet::Parser::Functions.environment_module(environment) + end +end diff --git a/mcollective/lib/puppet/parser/templatewrapper.rb b/mcollective/lib/puppet/parser/templatewrapper.rb new file mode 100644 index 000000000..6864aa1a9 --- /dev/null +++ b/mcollective/lib/puppet/parser/templatewrapper.rb @@ -0,0 +1,115 @@ +# A simple wrapper for templates, so they don't have full access to +# the scope objects. +require 'puppet/parser/files' +require 'erb' + +class Puppet::Parser::TemplateWrapper + attr_writer :scope + attr_reader :file + attr_accessor :string + include Puppet::Util + Puppet::Util.logmethods(self) + + def initialize(scope) + @__scope__ = scope + end + + def scope + @__scope__ + end + + # Should return true if a variable is defined, false if it is not + def has_variable?(name) + if scope.lookupvar(name.to_s, false) != :undefined + true + else + false + end + end + + # Allow templates to access the defined classes + def classes + scope.catalog.classes + end + + # Allow templates to access the tags defined in the current scope + def tags + scope.tags + end + + # Allow templates to access the all the defined tags + def all_tags + scope.catalog.tags + end + + # Ruby treats variables like methods, so we used to expose variables + # within scope to the ERB code via method_missing. As per RedMine #1427, + # though, this means that conflicts between methods in our inheritance + # tree (Kernel#fork) and variable names (fork => "yes/no") could arise. + # + # Worse, /new/ conflicts could pop up when a new kernel or object method + # was added to Ruby, causing templates to suddenly fail mysteriously when + # Ruby was upgraded. + # + # To ensure that legacy templates using unqualified names work we retain + # the missing_method definition here until we declare the syntax finally + # dead. + def method_missing(name, *args) + # We have to tell lookupvar to return :undefined to us when + # appropriate; otherwise it converts to "". + value = scope.lookupvar(name.to_s, false) + if value != :undefined + return value + else + # Just throw an error immediately, instead of searching for + # other missingmethod things or whatever. + raise Puppet::ParseError, "Could not find value for '#{name}'" + end + end + + def file=(filename) + unless @file = Puppet::Parser::Files.find_template(filename, scope.compiler.environment.to_s) + raise Puppet::ParseError, "Could not find template '#{filename}'" + end + + # We'll only ever not have a parser in testing, but, eh. + scope.known_resource_types.watch_file(file) + + @string = File.read(file) + end + + def result(string = nil) + if string + self.string = string + template_source = "inline template" + else + template_source = file + end + + # Expose all the variables in our scope as instance variables of the + # current object, making it possible to access them without conflict + # to the regular methods. + benchmark(:debug, "Bound template variables for #{template_source}") do + scope.to_hash.each { |name, value| + if name.kind_of?(String) + realname = name.gsub(/[^\w]/, "_") + else + realname = name + end + instance_variable_set("@#{realname}", value) + } + end + + result = nil + benchmark(:debug, "Interpolated template #{template_source}") do + template = ERB.new(self.string, 0, "-") + result = template.result(binding) + end + + result + end + + def to_s + "template[#{(file ? file : "inline")}]" + end +end diff --git a/mcollective/lib/puppet/parser/type_loader.rb b/mcollective/lib/puppet/parser/type_loader.rb new file mode 100644 index 000000000..bae560381 --- /dev/null +++ b/mcollective/lib/puppet/parser/type_loader.rb @@ -0,0 +1,148 @@ +require 'puppet/node/environment' + +class Puppet::Parser::TypeLoader + include Puppet::Node::Environment::Helper + + # Helper class that makes sure we don't try to import the same file + # more than once from either the same thread or different threads. + class Helper + include MonitorMixin + def initialize + super + # These hashes are indexed by filename + @state = {} # :doing or :done + @thread = {} # if :doing, thread that's doing the parsing + @cond_var = {} # if :doing, condition var that will be signaled when done. + end + + # Execute the supplied block exactly once per file, no matter how + # many threads have asked for it to run. If another thread is + # already executing it, wait for it to finish. If this thread is + # already executing it, return immediately without executing the + # block. + # + # Note: the reason for returning immediately if this thread is + # already executing the block is to handle the case of a circular + # import--when this happens, we attempt to recursively re-parse a + # file that we are already in the process of parsing. To prevent + # an infinite regress we need to simply do nothing when the + # recursive import is attempted. + def do_once(file) + need_to_execute = synchronize do + case @state[file] + when :doing + if @thread[file] != Thread.current + @cond_var[file].wait + end + false + when :done + false + else + @state[file] = :doing + @thread[file] = Thread.current + @cond_var[file] = new_cond + true + end + end + if need_to_execute + begin + yield + ensure + synchronize do + @state[file] = :done + @thread.delete(file) + @cond_var.delete(file).broadcast + end + end + end + end + end + + # Import our files. + def import(file, current_file = nil) + return if Puppet[:ignoreimport] + + # use a path relative to the file doing the importing + if current_file + dir = current_file.sub(%r{[^/]+$},'').sub(/\/$/, '') + else + dir = "." + end + if dir == "" + dir = "." + end + + pat = file + modname, files = Puppet::Parser::Files.find_manifests(pat, :cwd => dir, :environment => environment) + if files.size == 0 + raise Puppet::ImportError.new("No file(s) found for import of '#{pat}'") + end + + files.each do |file| + unless file =~ /^#{File::SEPARATOR}/ + file = File.join(dir, file) + end + @loading_helper.do_once(file) do + parse_file(file) + end + end + + modname + end + + def known_resource_types + environment.known_resource_types + end + + def initialize(env) + self.environment = env + @loading_helper = Helper.new + end + + def load_until(namespaces, name) + return nil if name == "" # special-case main. + name2files(namespaces, name).each do |filename| + modname = begin + import(filename) + rescue Puppet::ImportError => detail + # We couldn't load the item + # I'm not convienced we should just drop these errors, but this + # preserves existing behaviours. + nil + end + if result = yield(filename) + Puppet.debug "Automatically imported #{name} from #{filename} into #{environment}" + result.module_name = modname if modname and result.respond_to?(:module_name=) + return result + end + end + nil + end + + def name2files(namespaces, name) + return [name.sub(/^::/, '').gsub("::", File::SEPARATOR)] if name =~ /^::/ + + result = namespaces.inject([]) do |names_to_try, namespace| + fullname = (namespace + "::#{name}").sub(/^::/, '') + + # Try to load the module init file if we're a qualified name + names_to_try << fullname.split("::")[0] if fullname.include?("::") + + # Then the fully qualified name + names_to_try << fullname + end + + # Otherwise try to load the bare name on its own. This + # is appropriate if the class we're looking for is in a + # module that's different from our namespace. + result << name + result.uniq.collect { |f| f.gsub("::", File::SEPARATOR) } + end + + def parse_file(file) + Puppet.debug("importing '#{file}' in environment #{environment}") + parser = Puppet::Parser::Parser.new(environment) + parser.file = file + parser.parse + end +end diff --git a/mcollective/lib/puppet/parser/yaml_trimmer.rb b/mcollective/lib/puppet/parser/yaml_trimmer.rb new file mode 100644 index 000000000..cf7870916 --- /dev/null +++ b/mcollective/lib/puppet/parser/yaml_trimmer.rb @@ -0,0 +1,9 @@ +module Puppet::Parser::YamlTrimmer + REMOVE = %w{@scope @source} + + def to_yaml_properties + r = instance_variables - REMOVE + r -= skip_for_yaml if respond_to?(:skip_for_yaml) + r + end +end diff --git a/mcollective/lib/puppet/property.rb b/mcollective/lib/puppet/property.rb new file mode 100644 index 000000000..12f496a6e --- /dev/null +++ b/mcollective/lib/puppet/property.rb @@ -0,0 +1,339 @@ +# The virtual base class for properties, which are the self-contained building +# blocks for actually doing work on the system. + +require 'puppet' +require 'puppet/parameter' + +class Puppet::Property < Puppet::Parameter + require 'puppet/property/ensure' + + # Because 'should' uses an array, we have a special method for handling + # it. We also want to keep copies of the original values, so that + # they can be retrieved and compared later when merging. + attr_reader :shouldorig + + attr_writer :noop + + class << self + attr_accessor :unmanaged + attr_reader :name + + # Return array matching info, defaulting to just matching + # the first value. + def array_matching + @array_matching ||= :first + end + + # Set whether properties should match all values or just the first one. + def array_matching=(value) + value = value.intern if value.is_a?(String) + raise ArgumentError, "Supported values for Property#array_matching are 'first' and 'all'" unless [:first, :all].include?(value) + @array_matching = value + end + end + + # Look up a value's name, so we can find options and such. + def self.value_name(name) + if value = value_collection.match?(name) + value.name + end + end + + # Retrieve an option set when a value was defined. + def self.value_option(name, option) + if value = value_collection.value(name) + value.send(option) + end + end + + # Define a new valid value for a property. You must provide the value itself, + # usually as a symbol, or a regex to match the value. + # + # The first argument to the method is either the value itself or a regex. + # The second argument is an option hash; valid options are: + # * :method: The name of the method to define. Defaults to 'set_'. + # * :required_features: A list of features this value requires. + # * :event: The event that should be returned when this value is set. + # * :call: When to call any associated block. The default value + # is `instead`, which means to call the value instead of calling the + # provider. You can also specify `before` or `after`, which will + # call both the block and the provider, according to the order you specify + # (the `first` refers to when the block is called, not the provider). + def self.newvalue(name, options = {}, &block) + value = value_collection.newvalue(name, options, &block) + + define_method(value.method, &value.block) if value.method and value.block + value + end + + # Call the provider method. + def call_provider(value) + provider.send(self.class.name.to_s + "=", value) + rescue NoMethodError + self.fail "The #{provider.class.name} provider can not handle attribute #{self.class.name}" + end + + # Call the dynamically-created method associated with our value, if + # there is one. + def call_valuemethod(name, value) + if method = self.class.value_option(name, :method) and self.respond_to?(method) + begin + event = self.send(method) + rescue Puppet::Error + raise + rescue => detail + puts detail.backtrace if Puppet[:trace] + error = Puppet::Error.new("Could not set '#{value} on #{self.class.name}: #{detail}", @resource.line, @resource.file) + error.set_backtrace detail.backtrace + raise error + end + elsif block = self.class.value_option(name, :block) + # FIXME It'd be better here to define a method, so that + # the blocks could return values. + self.instance_eval(&block) + else + devfail "Could not find method for value '#{name}'" + end + end + + # How should a property change be printed as a string? + def change_to_s(current_value, newvalue) + begin + if current_value == :absent + return "defined '#{name}' as '#{should_to_s(newvalue)}'" + elsif newvalue == :absent or newvalue == [:absent] + return "undefined '#{name}' from '#{is_to_s(current_value)}'" + else + return "#{name} changed '#{is_to_s(current_value)}' to '#{should_to_s(newvalue)}'" + end + rescue Puppet::Error, Puppet::DevError + raise + rescue => detail + puts detail.backtrace if Puppet[:trace] + raise Puppet::DevError, "Could not convert change '#{name}' to string: #{detail}" + end + end + + # Figure out which event to return. + def event_name + value = self.should + + event_name = self.class.value_option(value, :event) and return event_name + + name == :ensure or return (name.to_s + "_changed").to_sym + + return (resource.type.to_s + case value + when :present; "_created" + when :absent; "_removed" + else + "_changed" + end).to_sym + end + + # Return a modified form of the resource event. + def event + resource.event :name => event_name, :desired_value => should, :property => self, :source_description => path + end + + attr_reader :shadow + + # initialize our property + def initialize(hash = {}) + super + + if ! self.metaparam? and klass = Puppet::Type.metaparamclass(self.class.name) + setup_shadow(klass) + end + end + + # Determine whether the property is in-sync or not. If @should is + # not defined or is set to a non-true value, then we do not have + # a valid value for it and thus consider the property to be in-sync + # since we cannot fix it. Otherwise, we expect our should value + # to be an array, and if @is matches any of those values, then + # we consider it to be in-sync. + # + # Don't override this method. + def safe_insync?(is) + # If there is no @should value, consider the property to be in sync. + return true unless @should + + # Otherwise delegate to the (possibly derived) insync? method. + insync?(is) + end + + def self.method_added(sym) + raise "Puppet::Property#safe_insync? shouldn't be overridden; please override insync? instead" if sym == :safe_insync? + end + + # This method should be overridden by derived classes if necessary + # to provide extra logic to determine whether the property is in + # sync. + def insync?(is) + self.devfail "#{self.class.name}'s should is not array" unless @should.is_a?(Array) + + # an empty array is analogous to no should values + return true if @should.empty? + + # Look for a matching value + return (is == @should or is == @should.collect { |v| v.to_s }) if match_all? + + @should.each { |val| return true if is == val or is == val.to_s } + + # otherwise, return false + false + end + + # because the @should and @is vars might be in weird formats, + # we need to set up a mechanism for pretty printing of the values + # default to just the values, but this way individual properties can + # override these methods + def is_to_s(currentvalue) + currentvalue + end + + # Send a log message. + def log(msg) + + Puppet::Util::Log.create( + + :level => resource[:loglevel], + :message => msg, + + :source => self + ) + end + + # Should we match all values, or just the first? + def match_all? + self.class.array_matching == :all + end + + # Execute our shadow's munge code, too, if we have one. + def munge(value) + self.shadow.munge(value) if self.shadow + + super + end + + # each property class must define the name method, and property instances + # do not change that name + # this implicitly means that a given object can only have one property + # instance of a given property class + def name + self.class.name + end + + # for testing whether we should actually do anything + def noop + # This is only here to make testing easier. + if @resource.respond_to?(:noop?) + @resource.noop? + else + if defined?(@noop) + @noop + else + Puppet[:noop] + end + end + end + + # By default, call the method associated with the property name on our + # provider. In other words, if the property name is 'gid', we'll call + # 'provider.gid' to retrieve the current value. + def retrieve + provider.send(self.class.name) + end + + # Set our value, using the provider, an associated block, or both. + def set(value) + # Set a name for looking up associated options like the event. + name = self.class.value_name(value) + + call = self.class.value_option(name, :call) || :none + + if call == :instead + call_valuemethod(name, value) + elsif call == :none + # They haven't provided a block, and our parent does not have + # a provider, so we have no idea how to handle this. + self.fail "#{self.class.name} cannot handle values of type #{value.inspect}" unless @resource.provider + call_provider(value) + else + # LAK:NOTE 20081031 This is a change in behaviour -- you could + # previously specify :call => [;before|:after], which would call + # the setter *in addition to* the block. I'm convinced this + # was never used, and it makes things unecessarily complicated. + # If you want to specify a block and still call the setter, then + # do so in the block. + devfail "Cannot use obsolete :call value '#{call}' for property '#{self.class.name}'" + end + end + + # If there's a shadowing metaparam, instantiate it now. + # This allows us to create a property or parameter with the + # same name as a metaparameter, and the metaparam will only be + # stored as a shadow. + def setup_shadow(klass) + @shadow = klass.new(:resource => self.resource) + end + + # Only return the first value + def should + return nil unless defined?(@should) + + self.devfail "should for #{self.class.name} on #{resource.name} is not an array" unless @should.is_a?(Array) + + if match_all? + return @should.collect { |val| self.unmunge(val) } + else + return self.unmunge(@should[0]) + end + end + + # Set the should value. + def should=(values) + values = [values] unless values.is_a?(Array) + + @shouldorig = values + + values.each { |val| validate(val) } + @should = values.collect { |val| self.munge(val) } + end + + def should_to_s(newvalue) + [newvalue].flatten.join(" ") + end + + def sync + devfail "Got a nil value for should" unless should + set(should) + end + + # Verify that the passed value is valid. + # If the developer uses a 'validate' hook, this method will get overridden. + def unsafe_validate(value) + super + validate_features_per_value(value) + end + + # Make sure that we've got all of the required features for a given value. + def validate_features_per_value(value) + if features = self.class.value_option(self.class.value_name(value), :required_features) + features = Array(features) + needed_features = features.collect { |f| f.to_s }.join(", ") + raise ArgumentError, "Provider must have features '#{needed_features}' to set '#{self.class.name}' to '#{value}'" unless provider.satisfies?(features) + end + end + + # Just return any should value we might have. + def value + self.should + end + + # Match the Parameter interface, but we really just use 'should' internally. + # Note that the should= method does all of the validation and such. + def value=(value) + self.should = value + end +end diff --git a/mcollective/lib/puppet/property/ensure.rb b/mcollective/lib/puppet/property/ensure.rb new file mode 100644 index 000000000..8b97ddeab --- /dev/null +++ b/mcollective/lib/puppet/property/ensure.rb @@ -0,0 +1,92 @@ +require 'puppet/property' + +# This property will get automatically added to any type that responds +# to the methods 'exists?', 'create', and 'destroy'. +class Puppet::Property::Ensure < Puppet::Property + @name = :ensure + + def self.defaultvalues + newvalue(:present) do + if @resource.provider and @resource.provider.respond_to?(:create) + @resource.provider.create + else + @resource.create + end + nil # return nil so the event is autogenerated + end + + newvalue(:absent) do + if @resource.provider and @resource.provider.respond_to?(:destroy) + @resource.provider.destroy + else + @resource.destroy + end + nil # return nil so the event is autogenerated + end + + defaultto do + if @resource.managed? + :present + else + nil + end + end + + # This doc will probably get overridden + @doc ||= "The basic property that the resource should be in." + end + + def self.inherited(sub) + # Add in the two properties that everyone will have. + sub.class_eval do + end + end + + def change_to_s(currentvalue, newvalue) + begin + if currentvalue == :absent or currentvalue.nil? + return "created" + elsif newvalue == :absent + return "removed" + else + return "#{self.name} changed '#{self.is_to_s(currentvalue)}' to '#{self.should_to_s(newvalue)}'" + end + rescue Puppet::Error, Puppet::DevError + raise + rescue => detail + raise Puppet::DevError, "Could not convert change #{self.name} to string: #{detail}" + end + end + + def retrieve + # XXX This is a problem -- whether the object exists or not often + # depends on the results of other properties, yet we're the first property + # to get checked, which means that those other properties do not have + # @is values set. This seems to be the source of quite a few bugs, + # although they're mostly logging bugs, not functional ones. + if prov = @resource.provider and prov.respond_to?(:exists?) + result = prov.exists? + elsif @resource.respond_to?(:exists?) + result = @resource.exists? + else + raise Puppet::DevError, "No ability to determine if #{@resource.class.name} exists" + end + if result + return :present + else + return :absent + end + end + + # If they're talking about the thing at all, they generally want to + # say it should exist. + #defaultto :present + defaultto do + if @resource.managed? + :present + else + nil + end + end +end + diff --git a/mcollective/lib/puppet/property/keyvalue.rb b/mcollective/lib/puppet/property/keyvalue.rb new file mode 100644 index 000000000..57d0ea2d9 --- /dev/null +++ b/mcollective/lib/puppet/property/keyvalue.rb @@ -0,0 +1,86 @@ +#This subclass of property manages string key value pairs. + +#In order to use this property: +# - the @should value must be an array of keyvalue pairs separated by the 'separator' +# - the retrieve method should return a hash with the keys as symbols +# IMPORTANT NOTE: In order for this property to work there must also be a 'membership' parameter +# The class that inherits from property should override that method with the symbol for the membership + +require 'puppet/property' + +module Puppet + class Property + class KeyValue < Property + + def hash_to_key_value_s(hash) + hash.select { |k,v| true }.map { |pair| pair.join(separator) }.join(delimiter) + end + + def should_to_s(should_value) + hash_to_key_value_s(should_value) + end + + def is_to_s(current_value) + hash_to_key_value_s(current_value) + end + + def membership + :key_value_membership + end + + def inclusive? + @resource[membership] == :inclusive + end + + def hashify(key_value_array) + #turns string array into a hash + key_value_array.inject({}) do |hash, key_value| + tmp = key_value.split(separator) + hash[tmp[0].intern] = tmp[1] + hash + end + end + + def process_current_hash(current) + return {} if current == :absent + + #inclusive means we are managing everything so if it isn't in should, its gone + current.each_key { |key| current[key] = nil } if inclusive? + current + end + + def should + return nil unless @should + + members = hashify(@should) + current = process_current_hash(retrieve) + + #shared keys will get overwritten by members + current.merge(members) + end + + def separator + "=" + end + + def delimiter + ";" + end + + def retrieve + #ok, some 'convention' if the keyvalue property is named properties, provider should implement a properties method + if key_hash = provider.send(name) and key_hash != :absent + return key_hash + else + return :absent + end + end + + def insync?(is) + return true unless is + + (is == self.should) + end + end + end +end diff --git a/mcollective/lib/puppet/property/list.rb b/mcollective/lib/puppet/property/list.rb new file mode 100644 index 000000000..b86dc87f2 --- /dev/null +++ b/mcollective/lib/puppet/property/list.rb @@ -0,0 +1,75 @@ +require 'puppet/property' + +module Puppet + class Property + class List < Property + + def should_to_s(should_value) + #just return the should value + should_value + end + + def is_to_s(currentvalue) + if currentvalue == :absent + return "absent" + else + return currentvalue.join(delimiter) + end + end + + def membership + :membership + end + + def add_should_with_current(should, current) + should += current if current.is_a?(Array) + should.uniq + end + + def inclusive? + @resource[membership] == :inclusive + end + + #dearrayify was motivated because to simplify the implementation of the OrderedList property + def dearrayify(array) + array.sort.join(delimiter) + end + + def should + return nil unless @should + + members = @should + #inclusive means we are managing everything so if it isn't in should, its gone + members = add_should_with_current(members, retrieve) if ! inclusive? + + dearrayify(members) + end + + def delimiter + "," + end + + def retrieve + #ok, some 'convention' if the list property is named groups, provider should implement a groups method + if tmp = provider.send(name) and tmp != :absent + return tmp.split(delimiter) + else + return :absent + end + end + + def prepare_is_for_comparison(is) + if is == :absent + is = [] + end + dearrayify(is) + end + + def insync?(is) + return true unless is + + (prepare_is_for_comparison(is) == self.should) + end + end + end +end diff --git a/mcollective/lib/puppet/property/ordered_list.rb b/mcollective/lib/puppet/property/ordered_list.rb new file mode 100644 index 000000000..7408b3019 --- /dev/null +++ b/mcollective/lib/puppet/property/ordered_list.rb @@ -0,0 +1,22 @@ +require 'puppet/property/list' + +module Puppet + class Property + class OrderedList < List + + def add_should_with_current(should, current) + if current.is_a?(Array) + #tricky trick + #Preserve all the current items in the list + #but move them to the back of the line + should = should + (current - should) + end + should + end + + def dearrayify(array) + array.join(delimiter) + end + end + end +end diff --git a/mcollective/lib/puppet/provider.rb b/mcollective/lib/puppet/provider.rb new file mode 100644 index 000000000..4456feb4e --- /dev/null +++ b/mcollective/lib/puppet/provider.rb @@ -0,0 +1,281 @@ +# The container class for implementations. +class Puppet::Provider + include Puppet::Util + include Puppet::Util::Errors + include Puppet::Util::Warnings + extend Puppet::Util::Warnings + + require 'puppet/provider/confiner' + + extend Puppet::Provider::Confiner + + Puppet::Util.logmethods(self, true) + + class << self + # Include the util module so we have access to things like 'which' + include Puppet::Util, Puppet::Util::Docs + include Puppet::Util::Logging + attr_accessor :name + + # The source parameter exists so that providers using the same + # source can specify this, so reading doesn't attempt to read the + # same package multiple times. + attr_writer :source + + # LAK 2007-05-09: Keep the model stuff around for backward compatibility + attr_reader :model + attr_accessor :resource_type + attr_writer :doc + end + + # LAK 2007-05-09: Keep the model stuff around for backward compatibility + attr_reader :model + attr_accessor :resource + + def self.command(name) + name = symbolize(name) + + if defined?(@commands) and command = @commands[name] + # nothing + elsif superclass.respond_to? :command and command = superclass.command(name) + # nothing + else + raise Puppet::DevError, "No command #{name} defined for provider #{self.name}" + end + + which(command) + end + + # Define commands that are not optional. + def self.commands(hash) + optional_commands(hash) do |name, path| + confine :exists => path, :for_binary => true + end + end + + # Is the provided feature a declared feature? + def self.declared_feature?(name) + defined?(@declared_features) and @declared_features.include?(name) + end + + # Does this implementation match all of the default requirements? If + # defaults are empty, we return false. + def self.default? + return false if @defaults.empty? + if @defaults.find do |fact, values| + values = [values] unless values.is_a? Array + if fval = Facter.value(fact).to_s and fval != "" + fval = fval.to_s.downcase.intern + else + return false + end + + # If any of the values match, we're a default. + if values.find do |value| fval == value.to_s.downcase.intern end + false + else + true + end + end + return false + else + return true + end + end + + # Store how to determine defaults. + def self.defaultfor(hash) + hash.each do |d,v| + @defaults[d] = v + end + end + + def self.specificity + (@defaults.length * 100) + ancestors.select { |a| a.is_a? Class }.length + end + + def self.initvars + @defaults = {} + @commands = {} + end + + # The method for returning a list of provider instances. Note that it returns providers, preferably with values already + # filled in, not resources. + def self.instances + raise Puppet::DevError, "Provider #{self.name} has not defined the 'instances' class method" + end + + # Create the methods for a given command. + def self.make_command_methods(name) + # Now define a method for that command + unless singleton_class.method_defined?(name) + meta_def(name) do |*args| + raise Puppet::Error, "Command #{name} is missing" unless command(name) + if args.empty? + cmd = [command(name)] + else + cmd = [command(name)] + args + end + # This might throw an ExecutionFailure, but the system above + # will catch it, if so. + return execute(cmd) + end + + # And then define an instance method that just calls the class method. + # We need both, so both instances and classes can easily run the commands. + unless method_defined?(name) + define_method(name) do |*args| + self.class.send(name, *args) + end + end + end + end + + # Create getter/setter methods for each property our resource type supports. + # They all get stored in @property_hash. This method is useful + # for those providers that use prefetch and flush. + def self.mkmodelmethods + warnonce "Provider.mkmodelmethods is deprecated; use Provider.mk_resource_methods" + mk_resource_methods + end + + # Create getter/setter methods for each property our resource type supports. + # They all get stored in @property_hash. This method is useful + # for those providers that use prefetch and flush. + def self.mk_resource_methods + [resource_type.validproperties, resource_type.parameters].flatten.each do |attr| + attr = symbolize(attr) + next if attr == :name + define_method(attr) do + @property_hash[attr] || :absent + end + + define_method(attr.to_s + "=") do |val| + @property_hash[attr] = val + end + end + end + + self.initvars + + # Define one or more binaries we'll be using. If a block is passed, yield the name + # and path to the block (really only used by 'commands'). + def self.optional_commands(hash) + hash.each do |name, path| + name = symbolize(name) + @commands[name] = path + + yield(name, path) if block_given? + + # Now define the class and instance methods. + make_command_methods(name) + end + end + + # Retrieve the data source. Defaults to the provider name. + def self.source + @source ||= self.name + end + + # Does this provider support the specified parameter? + def self.supports_parameter?(param) + if param.is_a?(Class) + klass = param + else + unless klass = resource_type.attrclass(param) + raise Puppet::DevError, "'#{param}' is not a valid parameter for #{resource_type.name}" + end + end + return true unless features = klass.required_features + + !!satisfies?(*features) + end + +# def self.to_s +# unless defined?(@str) +# if self.resource_type +# @str = "#{resource_type.name} provider #{self.name}" +# else +# @str = "unattached provider #{self.name}" +# end +# end +# @str +# end + + dochook(:defaults) do + if @defaults.length > 0 + return " Default for " + @defaults.collect do |f, v| + "`#{f}` == `#{v}`" + end.join(" and ") + "." + end + end + + dochook(:commands) do + if @commands.length > 0 + return " Required binaries: " + @commands.collect do |n, c| + "`#{c}`" + end.join(", ") + "." + end + end + + dochook(:features) do + if features.length > 0 + return " Supported features: " + features.collect do |f| + "`#{f}`" + end.join(", ") + "." + end + end + + # Remove the reference to the resource, so GC can clean up. + def clear + @resource = nil + @model = nil + end + + # Retrieve a named command. + def command(name) + self.class.command(name) + end + + # Get a parameter value. + def get(param) + @property_hash[symbolize(param)] || :absent + end + + def initialize(resource = nil) + if resource.is_a?(Hash) + # We don't use a duplicate here, because some providers (ParsedFile, at least) + # use the hash here for later events. + @property_hash = resource + elsif resource + @resource = resource + # LAK 2007-05-09: Keep the model stuff around for backward compatibility + @model = resource + @property_hash = {} + else + @property_hash = {} + end + end + + def name + if n = @property_hash[:name] + return n + elsif self.resource + resource.name + else + raise Puppet::DevError, "No resource and no name in property hash in #{self.class.name} instance" + end + end + + # Set passed params as the current values. + def set(params) + params.each do |param, value| + @property_hash[symbolize(param)] = value + end + end + + def to_s + "#{@resource}(provider=#{self.class.name})" + end +end + diff --git a/mcollective/lib/puppet/provider/augeas/augeas.rb b/mcollective/lib/puppet/provider/augeas/augeas.rb new file mode 100644 index 000000000..7dbd06240 --- /dev/null +++ b/mcollective/lib/puppet/provider/augeas/augeas.rb @@ -0,0 +1,361 @@ +#-- +# Copyright (C) 2008 Red Hat Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public +# License as published by the Free Software Foundation; either +# version 2 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# +# Author: Bryan Kearney + +require 'augeas' if Puppet.features.augeas? +require 'strscan' + +Puppet::Type.type(:augeas).provide(:augeas) do + include Puppet::Util + + confine :true => Puppet.features.augeas? + + has_features :parse_commands, :need_to_run?,:execute_changes + + SAVE_NOOP = "noop" + SAVE_OVERWRITE = "overwrite" + + COMMANDS = { + "set" => [ :path, :string ], + "rm" => [ :path ], + "clear" => [ :path ], + "insert" => [ :string, :string, :path ], + "get" => [ :path, :comparator, :string ], + "match" => [ :path, :glob ], + "size" => [:comparator, :int], + "include" => [:string], + "not_include" => [:string], + "==" => [:glob], + "!=" => [:glob] + } + + COMMANDS["ins"] = COMMANDS["insert"] + COMMANDS["remove"] = COMMANDS["rm"] + + attr_accessor :aug + + # Extracts an 2 dimensional array of commands which are in the + # form of command path value. + # The input can be + # - A string with one command + # - A string with many commands per line + # - An array of strings. + def parse_commands(data) + context = resource[:context] + # Add a trailing / if it is not there + if (context.length > 0) + context << "/" if context[-1, 1] != "/" + end + + data = data.split($/) if data.is_a?(String) + data = data.flatten + args = [] + data.each do |line| + line.strip! + next if line.nil? || line.empty? + argline = [] + sc = StringScanner.new(line) + cmd = sc.scan(/\w+|==|!=/) + formals = COMMANDS[cmd] + fail("Unknown command #{cmd}") unless formals + argline << cmd + narg = 0 + formals.each do |f| + sc.skip(/\s+/) + narg += 1 + if f == :path + start = sc.pos + nbracket = 0 + inSingleTick = false + inDoubleTick = false + begin + sc.skip(/([^\]\[\s\\'"]|\\.)+/) + ch = sc.getch + nbracket += 1 if ch == "[" + nbracket -= 1 if ch == "]" + inSingleTick = !inSingleTick if ch == "'" + inDoubleTick = !inDoubleTick if ch == "\"" + fail("unmatched [") if nbracket < 0 + end until ((nbracket == 0 && !inSingleTick && !inDoubleTick && (ch =~ /\s/)) || sc.eos?) + len = sc.pos - start + len -= 1 unless sc.eos? + unless p = sc.string[start, len] + fail("missing path argument #{narg} for #{cmd}") + end + # Rip off any ticks if they are there. + p = p[1, (p.size - 2)] if p[0,1] == "'" || p[0,1] == "\"" + p.chomp!("/") + if p[0,1] != '$' && p[0,1] != "/" + argline << context + p + else + argline << p + end + elsif f == :string + delim = sc.peek(1) + if delim == "'" || delim == "\"" + sc.getch + argline << sc.scan(/([^\\#{delim}]|(\\.))*/) + sc.getch + else + argline << sc.scan(/[^\s]+/) + end + fail("missing string argument #{narg} for #{cmd}") unless argline[-1] + elsif f == :comparator + argline << sc.scan(/(==|!=|=~|<|<=|>|>=)/) + unless argline[-1] + puts sc.rest + fail("invalid comparator for command #{cmd}") + end + elsif f == :int + argline << sc.scan(/\d+/).to_i + elsif f== :glob + argline << sc.rest + end + end + args << argline + end + args + end + + + def open_augeas + unless @aug + flags = Augeas::NONE + flags = Augeas::TYPE_CHECK if resource[:type_check] == :true + flags |= Augeas::NO_MODL_AUTOLOAD if resource[:incl] + root = resource[:root] + load_path = resource[:load_path] + debug("Opening augeas with root #{root}, lens path #{load_path}, flags #{flags}") + @aug = Augeas::open(root, load_path,flags) + + debug("Augeas version #{get_augeas_version} is installed") if get_augeas_version >= "0.3.6" + + if resource[:incl] + aug.set("/augeas/load/Xfm/lens", resource[:lens]) + aug.set("/augeas/load/Xfm/incl", resource[:incl]) + aug.load + end + end + @aug + end + + def close_augeas + if @aug + @aug.close + debug("Closed the augeas connection") + @aug = nil + end + end + + # Used by the need_to_run? method to process get filters. Returns + # true if there is a match, false if otherwise + # Assumes a syntax of get /files/path [COMPARATOR] value + def process_get(cmd_array) + return_value = false + + #validate and tear apart the command + fail ("Invalid command: #{cmd_array.join(" ")}") if cmd_array.length < 4 + cmd = cmd_array.shift + path = cmd_array.shift + comparator = cmd_array.shift + arg = cmd_array.join(" ") + + #check the value in augeas + result = @aug.get(path) || '' + case comparator + when "!=" + return_value = (result != arg) + when "=~" + regex = Regexp.new(arg) + return_value = (result =~ regex) + else + return_value = (result.send(comparator, arg)) + end + !!return_value + end + + # Used by the need_to_run? method to process match filters. Returns + # true if there is a match, false if otherwise + def process_match(cmd_array) + return_value = false + + #validate and tear apart the command + fail("Invalid command: #{cmd_array.join(" ")}") if cmd_array.length < 3 + cmd = cmd_array.shift + path = cmd_array.shift + + # Need to break apart the clause + clause_array = parse_commands(cmd_array.shift)[0] + verb = clause_array.shift + + #Get the values from augeas + result = @aug.match(path) || [] + fail("Error trying to match path '#{path}'") if (result == -1) + + # Now do the work + case verb + when "size" + fail("Invalid command: #{cmd_array.join(" ")}") if clause_array.length != 2 + comparator = clause_array.shift + arg = clause_array.shift + return_value = (result.size.send(comparator, arg)) + when "include" + arg = clause_array.shift + return_value = result.include?(arg) + when "not_include" + arg = clause_array.shift + return_value = !result.include?(arg) + when "==" + begin + arg = clause_array.shift + new_array = eval arg + return_value = (result == new_array) + rescue + fail("Invalid array in command: #{cmd_array.join(" ")}") + end + when "!=" + begin + arg = clause_array.shift + new_array = eval arg + return_value = (result != new_array) + rescue + fail("Invalid array in command: #{cmd_array.join(" ")}") + end + end + !!return_value + end + + def get_augeas_version + @aug.get("/augeas/version") || "" + end + + def set_augeas_save_mode(mode) + @aug.set("/augeas/save", mode) + end + + def files_changed? + saved_files = @aug.match("/augeas/events/saved") + saved_files.size > 0 + end + + # Determines if augeas acutally needs to run. + def need_to_run? + force = resource[:force] + return_value = true + begin + open_augeas + filter = resource[:onlyif] + unless filter == "" + cmd_array = parse_commands(filter)[0] + command = cmd_array[0]; + begin + case command + when "get"; return_value = process_get(cmd_array) + when "match"; return_value = process_match(cmd_array) + end + rescue SystemExit,NoMemoryError + raise + rescue Exception => e + fail("Error sending command '#{command}' with params #{cmd_array[1..-1].inspect}/#{e.message}") + end + end + + unless force + # If we have a verison of augeas which is at least 0.3.6 then we + # can make the changes now, see if changes were made, and + # actually do the save. + if return_value and get_augeas_version >= "0.3.6" + debug("Will attempt to save and only run if files changed") + set_augeas_save_mode(SAVE_NOOP) + do_execute_changes + save_result = @aug.save + saved_files = @aug.match("/augeas/events/saved") + if save_result and not files_changed? + debug("Skipping because no files were changed") + return_value = false + else + debug("Files changed, should execute") + end + end + end + ensure + close_augeas + end + return_value + end + + def execute_changes + # Re-connect to augeas, and re-execute the changes + begin + open_augeas + set_augeas_save_mode(SAVE_OVERWRITE) if get_augeas_version >= "0.3.6" + + do_execute_changes + + success = @aug.save + fail("Save failed with return code #{success}") if success != true + ensure + close_augeas + end + + :executed + end + + # Actually execute the augeas changes. + def do_execute_changes + commands = parse_commands(resource[:changes]) + commands.each do |cmd_array| + fail("invalid command #{cmd_array.join[" "]}") if cmd_array.length < 2 + command = cmd_array[0] + cmd_array.shift + begin + case command + when "set" + debug("sending command '#{command}' with params #{cmd_array.inspect}") + rv = aug.set(cmd_array[0], cmd_array[1]) + fail("Error sending command '#{command}' with params #{cmd_array.inspect}") if (!rv) + when "rm", "remove" + debug("sending command '#{command}' with params #{cmd_array.inspect}") + rv = aug.rm(cmd_array[0]) + fail("Error sending command '#{command}' with params #{cmd_array.inspect}") if (rv == -1) + when "clear" + debug("sending command '#{command}' with params #{cmd_array.inspect}") + rv = aug.clear(cmd_array[0]) + fail("Error sending command '#{command}' with params #{cmd_array.inspect}") if (!rv) + when "insert", "ins" + label = cmd_array[0] + where = cmd_array[1] + path = cmd_array[2] + case where + when "before"; before = true + when "after"; before = false + else fail("Invalid value '#{where}' for where param") + end + debug("sending command '#{command}' with params #{[label, where, path].inspect}") + rv = aug.insert(path, label, before) + fail("Error sending command '#{command}' with params #{cmd_array.inspect}") if (rv == -1) + else fail("Command '#{command}' is not supported") + end + rescue SystemExit,NoMemoryError + raise + rescue Exception => e + fail("Error sending command '#{command}' with params #{cmd_array.inspect}/#{e.message}") + end + end + end +end diff --git a/mcollective/lib/puppet/provider/computer/computer.rb b/mcollective/lib/puppet/provider/computer/computer.rb new file mode 100644 index 000000000..dd055beb3 --- /dev/null +++ b/mcollective/lib/puppet/provider/computer/computer.rb @@ -0,0 +1,20 @@ +require 'puppet/provider/nameservice/directoryservice' + +Puppet::Type.type(:computer).provide :directoryservice, :parent => Puppet::Provider::NameService::DirectoryService do + desc "Computer object management using DirectoryService on OS X. + Note that these are distinctly different kinds of objects to 'hosts', + as they require a MAC address and can have all sorts of policy attached to + them. + + This provider only manages Computer objects in the local directory service + domain, not in remote directories. + + If you wish to manage /etc/hosts on Mac OS X, then simply use the host + type as per other platforms." + + confine :operatingsystem => :darwin + defaultfor :operatingsystem => :darwin + + # hurray for abstraction. The nameservice directoryservice provider can + # handle everything we need. super. +end diff --git a/mcollective/lib/puppet/provider/confine.rb b/mcollective/lib/puppet/provider/confine.rb new file mode 100644 index 000000000..6825def7e --- /dev/null +++ b/mcollective/lib/puppet/provider/confine.rb @@ -0,0 +1,80 @@ +# The class that handles testing whether our providers +# actually work or not. +require 'puppet/util' + +class Puppet::Provider::Confine + include Puppet::Util + + @tests = {} + + class << self + attr_accessor :name + end + + def self.inherited(klass) + name = klass.to_s.split("::").pop.downcase.to_sym + raise "Test #{name} is already defined" if @tests.include?(name) + + klass.name = name + + @tests[name] = klass + end + + def self.test(name) + unless @tests[name] + begin + require "puppet/provider/confine/#{name}" + rescue LoadError => detail + unless detail.to_s =~ /No such file/i + warn "Could not load confine test '#{name}': #{detail}" + end + # Could not find file + end + end + @tests[name] + end + + attr_reader :values + + # Mark that this confine is used for testing binary existence. + attr_accessor :for_binary + def for_binary? + for_binary + end + + # Used for logging. + attr_accessor :label + + def initialize(values) + values = [values] unless values.is_a?(Array) + @values = values + end + + # Provide a hook for the message when there's a failure. + def message(value) + "" + end + + # Collect the results of all of them. + def result + values.collect { |value| pass?(value) } + end + + # Test whether our confine matches. + def valid? + values.each do |value| + unless pass?(value) + Puppet.debug(label + ": " + message(value)) + return false + end + end + + return true + ensure + reset + end + + # Provide a hook for subclasses. + def reset + end +end diff --git a/mcollective/lib/puppet/provider/confine/exists.rb b/mcollective/lib/puppet/provider/confine/exists.rb new file mode 100644 index 000000000..09f94dfd9 --- /dev/null +++ b/mcollective/lib/puppet/provider/confine/exists.rb @@ -0,0 +1,19 @@ +require 'puppet/provider/confine' + +class Puppet::Provider::Confine::Exists < Puppet::Provider::Confine + def self.summarize(confines) + confines.inject([]) { |total, confine| total + confine.summary } + end + + def pass?(value) + value && (for_binary? ? which(value) : FileTest.exist?(value)) + end + + def message(value) + "file #{value} does not exist" + end + + def summary + result.zip(values).inject([]) { |array, args| val, f = args; array << f unless val; array } + end +end diff --git a/mcollective/lib/puppet/provider/confine/false.rb b/mcollective/lib/puppet/provider/confine/false.rb new file mode 100644 index 000000000..1c11dd40f --- /dev/null +++ b/mcollective/lib/puppet/provider/confine/false.rb @@ -0,0 +1,19 @@ +require 'puppet/provider/confine' + +class Puppet::Provider::Confine::False < Puppet::Provider::Confine + def self.summarize(confines) + confines.inject(0) { |count, confine| count + confine.summary } + end + + def pass?(value) + ! value + end + + def message(value) + "true value when expecting false" + end + + def summary + result.find_all { |v| v == false }.length + end +end diff --git a/mcollective/lib/puppet/provider/confine/feature.rb b/mcollective/lib/puppet/provider/confine/feature.rb new file mode 100644 index 000000000..b223b8b11 --- /dev/null +++ b/mcollective/lib/puppet/provider/confine/feature.rb @@ -0,0 +1,17 @@ +require 'puppet/provider/confine' + +class Puppet::Provider::Confine::Feature < Puppet::Provider::Confine + def self.summarize(confines) + confines.collect { |c| c.values }.flatten.uniq.find_all { |value| ! confines[0].pass?(value) } + end + + # Is the named feature available? + def pass?(value) + Puppet.features.send(value.to_s + "?") + end + + def message(value) + "feature #{value} is missing" + end +end + diff --git a/mcollective/lib/puppet/provider/confine/true.rb b/mcollective/lib/puppet/provider/confine/true.rb new file mode 100644 index 000000000..559f2675f --- /dev/null +++ b/mcollective/lib/puppet/provider/confine/true.rb @@ -0,0 +1,20 @@ +require 'puppet/provider/confine' + +class Puppet::Provider::Confine::True < Puppet::Provider::Confine + def self.summarize(confines) + confines.inject(0) { |count, confine| count + confine.summary } + end + + def pass?(value) + # Double negate, so we only get true or false. + ! ! value + end + + def message(value) + "false value when expecting true" + end + + def summary + result.find_all { |v| v == true }.length + end +end diff --git a/mcollective/lib/puppet/provider/confine/variable.rb b/mcollective/lib/puppet/provider/confine/variable.rb new file mode 100644 index 000000000..af8e5d314 --- /dev/null +++ b/mcollective/lib/puppet/provider/confine/variable.rb @@ -0,0 +1,59 @@ +require 'puppet/provider/confine' + +# Require a specific value for a variable, either a Puppet setting +# or a Facter value. This class is a bit weird because the name +# is set explicitly by the ConfineCollection class -- from this class, +# it's not obvious how the name would ever get set. +class Puppet::Provider::Confine::Variable < Puppet::Provider::Confine + # Provide a hash summary of failing confines -- the key of the hash + # is the name of the confine, and the value is the missing yet required values. + # Only returns failed values, not all required values. + def self.summarize(confines) + result = Hash.new { |hash, key| hash[key] = [] } + confines.inject(result) { |total, confine| total[confine.name] += confine.values unless confine.valid?; total } + end + + # This is set by ConfineCollection. + attr_accessor :name + + # Retrieve the value from facter + def facter_value + @facter_value ||= ::Facter.value(name).to_s.downcase + end + + def initialize(values) + super + @values = @values.collect { |v| v.to_s.downcase } + end + + def message(value) + "facter value '#{test_value}' for '#{self.name}' not in required list '#{values.join(",")}'" + end + + # Compare the passed-in value to the retrieved value. + def pass?(value) + test_value.downcase.to_s == value.to_s.downcase + end + + def reset + # Reset the cache. We want to cache it during a given + # run, but not across runs. + @facter_value = nil + end + + def valid? + @values.include?(test_value.to_s.downcase) + ensure + reset + end + + private + + def setting? + Puppet.settings.valid?(name) + end + + def test_value + setting? ? Puppet.settings[name] : facter_value + end +end diff --git a/mcollective/lib/puppet/provider/confine_collection.rb b/mcollective/lib/puppet/provider/confine_collection.rb new file mode 100644 index 000000000..46fd3baaf --- /dev/null +++ b/mcollective/lib/puppet/provider/confine_collection.rb @@ -0,0 +1,50 @@ +# Manage a collection of confines, returning a boolean or +# helpful information. +require 'puppet/provider/confine' + +class Puppet::Provider::ConfineCollection + def confine(hash) + if hash.include?(:for_binary) + for_binary = true + hash.delete(:for_binary) + else + for_binary = false + end + hash.each do |test, values| + if klass = Puppet::Provider::Confine.test(test) + @confines << klass.new(values) + @confines[-1].for_binary = true if for_binary + else + confine = Puppet::Provider::Confine.test(:variable).new(values) + confine.name = test + @confines << confine + end + @confines[-1].label = self.label + end + end + + attr_reader :label + def initialize(label) + @label = label + @confines = [] + end + + # Return a hash of the whole confine set, used for the Provider + # reference. + def summary + confines = Hash.new { |hash, key| hash[key] = [] } + @confines.each { |confine| confines[confine.class] << confine } + result = {} + confines.each do |klass, list| + value = klass.summarize(list) + next if (value.respond_to?(:length) and value.length == 0) or (value == 0) + result[klass.name] = value + + end + result + end + + def valid? + ! @confines.detect { |c| ! c.valid? } + end +end diff --git a/mcollective/lib/puppet/provider/confiner.rb b/mcollective/lib/puppet/provider/confiner.rb new file mode 100644 index 000000000..6e1fb23ab --- /dev/null +++ b/mcollective/lib/puppet/provider/confiner.rb @@ -0,0 +1,16 @@ +require 'puppet/provider/confine_collection' + +module Puppet::Provider::Confiner + def confine(hash) + confine_collection.confine(hash) + end + + def confine_collection + @confine_collection ||= Puppet::Provider::ConfineCollection.new(self.to_s) + end + + # Check whether this implementation is suitable for our platform. + def suitable?(short = true) + return(short ? confine_collection.valid? : confine_collection.summary) + end +end diff --git a/mcollective/lib/puppet/provider/cron/crontab.rb b/mcollective/lib/puppet/provider/cron/crontab.rb new file mode 100755 index 000000000..8a347b331 --- /dev/null +++ b/mcollective/lib/puppet/provider/cron/crontab.rb @@ -0,0 +1,206 @@ +require 'puppet/provider/parsedfile' + +tab = case Facter.value(:operatingsystem) + when "Solaris" + :suntab + when "AIX" + :aixtab + else + :crontab + end + + + + Puppet::Type.type(:cron).provide( + :crontab, + :parent => Puppet::Provider::ParsedFile, + :default_target => ENV["USER"] || "root", + + :filetype => tab +) do + commands :crontab => "crontab" + + text_line :comment, :match => %r{^#}, :post_parse => proc { |record| + record[:name] = $1 if record[:line] =~ /Puppet Name: (.+)\s*$/ + } + + text_line :blank, :match => %r{^\s*$} + + text_line :environment, :match => %r{^\w+=} + + record_line :freebsd_special, :fields => %w{special command}, + :match => %r{^@(\w+)\s+(.+)$}, :pre_gen => proc { |record| + record[:special] = "@" + record[:special] + } + + crontab = record_line :crontab, :fields => %w{minute hour monthday month weekday command}, + :match => %r{^\s*(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(.+)$}, + :optional => %w{minute hour weekday month monthday}, :absent => "*" + + class << crontab + def numeric_fields + fields - [:command] + end + # Do some post-processing of the parsed record. Basically just + # split the numeric fields on ','. + def post_parse(record) + numeric_fields.each do |field| + if val = record[field] and val != :absent + record[field] = record[field].split(",") + end + end + end + + # Join the fields back up based on ','. + def pre_gen(record) + numeric_fields.each do |field| + if vals = record[field] and vals.is_a?(Array) + record[field] = vals.join(",") + end + end + end + + + # Add name and environments as necessary. + def to_line(record) + str = "" + str = "# Puppet Name: #{record[:name]}\n" if record[:name] + if record[:environment] and record[:environment] != :absent and record[:environment] != [:absent] + record[:environment].each do |env| + str += env + "\n" + end + end + + if record[:special] + str += "@#{record[:special]} #{record[:command]}" + else + str += join(record) + end + str + end + end + + + # Return the header placed at the top of each generated file, warning + # users that modifying this file manually is probably a bad idea. + def self.header +%{# HEADER: This file was autogenerated at #{Time.now} by puppet. +# HEADER: While it can still be managed manually, it is definitely not recommended. +# HEADER: Note particularly that the comments starting with 'Puppet Name' should +# HEADER: not be deleted, as doing so could cause duplicate cron jobs.\n} + end + + # See if we can match the record against an existing cron job. + def self.match(record, resources) + resources.each do |name, resource| + # Match the command first, since it's the most important one. + next unless record[:target] == resource.value(:target) + next unless record[:command] == resource.value(:command) + + # Then check the @special stuff + if record[:special] + next unless resource.value(:special) == record[:special] + end + + # Then the normal fields. + matched = true + record_type(record[:record_type]).fields.each do |field| + next if field == :command + next if field == :special + if record[field] and ! resource.value(field) + #Puppet.info "Cron is missing %s: %s and %s" % + # [field, record[field].inspect, resource.value(field).inspect] + matched = false + break + end + + if ! record[field] and resource.value(field) + #Puppet.info "Hash is missing %s: %s and %s" % + # [field, resource.value(field).inspect, record[field].inspect] + matched = false + break + end + + # Yay differing definitions of absent. + next if (record[field] == :absent and resource.value(field) == "*") + + # Everything should be in the form of arrays, not the normal text. + next if (record[field] == resource.value(field)) + #Puppet.info "Did not match %s: %s vs %s" % + # [field, resource.value(field).inspect, record[field].inspect] + matched = false + break + end + return resource if matched + end + + false + end + + # Collapse name and env records. + def self.prefetch_hook(records) + name = nil + envs = nil + result = records.each { |record| + case record[:record_type] + when :comment + if record[:name] + name = record[:name] + record[:skip] = true + + # Start collecting env values + envs = [] + end + when :environment + # If we're collecting env values (meaning we're in a named cronjob), + # store the line and skip the record. + if envs + envs << record[:line] + record[:skip] = true + end + when :blank + # nothing + else + if name + record[:name] = name + name = nil + end + if envs.nil? or envs.empty? + record[:environment] = :absent + else + # Collect all of the environment lines, and mark the records to be skipped, + # since their data is included in our crontab record. + record[:environment] = envs + + # And turn off env collection again + envs = nil + end + end + }.reject { |record| record[:skip] } + result + end + + def self.to_file(records) + text = super + # Apparently Freebsd will "helpfully" add a new TZ line to every + # single cron line, but not in all cases (e.g., it doesn't do it + # on my machine). This is my attempt to fix it so the TZ lines don't + # multiply. + if text =~ /(^TZ=.+\n)/ + tz = $1 + text.sub!(tz, '') + text = tz + text + end + text + end + + def user=(user) + @property_hash[:user] = user + @property_hash[:target] = user + end + + def user + @property_hash[:user] || @property_hash[:target] + end +end + diff --git a/mcollective/lib/puppet/provider/exec/posix.rb b/mcollective/lib/puppet/provider/exec/posix.rb new file mode 100644 index 000000000..157d0f28d --- /dev/null +++ b/mcollective/lib/puppet/provider/exec/posix.rb @@ -0,0 +1,115 @@ +Puppet::Type.type(:exec).provide :posix do + include Puppet::Util::Execution + + confine :feature => :posix + defaultfor :feature => :posix + + desc <<-EOT + Executes external binaries directly, without passing through a shell or + performing any interpolation. This is a safer and more predictable way + to execute most commands, but prevents the use of globbing and shell + built-ins (including control logic like "for" and "if" statements). + EOT + + def run(command, check = false) + output = nil + status = nil + dir = nil + + checkexe(command) + + if dir = resource[:cwd] + unless File.directory?(dir) + if check + dir = nil + else + self.fail "Working directory '#{dir}' does not exist" + end + end + end + + dir ||= Dir.pwd + + debug "Executing#{check ? " check": ""} '#{command}'" + begin + # Do our chdir + Dir.chdir(dir) do + environment = {} + + environment[:PATH] = resource[:path].join(":") if resource[:path] + + if envlist = resource[:environment] + envlist = [envlist] unless envlist.is_a? Array + envlist.each do |setting| + if setting =~ /^(\w+)=((.|\n)+)$/ + env_name = $1 + value = $2 + if environment.include?(env_name) || environment.include?(env_name.to_sym) + warning "Overriding environment setting '#{env_name}' with '#{value}'" + end + environment[env_name] = value + else + warning "Cannot understand environment setting #{setting.inspect}" + end + end + end + + withenv environment do + Timeout::timeout(resource[:timeout]) do + output, status = Puppet::Util::SUIDManager. + run_and_capture([command], resource[:user], resource[:group]) + end + # The shell returns 127 if the command is missing. + if status.exitstatus == 127 + raise ArgumentError, output + end + end + end + rescue Errno::ENOENT => detail + self.fail detail.to_s + end + + return output, status + end + + # Verify that we have the executable + def checkexe(command) + exe = extractexe(command) + + if resource[:path] + if Puppet.features.posix? and !File.exists?(exe) + withenv :PATH => resource[:path].join(File::PATH_SEPARATOR) do + exe = which(exe) || raise(ArgumentError,"Could not find command '#{exe}'") + end + elsif Puppet.features.microsoft_windows? and !File.exists?(exe) + resource[:path].each do |path| + [".exe", ".ps1", ".bat", ".com", ""].each do |extension| + file = File.join(path, exe+extension) + return if File.exists?(file) + end + end + end + end + + raise ArgumentError, "Could not find command '#{exe}'" unless File.exists?(exe) + unless File.executable?(exe) + raise ArgumentError, + "'#{exe}' is not executable" + end + end + + def extractexe(command) + # easy case: command was quoted + if command =~ /^"([^"]+)"/ + $1 + else + command.split(/ /)[0] + end + end + + def validatecmd(command) + exe = extractexe(command) + # if we're not fully qualified, require a path + self.fail "'#{command}' is not qualified and no path was specified. Please qualify the command or specify a path." if File.expand_path(exe) != exe and resource[:path].nil? + end +end diff --git a/mcollective/lib/puppet/provider/exec/shell.rb b/mcollective/lib/puppet/provider/exec/shell.rb new file mode 100644 index 000000000..ad2171005 --- /dev/null +++ b/mcollective/lib/puppet/provider/exec/shell.rb @@ -0,0 +1,26 @@ +Puppet::Type.type(:exec).provide :shell, :parent => :posix do + include Puppet::Util::Execution + + confine :feature => :posix + + desc <<-EOT + Passes the provided command through `/bin/sh`; only available on + POSIX systems. This allows the use of shell globbing and built-ins, and + does not require that the path to a command be fully-qualified. Although + this can be more convenient than the `posix` provider, it also means that + you need to be more careful with escaping; as ever, with great power comes + etc. etc. + + This provider closely resembles the behavior of the `exec` type + in Puppet 0.25.x. + EOT + + def run(command, check = false) + command = %Q{/bin/sh -c "#{command.gsub(/"/,'\"')}"} + super(command, check) + end + + def validatecmd(command) + true + end +end diff --git a/mcollective/lib/puppet/provider/file/posix.rb b/mcollective/lib/puppet/provider/file/posix.rb new file mode 100644 index 000000000..f7b8c9797 --- /dev/null +++ b/mcollective/lib/puppet/provider/file/posix.rb @@ -0,0 +1,97 @@ +Puppet::Type.type(:file).provide :posix do + desc "Uses POSIX functionality to manage file's users and rights." + + confine :feature => :posix + + include Puppet::Util::POSIX + include Puppet::Util::Warnings + + require 'etc' + + def id2name(id) + return id.to_s if id.is_a?(Symbol) + return nil if id > Puppet[:maximum_uid].to_i + + begin + user = Etc.getpwuid(id) + rescue TypeError + return nil + rescue ArgumentError + return nil + end + + if user.uid == "" + return nil + else + return user.name + end + end + + def is_owner_insync?(current, should) + should.each do |value| + if value =~ /^\d+$/ + uid = Integer(value) + elsif value.is_a?(String) + fail "Could not find user #{value}" unless uid = uid(value) + else + uid = value + end + + return true if uid == current + end + + unless Puppet.features.root? + warnonce "Cannot manage ownership unless running as root" + return true + end + + false + end + + # Determine if the user is valid, and if so, return the UID + def validuser?(value) + Integer(value) rescue uid(value) || false + end + + def retrieve(resource) + unless stat = resource.stat(false) + return :absent + end + + currentvalue = stat.uid + + # On OS X, files that are owned by -2 get returned as really + # large UIDs instead of negative ones. This isn't a Ruby bug, + # it's an OS X bug, since it shows up in perl, too. + if currentvalue > Puppet[:maximum_uid].to_i + self.warning "Apparently using negative UID (#{currentvalue}) on a platform that does not consistently handle them" + currentvalue = :silly + end + + currentvalue + end + + def sync(path, links, should) + # Set our method appropriately, depending on links. + if links == :manage + method = :lchown + else + method = :chown + end + + uid = nil + should.each do |user| + break if uid = validuser?(user) + end + + raise Puppet::Error, "Could not find user(s) #{should.join(",")}" unless uid + + begin + File.send(method, uid, nil, path) + rescue => detail + raise Puppet::Error, "Failed to set owner to '#{uid}': #{detail}" + end + + :file_changed + end +end diff --git a/mcollective/lib/puppet/provider/file/win32.rb b/mcollective/lib/puppet/provider/file/win32.rb new file mode 100644 index 000000000..21e7ca974 --- /dev/null +++ b/mcollective/lib/puppet/provider/file/win32.rb @@ -0,0 +1,72 @@ +Puppet::Type.type(:file).provide :microsoft_windows do + desc "Uses Microsoft Windows functionality to manage file's users and rights." + + confine :feature => :microsoft_windows + + include Puppet::Util::Warnings + + require 'sys/admin' if Puppet.features.microsoft_windows? + + def id2name(id) + return id.to_s if id.is_a?(Symbol) + return nil if id > Puppet[:maximum_uid].to_i + # should translate ID numbers to usernames + id + end + + def is_owner_insync?(current, should) + should.each do |value| + if value =~ /^\d+$/ + uid = Integer(value) + elsif value.is_a?(String) + fail "Could not find user #{value}" unless uid = uid(value) + else + uid = value + end + + return true if uid == current + end + + unless Puppet.features.root? + warnonce "Cannot manage ownership unless running as root" + return true + end + + false + end + + # Determine if the user is valid, and if so, return the UID + def validuser?(value) + info "Is '#{value}' a valid user?" + return 0 + begin + number = Integer(value) + return number + rescue ArgumentError + number = nil + end + (number = uid(value)) && number + end + + def retrieve(resource) + unless stat = resource.stat(false) + return :absent + end + + currentvalue = stat.uid + + # On OS X, files that are owned by -2 get returned as really + # large UIDs instead of negative ones. This isn't a Ruby bug, + # it's an OS X bug, since it shows up in perl, too. + if currentvalue > Puppet[:maximum_uid].to_i + self.warning "Apparently using negative UID (#{currentvalue}) on a platform that does not consistently handle them" + currentvalue = :silly + end + + currentvalue + end + + def sync(path, links, should) + info("should set '%s'%%owner to '%s'" % [path, should]) + end +end diff --git a/mcollective/lib/puppet/provider/group/directoryservice.rb b/mcollective/lib/puppet/provider/group/directoryservice.rb new file mode 100644 index 000000000..97fee883d --- /dev/null +++ b/mcollective/lib/puppet/provider/group/directoryservice.rb @@ -0,0 +1,26 @@ +# Created by Jeff McCune on 2007-07-22 +# Copyright (c) 2007. All rights reserved. +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation (version 2 of the License) +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston MA 02110-1301 USA + +require 'puppet/provider/nameservice/directoryservice' + +Puppet::Type.type(:group).provide :directoryservice, :parent => Puppet::Provider::NameService::DirectoryService do + desc "Group management using DirectoryService on OS X. + + " + + commands :dscl => "/usr/bin/dscl" + confine :operatingsystem => :darwin + defaultfor :operatingsystem => :darwin + has_feature :manages_members +end diff --git a/mcollective/lib/puppet/provider/group/groupadd.rb b/mcollective/lib/puppet/provider/group/groupadd.rb new file mode 100644 index 000000000..bcc08d9f7 --- /dev/null +++ b/mcollective/lib/puppet/provider/group/groupadd.rb @@ -0,0 +1,32 @@ +require 'puppet/provider/nameservice/objectadd' + +Puppet::Type.type(:group).provide :groupadd, :parent => Puppet::Provider::NameService::ObjectAdd do + desc "Group management via `groupadd` and its ilk. + + The default for most platforms + + " + + commands :add => "groupadd", :delete => "groupdel", :modify => "groupmod" + + has_feature :system_groups + + verify :gid, "GID must be an integer" do |value| + value.is_a? Integer + end + + def addcmd + cmd = [command(:add)] + if gid = @resource.should(:gid) + unless gid == :absent + cmd << flag(:gid) << gid + end + end + cmd << "-o" if @resource.allowdupe? + cmd << "-r" if @resource.system? + cmd << @resource[:name] + + cmd + end +end + diff --git a/mcollective/lib/puppet/provider/group/ldap.rb b/mcollective/lib/puppet/provider/group/ldap.rb new file mode 100644 index 000000000..86c72a5d3 --- /dev/null +++ b/mcollective/lib/puppet/provider/group/ldap.rb @@ -0,0 +1,47 @@ +require 'puppet/provider/ldap' + +Puppet::Type.type(:group).provide :ldap, :parent => Puppet::Provider::Ldap do + desc "Group management via `ldap`. + + This provider requires that you have valid values for all of the + ldap-related settings, including `ldapbase`. You will also almost + definitely need settings for `ldapuser` and `ldappassword`, so that + your clients can write to ldap. + + Note that this provider will automatically generate a GID for you if you do + not specify one, but it is a potentially expensive operation, as it + iterates across all existing groups to pick the appropriate next one. + + " + + confine :true => Puppet.features.ldap?, :false => (Puppet[:ldapuser] == "") + + # We're mapping 'members' here because we want to make it + # easy for the ldap user provider to manage groups. This + # way it can just use the 'update' method in the group manager, + # whereas otherwise it would need to replicate that code. + manages(:posixGroup).at("ou=Groups").and.maps :name => :cn, :gid => :gidNumber, :members => :memberUid + + # Find the next gid after the current largest gid. + provider = self + manager.generates(:gidNumber).with do + largest = 500 + if existing = provider.manager.search + existing.each do |hash| + next unless value = hash[:gid] + num = value[0].to_i + largest = num if num > largest + end + end + largest + 1 + end + + # Convert a group name to an id. + def self.name2id(group) + return nil unless result = manager.search("cn=#{group}") and result.length > 0 + + # Only use the first result. + group = result[0] + gid = group[:gid][0] + end +end diff --git a/mcollective/lib/puppet/provider/group/pw.rb b/mcollective/lib/puppet/provider/group/pw.rb new file mode 100644 index 000000000..a054d1ff1 --- /dev/null +++ b/mcollective/lib/puppet/provider/group/pw.rb @@ -0,0 +1,34 @@ +require 'puppet/provider/nameservice/pw' + +Puppet::Type.type(:group).provide :pw, :parent => Puppet::Provider::NameService::PW do + desc "Group management via `pw`. + + Only works on FreeBSD. + + " + + commands :pw => "/usr/sbin/pw" + defaultfor :operatingsystem => :freebsd + + verify :gid, "GID must be an integer" do |value| + value.is_a? Integer + end + + def addcmd + cmd = [command(:pw), "groupadd", @resource[:name]] + if gid = @resource.should(:gid) + unless gid == :absent + cmd << flag(:gid) << gid + end + end + + # Apparently, contrary to the man page, groupadd does + # not accept -o. + #if @parent[:allowdupe] == :true + # cmd << "-o" + #end + + cmd + end +end + diff --git a/mcollective/lib/puppet/provider/host/parsed.rb b/mcollective/lib/puppet/provider/host/parsed.rb new file mode 100644 index 000000000..4f15eff3f --- /dev/null +++ b/mcollective/lib/puppet/provider/host/parsed.rb @@ -0,0 +1,73 @@ +require 'puppet/provider/parsedfile' + +hosts = nil +case Facter.value(:operatingsystem) +when "Solaris"; hosts = "/etc/inet/hosts" +else + hosts = "/etc/hosts" +end + + + Puppet::Type.type(:host).provide( + :parsed, + :parent => Puppet::Provider::ParsedFile, + :default_target => hosts, + + :filetype => :flat +) do + confine :exists => hosts + + text_line :comment, :match => /^#/ + text_line :blank, :match => /^\s*$/ + + record_line :parsed, :fields => %w{ip name host_aliases}, + :optional => %w{host_aliases}, + :rts => true do |line| + hash = {} + if line.sub!(/^(\S+)\s+(\S+)\s*/, '') + hash[:ip] = $1 + hash[:name] = $2 + + if line.empty? + hash[:host_aliases] = [] + else + line.sub!(/\s*/, '') + line.sub!(/^([^#]+)\s*/) do |value| + aliases = $1 + unless aliases =~ /^\s*$/ + hash[:host_aliases] = aliases.split(/\s+/) + end + + "" + end + end + else + raise Puppet::Error, "Could not match '#{line}'" + end + + hash[:host_aliases] = [] if hash[:host_aliases] == "" + + return hash + end + + # Convert the current object into a host-style string. + def self.to_line(hash) + return super unless hash[:record_type] == :parsed + [:ip, :name].each do |n| + raise ArgumentError, "#{n} is a required attribute for hosts" unless hash[n] and hash[n] != :absent + end + + str = "#{hash[:ip]}\t#{hash[:name]}" + + if hash.include? :host_aliases and !hash[:host_aliases].empty? + if hash[:host_aliases].is_a? Array + str += "\t#{hash[:host_aliases].join("\t")}" + else + raise ArgumentError, "Host aliases must be specified as an array" + end + end + + str + end +end + diff --git a/mcollective/lib/puppet/provider/ldap.rb b/mcollective/lib/puppet/provider/ldap.rb new file mode 100644 index 000000000..b7d6baa9e --- /dev/null +++ b/mcollective/lib/puppet/provider/ldap.rb @@ -0,0 +1,133 @@ +require 'puppet/provider' + +# The base class for LDAP providers. +class Puppet::Provider::Ldap < Puppet::Provider + require 'puppet/util/ldap/manager' + + class << self + attr_reader :manager + end + + # Look up all instances at our location. Yay. + def self.instances + return [] unless list = manager.search + + list.collect { |entry| new(entry) } + end + + # Specify the ldap manager for this provider, which is + # used to figure out how we actually interact with ldap. + def self.manages(*args) + @manager = Puppet::Util::Ldap::Manager.new + @manager.manages(*args) + + # Set up our getter/setter methods. + mk_resource_methods + @manager + end + + # Query all of our resources from ldap. + def self.prefetch(resources) + resources.each do |name, resource| + if result = manager.find(name) + result[:ensure] = :present + resource.provider = new(result) + else + resource.provider = new(:ensure => :absent) + end + end + end + + def manager + self.class.manager + end + + def create + @property_hash[:ensure] = :present + self.class.resource_type.validproperties.each do |property| + if val = resource.should(property) + @property_hash[property] = val + end + end + end + + def delete + @property_hash[:ensure] = :absent + end + + def exists? + @property_hash[:ensure] != :absent + end + + # Apply our changes to ldap, yo. + def flush + # Just call the manager's update() method. + @property_hash.delete(:groups) + @ldap_properties.delete(:groups) + manager.update(name, ldap_properties, properties) + @property_hash.clear + @ldap_properties.clear + end + + def initialize(*args) + raise(Puppet::DevError, "No LDAP Configuration defined for #{self.class}") unless self.class.manager + raise(Puppet::DevError, "Invalid LDAP Configuration defined for #{self.class}") unless self.class.manager.valid? + super + + @property_hash = @property_hash.inject({}) do |result, ary| + param, values = ary + + # Skip any attributes we don't manage. + next result unless self.class.resource_type.valid_parameter?(param) + + paramclass = self.class.resource_type.attrclass(param) + + unless values.is_a?(Array) + result[param] = values + next result + end + + # Only use the first value if the attribute class doesn't manage + # arrays of values. + if paramclass.superclass == Puppet::Parameter or paramclass.array_matching == :first + result[param] = values[0] + else + result[param] = values + end + result + end + + # Make a duplicate, so that we have a copy for comparison + # at the end. + @ldap_properties = @property_hash.dup + end + + # Return the current state of ldap. + def ldap_properties + @ldap_properties.dup + end + + # Return (and look up if necessary) the desired state. + def properties + if @property_hash.empty? + @property_hash = query || {:ensure => :absent} + @property_hash[:ensure] = :absent if @property_hash.empty? + end + @property_hash.dup + end + + # Collect the current attributes from ldap. Returns + # the results, but also stores the attributes locally, + # so we have something to compare against when we update. + # LAK:NOTE This is normally not used, because we rely on prefetching. + def query + # Use the module function. + unless attributes = manager.find(name) + @ldap_properties = {} + return nil + end + + @ldap_properties = attributes + @ldap_properties.dup + end +end diff --git a/mcollective/lib/puppet/provider/macauthorization/macauthorization.rb b/mcollective/lib/puppet/provider/macauthorization/macauthorization.rb new file mode 100644 index 000000000..fdf9fd18c --- /dev/null +++ b/mcollective/lib/puppet/provider/macauthorization/macauthorization.rb @@ -0,0 +1,313 @@ +require 'facter' +require 'facter/util/plist' +require 'puppet' +require 'tempfile' + +Puppet::Type.type(:macauthorization).provide :macauthorization, :parent => Puppet::Provider do + + desc "Manage Mac OS X authorization database rules and rights. + + " + + commands :security => "/usr/bin/security" + commands :sw_vers => "/usr/bin/sw_vers" + + confine :operatingsystem => :darwin + + # This should be confined based on macosx_productversion once + # http://projects.reductivelabs.com/issues/show/1796 + # is resolved. + if FileTest.exists?("/usr/bin/sw_vers") + product_version = sw_vers "-productVersion" + + confine :true => if /^10.5/.match(product_version) or /^10.6/.match(product_version) + true + end + end + + defaultfor :operatingsystem => :darwin + + AuthDB = "/etc/authorization" + + @rights = {} + @rules = {} + @parsed_auth_db = {} + @comment = "" # Not implemented yet. Is there any real need to? + + # This map exists due to the use of hyphens and reserved words in + # the authorization schema. + PuppetToNativeAttributeMap = { :allow_root => "allow-root", + :authenticate_user => "authenticate-user", + :auth_class => "class", + :k_of_n => "k-of-n", + :session_owner => "session-owner", } + + class << self + attr_accessor :parsed_auth_db + attr_accessor :rights + attr_accessor :rules + attr_accessor :comments # Not implemented yet. + + def prefetch(resources) + self.populate_rules_rights + end + + def instances + if self.parsed_auth_db == {} + self.prefetch(nil) + end + self.parsed_auth_db.collect do |k,v| + new(:name => k) + end + end + + def populate_rules_rights + auth_plist = Plist::parse_xml(AuthDB) + raise Puppet::Error.new("Cannot parse: #{AuthDB}") if not auth_plist + self.rights = auth_plist["rights"].dup + self.rules = auth_plist["rules"].dup + self.parsed_auth_db = self.rights.dup + self.parsed_auth_db.merge!(self.rules.dup) + end + + end + + # standard required provider instance methods + + def initialize(resource) + if self.class.parsed_auth_db == {} + self.class.prefetch(resource) + end + super + end + + + def create + # we just fill the @property_hash in here and let the flush method + # deal with it rather than repeating code. + new_values = {} + validprops = Puppet::Type.type(resource.class.name).validproperties + validprops.each do |prop| + next if prop == :ensure + if value = resource.should(prop) and value != "" + new_values[prop] = value + end + end + @property_hash = new_values.dup + end + + def destroy + # We explicitly delete here rather than in the flush method. + case resource[:auth_type] + when :right + destroy_right + when :rule + destroy_rule + else + raise Puppet::Error.new("Must specify auth_type when destroying.") + end + end + + def exists? + !!self.class.parsed_auth_db.has_key?(resource[:name]) + end + + + def flush + # deletion happens in the destroy methods + if resource[:ensure] != :absent + case resource[:auth_type] + when :right + flush_right + when :rule + flush_rule + else + raise Puppet::Error.new("flush requested for unknown type.") + end + @property_hash.clear + end + end + + + # utility methods below + + def destroy_right + security "authorizationdb", :remove, resource[:name] + end + + def destroy_rule + authdb = Plist::parse_xml(AuthDB) + authdb_rules = authdb["rules"].dup + if authdb_rules[resource[:name]] + begin + authdb["rules"].delete(resource[:name]) + Plist::Emit.save_plist(authdb, AuthDB) + rescue Errno::EACCES => e + raise Puppet::Error.new("Error saving #{AuthDB}: #{e}") + end + end + end + + def flush_right + # first we re-read the right just to make sure we're in sync for + # values that weren't specified in the manifest. As we're supplying + # the whole plist when specifying the right it seems safest to be + # paranoid given the low cost of quering the db once more. + cmds = [] + cmds << :security << "authorizationdb" << "read" << resource[:name] + output = execute(cmds, :combine => false) + current_values = Plist::parse_xml(output) + current_values ||= {} + specified_values = convert_plist_to_native_attributes(@property_hash) + + # take the current values, merge the specified values to obtain a + # complete description of the new values. + new_values = current_values.merge(specified_values) + set_right(resource[:name], new_values) + end + + def flush_rule + authdb = Plist::parse_xml(AuthDB) + authdb_rules = authdb["rules"].dup + current_values = {} + current_values = authdb_rules[resource[:name]] if authdb_rules[resource[:name]] + specified_values = convert_plist_to_native_attributes(@property_hash) + new_values = current_values.merge(specified_values) + set_rule(resource[:name], new_values) + end + + def set_right(name, values) + # Both creates and modifies rights as it simply overwrites them. + # The security binary only allows for writes using stdin, so we + # dump the values to a tempfile. + values = convert_plist_to_native_attributes(values) + tmp = Tempfile.new('puppet_macauthorization') + begin + Plist::Emit.save_plist(values, tmp.path) + cmds = [] + cmds << :security << "authorizationdb" << "write" << name + + output = execute( + cmds, :combine => false, + + :stdinfile => tmp.path.to_s) + rescue Errno::EACCES => e + raise Puppet::Error.new("Cannot save right to #{tmp.path}: #{e}") + ensure + tmp.close + tmp.unlink + end + end + + def set_rule(name, values) + # Both creates and modifies rules as it overwrites the entry in the + # rules dictionary. Unfortunately the security binary doesn't + # support modifying rules at all so we have to twiddle the whole + # plist... :( See Apple Bug #6386000 + values = convert_plist_to_native_attributes(values) + authdb = Plist::parse_xml(AuthDB) + authdb["rules"][name] = values + + begin + Plist::Emit.save_plist(authdb, AuthDB) + rescue + raise Puppet::Error.new("Error writing to: #{AuthDB}") + end + end + + def convert_plist_to_native_attributes(propertylist) + # This mainly converts the keys from the puppet attributes to the + # 'native' ones, but also enforces that the keys are all Strings + # rather than Symbols so that any merges of the resultant Hash are + # sane. The exception is booleans, where we coerce to a proper bool + # if they come in as a symbol. + newplist = {} + propertylist.each_pair do |key, value| + next if key == :ensure # not part of the auth db schema. + next if key == :auth_type # not part of the auth db schema. + case value + when true, :true + value = true + when false, :false + value = false + end + new_key = key + if PuppetToNativeAttributeMap.has_key?(key) + new_key = PuppetToNativeAttributeMap[key].to_s + elsif not key.is_a?(String) + new_key = key.to_s + end + newplist[new_key] = value + end + newplist + end + + def retrieve_value(resource_name, attribute) + # We set boolean values to symbols when retrieving values + raise Puppet::Error.new("Cannot find #{resource_name} in auth db") if not self.class.parsed_auth_db.has_key?(resource_name) + + if PuppetToNativeAttributeMap.has_key?(attribute) + native_attribute = PuppetToNativeAttributeMap[attribute] + else + native_attribute = attribute.to_s + end + + if self.class.parsed_auth_db[resource_name].has_key?(native_attribute) + value = self.class.parsed_auth_db[resource_name][native_attribute] + case value + when true, :true + value = :true + when false, :false + value = :false + end + + @property_hash[attribute] = value + return value + else + @property_hash.delete(attribute) + return "" # so ralsh doesn't display it. + end + end + + + # property methods below + # + # We define them all dynamically apart from auth_type which is a special + # case due to not being in the actual authorization db schema. + + properties = [ :allow_root, :authenticate_user, :auth_class, :comment, + :group, :k_of_n, :mechanisms, :rule, :session_owner, + :shared, :timeout, :tries ] + + properties.each do |field| + define_method(field.to_s) do + retrieve_value(resource[:name], field) + end + + define_method(field.to_s + "=") do |value| + @property_hash[field] = value + end + end + + def auth_type + if resource.should(:auth_type) != nil + return resource.should(:auth_type) + elsif self.exists? + # this is here just for ralsh, so it can work out what type it is. + if self.class.rights.has_key?(resource[:name]) + return :right + elsif self.class.rules.has_key?(resource[:name]) + return :rule + else + raise Puppet::Error.new("#{resource[:name]} is unknown type.") + end + else + raise Puppet::Error.new("auth_type required for new resources.") + end + end + + def auth_type=(value) + @property_hash[:auth_type] = value + end + +end diff --git a/mcollective/lib/puppet/provider/mailalias/aliases.rb b/mcollective/lib/puppet/provider/mailalias/aliases.rb new file mode 100755 index 000000000..35c2f98fe --- /dev/null +++ b/mcollective/lib/puppet/provider/mailalias/aliases.rb @@ -0,0 +1,44 @@ +require 'puppet/provider/parsedfile' + + + Puppet::Type.type(:mailalias).provide( + :aliases, + :parent => Puppet::Provider::ParsedFile, + :default_target => "/etc/aliases", + + :filetype => :flat +) do + text_line :comment, :match => /^#/ + text_line :blank, :match => /^\s*$/ + + record_line :aliases, :fields => %w{name recipient}, :separator => /\s*:\s*/, :block_eval => :instance do + def post_parse(record) + # LAK:NOTE See http://snurl.com/21zf8 [groups_google_com] + # It's not sufficient to assign to an existing hash. + recipient = record[:recipient].split(/\s*,\s*/).collect { |d| d.gsub(/^['"]|['"]$/, '') } + record[:recipient] = recipient + record + end + + def process(line) + ret = {} + records = line.split(':',2) + ret[:name] = records[0].strip + ret[:recipient] = records[1].strip + ret + end + + def to_line(record) + dest = record[:recipient].collect do |d| + # Quote aliases that have non-alpha chars + if d =~ /[^-\w@.]/ + '"%s"' % d + else + d + end + end.join(",") + "#{record[:name]}: #{dest}" + end + end +end + diff --git a/mcollective/lib/puppet/provider/maillist/mailman.rb b/mcollective/lib/puppet/provider/maillist/mailman.rb new file mode 100755 index 000000000..e070a25dd --- /dev/null +++ b/mcollective/lib/puppet/provider/maillist/mailman.rb @@ -0,0 +1,108 @@ +require 'puppet/provider/parsedfile' + +Puppet::Type.type(:maillist).provide(:mailman) do + if [ "CentOS", "RedHat", "Fedora" ].any? { |os| Facter.value(:operatingsystem) == os } + commands :list_lists => "/usr/lib/mailman/bin/list_lists", :rmlist => "/usr/lib/mailman/bin/rmlist", :newlist => "/usr/lib/mailman/bin/newlist" + commands :mailman => "/usr/lib/mailman/mail/mailman" + else + # This probably won't work for non-Debian installs, but this path is sure not to be in the PATH. + commands :list_lists => "list_lists", :rmlist => "rmlist", :newlist => "newlist" + commands :mailman => "/var/lib/mailman/mail/mailman" + end + + mk_resource_methods + + # Return a list of existing mailman instances. + def self.instances + list_lists('--bare'). + split("\n"). + collect { |line| new(:ensure => :present, :name => line.strip) } + end + + # Prefetch our list list, yo. + def self.prefetch(lists) + instances.each do |prov| + if list = lists[prov.name] || lists[prov.name.downcase] + list.provider = prov + end + end + end + + def aliases + mailman = self.class.command(:mailman) + name = self.name.downcase + aliases = {name => "| #{mailman} post #{name}"} + %w{admin bounces confirm join leave owner request subscribe unsubscribe}.each do |address| + aliases["#{name}-#{address}"] = "| #{mailman} #{address} #{name}" + end + aliases + end + + # Create the list. + def create + args = [] + if val = @resource[:mailserver] + args << "--emailhost" << val + end + if val = @resource[:webserver] + args << "--urlhost" << val + end + + args << self.name + if val = @resource[:admin] + args << val + else + raise ArgumentError, "Mailman lists require an administrator email address" + end + if val = @resource[:password] + args << val + else + raise ArgumentError, "Mailman lists require an administrator password" + end + newlist(*args) + end + + # Delete the list. + def destroy(purge = false) + args = [] + args << "--archives" if purge + args << self.name + rmlist(*args) + end + + # Does our list exist already? + def exists? + properties[:ensure] != :absent + end + + # Clear out the cached values. + def flush + @property_hash.clear + end + + # Look up the current status. + def properties + if @property_hash.empty? + @property_hash = query || {:ensure => :absent} + @property_hash[:ensure] = :absent if @property_hash.empty? + end + @property_hash.dup + end + + # Remove the list and its archives. + def purge + destroy(true) + end + + # Pull the current state of the list from the full list. We're + # getting some double entendre here.... + def query + self.class.instances.each do |list| + if list.name == self.name or list.name.downcase == self.name + return list.properties + end + end + nil + end +end + diff --git a/mcollective/lib/puppet/provider/mcx/mcxcontent.rb b/mcollective/lib/puppet/provider/mcx/mcxcontent.rb new file mode 100644 index 000000000..cb5adc698 --- /dev/null +++ b/mcollective/lib/puppet/provider/mcx/mcxcontent.rb @@ -0,0 +1,205 @@ +#-- +# Copyright (C) 2008 Jeffrey J McCune. + +# This program and entire repository is free software; you can +# redistribute it and/or modify it under the terms of the GNU +# General Public License as published by the Free Software +# Foundation; either version 2 of the License, or any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +# Author: Jeff McCune + +require 'tempfile' + +Puppet::Type.type(:mcx).provide :mcxcontent, :parent => Puppet::Provider do + + desc "MCX Settings management using DirectoryService on OS X. + + This provider manages the entire MCXSettings attribute available + to some directory services nodes. This management is 'all or nothing' + in that discrete application domain key value pairs are not managed + by this provider. + + It is recommended to use WorkGroup Manager to configure Users, Groups, + Computers, or ComputerLists, then use 'ralsh mcx' to generate a puppet + manifest from the resulting configuration. + + Original Author: Jeff McCune (mccune.jeff@gmail.com) + +" + + # This provides a mapping of puppet types to DirectoryService + # type strings. + TypeMap = { + :user => "Users", + :group => "Groups", + :computer => "Computers", + :computerlist => "ComputerLists", + } + + class MCXContentProviderException < Exception + + end + + commands :dscl => "/usr/bin/dscl" + confine :operatingsystem => :darwin + defaultfor :operatingsystem => :darwin + + # self.instances is all important. + # This is the only class method, it returns + # an array of instances of this class. + def self.instances + mcx_list = [] + for ds_type in TypeMap.keys + ds_path = "/Local/Default/#{TypeMap[ds_type]}" + output = dscl 'localhost', '-list', ds_path + member_list = output.split + for ds_name in member_list + content = mcxexport(ds_type, ds_name) + if content.empty? + Puppet.debug "/#{TypeMap[ds_type]}/#{ds_name} has no MCX data." + else + # This node has MCX data. + + rsrc = self.new( + :name => "/#{TypeMap[ds_type]}/#{ds_name}", + :ds_type => ds_type, + :ds_name => ds_name, + + :content => content) + mcx_list << rsrc + end + end + end + mcx_list + end + + private + + # mcxexport is used by instances, and therefore + # a class method. + def self.mcxexport(ds_type, ds_name) + ds_t = TypeMap[ds_type] + ds_n = ds_name.to_s + ds_path = "/Local/Default/#{ds_t}/#{ds_n}" + dscl 'localhost', '-mcxexport', ds_path + end + + def mcximport(ds_type, ds_name, val) + ds_t = TypeMap[ds_type] + ds_n = ds_name.to_s + ds_path = "/Local/Default/#{ds_t}/#{ds_name}" + + tmp = Tempfile.new('puppet_mcx') + begin + tmp << val + tmp.flush + dscl 'localhost', '-mcximport', ds_path, tmp.path + ensure + tmp.close + tmp.unlink + end + end + + # Given the resource name string, parse ds_type out. + def parse_type(name) + tmp = name.split('/')[1] + if ! tmp.is_a? String + raise MCXContentProviderException, + "Coult not parse ds_type from resource name '#{name}'. Specify with ds_type parameter." + end + # De-pluralize and downcase. + tmp = tmp.chop.downcase.to_sym + if not TypeMap.keys.member? tmp + raise MCXContentProviderException, + "Coult not parse ds_type from resource name '#{name}'. Specify with ds_type parameter." + end + tmp + end + + # Given the resource name string, parse ds_name out. + def parse_name(name) + ds_name = name.split('/')[2] + if ! ds_name.is_a? String + raise MCXContentProviderException, + "Could not parse ds_name from resource name '#{name}'. Specify with ds_name parameter." + end + ds_name + end + + # Gather ds_type and ds_name from resource or + # parse it out of the name. + # This is a private instance method, not a class method. + def get_dsparams + ds_type = resource[:ds_type] + ds_type ||= parse_type(resource[:name]) + raise MCXContentProviderException unless TypeMap.keys.include? ds_type.to_sym + + ds_name = resource[:ds_name] + ds_name ||= parse_name(resource[:name]) + + rval = { + :ds_type => ds_type.to_sym, + :ds_name => ds_name, + } + + return rval + + end + + public + + def create + self.content=(resource[:content]) + end + + def destroy + ds_parms = get_dsparams + ds_t = TypeMap[ds_parms[:ds_type]] + ds_n = ds_parms[:ds_name].to_s + ds_path = "/Local/Default/#{ds_t}/#{ds_n}" + + dscl 'localhost', '-mcxdelete', ds_path + end + + def exists? + # JJM Just re-use the content method and see if it's empty. + begin + mcx = content + rescue Puppet::ExecutionFailure => e + return false + end + has_mcx = ! mcx.empty? + end + + def content + ds_parms = get_dsparams + + mcx = self.class.mcxexport( + ds_parms[:ds_type], + + ds_parms[:ds_name]) + mcx + end + + def content=(value) + # dscl localhost -mcximport + ds_parms = get_dsparams + + mcx = mcximport( + ds_parms[:ds_type], + ds_parms[:ds_name], + + resource[:content]) + mcx + end + +end diff --git a/mcollective/lib/puppet/provider/mount.rb b/mcollective/lib/puppet/provider/mount.rb new file mode 100644 index 000000000..65296eed2 --- /dev/null +++ b/mcollective/lib/puppet/provider/mount.rb @@ -0,0 +1,49 @@ +# Created by Luke Kanies on 2006-11-12. +# Copyright (c) 2006. All rights reserved. + +require 'puppet' + +# A module just to store the mount/unmount methods. Individual providers +# still need to add the mount commands manually. +module Puppet::Provider::Mount + # This only works when the mount point is synced to the fstab. + def mount + # Manually pass the mount options in, since some OSes *cough*OS X*cough* don't + # read from /etc/fstab but still want to use this type. + args = [] + args << "-o" << self.options if self.options and self.options != :absent + args << resource[:name] + + mountcmd(*args) + case get(:ensure) + when :absent; set(:ensure => :ghost) + when :unmounted; set(:ensure => :mounted) + end + end + + def remount + info "Remounting" + if resource[:remounts] == :true + mountcmd "-o", "remount", resource[:name] + else + unmount + mount + end + end + + # This only works when the mount point is synced to the fstab. + def unmount + umount(resource[:name]) + + # Update property hash for future queries (e.g. refresh is called) + case get(:ensure) + when :mounted; set(:ensure => :unmounted) + when :ghost; set(:ensure => :absent) + end + end + + # Is the mount currently mounted? + def mounted? + [:mounted, :ghost].include?(get(:ensure)) + end +end diff --git a/mcollective/lib/puppet/provider/mount/parsed.rb b/mcollective/lib/puppet/provider/mount/parsed.rb new file mode 100755 index 000000000..7c3f41bbd --- /dev/null +++ b/mcollective/lib/puppet/provider/mount/parsed.rb @@ -0,0 +1,106 @@ +require 'puppet/provider/parsedfile' +require 'puppet/provider/mount' + +fstab = nil +case Facter.value(:operatingsystem) +when "Solaris"; fstab = "/etc/vfstab" +else + fstab = "/etc/fstab" +end + +Puppet::Type.type(:mount).provide( + :parsed, + :parent => Puppet::Provider::ParsedFile, + :default_target => fstab, + :filetype => :flat +) do + include Puppet::Provider::Mount + + commands :mountcmd => "mount", :umount => "umount" + + case Facter.value(:operatingsystem) + when "Solaris" + @fields = [:device, :blockdevice, :name, :fstype, :pass, :atboot, :options] + else + @fields = [:device, :name, :fstype, :options, :dump, :pass] + @fielddefaults = [ nil ] * 4 + [ "0", "2" ] + end + + text_line :comment, :match => /^\s*#/ + text_line :blank, :match => /^\s*$/ + + optional_fields = @fields - [:device, :name, :blockdevice] + mandatory_fields = @fields - optional_fields + + # fstab will ignore lines that have fewer than the mandatory number of columns, + # so we should, too. + field_pattern = '(\s*(?>\S+))' + text_line :incomplete, :match => /^(?!#{field_pattern}{#{mandatory_fields.length}})/ + + record_line self.name, :fields => @fields, :separator => /\s+/, :joiner => "\t", :optional => optional_fields + + # Every entry in fstab is :unmounted until we can prove different + def self.prefetch_hook(target_records) + target_records.collect do |record| + record[:ensure] = :unmounted if record[:record_type] == :parsed + record + end + end + + def self.prefetch(resources = nil) + # Get providers for all resources the user defined and that match + # a record in /etc/fstab. + super + # We need to do two things now: + # - Update ensure from :unmounted to :mounted if the resource is mounted + # - Check for mounted devices that are not in fstab and + # set ensure to :ghost (if the user wants to add an entry + # to fstab we need to know if the device was mounted before) + mountinstances.each do |hash| + if mount = resources[hash[:name]] + case mount.provider.get(:ensure) + when :absent # Mount not in fstab + mount.provider.set(:ensure => :ghost) + when :unmounted # Mount in fstab + mount.provider.set(:ensure => :mounted) + end + end + end + end + + def self.mountinstances + # XXX: Will not work for mount points that have spaces in path (does fstab support this anyways?) + regex = case Facter.value(:operatingsystem) + when "Darwin" + / on (?:\/private\/var\/automount)?(\S*)/ + when "Solaris", "HP-UX" + /^(\S*) on / + when "AIX" + /^(?:\S*\s+\S+\s+)(\S+)/ + else + / on (\S*)/ + end + instances = [] + mount_output = mountcmd.split("\n") + if mount_output.length >= 2 and mount_output[1] =~ /^[- \t]*$/ + # On some OSes (e.g. AIX) mount output begins with a header line + # followed by a line consisting of dashes and whitespace. + # Discard these two lines. + mount_output[0..1] = [] + end + mount_output.each do |line| + if match = regex.match(line) and name = match.captures.first + instances << {:name => name, :mounted => :yes} # Only :name is important here + else + raise Puppet::Error, "Could not understand line #{line} from mount output" + end + end + instances + end + + def flush + needs_mount = @property_hash.delete(:needs_mount) + super + mount if needs_mount + end +end diff --git a/mcollective/lib/puppet/provider/naginator.rb b/mcollective/lib/puppet/provider/naginator.rb new file mode 100644 index 000000000..17cc24086 --- /dev/null +++ b/mcollective/lib/puppet/provider/naginator.rb @@ -0,0 +1,66 @@ +# Created by Luke Kanies on 2007-11-27. +# Copyright (c) 2007. All rights reserved. + +require 'puppet' +require 'puppet/provider/parsedfile' +require 'puppet/external/nagios' + +# The base class for all Naginator providers. +class Puppet::Provider::Naginator < Puppet::Provider::ParsedFile + NAME_STRING = "## --PUPPET_NAME-- (called '_naginator_name' in the manifest)" + # Retrieve the associated class from Nagios::Base. + def self.nagios_type + unless @nagios_type + name = resource_type.name.to_s.sub(/^nagios_/, '') + unless @nagios_type = Nagios::Base.type(name.to_sym) + raise Puppet::DevError, "Could not find nagios type '#{name}'" + end + + # And add our 'ensure' settings, since they aren't a part of + # Naginator by default + @nagios_type.send(:attr_accessor, :ensure, :target, :on_disk) + end + @nagios_type + end + + def self.parse(text) + Nagios::Parser.new.parse(text.gsub(NAME_STRING, "_naginator_name")) + rescue => detail + raise Puppet::Error, "Could not parse configuration for #{resource_type.name}: #{detail}" + end + + def self.to_file(records) + header + records.collect { |record| + # Remap the TYPE_name or _naginator_name params to the + # name if the record is a template (register == 0) + if record.to_s =~ /register\s+0/ + record.to_s.sub("_naginator_name", "name").sub(record.type.to_s + "_name", "name") + else + record.to_s.sub("_naginator_name", NAME_STRING) + end + }.join("\n") + end + + def self.skip_record?(record) + false + end + + def self.valid_attr?(klass, attr_name) + nagios_type.parameters.include?(attr_name) + end + + def initialize(resource = nil) + if resource.is_a?(Nagios::Base) + # We don't use a duplicate here, because some providers (ParsedFile, at least) + # use the hash here for later events. + @property_hash = resource + elsif resource + @resource = resource if resource + # LAK 2007-05-09: Keep the model stuff around for backward compatibility + @model = resource + @property_hash = self.class.nagios_type.new + else + @property_hash = self.class.nagios_type.new + end + end +end diff --git a/mcollective/lib/puppet/provider/nameservice.rb b/mcollective/lib/puppet/provider/nameservice.rb new file mode 100644 index 000000000..d57052bd9 --- /dev/null +++ b/mcollective/lib/puppet/provider/nameservice.rb @@ -0,0 +1,276 @@ +require 'puppet' + +# This is the parent class of all NSS classes. They're very different in +# their backend, but they're pretty similar on the front-end. This class +# provides a way for them all to be as similar as possible. +class Puppet::Provider::NameService < Puppet::Provider + class << self + def autogen_default(param) + defined?(@autogen_defaults) ? @autogen_defaults[symbolize(param)] : nil + end + + def autogen_defaults(hash) + @autogen_defaults ||= {} + hash.each do |param, value| + @autogen_defaults[symbolize(param)] = value + end + end + + def initvars + @checks = {} + super + end + + def instances + objects = [] + listbyname do |name| + objects << new(:name => name, :ensure => :present) + end + + objects + end + + def option(name, option) + name = name.intern if name.is_a? String + (defined?(@options) and @options.include? name and @options[name].include? option) ? @options[name][option] : nil + end + + def options(name, hash) + raise Puppet::DevError, "#{name} is not a valid attribute for #{resource_type.name}" unless resource_type.valid_parameter?(name) + @options ||= {} + @options[name] ||= {} + + # Set options individually, so we can call the options method + # multiple times. + hash.each do |param, value| + @options[name][param] = value + end + end + + # List everything out by name. Abstracted a bit so that it works + # for both users and groups. + def listbyname + names = [] + Etc.send("set#{section()}ent") + begin + while ent = Etc.send("get#{section()}ent") + names << ent.name + yield ent.name if block_given? + end + ensure + Etc.send("end#{section()}ent") + end + + names + end + + def resource_type=(resource_type) + super + @resource_type.validproperties.each do |prop| + next if prop == :ensure + define_method(prop) { get(prop) || :absent} unless public_method_defined?(prop) + define_method(prop.to_s + "=") { |*vals| set(prop, *vals) } unless public_method_defined?(prop.to_s + "=") + end + end + + # This is annoying, but there really aren't that many options, + # and this *is* built into Ruby. + def section + unless defined?(@resource_type) + raise Puppet::DevError, + "Cannot determine Etc section without a resource type" + + end + + if @resource_type.name == :group + "gr" + else + "pw" + end + end + + def validate(name, value) + name = name.intern if name.is_a? String + if @checks.include? name + block = @checks[name][:block] + raise ArgumentError, "Invalid value #{value}: #{@checks[name][:error]}" unless block.call(value) + end + end + + def verify(name, error, &block) + name = name.intern if name.is_a? String + @checks[name] = {:error => error, :block => block} + end + + private + + def op(property) + @ops[property.name] || ("-#{property.name}") + end + end + + # Autogenerate a value. Mostly used for uid/gid, but also used heavily + # with DirectoryServices, because DirectoryServices is stupid. + def autogen(field) + field = symbolize(field) + id_generators = {:user => :uid, :group => :gid} + if id_generators[@resource.class.name] == field + return autogen_id(field) + else + if value = self.class.autogen_default(field) + return value + elsif respond_to?("autogen_#{field}") + return send("autogen_#{field}") + else + return nil + end + end + end + + # Autogenerate either a uid or a gid. This is hard-coded: we can only + # generate one field type per class. + def autogen_id(field) + highest = 0 + + group = method = nil + case @resource.class.name + when :user; group = :passwd; method = :uid + when :group; group = :group; method = :gid + else + raise Puppet::DevError, "Invalid resource name #{resource}" + end + + # Make sure we don't use the same value multiple times + if defined?(@@prevauto) + @@prevauto += 1 + else + Etc.send(group) { |obj| + if obj.gid > highest + highest = obj.send(method) unless obj.send(method) > 65000 + end + } + + @@prevauto = highest + 1 + end + + @@prevauto + end + + def create + if exists? + info "already exists" + # The object already exists + return nil + end + + begin + execute(self.addcmd) + if feature?(:manages_password_age) && (cmd = passcmd) + execute(cmd) + end + rescue Puppet::ExecutionFailure => detail + raise Puppet::Error, "Could not create #{@resource.class.name} #{@resource.name}: #{detail}" + end + end + + def delete + unless exists? + info "already absent" + # the object already doesn't exist + return nil + end + + begin + execute(self.deletecmd) + rescue Puppet::ExecutionFailure => detail + raise Puppet::Error, "Could not delete #{@resource.class.name} #{@resource.name}: #{detail}" + end + end + + def ensure + if exists? + :present + else + :absent + end + end + + # Does our object exist? + def exists? + !!getinfo(true) + end + + # Retrieve a specific value by name. + def get(param) + (hash = getinfo(false)) ? hash[param] : nil + end + + # Retrieve what we can about our object + def getinfo(refresh) + if @objectinfo.nil? or refresh == true + @etcmethod ||= ("get" + self.class.section.to_s + "nam").intern + begin + @objectinfo = Etc.send(@etcmethod, @resource[:name]) + rescue ArgumentError => detail + @objectinfo = nil + end + end + + # Now convert our Etc struct into a hash. + @objectinfo ? info2hash(@objectinfo) : nil + end + + # The list of all groups the user is a member of. Different + # user mgmt systems will need to override this method. + def groups + groups = [] + + # Reset our group list + Etc.setgrent + + user = @resource[:name] + + # Now iterate across all of the groups, adding each one our + # user is a member of + while group = Etc.getgrent + members = group.mem + + groups << group.name if members.include? user + end + + # We have to close the file, so each listing is a separate + # reading of the file. + Etc.endgrent + + groups.join(",") + end + + # Convert the Etc struct into a hash. + def info2hash(info) + hash = {} + self.class.resource_type.validproperties.each do |param| + method = posixmethod(param) + hash[param] = info.send(posixmethod(param)) if info.respond_to? method + end + + hash + end + + def initialize(resource) + super + + @objectinfo = nil + end + + def set(param, value) + self.class.validate(param, value) + cmd = modifycmd(param, value) + raise Puppet::DevError, "Nameservice command must be an array" unless cmd.is_a?(Array) + begin + execute(cmd) + rescue Puppet::ExecutionFailure => detail + raise Puppet::Error, "Could not set #{param} on #{@resource.class.name}[#{@resource.name}]: #{detail}" + end + end +end + diff --git a/mcollective/lib/puppet/provider/nameservice/directoryservice.rb b/mcollective/lib/puppet/provider/nameservice/directoryservice.rb new file mode 100644 index 000000000..aab491122 --- /dev/null +++ b/mcollective/lib/puppet/provider/nameservice/directoryservice.rb @@ -0,0 +1,519 @@ +# Created by Jeff McCune on 2007-07-22 +# Copyright (c) 2007. All rights reserved. +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation (version 2 of the License) +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston MA 02110-1301 USA + +require 'puppet' +require 'puppet/provider/nameservice' +require 'facter/util/plist' +require 'cgi' + + +class Puppet::Provider::NameService +class DirectoryService < Puppet::Provider::NameService + # JJM: Dive into the singleton_class + class << self + # JJM: This allows us to pass information when calling + # Puppet::Type.type + # e.g. Puppet::Type.type(:user).provide :directoryservice, :ds_path => "Users" + # This is referenced in the get_ds_path class method + attr_writer :ds_path + attr_writer :macosx_version_major + end + + initvars + + commands :dscl => "/usr/bin/dscl" + commands :dseditgroup => "/usr/sbin/dseditgroup" + commands :sw_vers => "/usr/bin/sw_vers" + confine :operatingsystem => :darwin + defaultfor :operatingsystem => :darwin + + + # JJM 2007-07-25: This map is used to map NameService attributes to their + # corresponding DirectoryService attribute names. + # See: http://images.apple.com/server/docs.Open_Directory_v10.4.pdf + # JJM: Note, this is de-coupled from the Puppet::Type, and must + # be actively maintained. There may also be collisions with different + # types (Users, Groups, Mounts, Hosts, etc...) + @@ds_to_ns_attribute_map = { + 'RecordName' => :name, + 'PrimaryGroupID' => :gid, + 'NFSHomeDirectory' => :home, + 'UserShell' => :shell, + 'UniqueID' => :uid, + 'RealName' => :comment, + 'Password' => :password, + 'GeneratedUID' => :guid, + 'IPAddress' => :ip_address, + 'ENetAddress' => :en_address, + 'GroupMembership' => :members, + } + # JJM The same table as above, inverted. + @@ns_to_ds_attribute_map = { + :name => 'RecordName', + :gid => 'PrimaryGroupID', + :home => 'NFSHomeDirectory', + :shell => 'UserShell', + :uid => 'UniqueID', + :comment => 'RealName', + :password => 'Password', + :guid => 'GeneratedUID', + :en_address => 'ENetAddress', + :ip_address => 'IPAddress', + :members => 'GroupMembership', + } + + @@password_hash_dir = "/var/db/shadow/hash" + + def self.instances + # JJM Class method that provides an array of instance objects of this + # type. + # JJM: Properties are dependent on the Puppet::Type we're managine. + type_property_array = [:name] + @resource_type.validproperties + + # Create a new instance of this Puppet::Type for each object present + # on the system. + list_all_present.collect do |name_string| + self.new(single_report(name_string, *type_property_array)) + end + end + + def self.get_ds_path + # JJM: 2007-07-24 This method dynamically returns the DS path we're concerned with. + # For example, if we're working with an user type, this will be /Users + # with a group type, this will be /Groups. + # @ds_path is an attribute of the class itself. + return @ds_path if defined?(@ds_path) + # JJM: "Users" or "Groups" etc ... (Based on the Puppet::Type) + # Remember this is a class method, so self.class is Class + # Also, @resource_type seems to be the reference to the + # Puppet::Type this class object is providing for. + @resource_type.name.to_s.capitalize + "s" + end + + def self.get_macosx_version_major + return @macosx_version_major if defined?(@macosx_version_major) + begin + # Make sure we've loaded all of the facts + Facter.loadfacts + + if Facter.value(:macosx_productversion_major) + product_version_major = Facter.value(:macosx_productversion_major) + else + # TODO: remove this code chunk once we require Facter 1.5.5 or higher. + Puppet.warning("DEPRECATION WARNING: Future versions of the directoryservice provider will require Facter 1.5.5 or newer.") + product_version = Facter.value(:macosx_productversion) + fail("Could not determine OS X version from Facter") if product_version.nil? + product_version_major = product_version.scan(/(\d+)\.(\d+)./).join(".") + end + fail("#{product_version_major} is not supported by the directoryservice provider") if %w{10.0 10.1 10.2 10.3}.include?(product_version_major) + @macosx_version_major = product_version_major + return @macosx_version_major + rescue Puppet::ExecutionFailure => detail + fail("Could not determine OS X version: #{detail}") + end + end + + + def self.list_all_present + # JJM: List all objects of this Puppet::Type already present on the system. + begin + dscl_output = execute(get_exec_preamble("-list")) + rescue Puppet::ExecutionFailure => detail + fail("Could not get #{@resource_type.name} list from DirectoryService") + end + dscl_output.split("\n") + end + + def self.parse_dscl_url_data(dscl_output) + # we need to construct a Hash from the dscl -url output to match + # that returned by the dscl -plist output for 10.5+ clients. + # + # Nasty assumptions: + # a) no values *end* in a colon ':', only keys + # b) if a line ends in a colon and the next line does start with + # a space, then the second line is a value of the first. + # c) (implied by (b)) keys don't start with spaces. + + dscl_plist = {} + dscl_output.split("\n").inject([]) do |array, line| + if line =~ /^\s+/ # it's a value + array[-1] << line # add the value to the previous key + else + array << line + end + array + end.compact + + dscl_output.each do |line| + # This should be a 'normal' entry. key and value on one line. + # We split on ': ' to deal with keys/values with a colon in them. + split_array = line.split(/:\s+/) + key = split_array.first + value = CGI::unescape(split_array.last.strip.chomp) + # We need to treat GroupMembership separately as it is currently + # the only attribute we care about multiple values for, and + # the values can never contain spaces (shortnames) + # We also make every value an array to be consistent with the + # output of dscl -plist under 10.5 + if key == "GroupMembership" + dscl_plist[key] = value.split(/\s/) + else + dscl_plist[key] = [value] + end + end + dscl_plist + end + + def self.parse_dscl_plist_data(dscl_output) + Plist.parse_xml(dscl_output) + end + + def self.generate_attribute_hash(input_hash, *type_properties) + attribute_hash = {} + input_hash.keys.each do |key| + ds_attribute = key.sub("dsAttrTypeStandard:", "") + next unless (@@ds_to_ns_attribute_map.keys.include?(ds_attribute) and type_properties.include? @@ds_to_ns_attribute_map[ds_attribute]) + ds_value = input_hash[key] + case @@ds_to_ns_attribute_map[ds_attribute] + when :members + ds_value = ds_value # only members uses arrays so far + when :gid, :uid + # OS X stores objects like uid/gid as strings. + # Try casting to an integer for these cases to be + # consistent with the other providers and the group type + # validation + begin + ds_value = Integer(ds_value[0]) + rescue ArgumentError + ds_value = ds_value[0] + end + else ds_value = ds_value[0] + end + attribute_hash[@@ds_to_ns_attribute_map[ds_attribute]] = ds_value + end + + # NBK: need to read the existing password here as it's not actually + # stored in the user record. It is stored at a path that involves the + # UUID of the user record for non-Mobile local acccounts. + # Mobile Accounts are out of scope for this provider for now + attribute_hash[:password] = self.get_password(attribute_hash[:guid]) if @resource_type.validproperties.include?(:password) and Puppet.features.root? + attribute_hash + end + + def self.single_report(resource_name, *type_properties) + # JJM 2007-07-24: + # Given a the name of an object and a list of properties of that + # object, return all property values in a hash. + # + # This class method returns nil if the object doesn't exist + # Otherwise, it returns a hash of the object properties. + + all_present_str_array = list_all_present + + # NBK: shortcut the process if the resource is missing + return nil unless all_present_str_array.include? resource_name + + dscl_vector = get_exec_preamble("-read", resource_name) + begin + dscl_output = execute(dscl_vector) + rescue Puppet::ExecutionFailure => detail + fail("Could not get report. command execution failed.") + end + + # Two code paths is ugly, but until we can drop 10.4 support we don't + # have a lot of choice. Ultimately this should all be done using Ruby + # to access the DirectoryService APIs directly, but that's simply not + # feasible for a while yet. + if self.get_macosx_version_major > "10.4" + dscl_plist = self.parse_dscl_plist_data(dscl_output) + elsif self.get_macosx_version_major == "10.4" + dscl_plist = self.parse_dscl_url_data(dscl_output) + else + fail("Puppet does not support OS X versions < 10.4") + end + + self.generate_attribute_hash(dscl_plist, *type_properties) + end + + def self.get_exec_preamble(ds_action, resource_name = nil) + # JJM 2007-07-24 + # DSCL commands are often repetitive and contain the same positional + # arguments over and over. See http://developer.apple.com/documentation/Porting/Conceptual/PortingUnix/additionalfeatures/chapter_10_section_9.html + # for an example of what I mean. + # This method spits out proper DSCL commands for us. + # We EXPECT name to be @resource[:name] when called from an instance object. + + # 10.4 doesn't support the -plist option for dscl, and 10.5 has a + # different format for the -url output with objects with spaces in + # their values. *sigh*. Use -url for 10.4 in the hope this can be + # deprecated one day, and use -plist for 10.5 and higher. + if self.get_macosx_version_major > "10.4" + command_vector = [ command(:dscl), "-plist", "." ] + elsif self.get_macosx_version_major == "10.4" + command_vector = [ command(:dscl), "-url", "." ] + else + fail("Puppet does not support OS X versions < 10.4") + end + + # JJM: The actual action to perform. See "man dscl" + # Common actiosn: -create, -delete, -merge, -append, -passwd + command_vector << ds_action + # JJM: get_ds_path will spit back "Users" or "Groups", + # etc... Depending on the Puppet::Type of our self. + if resource_name + command_vector << "/#{get_ds_path}/#{resource_name}" + else + command_vector << "/#{get_ds_path}" + end + # JJM: This returns most of the preamble of the command. + # e.g. 'dscl / -create /Users/mccune' + command_vector + end + + def self.set_password(resource_name, guid, password_hash) + password_hash_file = "#{@@password_hash_dir}/#{guid}" + begin + File.open(password_hash_file, 'w') { |f| f.write(password_hash)} + rescue Errno::EACCES => detail + fail("Could not write to password hash file: #{detail}") + end + + # NBK: For shadow hashes, the user AuthenticationAuthority must contain a value of + # ";ShadowHash;". The LKDC in 10.5 makes this more interesting though as it + # will dynamically generate ;Kerberosv5;;username@LKDC:SHA1 attributes if + # missing. Thus we make sure we only set ;ShadowHash; if it is missing, and + # we can do this with the merge command. This allows people to continue to + # use other custom AuthenticationAuthority attributes without stomping on them. + # + # There is a potential problem here in that we're only doing this when setting + # the password, and the attribute could get modified at other times while the + # hash doesn't change and so this doesn't get called at all... but + # without switching all the other attributes to merge instead of create I can't + # see a simple enough solution for this that doesn't modify the user record + # every single time. This should be a rather rare edge case. (famous last words) + + dscl_vector = self.get_exec_preamble("-merge", resource_name) + dscl_vector << "AuthenticationAuthority" << ";ShadowHash;" + begin + dscl_output = execute(dscl_vector) + rescue Puppet::ExecutionFailure => detail + fail("Could not set AuthenticationAuthority.") + end + end + + def self.get_password(guid) + password_hash = nil + password_hash_file = "#{@@password_hash_dir}/#{guid}" + if File.exists?(password_hash_file) and File.file?(password_hash_file) + fail("Could not read password hash file at #{password_hash_file}") if not File.readable?(password_hash_file) + f = File.new(password_hash_file) + password_hash = f.read + f.close + end + password_hash + end + + def ensure=(ensure_value) + super + # We need to loop over all valid properties for the type we're + # managing and call the method which sets that property value + # dscl can't create everything at once unfortunately. + if ensure_value == :present + @resource.class.validproperties.each do |name| + next if name == :ensure + # LAK: We use property.sync here rather than directly calling + # the settor method because the properties might do some kind + # of conversion. In particular, the user gid property might + # have a string and need to convert it to a number + if @resource.should(name) + @resource.property(name).sync + elsif value = autogen(name) + self.send(name.to_s + "=", value) + else + next + end + end + end + end + + def password=(passphrase) + exec_arg_vector = self.class.get_exec_preamble("-read", @resource.name) + exec_arg_vector << @@ns_to_ds_attribute_map[:guid] + begin + guid_output = execute(exec_arg_vector) + guid_plist = Plist.parse_xml(guid_output) + # Although GeneratedUID like all DirectoryService values can be multi-valued + # according to the schema, in practice user accounts cannot have multiple UUIDs + # otherwise Bad Things Happen, so we just deal with the first value. + guid = guid_plist["dsAttrTypeStandard:#{@@ns_to_ds_attribute_map[:guid]}"][0] + self.class.set_password(@resource.name, guid, passphrase) + rescue Puppet::ExecutionFailure => detail + fail("Could not set #{param} on #{@resource.class.name}[#{@resource.name}]: #{detail}") + end + end + + # NBK: we override @parent.set as we need to execute a series of commands + # to deal with array values, rather than the single command nameservice.rb + # expects to be returned by modifycmd. Thus we don't bother defining modifycmd. + + def set(param, value) + self.class.validate(param, value) + current_members = @property_value_cache_hash[:members] + if param == :members + # If we are meant to be authoritative for the group membership + # then remove all existing members who haven't been specified + # in the manifest. + remove_unwanted_members(current_members, value) if @resource[:auth_membership] and not current_members.nil? + + # if they're not a member, make them one. + add_members(current_members, value) + else + exec_arg_vector = self.class.get_exec_preamble("-create", @resource[:name]) + # JJM: The following line just maps the NS name to the DS name + # e.g. { :uid => 'UniqueID' } + exec_arg_vector << @@ns_to_ds_attribute_map[symbolize(param)] + # JJM: The following line sends the actual value to set the property to + exec_arg_vector << value.to_s + begin + execute(exec_arg_vector) + rescue Puppet::ExecutionFailure => detail + fail("Could not set #{param} on #{@resource.class.name}[#{@resource.name}]: #{detail}") + end + end + end + + # NBK: we override @parent.create as we need to execute a series of commands + # to create objects with dscl, rather than the single command nameservice.rb + # expects to be returned by addcmd. Thus we don't bother defining addcmd. + def create + if exists? + info "already exists" + return nil + end + + # NBK: First we create the object with a known guid so we can set the contents + # of the password hash if required + # Shelling out sucks, but for a single use case it doesn't seem worth + # requiring people install a UUID library that doesn't come with the system. + # This should be revisited if Puppet starts managing UUIDs for other platform + # user records. + guid = %x{/usr/bin/uuidgen}.chomp + + exec_arg_vector = self.class.get_exec_preamble("-create", @resource[:name]) + exec_arg_vector << @@ns_to_ds_attribute_map[:guid] << guid + begin + execute(exec_arg_vector) + rescue Puppet::ExecutionFailure => detail + fail("Could not set GeneratedUID for #{@resource.class.name} #{@resource.name}: #{detail}") + end + + if value = @resource.should(:password) and value != "" + self.class.set_password(@resource[:name], guid, value) + end + + # Now we create all the standard properties + Puppet::Type.type(@resource.class.name).validproperties.each do |property| + next if property == :ensure + if value = @resource.should(property) and value != "" + if property == :members + add_members(nil, value) + else + exec_arg_vector = self.class.get_exec_preamble("-create", @resource[:name]) + exec_arg_vector << @@ns_to_ds_attribute_map[symbolize(property)] + next if property == :password # skip setting the password here + exec_arg_vector << value.to_s + begin + execute(exec_arg_vector) + rescue Puppet::ExecutionFailure => detail + fail("Could not create #{@resource.class.name} #{@resource.name}: #{detail}") + end + end + end + end + end + + def remove_unwanted_members(current_members, new_members) + current_members.each do |member| + if not new_members.flatten.include?(member) + cmd = [:dseditgroup, "-o", "edit", "-n", ".", "-d", member, @resource[:name]] + begin + execute(cmd) + rescue Puppet::ExecutionFailure => detail + fail("Could not remove #{member} from group: #{@resource.name}, #{detail}") + end + end + end + end + + def add_members(current_members, new_members) + new_members.flatten.each do |new_member| + if current_members.nil? or not current_members.include?(new_member) + cmd = [:dseditgroup, "-o", "edit", "-n", ".", "-a", new_member, @resource[:name]] + begin + execute(cmd) + rescue Puppet::ExecutionFailure => detail + fail("Could not add #{new_member} to group: #{@resource.name}, #{detail}") + end + end + end + end + + def deletecmd + # JJM: Like addcmd, only called when deleting the object itself + # Note, this isn't used to delete properties of the object, + # at least that's how I understand it... + self.class.get_exec_preamble("-delete", @resource[:name]) + end + + def getinfo(refresh = false) + # JJM 2007-07-24: + # Override the getinfo method, which is also defined in nameservice.rb + # This method returns and sets @infohash + # I'm not re-factoring the name "getinfo" because this method will be + # most likely called by nameservice.rb, which I didn't write. + if refresh or (! defined?(@property_value_cache_hash) or ! @property_value_cache_hash) + # JJM 2007-07-24: OK, there's a bit of magic that's about to + # happen... Let's see how strong my grip has become... =) + # + # self is a provider instance of some Puppet::Type, like + # Puppet::Type::User::ProviderDirectoryservice for the case of the + # user type and this provider. + # + # self.class looks like "user provider directoryservice", if that + # helps you ... + # + # self.class.resource_type is a reference to the Puppet::Type class, + # probably Puppet::Type::User or Puppet::Type::Group, etc... + # + # self.class.resource_type.validproperties is a class method, + # returning an Array of the valid properties of that specific + # Puppet::Type. + # + # So... something like [:comment, :home, :password, :shell, :uid, + # :groups, :ensure, :gid] + # + # Ultimately, we add :name to the list, delete :ensure from the + # list, then report on the remaining list. Pretty whacky, ehh? + type_properties = [:name] + self.class.resource_type.validproperties + type_properties.delete(:ensure) if type_properties.include? :ensure + type_properties << :guid # append GeneratedUID so we just get the report here + @property_value_cache_hash = self.class.single_report(@resource[:name], *type_properties) + [:uid, :gid].each do |param| + @property_value_cache_hash[param] = @property_value_cache_hash[param].to_i if @property_value_cache_hash and @property_value_cache_hash.include?(param) + end + end + @property_value_cache_hash + end +end +end diff --git a/mcollective/lib/puppet/provider/nameservice/objectadd.rb b/mcollective/lib/puppet/provider/nameservice/objectadd.rb new file mode 100644 index 000000000..dbb9f306f --- /dev/null +++ b/mcollective/lib/puppet/provider/nameservice/objectadd.rb @@ -0,0 +1,33 @@ +require 'puppet/provider/nameservice' + +class Puppet::Provider::NameService +class ObjectAdd < Puppet::Provider::NameService + def deletecmd + [command(:delete), @resource[:name]] + end + + # Determine the flag to pass to our command. + def flag(name) + name = name.intern if name.is_a? String + self.class.option(name, :flag) || "-" + name.to_s[0,1] + end + + def modifycmd(param, value) + cmd = [command(param.to_s =~ /password_.+_age/ ? :password : :modify)] + cmd << flag(param) << value + if @resource.allowdupe? && ((param == :uid) || (param == :gid and self.class.name == :groupadd)) + cmd << "-o" + end + cmd << @resource[:name] + + cmd + end + + def posixmethod(name) + name = name.intern if name.is_a? String + method = self.class.option(name, :method) || name + + method + end +end +end diff --git a/mcollective/lib/puppet/provider/nameservice/pw.rb b/mcollective/lib/puppet/provider/nameservice/pw.rb new file mode 100644 index 000000000..74f1a9fbb --- /dev/null +++ b/mcollective/lib/puppet/provider/nameservice/pw.rb @@ -0,0 +1,21 @@ +require 'puppet/provider/nameservice/objectadd' + +class Puppet::Provider::NameService +class PW < ObjectAdd + def deletecmd + [command(:pw), "#{@resource.class.name.to_s}del", @resource[:name]] + end + + def modifycmd(param, value) + cmd = [ + command(:pw), + "#{@resource.class.name.to_s}mod", + @resource[:name], + flag(param), + value + ] + cmd + end +end +end + diff --git a/mcollective/lib/puppet/provider/package.rb b/mcollective/lib/puppet/provider/package.rb new file mode 100644 index 000000000..2f5f67547 --- /dev/null +++ b/mcollective/lib/puppet/provider/package.rb @@ -0,0 +1,27 @@ +# Created by Luke A. Kanies on 2007-06-05. +# Copyright (c) 2007. All rights reserved. + +class Puppet::Provider::Package < Puppet::Provider + # Prefetch our package list, yo. + def self.prefetch(packages) + instances.each do |prov| + if pkg = packages[prov.name] + pkg.provider = prov + end + end + end + + # Clear out the cached values. + def flush + @property_hash.clear + end + + # Look up the current status. + def properties + if @property_hash.empty? + @property_hash = query || {:ensure => :absent} + @property_hash[:ensure] = :absent if @property_hash.empty? + end + @property_hash.dup + end +end diff --git a/mcollective/lib/puppet/provider/package/aix.rb b/mcollective/lib/puppet/provider/package/aix.rb new file mode 100644 index 000000000..134a84526 --- /dev/null +++ b/mcollective/lib/puppet/provider/package/aix.rb @@ -0,0 +1,128 @@ +require 'puppet/provider/package' +require 'puppet/util/package' + +Puppet::Type.type(:package).provide :aix, :parent => Puppet::Provider::Package do + desc "Installation from AIX Software directory" + + # The commands we are using on an AIX box are installed standard + # (except nimclient) nimclient needs the bos.sysmgt.nim.client fileset. + commands :lslpp => "/usr/bin/lslpp", + :installp => "/usr/sbin/installp" + + # AIX supports versionable packages with and without a NIM server + has_feature :versionable + + confine :operatingsystem => [ :aix ] + defaultfor :operatingsystem => :aix + + attr_accessor :latest_info + + def self.srclistcmd(source) + [ command(:installp), "-L", "-d", source ] + end + + def self.prefetch(packages) + raise Puppet::Error, "The aix provider can only be used by root" if Process.euid != 0 + + return unless packages.detect { |name, package| package.should(:ensure) == :latest } + + sources = packages.collect { |name, package| package[:source] }.uniq + + updates = {} + sources.each do |source| + execute(self.srclistcmd(source)).each do |line| + if line =~ /^[^#][^:]*:([^:]*):([^:]*)/ + current = {} + current[:name] = $1 + current[:version] = $2 + current[:source] = source + + if updates.key?(current[:name]) + previous = updates[current[:name]] + + updates[ current[:name] ] = current unless Puppet::Util::Package.versioncmp(previous[:version], current[:version]) == 1 + + else + updates[current[:name]] = current + end + end + end + end + + packages.each do |name, package| + if info = updates[package[:name]] + package.provider.latest_info = info[0] + end + end + end + + def uninstall + # Automatically process dependencies when installing/uninstalling + # with the -g option to installp. + installp "-gu", @resource[:name] + end + + def install(useversion = true) + unless source = @resource[:source] + self.fail "A directory is required which will be used to find packages" + end + + pkg = @resource[:name] + + pkg << " #{@resource.should(:ensure)}" if (! @resource.should(:ensure).is_a? Symbol) and useversion + + installp "-acgwXY", "-d", source, pkg + end + + def self.pkglist(hash = {}) + cmd = [command(:lslpp), "-qLc"] + + if name = hash[:pkgname] + cmd << name + end + + begin + list = execute(cmd).scan(/^[^#][^:]*:([^:]*):([^:]*)/).collect { |n,e| + { :name => n, :ensure => e, :provider => self.name } + } + rescue Puppet::ExecutionFailure => detail + if hash[:pkgname] + return nil + else + raise Puppet::Error, "Could not list installed Packages: #{detail}" + end + end + + if hash[:pkgname] + return list.shift + else + return list + end + end + + def self.instances + pkglist.collect do |hash| + new(hash) + end + end + + def latest + upd = latest_info + + unless upd.nil? + return "#{upd[:version]}" + else + raise Puppet::DevError, "Tried to get latest on a missing package" if properties[:ensure] == :absent + + return properties[:ensure] + end + end + + def query + self.class.pkglist(:pkgname => @resource[:name]) + end + + def update + self.install(false) + end +end diff --git a/mcollective/lib/puppet/provider/package/appdmg.rb b/mcollective/lib/puppet/provider/package/appdmg.rb new file mode 100644 index 000000000..439043c91 --- /dev/null +++ b/mcollective/lib/puppet/provider/package/appdmg.rb @@ -0,0 +1,110 @@ +# Jeff McCune +# Changed to app.dmg by: Udo Waechter +# Mac OS X Package Installer which handles application (.app) +# bundles inside an Apple Disk Image. +# +# Motivation: DMG files provide a true HFS file system +# and are easier to manage. +# +# Note: the 'apple' Provider checks for the package name +# in /L/Receipts. Since we possibly install multiple apps's from +# a single source, we treat the source .app.dmg file as the package name. +# As a result, we store installed .app.dmg file names +# in /var/db/.puppet_appdmg_installed_ + +require 'puppet/provider/package' +Puppet::Type.type(:package).provide(:appdmg, :parent => Puppet::Provider::Package) do + desc "Package management which copies application bundles to a target." + + confine :operatingsystem => :darwin + + commands :hdiutil => "/usr/bin/hdiutil" + commands :curl => "/usr/bin/curl" + commands :ditto => "/usr/bin/ditto" + + # JJM We store a cookie for each installed .app.dmg in /var/db + def self.instances_by_name + Dir.entries("/var/db").find_all { |f| + f =~ /^\.puppet_appdmg_installed_/ + }.collect do |f| + name = f.sub(/^\.puppet_appdmg_installed_/, '') + yield name if block_given? + name + end + end + + def self.instances + instances_by_name.collect do |name| + new(:name => name, :provider => :appdmg, :ensure => :installed) + end + end + + def self.installapp(source, name, orig_source) + appname = File.basename(source); + ditto "--rsrc", source, "/Applications/#{appname}" + File.open("/var/db/.puppet_appdmg_installed_#{name}", "w") do |t| + t.print "name: '#{name}'\n" + t.print "source: '#{orig_source}'\n" + end + end + + def self.installpkgdmg(source, name) + unless source =~ /\.dmg$/i + self.fail "Mac OS X PKG DMG's must specify a source string ending in .dmg" + end + require 'open-uri' + require 'facter/util/plist' + cached_source = source + tmpdir = Dir.mktmpdir + begin + if %r{\A[A-Za-z][A-Za-z0-9+\-\.]*://} =~ cached_source + cached_source = File.join(tmpdir, name) + begin + curl "-o", cached_source, "-C", "-", "-k", "-L", "-s", "--url", source + Puppet.debug "Success: curl transfered [#{name}]" + rescue Puppet::ExecutionFailure + Puppet.debug "curl did not transfer [#{name}]. Falling back to slower open-uri transfer methods." + cached_source = source + end + end + + open(cached_source) do |dmg| + xml_str = hdiutil "mount", "-plist", "-nobrowse", "-readonly", "-mountrandom", "/tmp", dmg.path + ptable = Plist::parse_xml xml_str + # JJM Filter out all mount-paths into a single array, discard the rest. + mounts = ptable['system-entities'].collect { |entity| + entity['mount-point'] + }.select { |mountloc|; mountloc } + begin + mounts.each do |fspath| + Dir.entries(fspath).select { |f| + f =~ /\.app$/i + }.each do |pkg| + installapp("#{fspath}/#{pkg}", name, source) + end + end + ensure + hdiutil "eject", mounts[0] + end + end + ensure + FileUtils.remove_entry_secure(tmpdir, force=true) + end + end + + def query + FileTest.exists?("/var/db/.puppet_appdmg_installed_#{@resource[:name]}") ? {:name => @resource[:name], :ensure => :present} : nil + end + + def install + source = nil + unless source = @resource[:source] + self.fail "Mac OS X PKG DMG's must specify a package source." + end + unless name = @resource[:name] + self.fail "Mac OS X PKG DMG's must specify a package name." + end + self.class.installpkgdmg(source,name) + end +end + diff --git a/mcollective/lib/puppet/provider/package/apple.rb b/mcollective/lib/puppet/provider/package/apple.rb new file mode 100755 index 000000000..b5bb9102f --- /dev/null +++ b/mcollective/lib/puppet/provider/package/apple.rb @@ -0,0 +1,51 @@ +require 'puppet/provider/package' + +# OS X Packaging sucks. We can install packages, but that's about it. +Puppet::Type.type(:package).provide :apple, :parent => Puppet::Provider::Package do + desc "Package management based on OS X's builtin packaging system. This is + essentially the simplest and least functional package system in existence -- + it only supports installation; no deletion or upgrades. The provider will + automatically add the `.pkg` extension, so leave that off when specifying + the package name." + + confine :operatingsystem => :darwin + commands :installer => "/usr/sbin/installer" + + def self.instances + instance_by_name.collect do |name| + + self.new( + + :name => name, + :provider => :apple, + + :ensure => :installed + ) + end + end + + def self.instance_by_name + Dir.entries("/Library/Receipts").find_all { |f| + f =~ /\.pkg$/ + }.collect { |f| + name = f.sub(/\.pkg/, '') + yield name if block_given? + + name + } + end + + def query + FileTest.exists?("/Library/Receipts/#{@resource[:name]}.pkg") ? {:name => @resource[:name], :ensure => :present} : nil + end + + def install + source = nil + unless source = @resource[:source] + self.fail "Mac OS X packages must specify a package source" + end + + installer "-pkg", source, "-target", "/" + end +end + diff --git a/mcollective/lib/puppet/provider/package/apt.rb b/mcollective/lib/puppet/provider/package/apt.rb new file mode 100755 index 000000000..2fc787419 --- /dev/null +++ b/mcollective/lib/puppet/provider/package/apt.rb @@ -0,0 +1,112 @@ +Puppet::Type.type(:package).provide :apt, :parent => :dpkg, :source => :dpkg do + # Provide sorting functionality + include Puppet::Util::Package + + desc "Package management via `apt-get`." + + has_feature :versionable + + commands :aptget => "/usr/bin/apt-get" + commands :aptcache => "/usr/bin/apt-cache" + commands :preseed => "/usr/bin/debconf-set-selections" + + defaultfor :operatingsystem => [:debian, :ubuntu] + + ENV['DEBIAN_FRONTEND'] = "noninteractive" + + # disable common apt helpers to allow non-interactive package installs + ENV['APT_LISTBUGS_FRONTEND'] = "none" + ENV['APT_LISTCHANGES_FRONTEND'] = "none" + + # A derivative of DPKG; this is how most people actually manage + # Debian boxes, and the only thing that differs is that it can + # install packages from remote sites. + + def checkforcdrom + unless defined?(@@checkedforcdrom) + if FileTest.exists? "/etc/apt/sources.list" + @@checkedforcdrom = !!(File.read("/etc/apt/sources.list") =~ /^[^#]*cdrom:/) + else + # This is basically a pathalogical case, but we'll just + # ignore it + @@checkedforcdrom = false + end + end + + if @@checkedforcdrom and @resource[:allowcdrom] != :true + raise Puppet::Error, + "/etc/apt/sources.list contains a cdrom source; not installing. Use 'allowcdrom' to override this failure." + end + end + + # Install a package using 'apt-get'. This function needs to support + # installing a specific version. + def install + self.run_preseed if @resource[:responsefile] + should = @resource[:ensure] + + checkforcdrom + cmd = %w{-q -y} + + keep = "" + if config = @resource[:configfiles] + if config == :keep + cmd << "-o" << 'DPkg::Options::=--force-confold' + else + cmd << "-o" << 'DPkg::Options::=--force-confnew' + end + end + + str = @resource[:name] + case should + when true, false, Symbol + # pass + else + # Add the package version and --force-yes option + str += "=#{should}" + cmd << "--force-yes" + end + + cmd << :install << str + + aptget(*cmd) + end + + # What's the latest package version available? + def latest + output = aptcache :policy, @resource[:name] + + if output =~ /Candidate:\s+(\S+)\s/ + return $1 + else + self.err "Could not find latest version" + return nil + end + end + + # + # preseeds answers to dpkg-set-selection from the "responsefile" + # + def run_preseed + if response = @resource[:responsefile] and FileTest.exist?(response) + self.info("Preseeding #{response} to debconf-set-selections") + + preseed response + else + self.info "No responsefile specified or non existant, not preseeding anything" + end + end + + def uninstall + self.run_preseed if @resource[:responsefile] + aptget "-y", "-q", :remove, @resource[:name] + end + + def purge + self.run_preseed if @resource[:responsefile] + aptget '-y', '-q', :remove, '--purge', @resource[:name] + # workaround a "bug" in apt, that already removed packages are not purged + super + end +end + diff --git a/mcollective/lib/puppet/provider/package/aptitude.rb b/mcollective/lib/puppet/provider/package/aptitude.rb new file mode 100755 index 000000000..2eafd3ef8 --- /dev/null +++ b/mcollective/lib/puppet/provider/package/aptitude.rb @@ -0,0 +1,30 @@ +Puppet::Type.type(:package).provide :aptitude, :parent => :apt, :source => :dpkg do + desc "Package management via `aptitude`." + + has_feature :versionable + + commands :aptitude => "/usr/bin/aptitude" + commands :aptcache => "/usr/bin/apt-cache" + + ENV['DEBIAN_FRONTEND'] = "noninteractive" + + def aptget(*args) + args.flatten! + # Apparently aptitude hasn't always supported a -q flag. + args.delete("-q") if args.include?("-q") + args.delete("--force-yes") if args.include?("--force-yes") + output = aptitude(*args) + + # Yay, stupid aptitude doesn't throw an error when the package is missing. + if args.include?(:install) and output =~ /Couldn't find any package/ + raise Puppet::Error.new( + "Could not find package #{self.name}" + ) + end + end + + def purge + aptitude '-y', 'purge', @resource[:name] + end +end + diff --git a/mcollective/lib/puppet/provider/package/aptrpm.rb b/mcollective/lib/puppet/provider/package/aptrpm.rb new file mode 100644 index 000000000..2eb33c72b --- /dev/null +++ b/mcollective/lib/puppet/provider/package/aptrpm.rb @@ -0,0 +1,84 @@ +Puppet::Type.type(:package).provide :aptrpm, :parent => :rpm, :source => :rpm do + # Provide sorting functionality + include Puppet::Util::Package + + desc "Package management via `apt-get` ported to `rpm`." + + has_feature :versionable + + commands :aptget => "apt-get" + commands :aptcache => "apt-cache" + commands :rpm => "rpm" + + if command('rpm') + confine :true => begin + rpm('-ql', 'rpm') + rescue Puppet::ExecutionFailure + false + else + true + end + end + + # Install a package using 'apt-get'. This function needs to support + # installing a specific version. + def install + should = @resource.should(:ensure) + + str = @resource[:name] + case should + when true, false, Symbol + # pass + else + # Add the package version + str += "=#{should}" + end + cmd = %w{-q -y} + + cmd << 'install' << str + + aptget(*cmd) + end + + # What's the latest package version available? + def latest + output = aptcache :showpkg, @resource[:name] + + if output =~ /Versions:\s*\n((\n|.)+)^$/ + versions = $1 + available_versions = versions.split(/\n/).collect { |version| + if version =~ /^([^\(]+)\(/ + $1 + else + self.warning "Could not match version '#{version}'" + nil + end + }.reject { |vers| vers.nil? }.sort { |a,b| + versioncmp(a,b) + } + + if available_versions.length == 0 + self.debug "No latest version" + print output if Puppet[:debug] + end + + # Get the latest and greatest version number + return available_versions.pop + else + self.err "Could not match string" + end + end + + def update + self.install + end + + def uninstall + aptget "-y", "-q", 'remove', @resource[:name] + end + + def purge + aptget '-y', '-q', 'remove', '--purge', @resource[:name] + end +end + diff --git a/mcollective/lib/puppet/provider/package/blastwave.rb b/mcollective/lib/puppet/provider/package/blastwave.rb new file mode 100755 index 000000000..e9c84845f --- /dev/null +++ b/mcollective/lib/puppet/provider/package/blastwave.rb @@ -0,0 +1,112 @@ +# Packaging using Blastwave's pkg-get program. +Puppet::Type.type(:package).provide :blastwave, :parent => :sun, :source => :sun do + desc "Package management using Blastwave.org's `pkg-get` command on Solaris." + pkgget = "pkg-get" + pkgget = "/opt/csw/bin/pkg-get" if FileTest.executable?("/opt/csw/bin/pkg-get") + + confine :operatingsystem => :solaris + + commands :pkgget => pkgget + + def pkgget_with_cat(*args) + Puppet::Util::Execution::withenv(:PAGER => "/usr/bin/cat") { pkgget(*args) } + end + + def self.extended(mod) + unless command(:pkgget) != "pkg-get" + raise Puppet::Error, + "The pkg-get command is missing; blastwave packaging unavailable" + end + + unless FileTest.exists?("/var/pkg-get/admin") + Puppet.notice "It is highly recommended you create '/var/pkg-get/admin'." + Puppet.notice "See /var/pkg-get/admin-fullauto" + end + end + + def self.instances(hash = {}) + blastlist(hash).collect do |bhash| + bhash.delete(:avail) + new(bhash) + end + end + + # Turn our blastwave listing into a bunch of hashes. + def self.blastlist(hash) + command = ["-c"] + + command << hash[:justme] if hash[:justme] + + output = Puppet::Util::Execution::withenv(:PAGER => "/usr/bin/cat") { pkgget command } + + list = output.split("\n").collect do |line| + next if line =~ /^#/ + next if line =~ /^WARNING/ + next if line =~ /localrev\s+remoterev/ + + blastsplit(line) + end.reject { |h| h.nil? } + + if hash[:justme] + return list[0] + else + list.reject! { |h| + h[:ensure] == :absent + } + return list + end + + end + + # Split the different lines into hashes. + def self.blastsplit(line) + if line =~ /\s*(\S+)\s+((\[Not installed\])|(\S+))\s+(\S+)/ + hash = {} + hash[:name] = $1 + hash[:ensure] = if $2 == "[Not installed]" + :absent + else + $2 + end + hash[:avail] = $5 + + hash[:avail] = hash[:ensure] if hash[:avail] == "SAME" + + # Use the name method, so it works with subclasses. + hash[:provider] = self.name + + return hash + else + Puppet.warning "Cannot match #{line}" + return nil + end + end + + def install + pkgget_with_cat "-f", :install, @resource[:name] + end + + # Retrieve the version from the current package file. + def latest + hash = self.class.blastlist(:justme => @resource[:name]) + hash[:avail] + end + + def query + if hash = self.class.blastlist(:justme => @resource[:name]) + hash + else + {:ensure => :absent} + end + end + + # Remove the old package, and install the new one + def update + pkgget_with_cat "-f", :upgrade, @resource[:name] + end + + def uninstall + pkgget_with_cat "-f", :remove, @resource[:name] + end +end + diff --git a/mcollective/lib/puppet/provider/package/dpkg.rb b/mcollective/lib/puppet/provider/package/dpkg.rb new file mode 100755 index 000000000..7b34b09af --- /dev/null +++ b/mcollective/lib/puppet/provider/package/dpkg.rb @@ -0,0 +1,156 @@ +require 'puppet/provider/package' + +Puppet::Type.type(:package).provide :dpkg, :parent => Puppet::Provider::Package do + desc "Package management via `dpkg`. Because this only uses `dpkg` + and not `apt`, you must specify the source of any packages you want + to manage." + + has_feature :holdable + + commands :dpkg => "/usr/bin/dpkg" + commands :dpkg_deb => "/usr/bin/dpkg-deb" + commands :dpkgquery => "/usr/bin/dpkg-query" + + def self.instances + packages = [] + + # list out all of the packages + cmd = "#{command(:dpkgquery)} -W --showformat '${Status} ${Package} ${Version}\\n'" + Puppet.debug "Executing '#{cmd}'" + execpipe(cmd) do |process| + # our regex for matching dpkg output + regex = %r{^(\S+) +(\S+) +(\S+) (\S+) (\S*)$} + fields = [:desired, :error, :status, :name, :ensure] + hash = {} + + # now turn each returned line into a package object + process.each { |line| + if hash = parse_line(line) + packages << new(hash) + end + } + end + + packages + end + + self::REGEX = %r{^(\S+) +(\S+) +(\S+) (\S+) (\S*)$} + self::FIELDS = [:desired, :error, :status, :name, :ensure] + + def self.parse_line(line) + if match = self::REGEX.match(line) + hash = {} + + self::FIELDS.zip(match.captures) { |field,value| + hash[field] = value + } + + hash[:provider] = self.name + + if hash[:status] == 'not-installed' + hash[:ensure] = :purged + elsif ['config-files', 'half-installed', 'unpacked', 'half-configured'].include?(hash[:status]) + hash[:ensure] = :absent + end + hash[:ensure] = :held if hash[:desired] == 'hold' + else + Puppet.warning "Failed to match dpkg-query line #{line.inspect}" + return nil + end + + hash + end + + def install + unless file = @resource[:source] + raise ArgumentError, "You cannot install dpkg packages without a source" + end + + args = [] + + # We always unhold when installing to remove any prior hold. + self.unhold + + if @resource[:configfiles] == :keep + args << '--force-confold' + else + args << '--force-confnew' + end + args << '-i' << file + + dpkg(*args) + end + + def update + self.install + end + + # Return the version from the package. + def latest + output = dpkg_deb "--show", @resource[:source] + matches = /^(\S+)\t(\S+)$/.match(output).captures + warning "source doesn't contain named package, but #{matches[0]}" unless matches[0].match( Regexp.escape(@resource[:name]) ) + matches[1] + end + + def query + packages = [] + + fields = [:desired, :error, :status, :name, :ensure] + + hash = {} + + # list out our specific package + begin + + output = dpkgquery( + "-W", "--showformat", + + '${Status} ${Package} ${Version}\\n', @resource[:name] + ) + rescue Puppet::ExecutionFailure + # dpkg-query exits 1 if the package is not found. + return {:ensure => :purged, :status => 'missing', :name => @resource[:name], :error => 'ok'} + + end + + hash = self.class.parse_line(output) || {:ensure => :absent, :status => 'missing', :name => @resource[:name], :error => 'ok'} + + if hash[:error] != "ok" + raise Puppet::Error.new( + "Package #{hash[:name]}, version #{hash[:ensure]} is in error state: #{hash[:error]}" + ) + end + + hash + end + + def uninstall + dpkg "-r", @resource[:name] + end + + def purge + dpkg "--purge", @resource[:name] + end + + def hold + self.install + begin + Tempfile.open('puppet_dpkg_set_selection') { |tmpfile| + tmpfile.write("#{@resource[:name]} hold\n") + tmpfile.flush + execute([:dpkg, "--set-selections"], :stdinfile => tmpfile.path.to_s) + } + end + end + + def unhold + begin + Tempfile.open('puppet_dpkg_set_selection') { |tmpfile| + tmpfile.write("#{@resource[:name]} install\n") + tmpfile.flush + execute([:dpkg, "--set-selections"], :stdinfile => tmpfile.path.to_s) + } + end + end +end diff --git a/mcollective/lib/puppet/provider/package/fink.rb b/mcollective/lib/puppet/provider/package/fink.rb new file mode 100755 index 000000000..db991397a --- /dev/null +++ b/mcollective/lib/puppet/provider/package/fink.rb @@ -0,0 +1,82 @@ +Puppet::Type.type(:package).provide :fink, :parent => :dpkg, :source => :dpkg do + # Provide sorting functionality + include Puppet::Util::Package + + desc "Package management via `fink`." + + commands :fink => "/sw/bin/fink" + commands :aptget => "/sw/bin/apt-get" + commands :aptcache => "/sw/bin/apt-cache" + commands :dpkgquery => "/sw/bin/dpkg-query" + + has_feature :versionable + + # A derivative of DPKG; this is how most people actually manage + # Debian boxes, and the only thing that differs is that it can + # install packages from remote sites. + + def finkcmd(*args) + fink(*args) + end + + # Install a package using 'apt-get'. This function needs to support + # installing a specific version. + def install + self.run_preseed if @resource[:responsefile] + should = @resource.should(:ensure) + + str = @resource[:name] + case should + when true, false, Symbol + # pass + else + # Add the package version + str += "=#{should}" + end + cmd = %w{-b -q -y} + + keep = "" + + cmd << :install << str + + finkcmd(cmd) + end + + # What's the latest package version available? + def latest + output = aptcache :policy, @resource[:name] + + if output =~ /Candidate:\s+(\S+)\s/ + return $1 + else + self.err "Could not find latest version" + return nil + end + end + + # + # preseeds answers to dpkg-set-selection from the "responsefile" + # + def run_preseed + if response = @resource[:responsefile] and FileTest.exists?(response) + self.info("Preseeding #{response} to debconf-set-selections") + + preseed response + else + self.info "No responsefile specified or non existant, not preseeding anything" + end + end + + def update + self.install + end + + def uninstall + finkcmd "-y", "-q", :remove, @model[:name] + end + + def purge + aptget '-y', '-q', 'remove', '--purge', @resource[:name] + end +end + diff --git a/mcollective/lib/puppet/provider/package/freebsd.rb b/mcollective/lib/puppet/provider/package/freebsd.rb new file mode 100755 index 000000000..e10a20b04 --- /dev/null +++ b/mcollective/lib/puppet/provider/package/freebsd.rb @@ -0,0 +1,50 @@ +Puppet::Type.type(:package).provide :freebsd, :parent => :openbsd do + desc "The specific form of package management on FreeBSD. This is an + extremely quirky packaging system, in that it freely mixes between + ports and packages. Apparently all of the tools are written in Ruby, + so there are plans to rewrite this support to directly use those + libraries." + + commands :pkginfo => "/usr/sbin/pkg_info", + :pkgadd => "/usr/sbin/pkg_add", + :pkgdelete => "/usr/sbin/pkg_delete" + + confine :operatingsystem => :freebsd + + def self.listcmd + command(:pkginfo) + end + + def install + should = @resource.should(:ensure) + + if @resource[:source] =~ /\/$/ + if @resource[:source] =~ /^(ftp|https?):/ + Puppet::Util::Execution::withenv :PACKAGESITE => @resource[:source] do + pkgadd "-r", @resource[:name] + end + else + Puppet::Util::Execution::withenv :PKG_PATH => @resource[:source] do + pkgadd @resource[:name] + end + end + else + Puppet.warning "source is defined but does not have trailing slash, ignoring #{@resource[:source]}" if @resource[:source] + pkgadd "-r", @resource[:name] + end + end + + def query + self.class.instances.each do |provider| + if provider.name == @resource.name + return provider.properties + end + end + nil + end + + def uninstall + pkgdelete "#{@resource[:name]}-#{@resource.should(:ensure)}" + end +end + diff --git a/mcollective/lib/puppet/provider/package/gem.rb b/mcollective/lib/puppet/provider/package/gem.rb new file mode 100755 index 000000000..28731c849 --- /dev/null +++ b/mcollective/lib/puppet/provider/package/gem.rb @@ -0,0 +1,124 @@ +require 'puppet/provider/package' +require 'uri' + +# Ruby gems support. +Puppet::Type.type(:package).provide :gem, :parent => Puppet::Provider::Package do + desc "Ruby Gem support. If a URL is passed via `source`, then that URL is used as the + remote gem repository; if a source is present but is not a valid URL, it will be + interpreted as the path to a local gem file. If source is not present at all, + the gem will be installed from the default gem repositories." + + has_feature :versionable + + commands :gemcmd => "gem" + + def self.gemlist(hash) + command = [command(:gemcmd), "list"] + + if hash[:local] + command << "--local" + else + command << "--remote" + end + + if name = hash[:justme] + command << name + "$" + end + + begin + list = execute(command).split("\n").collect do |set| + if gemhash = gemsplit(set) + gemhash[:provider] = :gem + gemhash + else + nil + end + end.compact + rescue Puppet::ExecutionFailure => detail + raise Puppet::Error, "Could not list gems: #{detail}" + end + + if hash[:justme] + return list.shift + else + return list + end + end + + def self.gemsplit(desc) + case desc + when /^\*\*\*/, /^\s*$/, /^\s+/; return nil + when /^(\S+)\s+\((.+)\)/ + name = $1 + version = $2.split(/,\s*/)[0] + return { + :name => name, + :ensure => version + } + else + Puppet.warning "Could not match #{desc}" + nil + end + end + + def self.instances(justme = false) + gemlist(:local => true).collect do |hash| + new(hash) + end + end + + def install(useversion = true) + command = [command(:gemcmd), "install"] + command << "-v" << resource[:ensure] if (! resource[:ensure].is_a? Symbol) and useversion + # Always include dependencies + command << "--include-dependencies" + + if source = resource[:source] + begin + uri = URI.parse(source) + rescue => detail + fail "Invalid source '#{uri}': #{detail}" + end + + case uri.scheme + when nil + # no URI scheme => interpret the source as a local file + command << source + when /file/i + command << uri.path + when 'puppet' + # we don't support puppet:// URLs (yet) + raise Puppet::Error.new("puppet:// URLs are not supported as gem sources") + else + # interpret it as a gem repository + command << "--source" << "#{source}" << resource[:name] + end + else + command << "--no-rdoc" << "--no-ri" << resource[:name] + end + + output = execute(command) + # Apparently some stupid gem versions don't exit non-0 on failure + self.fail "Could not install: #{output.chomp}" if output.include?("ERROR") + end + + def latest + # This always gets the latest version available. + hash = self.class.gemlist(:justme => resource[:name]) + + hash[:ensure] + end + + def query + self.class.gemlist(:justme => resource[:name], :local => true) + end + + def uninstall + gemcmd "uninstall", "-x", "-a", resource[:name] + end + + def update + self.install(false) + end +end + diff --git a/mcollective/lib/puppet/provider/package/hpux.rb b/mcollective/lib/puppet/provider/package/hpux.rb new file mode 100644 index 000000000..8396edf6d --- /dev/null +++ b/mcollective/lib/puppet/provider/package/hpux.rb @@ -0,0 +1,44 @@ +# HP-UX packaging. + +require 'puppet/provider/package' + +Puppet::Type.type(:package).provide :hpux, :parent => Puppet::Provider::Package do + + desc "HP-UX's packaging system." + + commands :swinstall => "/usr/sbin/swinstall", + :swlist => "/usr/sbin/swlist", + :swremove => "/usr/sbin/swremove" + + confine :operatingsystem => "hp-ux" + + defaultfor :operatingsystem => "hp-ux" + + def self.instances + # TODO: This is very hard on HP-UX! + [] + end + + # source and name are required + def install + raise ArgumentError, "source must be provided to install HP-UX packages" unless resource[:source] + args = standard_args + ["-s", resource[:source], resource[:name]] + swinstall(*args) + end + + def query + swlist resource[:name] + {:ensure => :present} + rescue + {:ensure => :absent} + end + + def uninstall + args = standard_args + [resource[:name]] + swremove(*args) + end + + def standard_args + ["-x", "mount_all_filesystems=false"] + end +end diff --git a/mcollective/lib/puppet/provider/package/macports.rb b/mcollective/lib/puppet/provider/package/macports.rb new file mode 100755 index 000000000..c43eb72f3 --- /dev/null +++ b/mcollective/lib/puppet/provider/package/macports.rb @@ -0,0 +1,106 @@ +require 'puppet/provider/package' + +Puppet::Type.type(:package).provide :macports, :parent => Puppet::Provider::Package do + desc "Package management using MacPorts on OS X. + + Supports MacPorts versions and revisions, but not variants. + Variant preferences may be specified using the MacPorts variants.conf file + http://guide.macports.org/chunked/internals.configuration-files.html#internals.configuration-files.variants-conf + + When specifying a version in the Puppet DSL, only specify the version, not the revision + Revisions are only used internally for ensuring the latest version/revision of a port. + " + + confine :operatingsystem => :darwin + commands :port => "/opt/local/bin/port" + + has_feature :installable + has_feature :uninstallable + has_feature :upgradeable + has_feature :versionable + + + def self.parse_installed_query_line(line) + regex = /(\S+)\s+@(\S+)_(\S+)\s+\(active\)/ + fields = [:name, :ensure, :revision] + hash_from_line(line, regex, fields) + end + + def self.parse_info_query_line(line) + regex = /(\S+)\s+(\S+)/ + fields = [:version, :revision] + hash_from_line(line, regex, fields) + end + + def self.hash_from_line(line, regex, fields) + hash = {} + if match = regex.match(line) + fields.zip(match.captures) { |field, value| + hash[field] = value + } + hash[:provider] = self.name + return hash + end + nil + end + + def self.instances + packages = [] + port("-q", :installed).each do |line| + if hash = parse_installed_query_line(line) + packages << new(hash) + end + end + packages + end + + def install + should = @resource.should(:ensure) + if [:latest, :installed, :present].include?(should) + output = port("-q", :install, @resource[:name]) + else + output = port("-q", :install, @resource[:name], "@#{should}") + end + # MacPorts now correctly exits non-zero with appropriate errors in + # situations where a port cannot be found or installed. + end + + def query + return self.class.parse_installed_query_line(port("-q", :installed, @resource[:name])) + end + + def latest + # We need both the version and the revision to be confident + # we've got the latest revision of a specific version + # Note we're still not doing anything with variants here. + info_line = port("-q", :info, "--line", "--version", "--revision", @resource[:name]) + return nil if info_line == "" + + if newest = self.class.parse_info_query_line(info_line) + current = query + # We're doing some fiddling behind the scenes here to cope with updated revisions. + # If we're already at the latest version/revision, then just return the version + # so the current and desired values match. Otherwise return version and revision + # to trigger an upgrade to the latest revision. + if newest[:version] == current[:ensure] and newest[:revision] == current[:revision] + return current[:ensure] + else + return "#{newest[:version]}_#{newest[:revision]}" + end + end + nil + end + + def uninstall + port("-q", :uninstall, @resource[:name]) + end + + def update + if query[:name] == @resource[:name] # 'port upgrade' cannot install new ports + port("-q", :upgrade, @resource[:name]) + else + install + end + end +end + diff --git a/mcollective/lib/puppet/provider/package/nim.rb b/mcollective/lib/puppet/provider/package/nim.rb new file mode 100644 index 000000000..8f52016db --- /dev/null +++ b/mcollective/lib/puppet/provider/package/nim.rb @@ -0,0 +1,35 @@ +require 'puppet/provider/package' +require 'puppet/util/package' + +Puppet::Type.type(:package).provide :nim, :parent => :aix, :source => :aix do + desc "Installation from NIM LPP source" + + # The commands we are using on an AIX box are installed standard + # (except nimclient) nimclient needs the bos.sysmgt.nim.client fileset. + commands :nimclient => "/usr/sbin/nimclient" + + # If NIM has not been configured, /etc/niminfo will not be present. + # However, we have no way of knowing if the NIM server is not configured + # properly. + confine :exists => "/etc/niminfo" + + has_feature :versionable + + attr_accessor :latest_info + + def self.srclistcmd(source) + [ command(:nimclient), "-o", "showres", "-a", "installp_flags=L", "-a", "resource=#{source}" ] + end + + def install(useversion = true) + unless source = @resource[:source] + self.fail "An LPP source location is required in 'source'" + end + + pkg = @resource[:name] + + pkg << " " << @resource.should(:ensure) if (! @resource.should(:ensure).is_a? Symbol) and useversion + + nimclient "-o", "cust", "-a", "installp_flags=acgwXY", "-a", "lpp_source=#{source}", "-a", "filesets='#{pkg}'" + end +end diff --git a/mcollective/lib/puppet/provider/package/openbsd.rb b/mcollective/lib/puppet/provider/package/openbsd.rb new file mode 100755 index 000000000..bb07d894a --- /dev/null +++ b/mcollective/lib/puppet/provider/package/openbsd.rb @@ -0,0 +1,116 @@ +require 'puppet/provider/package' + +# Packaging on OpenBSD. Doesn't work anywhere else that I know of. +Puppet::Type.type(:package).provide :openbsd, :parent => Puppet::Provider::Package do + desc "OpenBSD's form of `pkg_add` support." + + commands :pkginfo => "pkg_info", :pkgadd => "pkg_add", :pkgdelete => "pkg_delete" + + defaultfor :operatingsystem => :openbsd + confine :operatingsystem => :openbsd + + has_feature :versionable + + def self.instances + packages = [] + + begin + execpipe(listcmd) do |process| + # our regex for matching pkg_info output + regex = /^(.*)-(\d[^-]*)[-]?(\D*)(.*)$/ + fields = [:name, :ensure, :flavor ] + hash = {} + + # now turn each returned line into a package object + process.each { |line| + if match = regex.match(line.split[0]) + fields.zip(match.captures) { |field,value| + hash[field] = value + } + yup = nil + name = hash[:name] + + hash[:provider] = self.name + + packages << new(hash) + hash = {} + else + # Print a warning on lines we can't match, but move + # on, since it should be non-fatal + warning("Failed to match line #{line}") + end + } + end + + return packages + rescue Puppet::ExecutionFailure + return nil + end + end + + def self.listcmd + [command(:pkginfo), " -a"] + end + + def install + should = @resource.should(:ensure) + + unless @resource[:source] + raise Puppet::Error, + "You must specify a package source for BSD packages" + end + + if @resource[:source][-1,1] == ::File::PATH_SEPARATOR + e_vars = { :PKG_PATH => @resource[:source] } + full_name = [ @resource[:name], get_version || @resource[:ensure], @resource[:flavor] ].join('-').chomp('-') + else + e_vars = {} + full_name = @resource[:source] + end + + Puppet::Util::Execution::withenv(e_vars) { pkgadd full_name } + end + + def get_version + execpipe([command(:pkginfo), " -I ", @resource[:name]]) do |process| + # our regex for matching pkg_info output + regex = /^(.*)-(\d[^-]*)[-]?(\D*)(.*)$/ + fields = [ :name, :version, :flavor ] + master_version = 0 + + process.each do |line| + if match = regex.match(line.split[0]) + # now we return the first version, unless ensure is latest + version = match.captures[1] + return version unless @resource[:ensure] == "latest" + + master_version = version unless master_version > version + end + end + + return master_version unless master_version == 0 + raise Puppet::Error, "#{version} is not available for this package" + end + rescue Puppet::ExecutionFailure + return nil + end + + def query + hash = {} + info = pkginfo @resource[:name] + + # Search for the version info + if info =~ /Information for (inst:)?#{@resource[:name]}-(\S+)/ + hash[:ensure] = $2 + else + return nil + end + + hash + end + + def uninstall + pkgdelete @resource[:name] + end +end + diff --git a/mcollective/lib/puppet/provider/package/pkg.rb b/mcollective/lib/puppet/provider/package/pkg.rb new file mode 100644 index 000000000..9a3733c95 --- /dev/null +++ b/mcollective/lib/puppet/provider/package/pkg.rb @@ -0,0 +1,108 @@ +require 'puppet/provider/package' + +Puppet::Type.type(:package).provide :pkg, :parent => Puppet::Provider::Package do + desc "OpenSolaris image packaging system. See pkg(5) for more information" + + commands :pkg => "/usr/bin/pkg" + + confine :operatingsystem => :solaris + + #defaultfor [:operatingsystem => :solaris, :kernelrelease => "5.11"] + + def self.instances + packages = [] + + cmd = "#{command(:pkg)} list -H" + execpipe(cmd) do |process| + hash = {} + + # now turn each returned line into a package object + process.each { |line| + if hash = parse_line(line) + packages << new(hash) + end + } + end + + packages + end + + self::REGEX = %r{^(\S+)\s+(\S+)\s+(\S+)\s+} + self::FIELDS = [:name, :version, :status] + + def self.parse_line(line) + hash = {} + if match = self::REGEX.match(line) + + self::FIELDS.zip(match.captures) { |field,value| + hash[field] = value + } + + hash[:provider] = self.name + hash[:error] = "ok" + + if hash[:status] == "installed" + hash[:ensure] = :present + else + hash[:ensure] = :absent + end + else + Puppet.warning "Failed to match 'pkg list' line #{line.inspect}" + return nil + end + + hash + end + + # return the version of the package + # TODO deal with multiple publishers + def latest + version = nil + pkg(:list, "-Ha", @resource[:name]).split("\n").each do |line| + v = line.split[2] + case v + when "known" + return v + when "installed" + version = v + else + Puppet.warn "unknown package state for #{@resource[:name]}: #{v}" + end + end + version + end + + # install the package + def install + pkg :install, @resource[:name] + end + + # uninstall the package + def uninstall + pkg :uninstall, '-r', @resource[:name] + end + + # update the package to the latest version available + def update + self.install + end + + # list a specific package + def query + begin + output = pkg(:list, "-H", @resource[:name]) + rescue Puppet::ExecutionFailure + # pkg returns 1 if the package is not found. + return {:ensure => :absent, :status => 'missing', + :name => @resource[:name], :error => 'ok'} + end + + hash = self.class.parse_line(output) || + {:ensure => :absent, :status => 'missing', :name => @resource[:name], :error => 'ok'} + + raise Puppet::Error.new( "Package #{hash[:name]}, version #{hash[:version]} is in error state: #{hash[:error]}") if hash[:error] != "ok" + + hash + end + +end diff --git a/mcollective/lib/puppet/provider/package/pkgdmg.rb b/mcollective/lib/puppet/provider/package/pkgdmg.rb new file mode 100644 index 000000000..8f6c39c21 --- /dev/null +++ b/mcollective/lib/puppet/provider/package/pkgdmg.rb @@ -0,0 +1,135 @@ +# +# pkgdmg.rb +# +# Install Installer.app packages wrapped up inside a DMG image file. +# +# Copyright (C) 2007 Jeff McCune Jeff McCune +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation (version 2 of the License) +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston MA 02110-1301 USA +# +# Motivation: DMG files provide a true HFS file system +# and are easier to manage and .pkg bundles. +# +# Note: the 'apple' Provider checks for the package name +# in /L/Receipts. Since we install multiple pkg's from a single +# source, we treat the source .pkg.dmg file as the package name. +# As a result, we store installed .pkg.dmg file names +# in /var/db/.puppet_pkgdmg_installed_ + +require 'puppet/provider/package' +require 'facter/util/plist' + +Puppet::Type.type(:package).provide :pkgdmg, :parent => Puppet::Provider::Package do + desc "Package management based on Apple's Installer.app and DiskUtility.app. This package works by checking the contents of a DMG image for Apple pkg or mpkg files. Any number of pkg or mpkg files may exist in the root directory of the DMG file system. Sub directories are not checked for packages. See `the wiki docs ` for more detail." + + confine :operatingsystem => :darwin + defaultfor :operatingsystem => :darwin + commands :installer => "/usr/sbin/installer" + commands :hdiutil => "/usr/bin/hdiutil" + commands :curl => "/usr/bin/curl" + + # JJM We store a cookie for each installed .pkg.dmg in /var/db + def self.instance_by_name + Dir.entries("/var/db").find_all { |f| + f =~ /^\.puppet_pkgdmg_installed_/ + }.collect do |f| + name = f.sub(/^\.puppet_pkgdmg_installed_/, '') + yield name if block_given? + name + end + end + + def self.instances + instance_by_name.collect do |name| + new(:name => name, :provider => :pkgdmg, :ensure => :installed) + end + end + + def self.installpkg(source, name, orig_source) + installer "-pkg", source, "-target", "/" + # Non-zero exit status will throw an exception. + File.open("/var/db/.puppet_pkgdmg_installed_#{name}", "w") do |t| + t.print "name: '#{name}'\n" + t.print "source: '#{orig_source}'\n" + end + end + + def self.installpkgdmg(source, name) + unless source =~ /\.dmg$/i || source =~ /\.pkg$/i + raise Puppet::Error.new("Mac OS X PKG DMG's must specify a source string ending in .dmg or flat .pkg file") + end + require 'open-uri' + cached_source = source + tmpdir = Dir.mktmpdir + begin + if %r{\A[A-Za-z][A-Za-z0-9+\-\.]*://} =~ cached_source + cached_source = File.join(tmpdir, name) + begin + curl "-o", cached_source, "-C", "-", "-k", "-L", "-s", "--url", source + Puppet.debug "Success: curl transfered [#{name}]" + rescue Puppet::ExecutionFailure + Puppet.debug "curl did not transfer [#{name}]. Falling back to slower open-uri transfer methods." + cached_source = source + end + end + + if source =~ /\.dmg$/i + File.open(cached_source) do |dmg| + xml_str = hdiutil "mount", "-plist", "-nobrowse", "-readonly", "-noidme", "-mountrandom", "/tmp", dmg.path + hdiutil_info = Plist::parse_xml(xml_str) + raise Puppet::Error.new("No disk entities returned by mount at #{dmg.path}") unless hdiutil_info.has_key?("system-entities") + mounts = hdiutil_info["system-entities"].collect { |entity| + entity["mount-point"] + }.compact + begin + mounts.each do |mountpoint| + Dir.entries(mountpoint).select { |f| + f =~ /\.m{0,1}pkg$/i + }.each do |pkg| + installpkg("#{mountpoint}/#{pkg}", name, source) + end + end + ensure + mounts.each do |mountpoint| + hdiutil "eject", mountpoint + end + end + end + else + installpkg(cached_source, name, source) + end + ensure + FileUtils.remove_entry_secure(tmpdir, force=true) + end + end + + def query + if FileTest.exists?("/var/db/.puppet_pkgdmg_installed_#{@resource[:name]}") + Puppet.debug "/var/db/.puppet_pkgdmg_installed_#{@resource[:name]} found" + return {:name => @resource[:name], :ensure => :present} + else + return nil + end + end + + def install + source = nil + unless source = @resource[:source] + raise Puppet::Error.new("Mac OS X PKG DMG's must specify a package source.") + end + unless name = @resource[:name] + raise Puppet::Error.new("Mac OS X PKG DMG's must specify a package name.") + end + self.class.installpkgdmg(source,name) + end +end + diff --git a/mcollective/lib/puppet/provider/package/portage.rb b/mcollective/lib/puppet/provider/package/portage.rb new file mode 100644 index 000000000..30f0e4a25 --- /dev/null +++ b/mcollective/lib/puppet/provider/package/portage.rb @@ -0,0 +1,122 @@ +require 'puppet/provider/package' +require 'fileutils' + +Puppet::Type.type(:package).provide :portage, :parent => Puppet::Provider::Package do + desc "Provides packaging support for Gentoo's portage system." + + has_feature :versionable + + commands :emerge => "/usr/bin/emerge", :eix => "/usr/bin/eix", :update_eix => "/usr/bin/eix-update" + + confine :operatingsystem => :gentoo + + defaultfor :operatingsystem => :gentoo + + def self.instances + result_format = /^(\S+)\s+(\S+)\s+\[(\S+)\]\s+\[(\S+)\]\s+(\S+)\s+(.*)$/ + result_fields = [:category, :name, :ensure, :version_available, :vendor, :description] + + version_format = "{last}{}" + search_format = " [] [] \n" + + begin + update_eix if !FileUtils.uptodate?("/var/cache/eix", %w{/usr/bin/eix /usr/portage/metadata/timestamp}) + + search_output = nil + Puppet::Util::Execution.withenv :LASTVERSION => version_format do + search_output = eix "--nocolor", "--pure-packages", "--stable", "--installed", "--format", search_format + end + + packages = [] + search_output.each do |search_result| + match = result_format.match(search_result) + + if match + package = {} + result_fields.zip(match.captures) do |field, value| + package[field] = value unless !value or value.empty? + end + package[:provider] = :portage + packages << new(package) + end + end + + return packages + rescue Puppet::ExecutionFailure => detail + raise Puppet::Error.new(detail) + end + end + + def install + should = @resource.should(:ensure) + name = package_name + unless should == :present or should == :latest + # We must install a specific version + name = "=#{name}-#{should}" + end + emerge name + end + + # The common package name format. + def package_name + @resource[:category] ? "#{@resource[:category]}/#{@resource[:name]}" : @resource[:name] + end + + def uninstall + emerge "--unmerge", package_name + end + + def update + self.install + end + + def query + result_format = /^(\S+)\s+(\S+)\s+\[(\S*)\]\s+\[(\S+)\]\s+(\S+)\s+(.*)$/ + result_fields = [:category, :name, :ensure, :version_available, :vendor, :description] + + version_format = "{last}{}" + search_format = " [] [] \n" + + search_field = package_name.count('/') > 0 ? "--category-name" : "--name" + search_value = package_name + + begin + update_eix if !FileUtils.uptodate?("/var/cache/eix", %w{/usr/bin/eix /usr/portage/metadata/timestamp}) + + search_output = nil + Puppet::Util::Execution.withenv :LASTVERSION => version_format do + search_output = eix "--nocolor", "--pure-packages", "--stable", "--format", search_format, "--exact", search_field, search_value + end + + packages = [] + search_output.each do |search_result| + match = result_format.match(search_result) + + if match + package = {} + result_fields.zip(match.captures) do |field, value| + package[field] = value unless !value or value.empty? + end + package[:ensure] = package[:ensure] ? package[:ensure] : :absent + packages << package + end + end + + case packages.size + when 0 + not_found_value = "#{@resource[:category] ? @resource[:category] : ""}/#{@resource[:name]}" + raise Puppet::Error.new("No package found with the specified name [#{not_found_value}]") + when 1 + return packages[0] + else + raise Puppet::Error.new("More than one package with the specified name [#{search_value}], please use the category parameter to disambiguate") + end + rescue Puppet::ExecutionFailure => detail + raise Puppet::Error.new(detail) + end + end + + def latest + self.query[:version_available] + end +end diff --git a/mcollective/lib/puppet/provider/package/ports.rb b/mcollective/lib/puppet/provider/package/ports.rb new file mode 100755 index 000000000..c8020928e --- /dev/null +++ b/mcollective/lib/puppet/provider/package/ports.rb @@ -0,0 +1,95 @@ +Puppet::Type.type(:package).provide :ports, :parent => :freebsd, :source => :freebsd do + desc "Support for FreeBSD's ports. Again, this still mixes packages and ports." + + commands :portupgrade => "/usr/local/sbin/portupgrade", + :portversion => "/usr/local/sbin/portversion", + :portuninstall => "/usr/local/sbin/pkg_deinstall", + :portinfo => "/usr/sbin/pkg_info" + + defaultfor :operatingsystem => :freebsd + + # I hate ports + %w{INTERACTIVE UNAME}.each do |var| + ENV.delete(var) if ENV.include?(var) + end + + def install + # -N: install if the package is missing, otherwise upgrade + # -M: yes, we're a batch, so don't ask any questions + cmd = %w{-N -M BATCH=yes} << @resource[:name] + + output = portupgrade(*cmd) + if output =~ /\*\* No such / + raise Puppet::ExecutionFailure, "Could not find package #{@resource[:name]}" + end + end + + # If there are multiple packages, we only use the last one + def latest + cmd = ["-v", @resource[:name]] + + begin + output = portversion(*cmd) + rescue Puppet::ExecutionFailure + raise Puppet::Error.new(output) + end + line = output.split("\n").pop + + unless line =~ /^(\S+)\s+(\S)\s+(.+)$/ + # There's no "latest" version, so just return a placeholder + return :latest + end + + pkgstuff = $1 + match = $2 + info = $3 + + unless pkgstuff =~ /^(\S+)-([^-\s]+)$/ + raise Puppet::Error, + "Could not match package info '#{pkgstuff}'" + end + + name, version = $1, $2 + + if match == "=" or match == ">" + # we're up to date or more recent + return version + end + + # Else, we need to be updated; we need to pull out the new version + + unless info =~ /\((\w+) has (.+)\)/ + raise Puppet::Error, + "Could not match version info '#{info}'" + end + + source, newversion = $1, $2 + + debug "Newer version in #{source}" + newversion + end + + def query + # support portorigin_glob such as "mail/postfix" + name = self.name + if name =~ /\// + name = self.name.split(/\//).slice(1) + end + self.class.instances.each do |instance| + if instance.name == name + return instance.properties + end + end + + nil + end + + def uninstall + portuninstall @resource[:name] + end + + def update + install + end +end + diff --git a/mcollective/lib/puppet/provider/package/portupgrade.rb b/mcollective/lib/puppet/provider/package/portupgrade.rb new file mode 100644 index 000000000..45e35bbca --- /dev/null +++ b/mcollective/lib/puppet/provider/package/portupgrade.rb @@ -0,0 +1,251 @@ + +# Whole new package, so include pack stuff +require 'puppet/provider/package' + +Puppet::Type.type(:package).provide :portupgrade, :parent => Puppet::Provider::Package do + include Puppet::Util::Execution + + desc "Support for FreeBSD's ports using the portupgrade ports management software. + Use the port's full origin as the resource name. eg (ports-mgmt/portupgrade) + for the portupgrade port." + + ## has_features is usually autodetected based on defs below. + # has_features :installable, :uninstallable, :upgradeable + + commands :portupgrade => "/usr/local/sbin/portupgrade", + :portinstall => "/usr/local/sbin/portinstall", + :portversion => "/usr/local/sbin/portversion", + :portuninstall => "/usr/local/sbin/pkg_deinstall", + :portinfo => "/usr/sbin/pkg_info" + + ## Activate this only once approved by someone important. + # defaultfor :operatingsystem => :freebsd + + # Remove unwanted environment variables. + %w{INTERACTIVE UNAME}.each do |var| + if ENV.include?(var) + ENV.delete(var) + end + end + + ######## instances sub command (builds the installed packages list) + + def self.instances + Puppet.debug "portupgrade.rb Building packages list from installed ports" + + # regex to match output from pkg_info + regex = %r{^(\S+)-([^-\s]+):(\S+)$} + # Corresponding field names + fields = [:portname, :ensure, :portorigin] + # define Temporary hash used, packages array of hashes + hash = Hash.new + packages = [] + + # exec command + cmdline = ["-aoQ"] + begin + output = portinfo(*cmdline) + rescue Puppet::ExecutionFailure + raise Puppet::Error.new(output) + return nil + end + + # split output and match it and populate temp hash + output.split("\n").each { |data| + # reset hash to nil for each line + hash.clear + if match = regex.match(data) + # Output matched regex + fields.zip(match.captures) { |field, value| + hash[field] = value + } + + # populate the actual :name field from the :portorigin + # Set :provider to this object name + hash[:name] = hash[:portorigin] + hash[:provider] = self.name + + # Add to the full packages listing + packages << new(hash) + + else + # unrecognised output from pkg_info + Puppet.debug "portupgrade.Instances() - unable to match output: #{data}" + end + } + + # return the packages array of hashes + return packages + + end + + ######## Installation sub command + + def install + Puppet.debug "portupgrade.install() - Installation call on #{@resource[:name]}" + # -M: yes, we're a batch, so don't ask any questions + cmdline = ["-M BATCH=yes", @resource[:name]] + + # FIXME: it's possible that portinstall prompts for data so locks up. + begin + output = portinstall(*cmdline) + rescue Puppet::ExecutionFailure + raise Puppet::Error.new(output) + end + + if output =~ /\*\* No such / + raise Puppet::ExecutionFailure, "Could not find package #{@resource[:name]}" + end + + # No return code required, so do nil to be clean + return nil + end + + ######## Latest subcommand (returns the latest version available, or current version if installed is latest) + + def latest + Puppet.debug "portupgrade.latest() - Latest check called on #{@resource[:name]}" + # search for latest version available, or return current version. + # cmdline = "portversion -v ", returns " " + # or "** No matching package found: " + cmdline = ["-v", @resource[:name]] + + begin + output = portversion(*cmdline) + rescue Puppet::ExecutionFailure + raise Puppet::Error.new(output) + end + + # Check: output format. + if output =~ /^\S+-([^-\s]+)\s+(\S)\s+(.*)/ + # $1 = installed version, $2 = comparison, $3 other data + # latest installed + installedversion = $1 + comparison = $2 + otherdata = $3 + + # Only return a new version number when it's clear that there is a new version + # all others return the current version so no unexpected 'upgrades' occur. + case comparison + when "=", ">" + Puppet.debug "portupgrade.latest() - Installed package is latest (#{installedversion})" + return installedversion + when "<" + # "portpkg-1.7_5 < needs updating (port has 1.14)" + # "portpkg-1.7_5 < needs updating (port has 1.14) (=> 'newport/pkg') + if otherdata =~ /\(port has (\S+)\)/ + newversion = $1 + Puppet.debug "portupgrade.latest() - Installed version needs updating to (#{newversion})" + return newversion + else + Puppet.debug "portupgrade.latest() - Unable to determine new version from (#{otherdata})" + return installedversion + end + when "?", "!", "#" + Puppet.debug "portupgrade.latest() - Comparison Error reported from portversion (#{output})" + return installedversion + else + Puppet.debug "portupgrade.latest() - Unknown code from portversion output (#{output})" + return installedversion + end + + else + # error: output not parsed correctly, error out with nil. + # Seriously - this section should never be called in a perfect world. + # as verification that the port is installed has already happened in query. + if output =~ /^\*\* No matching package / + raise Puppet::ExecutionFailure, "Could not find package #{@resource[:name]}" + else + # Any other error (dump output to log) + raise Puppet::ExecutionFailure, "Unexpected output from portversion: #{output}" + end + + # Just in case we still are running, return nil + return nil + end + + # At this point normal operation has finished and we shouldn't have been called. + # Error out and let the admin deal with it. + raise Puppet::Error, "portversion.latest() - fatal error with portversion: #{output}" + return nil + + end + + ###### Query subcommand - return a hash of details if exists, or nil if it doesn't. + # Used to make sure the package is installed + + def query + Puppet.debug "portupgrade.query() - Called on #{@resource[:name]}" + + cmdline = ["-qO", @resource[:name]] + begin + output = portinfo(*cmdline) + rescue Puppet::ExecutionFailure + raise Puppet::Error.new(output) + end + + # Check: if output isn't in the right format, return nil + if output =~ /^(\S+)-([^-\s]+)/ + # Fill in the details + hash = Hash.new + hash[:portorigin] = self.name + hash[:portname] = $1 + hash[:ensure] = $2 + + # If more details are required, then we can do another pkg_info query here + # and parse out that output and add to the hash + + # return the hash to the caller + return hash + else + Puppet.debug "portupgrade.query() - package (#{@resource[:name]}) not installed" + return nil + end + + end + + ####### Uninstall command + + def uninstall + Puppet.debug "portupgrade.uninstall() - called on #{@resource[:name]}" + # Get full package name from port origin to uninstall with + cmdline = ["-qO", @resource[:name]] + begin + output = portinfo(*cmdline) + rescue Puppet::ExecutionFailure + raise Puppet::Error.new(output) + end + + if output =~ /^(\S+)/ + # output matches, so uninstall it + portuninstall $1 + end + + end + + ######## Update/upgrade command + + def update + Puppet.debug "portupgrade.update() - called on (#{@resource[:name]})" + + cmdline = ["-qO", @resource[:name]] + begin + output = portinfo(*cmdline) + rescue Puppet::ExecutionFailure + raise Puppet::Error.new(output) + end + + if output =~ /^(\S+)/ + # output matches, so upgrade the software + cmdline = ["-M BATCH=yes", $1] + begin + output = portupgrade(*cmdline) + rescue Puppet::ExecutionFailure + raise Puppet::Error.new(output) + end + end + end + +## EOF +end + diff --git a/mcollective/lib/puppet/provider/package/rpm.rb b/mcollective/lib/puppet/provider/package/rpm.rb new file mode 100755 index 000000000..72dc260a4 --- /dev/null +++ b/mcollective/lib/puppet/provider/package/rpm.rb @@ -0,0 +1,132 @@ +require 'puppet/provider/package' +# RPM packaging. Should work anywhere that has rpm installed. +Puppet::Type.type(:package).provide :rpm, :source => :rpm, :parent => Puppet::Provider::Package do + desc "RPM packaging support; should work anywhere with a working `rpm` + binary." + + has_feature :versionable + + # The query format by which we identify installed packages + NEVRAFORMAT = "%{NAME} %|EPOCH?{%{EPOCH}}:{0}| %{VERSION} %{RELEASE} %{ARCH}" + NEVRA_FIELDS = [:name, :epoch, :version, :release, :arch] + + commands :rpm => "rpm" + + if command('rpm') + confine :true => begin + rpm('--version') + rescue Puppet::ExecutionFailure + false + else + true + end + end + + def self.instances + packages = [] + + # rpm < 4.1 don't support --nosignature + output = rpm "--version" + sig = "--nosignature" + if output =~ /RPM version (([123].*)|(4\.0.*))/ + sig = "" + end + + # list out all of the packages + begin + execpipe("#{command(:rpm)} -qa #{sig} --nodigest --qf '#{NEVRAFORMAT}\n'") { |process| + # now turn each returned line into a package object + process.each { |line| + hash = nevra_to_hash(line) + packages << new(hash) + } + } + rescue Puppet::ExecutionFailure + raise Puppet::Error, "Failed to list packages" + end + + packages + end + + # Find the fully versioned package name and the version alone. Returns + # a hash with entries :instance => fully versioned package name, and + # :ensure => version-release + def query + #NOTE: Prior to a fix for issue 1243, this method potentially returned a cached value + #IF YOU CALL THIS METHOD, IT WILL CALL RPM + #Use get(:property) to check if cached values are available + cmd = ["-q", @resource[:name], "--nosignature", "--nodigest", "--qf", "#{NEVRAFORMAT}\n"] + + begin + output = rpm(*cmd) + rescue Puppet::ExecutionFailure + return nil + end + + # FIXME: We could actually be getting back multiple packages + # for multilib + @property_hash.update(self.class.nevra_to_hash(output)) + + @property_hash.dup + end + + # Here we just retrieve the version from the file specified in the source. + def latest + unless source = @resource[:source] + @resource.fail "RPMs must specify a package source" + end + + cmd = [command(:rpm), "-q", "--qf", "#{NEVRAFORMAT}\n", "-p", "#{@resource[:source]}"] + h = self.class.nevra_to_hash(execfail(cmd, Puppet::Error)) + h[:ensure] + end + + def install + source = nil + unless source = @resource[:source] + @resource.fail "RPMs must specify a package source" + end + # RPM gets pissy if you try to install an already + # installed package + if @resource.should(:ensure) == @property_hash[:ensure] or + @resource.should(:ensure) == :latest && @property_hash[:ensure] == latest + return + end + + flag = "-i" + flag = "-U" if @property_hash[:ensure] and @property_hash[:ensure] != :absent + + rpm flag, "--oldpackage", source + end + + def uninstall + query unless get(:arch) + nvr = "#{get(:name)}-#{get(:version)}-#{get(:release)}" + arch = ".#{get(:arch)}" + # If they specified an arch in the manifest, erase that Otherwise, + # erase the arch we got back from the query. If multiple arches are + # installed and only the package name is specified (without the + # arch), this will uninstall all of them on successive runs of the + # client, one after the other + if @resource[:name][-arch.size, arch.size] == arch + nvr += arch + else + nvr += ".#{get(:arch)}" + end + rpm "-e", nvr + end + + def update + self.install + end + + def self.nevra_to_hash(line) + line.chomp! + hash = {} + NEVRA_FIELDS.zip(line.split) { |f, v| hash[f] = v } + hash[:provider] = self.name + hash[:ensure] = "#{hash[:version]}-#{hash[:release]}" + hash + end +end + diff --git a/mcollective/lib/puppet/provider/package/rug.rb b/mcollective/lib/puppet/provider/package/rug.rb new file mode 100644 index 000000000..28729952d --- /dev/null +++ b/mcollective/lib/puppet/provider/package/rug.rb @@ -0,0 +1,52 @@ +Puppet::Type.type(:package).provide :rug, :parent => :rpm do + desc "Support for suse `rug` package manager." + + has_feature :versionable + + commands :rug => "/usr/bin/rug" + commands :rpm => "rpm" + defaultfor :operatingsystem => [:suse, :sles] + confine :operatingsystem => [:suse, :sles] + + # Install a package using 'rug'. + def install + should = @resource.should(:ensure) + self.debug "Ensuring => #{should}" + wanted = @resource[:name] + + # XXX: We don't actually deal with epochs here. + case should + when true, false, Symbol + # pass + else + # Add the package version + wanted += "-#{should}" + end + output = rug "--quiet", :install, "-y", wanted + + unless self.query + raise Puppet::ExecutionFailure.new( + "Could not find package #{self.name}" + ) + end + end + + # What's the latest package version available? + def latest + #rug can only get a list of *all* available packages? + output = rug "list-updates" + + if output =~ /#{Regexp.escape @resource[:name]}\s*\|\s*([^\s\|]+)/ + return $1 + else + # rug didn't find updates, pretend the current + # version is the latest + return @property_hash[:ensure] + end + end + + def update + # rug install can be used for update, too + self.install + end +end diff --git a/mcollective/lib/puppet/provider/package/sun.rb b/mcollective/lib/puppet/provider/package/sun.rb new file mode 100755 index 000000000..05bbe9726 --- /dev/null +++ b/mcollective/lib/puppet/provider/package/sun.rb @@ -0,0 +1,155 @@ +# Sun packaging. + +require 'puppet/provider/package' + +Puppet::Type.type(:package).provide :sun, :parent => Puppet::Provider::Package do + desc "Sun's packaging system. Requires that you specify the source for + the packages you're managing." + commands :pkginfo => "/usr/bin/pkginfo", + :pkgadd => "/usr/sbin/pkgadd", + :pkgrm => "/usr/sbin/pkgrm" + + confine :operatingsystem => :solaris + + defaultfor :operatingsystem => :solaris + + def self.instances + packages = [] + hash = {} + names = { + "PKGINST" => :name, + "NAME" => nil, + "CATEGORY" => :category, + "ARCH" => :platform, + "VERSION" => :ensure, + "BASEDIR" => :root, + "HOTLINE" => nil, + "EMAIL" => nil, + "VENDOR" => :vendor, + "DESC" => :description, + "PSTAMP" => nil, + "INSTDATE" => nil, + "STATUS" => nil, + "FILES" => nil + } + + cmd = "#{command(:pkginfo)} -l" + + # list out all of the packages + execpipe(cmd) { |process| + # we're using the long listing, so each line is a separate + # piece of information + process.each { |line| + case line + when /^$/ + hash[:provider] = :sun + + packages << new(hash) + hash = {} + when /\s*(\w+):\s+(.+)/ + name = $1 + value = $2 + if names.include?(name) + hash[names[name]] = value unless names[name].nil? + end + when /\s+\d+.+/ + # nothing; we're ignoring the FILES info + end + } + } + packages + end + + # Get info on a package, optionally specifying a device. + def info2hash(device = nil) + names = { + "PKGINST" => :name, + "NAME" => nil, + "CATEGORY" => :category, + "ARCH" => :platform, + "VERSION" => :ensure, + "BASEDIR" => :root, + "HOTLINE" => nil, + "EMAIL" => nil, + "VSTOCK" => nil, + "VENDOR" => :vendor, + "DESC" => :description, + "PSTAMP" => nil, + "INSTDATE" => nil, + "STATUS" => nil, + "FILES" => nil + } + + hash = {} + cmd = "#{command(:pkginfo)} -l" + cmd += " -d #{device}" if device + cmd += " #{@resource[:name]}" + + begin + # list out all of the packages + execpipe(cmd) { |process| + # we're using the long listing, so each line is a separate + # piece of information + process.readlines.each { |line| + case line + when /^$/ # ignore + when /\s*([A-Z]+):\s+(.+)/ + name = $1 + value = $2 + if names.include?(name) + hash[names[name]] = value unless names[name].nil? + end + when /\s+\d+.+/ + # nothing; we're ignoring the FILES info + end + } + } + return hash + rescue Puppet::ExecutionFailure => detail + return {:ensure => :absent} if detail.message =~ /information for "#{Regexp.escape(@resource[:name])}" was not found/ + puts detail.backtrace if Puppet[:trace] + raise Puppet::Error, "Unable to get information about package #{@resource[:name]} because of: #{detail}" + end + end + + def install + raise Puppet::Error, "Sun packages must specify a package source" unless @resource[:source] + cmd = [] + + cmd << "-a" << @resource[:adminfile] if @resource[:adminfile] + + cmd << "-r" << @resource[:responsefile] if @resource[:responsefile] + + cmd << "-d" << @resource[:source] + cmd << "-n" << @resource[:name] + + pkgadd cmd + end + + # Retrieve the version from the current package file. + def latest + hash = info2hash(@resource[:source]) + hash[:ensure] + end + + def query + info2hash() + end + + def uninstall + command = ["-n"] + + command << "-a" << @resource[:adminfile] if @resource[:adminfile] + + command << @resource[:name] + pkgrm command + end + + # Remove the old package, and install the new one. This will probably + # often fail. + def update + self.uninstall if (@property_hash[:ensure] || info2hash()[:ensure]) != :absent + self.install + end +end + diff --git a/mcollective/lib/puppet/provider/package/sunfreeware.rb b/mcollective/lib/puppet/provider/package/sunfreeware.rb new file mode 100755 index 000000000..4745ea1eb --- /dev/null +++ b/mcollective/lib/puppet/provider/package/sunfreeware.rb @@ -0,0 +1,11 @@ +# At this point, it's an exact copy of the Blastwave stuff. +Puppet::Type.type(:package).provide :sunfreeware, :parent => :blastwave, :source => :sun do + desc "Package management using sunfreeware.com's `pkg-get` command on Solaris. + At this point, support is exactly the same as `blastwave` support and + has not actually been tested." + commands :pkgget => "pkg-get" + + confine :operatingsystem => :solaris + +end + diff --git a/mcollective/lib/puppet/provider/package/up2date.rb b/mcollective/lib/puppet/provider/package/up2date.rb new file mode 100644 index 000000000..243bc6c6b --- /dev/null +++ b/mcollective/lib/puppet/provider/package/up2date.rb @@ -0,0 +1,42 @@ +Puppet::Type.type(:package).provide :up2date, :parent => :rpm, :source => :rpm do + desc "Support for Red Hat's proprietary `up2date` package update + mechanism." + + commands :up2date => "/usr/sbin/up2date-nox" + + defaultfor :operatingsystem => [:redhat, :oel, :ovm], + :lsbdistrelease => ["2.1", "3", "4"] + + confine :operatingsystem => [:redhat, :oel, :ovm] + + # Install a package using 'up2date'. + def install + up2date "-u", @resource[:name] + + unless self.query + raise Puppet::ExecutionFailure.new( + "Could not find package #{self.name}" + ) + end + end + + # What's the latest package version available? + def latest + #up2date can only get a list of *all* available packages? + output = up2date "--showall" + + if output =~ /^#{Regexp.escape @resource[:name]}-(\d+.*)\.\w+/ + return $1 + else + # up2date didn't find updates, pretend the current + # version is the latest + return @property_hash[:ensure] + end + end + + def update + # Install in up2date can be used for update, too + self.install + end +end + diff --git a/mcollective/lib/puppet/provider/package/urpmi.rb b/mcollective/lib/puppet/provider/package/urpmi.rb new file mode 100644 index 000000000..425d77849 --- /dev/null +++ b/mcollective/lib/puppet/provider/package/urpmi.rb @@ -0,0 +1,60 @@ +Puppet::Type.type(:package).provide :urpmi, :parent => :rpm, :source => :rpm do + desc "Support via `urpmi`." + commands :urpmi => "urpmi", :urpmq => "urpmq", :rpm => "rpm" + + if command('rpm') + confine :true => begin + rpm('-ql', 'rpm') + rescue Puppet::ExecutionFailure + false + else + true + end + end + + defaultfor :operatingsystem => [:mandriva, :mandrake] + + has_feature :versionable + + def install + should = @resource.should(:ensure) + self.debug "Ensuring => #{should}" + wanted = @resource[:name] + + # XXX: We don't actually deal with epochs here. + case should + when true, false, Symbol + # pass + else + # Add the package version + wanted += "-#{should}" + end + + output = urpmi "--auto", wanted + + unless self.query + raise Puppet::Error.new( + "Could not find package #{self.name}" + ) + end + end + + # What's the latest package version available? + def latest + output = urpmq "-S", @resource[:name] + + if output =~ /^#{Regexp.escape @resource[:name]}\s+:\s+.*\(\s+(\S+)\s+\)/ + return $1 + else + # urpmi didn't find updates, pretend the current + # version is the latest + return @resource[:ensure] + end + end + + def update + # Install in urpmi can be used for update, too + self.install + end +end + diff --git a/mcollective/lib/puppet/provider/package/yum.rb b/mcollective/lib/puppet/provider/package/yum.rb new file mode 100755 index 000000000..fcda5ba8c --- /dev/null +++ b/mcollective/lib/puppet/provider/package/yum.rb @@ -0,0 +1,100 @@ +Puppet::Type.type(:package).provide :yum, :parent => :rpm, :source => :rpm do + desc "Support via `yum`." + + has_feature :versionable + + commands :yum => "yum", :rpm => "rpm", :python => "python" + + YUMHELPER = File::join(File::dirname(__FILE__), "yumhelper.py") + + attr_accessor :latest_info + + if command('rpm') + confine :true => begin + rpm('--version') + rescue Puppet::ExecutionFailure + false + else + true + end + end + + defaultfor :operatingsystem => [:fedora, :centos, :redhat] + + def self.prefetch(packages) + raise Puppet::Error, "The yum provider can only be used as root" if Process.euid != 0 + super + return unless packages.detect { |name, package| package.should(:ensure) == :latest } + + # collect our 'latest' info + updates = {} + python(YUMHELPER).each_line do |l| + l.chomp! + next if l.empty? + if l[0,4] == "_pkg" + hash = nevra_to_hash(l[5..-1]) + [hash[:name], "#{hash[:name]}.#{hash[:arch]}"].each do |n| + updates[n] ||= [] + updates[n] << hash + end + end + end + + # Add our 'latest' info to the providers. + packages.each do |name, package| + if info = updates[package[:name]] + package.provider.latest_info = info[0] + end + end + end + + def install + should = @resource.should(:ensure) + self.debug "Ensuring => #{should}" + wanted = @resource[:name] + + # XXX: We don't actually deal with epochs here. + case should + when true, false, Symbol + # pass + should = nil + else + # Add the package version + wanted += "-#{should}" + end + + output = yum "-d", "0", "-e", "0", "-y", :install, wanted + + is = self.query + raise Puppet::Error, "Could not find package #{self.name}" unless is + + # FIXME: Should we raise an exception even if should == :latest + # and yum updated us to a version other than @param_hash[:ensure] ? + raise Puppet::Error, "Failed to update to version #{should}, got version #{is[:ensure]} instead" if should && should != is[:ensure] + end + + # What's the latest package version available? + def latest + upd = latest_info + unless upd.nil? + # FIXME: there could be more than one update for a package + # because of multiarch + return "#{upd[:version]}-#{upd[:release]}" + else + # Yum didn't find updates, pretend the current + # version is the latest + raise Puppet::DevError, "Tried to get latest on a missing package" if properties[:ensure] == :absent + return properties[:ensure] + end + end + + def update + # Install in yum can be used for update, too + self.install + end + + def purge + yum "-y", :erase, @resource[:name] + end +end + diff --git a/mcollective/lib/puppet/provider/package/yumhelper.py b/mcollective/lib/puppet/provider/package/yumhelper.py new file mode 100644 index 000000000..e7b5fe13d --- /dev/null +++ b/mcollective/lib/puppet/provider/package/yumhelper.py @@ -0,0 +1,129 @@ +# Python helper script to query for the packages that have +# pending updates. Called by the yum package provider +# +# (C) 2007 Red Hat Inc. +# David Lutterkort + +import sys +import string +import re + +# this maintains compatibility with really old platforms with python 1.x +from os import popen, WEXITSTATUS + +# Try to use the yum libraries by default, but shell out to the yum executable +# if they are not present (i.e. yum <= 2.0). This is only required for RHEL3 +# and earlier that do not support later versions of Yum. Once RHEL3 is EOL, +# shell_out() and related code can be removed. +try: + import yum +except ImportError: + useyumlib = 0 +else: + useyumlib = 1 + +OVERRIDE_OPTS = { + 'debuglevel': 0, + 'errorlevel': 0, + 'logfile': '/dev/null' +} + +def pkg_lists(my): + my.doConfigSetup() + + for k in OVERRIDE_OPTS.keys(): + if hasattr(my.conf, k): + setattr(my.conf, k, OVERRIDE_OPTS[k]) + else: + my.conf.setConfigOption(k, OVERRIDE_OPTS[k]) + + my.doTsSetup() + my.doRpmDBSetup() + + # Yum 2.2/2.3 python libraries require a couple of extra function calls to setup package sacks. + # They also don't have a __version__ attribute + try: + yumver = yum.__version__ + except AttributeError: + my.doRepoSetup() + my.doSackSetup() + + return my.doPackageLists('updates') + +def shell_out(): + try: + p = popen("/usr/bin/env yum check-update 2>&1") + output = p.readlines() + rc = p.close() + + if rc is not None: + # None represents exit code of 0, otherwise the exit code is in the + # format returned by wait(). Exit code of 100 from yum represents + # updates available. + if WEXITSTATUS(rc) != 100: + return WEXITSTATUS(rc) + else: + # Exit code is None (0), no updates waiting so don't both parsing output + return 0 + + # Yum prints a line of hyphens (old versions) or a blank line between + # headers and package data, so skip everything before them + skipheaders = 0 + for line in output: + if not skipheaders: + if re.compile("^((-){80}|)$").search(line): + skipheaders = 1 + continue + + # Skip any blank lines + if re.compile("^[ \t]*$").search(line): + continue + + # Format is: + # Yum 1.x: name arch (epoch:)?version + # Yum 2.0: name arch (epoch:)?version repo + # epoch is optional if 0 + + p = string.split(line) + pname = p[0] + parch = p[1] + pevr = p[2] + + # Separate out epoch:version-release + evr_re = re.compile("^(\d:)?(\S+)-(\S+)$") + evr = evr_re.match(pevr) + + pepoch = "" + if evr.group(1) is None: + pepoch = "0" + else: + pepoch = evr.group(1).replace(":", "") + pversion = evr.group(2) + prelease = evr.group(3) + + print "_pkg", pname, pepoch, pversion, prelease, parch + + return 0 + except: + print sys.exc_info()[0] + return 1 + +if useyumlib: + try: + try: + my = yum.YumBase() + ypl = pkg_lists(my) + for pkg in ypl.updates: + print "_pkg %s %s %s %s %s" % (pkg.name, pkg.epoch, pkg.version, pkg.release, pkg.arch) + finally: + my.closeRpmDB() + except IOError, e: + print "_err IOError %d %s" % (e.errno, e) + sys.exit(1) + except AttributeError, e: + # catch yumlib errors in buggy 2.x versions of yum + print "_err AttributeError %s" % e + sys.exit(1) +else: + rc = shell_out() + sys.exit(rc) diff --git a/mcollective/lib/puppet/provider/package/zypper.rb b/mcollective/lib/puppet/provider/package/zypper.rb new file mode 100644 index 000000000..f129ef6e6 --- /dev/null +++ b/mcollective/lib/puppet/provider/package/zypper.rb @@ -0,0 +1,52 @@ +Puppet::Type.type(:package).provide :zypper, :parent => :rpm do + desc "Support for SuSE `zypper` package manager. Found in SLES10sp2+ and SLES11" + + has_feature :versionable + + commands :zypper => "/usr/bin/zypper" + commands :rpm => "rpm" + + confine :operatingsystem => [:suse, :sles, :sled, :opensuse] + + # Install a package using 'zypper'. + def install + should = @resource.should(:ensure) + self.debug "Ensuring => #{should}" + wanted = @resource[:name] + + # XXX: We don't actually deal with epochs here. + case should + when true, false, Symbol + # pass + else + # Add the package version + wanted = "#{wanted}-#{should}" + end + output = zypper "--quiet", :install, "-l", "-y", wanted + + unless self.query + raise Puppet::ExecutionFailure.new( + "Could not find package #{self.name}" + ) + end + end + + # What's the latest package version available? + def latest + #zypper can only get a list of *all* available packages? + output = zypper "list-updates" + + if output =~ /#{Regexp.escape @resource[:name]}\s*\|\s*([^\s\|]+)/ + return $1 + else + # zypper didn't find updates, pretend the current + # version is the latest + return @property_hash[:ensure] + end + end + + def update + # zypper install can be used for update, too + self.install + end +end diff --git a/mcollective/lib/puppet/provider/parsedfile.rb b/mcollective/lib/puppet/provider/parsedfile.rb new file mode 100755 index 000000000..75a215f4b --- /dev/null +++ b/mcollective/lib/puppet/provider/parsedfile.rb @@ -0,0 +1,374 @@ +require 'puppet' +require 'puppet/util/filetype' +require 'puppet/util/fileparsing' + +# This provider can be used as the parent class for a provider that +# parses and generates files. Its content must be loaded via the +# 'prefetch' method, and the file will be written when 'flush' is called +# on the provider instance. At this point, the file is written once +# for every provider instance. +# +# Once the provider prefetches the data, it's the resource's job to copy +# that data over to the @is variables. +class Puppet::Provider::ParsedFile < Puppet::Provider + extend Puppet::Util::FileParsing + + class << self + attr_accessor :default_target, :target + end + + attr_accessor :property_hash + + def self.clean(hash) + newhash = hash.dup + [:record_type, :on_disk].each do |p| + newhash.delete(p) if newhash.include?(p) + end + + newhash + end + + def self.clear + @target_objects.clear + @records.clear + end + + def self.filetype + @filetype ||= Puppet::Util::FileType.filetype(:flat) + end + + def self.filetype=(type) + if type.is_a?(Class) + @filetype = type + elsif klass = Puppet::Util::FileType.filetype(type) + @filetype = klass + else + raise ArgumentError, "Invalid filetype #{type}" + end + end + + # Flush all of the targets for which there are modified records. The only + # reason we pass a record here is so that we can add it to the stack if + # necessary -- it's passed from the instance calling 'flush'. + def self.flush(record) + # Make sure this record is on the list to be flushed. + unless record[:on_disk] + record[:on_disk] = true + @records << record + + # If we've just added the record, then make sure our + # target will get flushed. + modified(record[:target] || default_target) + end + + return unless defined?(@modified) and ! @modified.empty? + + flushed = [] + @modified.sort { |a,b| a.to_s <=> b.to_s }.uniq.each do |target| + Puppet.debug "Flushing #{@resource_type.name} provider target #{target}" + flush_target(target) + flushed << target + end + + @modified.reject! { |t| flushed.include?(t) } + end + + # Make sure our file is backed up, but only back it up once per transaction. + # We cheat and rely on the fact that @records is created on each prefetch. + def self.backup_target(target) + return nil unless target_object(target).respond_to?(:backup) + + @backup_stats ||= {} + return nil if @backup_stats[target] == @records.object_id + + target_object(target).backup + @backup_stats[target] = @records.object_id + end + + # Flush all of the records relating to a specific target. + def self.flush_target(target) + backup_target(target) + + records = target_records(target).reject { |r| + r[:ensure] == :absent + } + target_object(target).write(to_file(records)) + end + + # Return the header placed at the top of each generated file, warning + # users that modifying this file manually is probably a bad idea. + def self.header +%{# HEADER: This file was autogenerated at #{Time.now} +# HEADER: by puppet. While it can still be managed manually, it +# HEADER: is definitely not recommended.\n} + end + + # Add another type var. + def self.initvars + @records = [] + @target_objects = {} + + @target = nil + + # Default to flat files + @filetype ||= Puppet::Util::FileType.filetype(:flat) + super + end + + # Return a list of all of the records we can find. + def self.instances + targets.collect do |target| + prefetch_target(target) + end.flatten.reject { |r| skip_record?(r) }.collect do |record| + new(record) + end + end + + # Override the default method with a lot more functionality. + def self.mk_resource_methods + [resource_type.validproperties, resource_type.parameters].flatten.each do |attr| + attr = symbolize(attr) + define_method(attr) do +# if @property_hash.empty? +# # Note that this swaps the provider out from under us. +# prefetch +# if @resource.provider == self +# return @property_hash[attr] +# else +# return @resource.provider.send(attr) +# end +# end + # If it's not a valid field for this record type (which can happen + # when different platforms support different fields), then just + # return the should value, so the resource shuts up. + if @property_hash[attr] or self.class.valid_attr?(self.class.name, attr) + @property_hash[attr] || :absent + else + if defined?(@resource) + @resource.should(attr) + else + nil + end + end + end + + define_method(attr.to_s + "=") do |val| + mark_target_modified + @property_hash[attr] = val + end + end + end + + # Always make the resource methods. + def self.resource_type=(resource) + super + mk_resource_methods + end + + # Mark a target as modified so we know to flush it. This only gets + # used within the attr= methods. + def self.modified(target) + @modified ||= [] + @modified << target unless @modified.include?(target) + end + + # Retrieve all of the data from disk. There are three ways to know + # which files to retrieve: We might have a list of file objects already + # set up, there might be instances of our associated resource and they + # will have a path parameter set, and we will have a default path + # set. We need to turn those three locations into a list of files, + # prefetch each one, and make sure they're associated with each appropriate + # resource instance. + def self.prefetch(resources = nil) + # Reset the record list. + @records = prefetch_all_targets(resources) + + match_providers_with_resources(resources) + end + + def self.match_providers_with_resources(resources) + return unless resources + matchers = resources.dup + @records.each do |record| + # Skip things like comments and blank lines + next if skip_record?(record) + + if name = record[:name] and resource = resources[name] + resource.provider = new(record) + elsif respond_to?(:match) + if resource = match(record, matchers) + # Remove this resource from circulation so we don't unnecessarily try to match + matchers.delete(resource.title) + record[:name] = resource[:name] + resource.provider = new(record) + end + end + end + end + + def self.prefetch_all_targets(resources) + records = [] + targets(resources).each do |target| + records += prefetch_target(target) + end + records + end + + # Prefetch an individual target. + def self.prefetch_target(target) + target_records = retrieve(target).each do |r| + r[:on_disk] = true + r[:target] = target + r[:ensure] = :present + end + + target_records = prefetch_hook(target_records) if respond_to?(:prefetch_hook) + + raise Puppet::DevError, "Prefetching #{target} for provider #{self.name} returned nil" unless target_records + + target_records + end + + # Is there an existing record with this name? + def self.record?(name) + return nil unless @records + @records.find { |r| r[:name] == name } + end + + # Retrieve the text for the file. Returns nil in the unlikely + # event that it doesn't exist. + def self.retrieve(path) + # XXX We need to be doing something special here in case of failure. + text = target_object(path).read + if text.nil? or text == "" + # there is no file + return [] + else + # Set the target, for logging. + old = @target + begin + @target = path + return self.parse(text) + rescue Puppet::Error => detail + detail.file = @target + raise detail + ensure + @target = old + end + end + end + + # Should we skip the record? Basically, we skip text records. + # This is only here so subclasses can override it. + def self.skip_record?(record) + record_type(record[:record_type]).text? + end + + # Initialize the object if necessary. + def self.target_object(target) + @target_objects[target] ||= filetype.new(target) + + @target_objects[target] + end + + # Find all of the records for a given target + def self.target_records(target) + @records.find_all { |r| r[:target] == target } + end + + # Find a list of all of the targets that we should be reading. This is + # used to figure out what targets we need to prefetch. + def self.targets(resources = nil) + targets = [] + # First get the default target + raise Puppet::DevError, "Parsed Providers must define a default target" unless self.default_target + targets << self.default_target + + # Then get each of the file objects + targets += @target_objects.keys + + # Lastly, check the file from any resource instances + if resources + resources.each do |name, resource| + if value = resource.should(:target) + targets << value + end + end + end + + targets.uniq.compact + end + + def self.to_file(records) + text = super + header + text + end + + def create + @resource.class.validproperties.each do |property| + if value = @resource.should(property) + @property_hash[property] = value + end + end + mark_target_modified + (@resource.class.name.to_s + "_created").intern + end + + def destroy + # We use the method here so it marks the target as modified. + self.ensure = :absent + (@resource.class.name.to_s + "_deleted").intern + end + + def exists? + !(@property_hash[:ensure] == :absent or @property_hash[:ensure].nil?) + end + + # Write our data to disk. + def flush + # Make sure we've got a target and name set. + + # If the target isn't set, then this is our first modification, so + # mark it for flushing. + unless @property_hash[:target] + @property_hash[:target] = @resource.should(:target) || self.class.default_target + self.class.modified(@property_hash[:target]) + end + @resource.class.key_attributes.each do |attr| + @property_hash[attr] ||= @resource[attr] + end + + self.class.flush(@property_hash) + + #@property_hash = {} + end + + def initialize(record) + super + + # The 'record' could be a resource or a record, depending on how the provider + # is initialized. If we got an empty property hash (probably because the resource + # is just being initialized), then we want to set up some defualts. + @property_hash = self.class.record?(resource[:name]) || {:record_type => self.class.name, :ensure => :absent} if @property_hash.empty? + end + + # Retrieve the current state from disk. + def prefetch + raise Puppet::DevError, "Somehow got told to prefetch with no resource set" unless @resource + self.class.prefetch(@resource[:name] => @resource) + end + + def record_type + @property_hash[:record_type] + end + + private + + # Mark both the resource and provider target as modified. + def mark_target_modified + if defined?(@resource) and restarget = @resource.should(:target) and restarget != @property_hash[:target] + self.class.modified(restarget) + end + self.class.modified(@property_hash[:target]) if @property_hash[:target] != :absent and @property_hash[:target] + end +end diff --git a/mcollective/lib/puppet/provider/port/parsed.rb b/mcollective/lib/puppet/provider/port/parsed.rb new file mode 100755 index 000000000..5c973b6af --- /dev/null +++ b/mcollective/lib/puppet/provider/port/parsed.rb @@ -0,0 +1,173 @@ +require 'puppet/provider/parsedfile' + +#services = nil +#case Facter.value(:operatingsystem) +#when "Solaris"; services = "/etc/inet/services" +#else +# services = "/etc/services" +#end +# +#Puppet::Type.type(:port).provide(:parsed, +# :parent => Puppet::Provider::ParsedFile, +# :default_target => services, +# :filetype => :flat +#) do +# text_line :comment, :match => /^\s*#/ +# text_line :blank, :match => /^\s*$/ +# +# # We're cheating horribly here -- we don't support ddp, because it assigns +# # the same number to already-used names, and the same name to different +# # numbers. +# text_line :ddp, :match => /^\S+\s+\d+\/ddp/ +# +# # Also, just ignore the lines on OS X that don't have service names. +# text_line :funky_darwin, :match => /^\s+\d+\// +# +# # We have to manually parse the line, since it's so darn complicated. +# record_line :parsed, :fields => %w{name port protocols alias description}, +# :optional => %w{alias description} do |line| +# if line =~ /\/ddp/ +# raise "missed ddp in #{line}" +# end +# # The record might contain multiple port lines separated by \n. +# hashes = line.split("\n").collect { |l| parse_port(l) } +# +# # It's easy if there's just one hash. +# if hashes.length == 1 +# return hashes.shift +# end +# +# # Else, merge the two records into one. +# return port_merge(*hashes) +# end +# +# # Override how we split into lines, so that we always treat both protocol +# # lines as a single line. This drastically simplifies merging the two lines +# # into one record. +# def self.lines(text) +# names = {} +# lines = [] +# +# # We organize by number, because that's apparently how the ports work. +# # You'll never be able to use Puppet to manage multiple entries +# # with the same name but different numbers, though. +# text.split("\n").each do |line| +# if line =~ /^([-\w]+)\s+(\d+)\/[^d]/ # We want to skip ddp proto stuff +# names[$1] ||= [] +# names[$1] << line +# lines << [:special, $1] +# else +# lines << line +# end +# end +# +# # Now, return each line in order, but join the ones with the same name +# lines.collect do |line| +# if line.is_a?(Array) +# name = line[1] +# if names[name] +# t = names[name].join("\n") +# names.delete(name) +# t +# end +# else +# line +# end +# end.reject { |l| l.nil? } +# end +# +# # Parse a single port line, returning a hash. +# def self.parse_port(line) +# hash = {} +# if line.sub!(/^(\S+)\s+(\d+)\/(\w+)\s*/, '') +# hash[:name] = $1 +# hash[:number] = $2 +# hash[:protocols] = [$3] +# +# unless line == "" +# line.sub!(/^([^#]+)\s*/) do |value| +# aliases = $1 +# +# # Remove any trailing whitespace +# aliases.strip! +# unless aliases =~ /^\s*$/ +# hash[:alias] = aliases.split(/\s+/) +# end +# +# "" +# end +# +# line.sub!(/^\s*#\s*(.+)$/) do |value| +# desc = $1 +# unless desc =~ /^\s*$/ +# hash[:description] = desc.sub(/\s*$/, '') +# end +# +# "" +# end +# end +# else +# if line =~ /^\s+\d+/ and +# Facter["operatingsystem"].value == "Darwin" +# #Puppet.notice "Skipping wonky OS X port entry %s" % +# # line.inspect +# next +# end +# Puppet.notice "Ignoring unparseable line '#{line}' in #{self.target}" +# end +# +# if hash.empty? +# return nil +# else +# return hash +# end +# end +# +# # Merge two records into one. +# def self.port_merge(one, two) +# keys = [one.keys, two.keys].flatten.uniq +# +# # We'll be returning the 'one' hash. so make any necessary modifications +# # to it. +# keys.each do |key| +# # The easy case +# if one[key] == two[key] +# next +# elsif one[key] and ! two[key] +# next +# elsif ! one[key] and two[key] +# one[key] = two[key] +# elsif one[key].is_a?(Array) and two[key].is_a?(Array) +# one[key] = [one[key], two[key]].flatten.uniq +# else +# # Keep the info from the first hash, so don't do anything +# #Puppet.notice "Cannot merge %s in %s with %s" % +# # [key, one.inspect, two.inspect] +# end +# end +# +# return one +# end +# +# # Convert the current object into one or more services entry. +# def self.to_line(hash) +# unless hash[:record_type] == :parsed +# return super +# end +# +# # Strangely, most sites seem to use tabs as separators. +# hash[:protocols].collect { |proto| +# str = "#{hash[:name]}\t\t#{hash[:number]}/#{proto}" +# +# if value = hash[:alias] and value != :absent +# str += "\t\t#{value.join(" ")}" +# end +# +# if value = hash[:description] and value != :absent +# str += "\t# #{value}" +# end +# str +# }.join("\n") +# end +#end + diff --git a/mcollective/lib/puppet/provider/selboolean/getsetsebool.rb b/mcollective/lib/puppet/provider/selboolean/getsetsebool.rb new file mode 100644 index 000000000..cacc41386 --- /dev/null +++ b/mcollective/lib/puppet/provider/selboolean/getsetsebool.rb @@ -0,0 +1,47 @@ +Puppet::Type.type(:selboolean).provide(:getsetsebool) do + desc "Manage SELinux booleans using the getsebool and setsebool binaries." + + commands :getsebool => "/usr/sbin/getsebool" + commands :setsebool => "/usr/sbin/setsebool" + + def value + self.debug "Retrieving value of selboolean #{@resource[:name]}" + + status = getsebool(@resource[:name]) + + if status =~ / off$/ + return :off + elsif status =~ / on$/ then + return :on + else + status.chomp! + raise Puppet::Error, "Invalid response '#{status}' returned from getsebool" + end + end + + def value=(new) + persist = "" + if @resource[:persistent] == :true + self.debug "Enabling persistence" + persist = "-P" + end + execoutput("#{command(:setsebool)} #{persist} #{@resource[:name]} #{new}") + :file_changed + end + + # Required workaround, since SELinux policy prevents setsebool + # from writing to any files, even tmp, preventing the standard + # 'setsebool("...")' construct from working. + + def execoutput (cmd) + output = '' + begin + execpipe(cmd) do |out| + output = out.readlines.join('').chomp! + end + rescue Puppet::ExecutionFailure + raise Puppet::ExecutionFailure, output.split("\n")[0] + end + output + end +end diff --git a/mcollective/lib/puppet/provider/selmodule/semodule.rb b/mcollective/lib/puppet/provider/selmodule/semodule.rb new file mode 100644 index 000000000..64197156f --- /dev/null +++ b/mcollective/lib/puppet/provider/selmodule/semodule.rb @@ -0,0 +1,135 @@ +Puppet::Type.type(:selmodule).provide(:semodule) do + desc "Manage SELinux policy modules using the semodule binary." + + commands :semodule => "/usr/sbin/semodule" + + def create + begin + execoutput("#{command(:semodule)} --install #{selmod_name_to_filename}") + rescue Puppet::ExecutionFailure => detail + raise Puppet::Error, "Could not load policy module: #{detail}"; + end + :true + end + + def destroy + execoutput("#{command(:semodule)} --remove #{@resource[:name]}") + rescue Puppet::ExecutionFailure => detail + raise Puppet::Error, "Could not remove policy module: #{detail}"; + end + + def exists? + self.debug "Checking for module #{@resource[:name]}" + execpipe("#{command(:semodule)} --list") do |out| + out.each do |line| + if line =~ /#{@resource[:name]}\b/ + return :true + end + end + end + nil + end + + def syncversion + self.debug "Checking syncversion on #{@resource[:name]}" + + loadver = selmodversion_loaded + + if(loadver) then + filever = selmodversion_file + if (filever == loadver) + return :true + end + end + :false + end + + def syncversion= (dosync) + execoutput("#{command(:semodule)} --upgrade #{selmod_name_to_filename}") + rescue Puppet::ExecutionFailure => detail + raise Puppet::Error, "Could not upgrade policy module: #{detail}"; + end + + # Helper functions + + def execoutput (cmd) + output = '' + begin + execpipe(cmd) do |out| + output = out.readlines.join('').chomp! + end + rescue Puppet::ExecutionFailure + raise Puppet::ExecutionFailure, output.split("\n")[0] + end + output + end + + def selmod_name_to_filename + if @resource[:selmodulepath] + return @resource[:selmodulepath] + else + return "#{@resource[:selmoduledir]}/#{@resource[:name]}.pp" + end + end + + def selmod_readnext (handle) + len = handle.read(4).unpack('L')[0] + handle.read(len) + end + + def selmodversion_file + magic = 0xF97CFF8F + + filename = selmod_name_to_filename + mod = File.new(filename, "r") + + (hdr, ver, numsec) = mod.read(12).unpack('LLL') + + raise Puppet::Error, "Found #{hdr} instead of magic #{magic} in #{filename}" if hdr != magic + + raise Puppet::Error, "Unknown policy file version #{ver} in #{filename}" if ver != 1 + + # Read through (and throw away) the file section offsets, and also + # the magic header for the first section. + + mod.read((numsec + 1) * 4) + + ## Section 1 should be "SE Linux Module" + + selmod_readnext(mod) + selmod_readnext(mod) + + # Skip past the section headers + mod.read(14) + + # Module name + selmod_readnext(mod) + + # At last! the version + + v = selmod_readnext(mod) + + self.debug "file version #{v}" + v + end + + def selmodversion_loaded + lines = () + begin + execpipe("#{command(:semodule)} --list") do |output| + lines = output.readlines + lines.each do |line| + line.chomp! + bits = line.split + if bits[0] == @resource[:name] + self.debug "load version #{bits[1]}" + return bits[1] + end + end + end + rescue Puppet::ExecutionFailure + raise Puppet::ExecutionFailure, "Could not list policy modules: #{lines.join(' ').chomp!}" + end + nil + end +end diff --git a/mcollective/lib/puppet/provider/service/base.rb b/mcollective/lib/puppet/provider/service/base.rb new file mode 100755 index 000000000..211e7f964 --- /dev/null +++ b/mcollective/lib/puppet/provider/service/base.rb @@ -0,0 +1,144 @@ +Puppet::Type.type(:service).provide :base do + desc "The simplest form of service support. + + You have to specify enough about your service for this to work; the + minimum you can specify is a binary for starting the process, and this + same binary will be searched for in the process table to stop the + service. It is preferable to specify start, stop, and status commands, + akin to how you would do so using `init`. + + " + + commands :kill => "kill" + + def self.instances + [] + end + + # Get the process ID for a running process. Requires the 'pattern' + # parameter. + def getpid + @resource.fail "Either stop/status commands or a pattern must be specified" unless @resource[:pattern] + ps = Facter["ps"].value + @resource.fail "You must upgrade Facter to a version that includes 'ps'" unless ps and ps != "" + regex = Regexp.new(@resource[:pattern]) + self.debug "Executing '#{ps}'" + IO.popen(ps) { |table| + table.each { |line| + if regex.match(line) + ary = line.sub(/^\s+/, '').split(/\s+/) + return ary[1] + end + } + } + + nil + end + + # How to restart the process. + def restart + if @resource[:restart] or restartcmd + ucommand(:restart) + else + self.stop + self.start + end + end + + # There is no default command, which causes other methods to be used + def restartcmd + end + + # Check if the process is running. Prefer the 'status' parameter, + # then 'statuscmd' method, then look in the process table. We give + # the object the option to not return a status command, which might + # happen if, for instance, it has an init script (and thus responds to + # 'statuscmd') but does not have 'hasstatus' enabled. + def status + if @resource[:status] or statuscmd + # Don't fail when the exit status is not 0. + ucommand(:status, false) + + # Expicitly calling exitstatus to facilitate testing + if $CHILD_STATUS.exitstatus == 0 + return :running + else + return :stopped + end + elsif pid = self.getpid + self.debug "PID is #{pid}" + return :running + else + return :stopped + end + end + + # There is no default command, which causes other methods to be used + def statuscmd + end + + # Run the 'start' parameter command, or the specified 'startcmd'. + def start + ucommand(:start) + end + + # The command used to start. Generated if the 'binary' argument + # is passed. + def startcmd + if @resource[:binary] + return @resource[:binary] + else + raise Puppet::Error, + "Services must specify a start command or a binary" + end + end + + # Stop the service. If a 'stop' parameter is specified, it + # takes precedence; otherwise checks if the object responds to + # a 'stopcmd' method, and if so runs that; otherwise, looks + # for the process in the process table. + # This method will generally not be overridden by submodules. + def stop + if @resource[:stop] or stopcmd + ucommand(:stop) + else + pid = getpid + unless pid + self.info "#{self.name} is not running" + return false + end + begin + output = kill pid + rescue Puppet::ExecutionFailure => detail + @resource.fail "Could not kill #{self.name}, PID #{pid}: #{output}" + end + return true + end + end + + # There is no default command, which causes other methods to be used + def stopcmd + end + + # A simple wrapper so execution failures are a bit more informative. + def texecute(type, command, fof = true) + begin + # #565: Services generally produce no output, so squelch them. + execute(command, :failonfail => fof, :squelch => true) + rescue Puppet::ExecutionFailure => detail + @resource.fail "Could not #{type} #{@resource.ref}: #{detail}" + end + nil + end + + # Use either a specified command or the default for our provider. + def ucommand(type, fof = true) + if c = @resource[type] + cmd = [c] + else + cmd = [send("#{type}cmd")].flatten + end + texecute(type, cmd, fof) + end +end + diff --git a/mcollective/lib/puppet/provider/service/bsd.rb b/mcollective/lib/puppet/provider/service/bsd.rb new file mode 100644 index 000000000..e2a0e35f7 --- /dev/null +++ b/mcollective/lib/puppet/provider/service/bsd.rb @@ -0,0 +1,48 @@ +# Manage FreeBSD services. +Puppet::Type.type(:service).provide :bsd, :parent => :init do + desc "FreeBSD's (and probably NetBSD?) form of `init`-style service management. + + Uses `rc.conf.d` for service enabling and disabling. + +" + + confine :operatingsystem => [:freebsd, :netbsd, :openbsd] + + @@rcconf_dir = '/etc/rc.conf.d' + + def self.defpath + superclass.defpath + end + + # remove service file from rc.conf.d to disable it + def disable + rcfile = File.join(@@rcconf_dir, @model[:name]) + File.delete(rcfile) if File.exists?(rcfile) + end + + # if the service file exists in rc.conf.d then it's already enabled + def enabled? + rcfile = File.join(@@rcconf_dir, @model[:name]) + return :true if File.exists?(rcfile) + + :false + end + + # enable service by creating a service file under rc.conf.d with the + # proper contents + def enable + Dir.mkdir(@@rcconf_dir) if not File.exists?(@@rcconf_dir) + rcfile = File.join(@@rcconf_dir, @model[:name]) + open(rcfile, 'w') { |f| f << "%s_enable=\"YES\"\n" % @model[:name] } + end + + # Override stop/start commands to use one's and the avoid race condition + # where provider trys to stop/start the service before it is enabled + def startcmd + [self.initscript, :onestart] + end + + def stopcmd + [self.initscript, :onestop] + end +end diff --git a/mcollective/lib/puppet/provider/service/daemontools.rb b/mcollective/lib/puppet/provider/service/daemontools.rb new file mode 100644 index 000000000..bbb962a71 --- /dev/null +++ b/mcollective/lib/puppet/provider/service/daemontools.rb @@ -0,0 +1,194 @@ +# Daemontools service management +# +# author Brice Figureau +Puppet::Type.type(:service).provide :daemontools, :parent => :base do + desc "Daemontools service management. + + This provider manages daemons running supervised by D.J.Bernstein daemontools. + It tries to detect the service directory, with by order of preference: + + * /service + * /etc/service + * /var/lib/svscan + + The daemon directory should be placed in a directory that can be + by default in: + + * /var/lib/service + * /etc + + or this can be overriden in the service resource parameters:: + + service { \"myservice\": + provider => \"daemontools\", + path => \"/path/to/daemons\", + } + + This provider supports out of the box: + + * start/stop (mapped to enable/disable) + * enable/disable + * restart + * status + + If a service has `ensure => \"running\"`, it will link /path/to/daemon to + /path/to/service, which will automatically enable the service. + + If a service has `ensure => \"stopped\"`, it will only down the service, not + remove the /path/to/service link. + + " + + commands :svc => "/usr/bin/svc", :svstat => "/usr/bin/svstat" + + class << self + attr_writer :defpath + + # Determine the daemon path. + def defpath(dummy_argument=:work_arround_for_ruby_GC_bug) + unless @defpath + ["/var/lib/service", "/etc"].each do |path| + if FileTest.exist?(path) + @defpath = path + break + end + end + raise "Could not find the daemon directory (tested [/var/lib/service,/etc])" unless @defpath + end + @defpath + end + end + + attr_writer :servicedir + + # returns all providers for all existing services in @defpath + # ie enabled or not + def self.instances + path = self.defpath + unless FileTest.directory?(path) + Puppet.notice "Service path #{path} does not exist" + next + end + + # reject entries that aren't either a directory + # or don't contain a run file + Dir.entries(path).reject { |e| + fullpath = File.join(path, e) + e =~ /^\./ or ! FileTest.directory?(fullpath) or ! FileTest.exist?(File.join(fullpath,"run")) + }.collect do |name| + new(:name => name, :path => path) + end + end + + # returns the daemon dir on this node + def self.daemondir + self.defpath + end + + # find the service dir on this node + def servicedir + unless @servicedir + ["/service", "/etc/service","/var/lib/svscan"].each do |path| + if FileTest.exist?(path) + @servicedir = path + break + end + end + raise "Could not find service directory" unless @servicedir + end + @servicedir + end + + # returns the full path of this service when enabled + # (ie in the service directory) + def service + File.join(self.servicedir, resource[:name]) + end + + # returns the full path to the current daemon directory + # note that this path can be overriden in the resource + # definition + def daemon + File.join(resource[:path], resource[:name]) + end + + def status + begin + output = svstat self.service + if output =~ /:\s+up \(/ + return :running + end + rescue Puppet::ExecutionFailure => detail + raise Puppet::Error.new( "Could not get status for service #{resource.ref}: #{detail}" ) + end + :stopped + end + + def setupservice + if resource[:manifest] + Puppet.notice "Configuring #{resource[:name]}" + command = [ resource[:manifest], resource[:name] ] + #texecute("setupservice", command) + rv = system("#{command}") + end + rescue Puppet::ExecutionFailure => detail + raise Puppet::Error.new( "Cannot config #{self.service} to enable it: #{detail}" ) + end + + def enabled? + case self.status + when :running + # obviously if the daemon is running then it is enabled + return :true + else + # the service is enabled if it is linked + return FileTest.symlink?(self.service) ? :true : :false + end + end + + def enable + if ! FileTest.directory?(self.daemon) + Puppet.notice "No daemon dir, calling setupservice for #{resource[:name]}" + self.setupservice + end + if self.daemon + if ! FileTest.symlink?(self.service) + Puppet.notice "Enabling #{self.service}: linking #{self.daemon} -> #{self.service}" + File.symlink(self.daemon, self.service) + end + end + rescue Puppet::ExecutionFailure => detail + raise Puppet::Error.new( "No daemon directory found for #{self.service}") + end + + def disable + begin + if ! FileTest.directory?(self.daemon) + Puppet.notice "No daemon dir, calling setupservice for #{resource[:name]}" + self.setupservice + end + if self.daemon + if FileTest.symlink?(self.service) + Puppet.notice "Disabling #{self.service}: removing link #{self.daemon} -> #{self.service}" + File.unlink(self.service) + end + end + rescue Puppet::ExecutionFailure => detail + raise Puppet::Error.new( "No daemon directory found for #{self.service}") + end + self.stop + end + + def restart + svc "-t", self.service + end + + def start + enable unless enabled? == :true + svc "-u", self.service + end + + def stop + svc "-d", self.service + end +end diff --git a/mcollective/lib/puppet/provider/service/debian.rb b/mcollective/lib/puppet/provider/service/debian.rb new file mode 100755 index 000000000..58b808a8e --- /dev/null +++ b/mcollective/lib/puppet/provider/service/debian.rb @@ -0,0 +1,52 @@ +# Manage debian services. Start/stop is the same as InitSvc, but enable/disable +# is special. +Puppet::Type.type(:service).provide :debian, :parent => :init do + desc "Debian's form of `init`-style management. + + The only difference is that this supports service enabling and disabling + via `update-rc.d` and determines enabled status via `invoke-rc.d`. + + " + + commands :update_rc => "/usr/sbin/update-rc.d" + # note this isn't being used as a command until + # http://projects.reductivelabs.com/issues/2538 + # is resolved. + commands :invoke_rc => "/usr/sbin/invoke-rc.d" + + defaultfor :operatingsystem => [:debian, :ubuntu] + + def self.defpath + superclass.defpath + end + + # Remove the symlinks + def disable + if `dpkg --compare-versions $(dpkg-query -W --showformat '${Version}' sysv-rc) ge 2.88 ; echo $?`.to_i == 0 + update_rc @resource[:name], "disable" + else + update_rc "-f", @resource[:name], "remove" + update_rc @resource[:name], "stop", "00", "1", "2", "3", "4", "5", "6", "." + end + end + + def enabled? + # TODO: Replace system call when Puppet::Util.execute gives us a way + # to determine exit status. http://projects.reductivelabs.com/issues/2538 + system("/usr/sbin/invoke-rc.d", "--quiet", "--query", @resource[:name], "start") + + # 104 is the exit status when you query start an enabled service. + # 106 is the exit status when the policy layer supplies a fallback action + # See x-man-page://invoke-rc.d + if [104, 106].include?($CHILD_STATUS.exitstatus) + return :true + else + return :false + end + end + + def enable + update_rc "-f", @resource[:name], "remove" + update_rc @resource[:name], "defaults" + end +end diff --git a/mcollective/lib/puppet/provider/service/freebsd.rb b/mcollective/lib/puppet/provider/service/freebsd.rb new file mode 100644 index 000000000..f8c7134f0 --- /dev/null +++ b/mcollective/lib/puppet/provider/service/freebsd.rb @@ -0,0 +1,139 @@ +Puppet::Type.type(:service).provide :freebsd, :parent => :init do + + desc "Provider for FreeBSD. Makes use of rcvar argument of init scripts and parses/edits rc files." + + confine :operatingsystem => [:freebsd] + defaultfor :operatingsystem => [:freebsd] + + @@rcconf = '/etc/rc.conf' + @@rcconf_local = '/etc/rc.conf.local' + @@rcconf_dir = '/etc/rc.conf.d' + + def self.defpath + superclass.defpath + end + + # Executing an init script with the 'rcvar' argument returns + # the service name, rcvar name and whether it's enabled/disabled + def rcvar + rcvar = execute([self.initscript, :rcvar], :failonfail => true, :squelch => false) + rcvar = rcvar.split("\n") + rcvar.delete_if {|str| str =~ /^#\s*$/} + rcvar[1] = rcvar[1].gsub(/^\$/, '') + rcvar + end + + # Extract service name + def service_name + name = self.rcvar[0] + self.error("No service name found in rcvar") if name.nil? + name = name.gsub!(/# (.*)/, '\1') + self.error("Service name is empty") if name.nil? + self.debug("Service name is #{name}") + name + end + + # Extract rcvar name + def rcvar_name + name = self.rcvar[1] + self.error("No rcvar name found in rcvar") if name.nil? + name = name.gsub!(/(.*)_enable=(.*)/, '\1') + self.error("rcvar name is empty") if name.nil? + self.debug("rcvar name is #{name}") + name + end + + # Extract rcvar value + def rcvar_value + value = self.rcvar[1] + self.error("No rcvar value found in rcvar") if value.nil? + value = value.gsub!(/(.*)_enable="?(\w+)"?/, '\2') + self.error("rcvar value is empty") if value.nil? + self.debug("rcvar value is #{value}") + value + end + + # Edit rc files and set the service to yes/no + def rc_edit(yesno) + service = self.service_name + rcvar = self.rcvar_name + self.debug("Editing rc files: setting #{rcvar} to #{yesno} for #{service}") + self.rc_add(service, rcvar, yesno) if not self.rc_replace(service, rcvar, yesno) + end + + # Try to find an existing setting in the rc files + # and replace the value + def rc_replace(service, rcvar, yesno) + success = false + # Replace in all files, not just in the first found with a match + [@@rcconf, @@rcconf_local, @@rcconf_dir + "/#{service}"].each do |filename| + if File.exists?(filename) + s = File.read(filename) + if s.gsub!(/(#{rcvar}_enable)=\"?(YES|NO)\"?/, "\\1=\"#{yesno}\"") + File.open(filename, File::WRONLY) { |f| f << s } + self.debug("Replaced in #{filename}") + success = true + end + end + end + success + end + + # Add a new setting to the rc files + def rc_add(service, rcvar, yesno) + append = "\# Added by Puppet\n#{rcvar}_enable=\"#{yesno}\"\n" + # First, try the one-file-per-service style + if File.exists?(@@rcconf_dir) + File.open(@@rcconf_dir + "/#{service}", File::WRONLY | File::APPEND | File::CREAT, 0644) { + |f| f << append + self.debug("Appended to #{f.path}") + } + else + # Else, check the local rc file first, but don't create it + if File.exists?(@@rcconf_local) + File.open(@@rcconf_local, File::WRONLY | File::APPEND) { + |f| f << append + self.debug("Appended to #{f.path}") + } + else + # At last use the standard rc.conf file + File.open(@@rcconf, File::WRONLY | File::APPEND | File::CREAT, 0644) { + |f| f << append + self.debug("Appended to #{f.path}") + } + end + end + end + + def enabled? + if /YES$/ =~ self.rcvar_value + self.debug("Is enabled") + return :true + end + self.debug("Is disabled") + :false + end + + def enable + self.debug("Enabling") + self.rc_edit("YES") + end + + def disable + self.debug("Disabling") + self.rc_edit("NO") + end + + def startcmd + [self.initscript, :onestart] + end + + def stopcmd + [self.initscript, :onestop] + end + + def statuscmd + [self.initscript, :onestatus] + end + +end diff --git a/mcollective/lib/puppet/provider/service/gentoo.rb b/mcollective/lib/puppet/provider/service/gentoo.rb new file mode 100644 index 000000000..20f5d77e6 --- /dev/null +++ b/mcollective/lib/puppet/provider/service/gentoo.rb @@ -0,0 +1,50 @@ +# Manage gentoo services. Start/stop is the same as InitSvc, but enable/disable +# is special. +Puppet::Type.type(:service).provide :gentoo, :parent => :init do + desc "Gentoo's form of `init`-style service management. + + Uses `rc-update` for service enabling and disabling. + + " + + commands :update => "/sbin/rc-update" + + confine :operatingsystem => :gentoo + + defaultfor :operatingsystem => :gentoo + + def self.defpath + superclass.defpath + end + + def disable + output = update :del, @resource[:name], :default + rescue Puppet::ExecutionFailure + raise Puppet::Error, "Could not disable #{self.name}: #{output}" + end + + def enabled? + begin + output = update :show + rescue Puppet::ExecutionFailure + return :false + end + + line = output.split(/\n/).find { |l| l.include?(@resource[:name]) } + + return :false unless line + + # If it's enabled then it will print output showing service | runlevel + if output =~ /^\s*#{@resource[:name]}\s*\|\s*(boot|default)/ + return :true + else + return :false + end + end + + def enable + output = update :add, @resource[:name], :default + rescue Puppet::ExecutionFailure + raise Puppet::Error, "Could not enable #{self.name}: #{output}" + end +end diff --git a/mcollective/lib/puppet/provider/service/init.rb b/mcollective/lib/puppet/provider/service/init.rb new file mode 100755 index 000000000..447c01aa5 --- /dev/null +++ b/mcollective/lib/puppet/provider/service/init.rb @@ -0,0 +1,141 @@ +# The standard init-based service type. Many other service types are +# customizations of this module. +Puppet::Type.type(:service).provide :init, :parent => :base do + desc "Standard init service management. + + This provider assumes that the init script has no `status` command, + because so few scripts do, so you need to either provide a status + command or specify via `hasstatus` that one already exists in the + init script. + +" + + class << self + attr_accessor :defpath + end + + case Facter["operatingsystem"].value + when "FreeBSD" + @defpath = ["/etc/rc.d", "/usr/local/etc/rc.d"] + when "HP-UX" + @defpath = "/sbin/init.d" + else + @defpath = "/etc/init.d" + end + + # We can't confine this here, because the init path can be overridden. + #confine :exists => @defpath + + # List all services of this type. + def self.instances + get_services(self.defpath) + end + + def self.get_services(defpath, exclude=[]) + defpath = [defpath] unless defpath.is_a? Array + instances = [] + defpath.each do |path| + unless FileTest.directory?(path) + Puppet.debug "Service path #{path} does not exist" + next + end + + check = [:ensure] + + check << :enable if public_method_defined? :enabled? + + Dir.entries(path).each do |name| + fullpath = File.join(path, name) + next if name =~ /^\./ + next if exclude.include? name + next if not FileTest.executable?(fullpath) + instances << new(:name => name, :path => path, :hasstatus => true) + end + end + instances + end + + # Mark that our init script supports 'status' commands. + def hasstatus=(value) + case value + when true, "true"; @parameters[:hasstatus] = true + when false, "false"; @parameters[:hasstatus] = false + else + raise Puppet::Error, "Invalid 'hasstatus' value #{value.inspect}" + end + end + + # Where is our init script? + def initscript + @initscript ||= self.search(@resource[:name]) + end + + def paths + @paths ||= @resource[:path].find_all do |path| + if File.directory?(path) + true + else + if File.exist?(path) and ! File.directory?(path) + self.debug "Search path #{path} is not a directory" + else + self.debug "Search path #{path} does not exist" + end + false + end + end + end + + def search(name) + paths.each { |path| + fqname = File.join(path,name) + begin + stat = File.stat(fqname) + rescue + # should probably rescue specific errors... + self.debug("Could not find #{name} in #{path}") + next + end + + # if we've gotten this far, we found a valid script + return fqname + } + + paths.each { |path| + fqname_sh = File.join(path,"#{name}.sh") + begin + stat = File.stat(fqname_sh) + rescue + # should probably rescue specific errors... + self.debug("Could not find #{name}.sh in #{path}") + next + end + + # if we've gotten this far, we found a valid script + return fqname_sh + } + raise Puppet::Error, "Could not find init script for '#{name}'" + end + + # The start command is just the init scriptwith 'start'. + def startcmd + [initscript, :start] + end + + # The stop command is just the init script with 'stop'. + def stopcmd + [initscript, :stop] + end + + def restartcmd + (@resource[:hasrestart] == :true) && [initscript, :restart] + end + + # If it was specified that the init script has a 'status' command, then + # we just return that; otherwise, we return false, which causes it to + # fallback to other mechanisms. + def statuscmd + (@resource[:hasstatus] == :true) && [initscript, :status] + end + +end + diff --git a/mcollective/lib/puppet/provider/service/launchd.rb b/mcollective/lib/puppet/provider/service/launchd.rb new file mode 100644 index 000000000..07c549a8b --- /dev/null +++ b/mcollective/lib/puppet/provider/service/launchd.rb @@ -0,0 +1,266 @@ +require 'facter/util/plist' + +Puppet::Type.type(:service).provide :launchd, :parent => :base do + desc "launchd service management framework. + + This provider manages jobs with launchd, which is the default service framework for + Mac OS X and is potentially available for use on other platforms. + + See: + + * http://developer.apple.com/macosx/launchd.html + * http://launchd.macosforge.org/ + + This provider reads plists out of the following directories: + + * /System/Library/LaunchDaemons + * /System/Library/LaunchAgents + * /Library/LaunchDaemons + * /Library/LaunchAgents + + ...and builds up a list of services based upon each plist's \"Label\" entry. + + This provider supports: + + * ensure => running/stopped, + * enable => true/false + * status + * restart + + Here is how the Puppet states correspond to launchd states: + + * stopped --- job unloaded + * started --- job loaded + * enabled --- 'Disable' removed from job plist file + * disabled --- 'Disable' added to job plist file + + Note that this allows you to do something launchctl can't do, which is to + be in a state of \"stopped/enabled\ or \"running/disabled\". + + " + + commands :launchctl => "/bin/launchctl" + commands :sw_vers => "/usr/bin/sw_vers" + commands :plutil => "/usr/bin/plutil" + + defaultfor :operatingsystem => :darwin + confine :operatingsystem => :darwin + + has_feature :enableable + + Launchd_Paths = ["/Library/LaunchAgents", + "/Library/LaunchDaemons", + "/System/Library/LaunchAgents", + "/System/Library/LaunchDaemons",] + + Launchd_Overrides = "/var/db/launchd.db/com.apple.launchd/overrides.plist" + + + # Read a plist, whether its format is XML or in Apple's "binary1" + # format. + def self.read_plist(path) + Plist::parse_xml(plutil('-convert', 'xml1', '-o', '/dev/stdout', path)) + end + + # returns a label => path map for either all jobs, or just a single + # job if the label is specified + def self.jobsearch(label=nil) + label_to_path_map = {} + Launchd_Paths.each do |path| + if FileTest.exists?(path) + Dir.entries(path).each do |f| + next if f =~ /^\..*$/ + next if FileTest.directory?(f) + fullpath = File.join(path, f) + if FileTest.file?(fullpath) and job = read_plist(fullpath) and job.has_key?("Label") + if job["Label"] == label + return { label => fullpath } + else + label_to_path_map[job["Label"]] = fullpath + end + end + end + end + end + + # if we didn't find the job above and we should have, error. + raise Puppet::Error.new("Unable to find launchd plist for job: #{label}") if label + # if returning all jobs + label_to_path_map + end + + + def self.instances + jobs = self.jobsearch + jobs.keys.collect do |job| + new(:name => job, :provider => :launchd, :path => jobs[job]) + end + end + + + def self.get_macosx_version_major + return @macosx_version_major if defined?(@macosx_version_major) + begin + # Make sure we've loaded all of the facts + Facter.loadfacts + + if Facter.value(:macosx_productversion_major) + product_version_major = Facter.value(:macosx_productversion_major) + else + # TODO: remove this code chunk once we require Facter 1.5.5 or higher. + Puppet.warning("DEPRECATION WARNING: Future versions of the launchd provider will require Facter 1.5.5 or newer.") + product_version = Facter.value(:macosx_productversion) + fail("Could not determine OS X version from Facter") if product_version.nil? + product_version_major = product_version.scan(/(\d+)\.(\d+)./).join(".") + end + fail("#{product_version_major} is not supported by the launchd provider") if %w{10.0 10.1 10.2 10.3}.include?(product_version_major) + @macosx_version_major = product_version_major + return @macosx_version_major + rescue Puppet::ExecutionFailure => detail + fail("Could not determine OS X version: #{detail}") + end + end + + + # finds the path for a given label and returns the path and parsed plist + # as an array of [path, plist]. Note plist is really a Hash here. + def plist_from_label(label) + job = self.class.jobsearch(label) + job_path = job[label] + if FileTest.file?(job_path) + job_plist = self.class.read_plist(job_path) + else + raise Puppet::Error.new("Unable to parse launchd plist at path: #{job_path}") + end + [job_path, job_plist] + end + + + def status + # launchctl list exits zero if the job is loaded + # and non-zero if it isn't. Simple way to check... but is only + # available on OS X 10.5 unfortunately, so we grab the whole list + # and check if our resource is included. The output formats differ + # between 10.4 and 10.5, thus the necessity for splitting + begin + output = launchctl :list + raise Puppet::Error.new("launchctl list failed to return any data.") if output.nil? + output.split("\n").each do |j| + return :running if j.split(/\s/).last == resource[:name] + end + return :stopped + rescue Puppet::ExecutionFailure + raise Puppet::Error.new("Unable to determine status of #{resource[:name]}") + end + end + + + # start the service. To get to a state of running/enabled, we need to + # conditionally enable at load, then disable by modifying the plist file + # directly. + def start + job_path, job_plist = plist_from_label(resource[:name]) + did_enable_job = false + cmds = [] + cmds << :launchctl << :load + if self.enabled? == :false # launchctl won't load disabled jobs + cmds << "-w" + did_enable_job = true + end + cmds << job_path + begin + execute(cmds) + rescue Puppet::ExecutionFailure + raise Puppet::Error.new("Unable to start service: #{resource[:name]} at path: #{job_path}") + end + # As load -w clears the Disabled flag, we need to add it in after + self.disable if did_enable_job and resource[:enable] == :false + end + + + def stop + job_path, job_plist = plist_from_label(resource[:name]) + did_disable_job = false + cmds = [] + cmds << :launchctl << :unload + if self.enabled? == :true # keepalive jobs can't be stopped without disabling + cmds << "-w" + did_disable_job = true + end + cmds << job_path + begin + execute(cmds) + rescue Puppet::ExecutionFailure + raise Puppet::Error.new("Unable to stop service: #{resource[:name]} at path: #{job_path}") + end + # As unload -w sets the Disabled flag, we need to add it in after + self.enable if did_disable_job and resource[:enable] == :true + end + + + # launchd jobs are enabled by default. They are only disabled if the key + # "Disabled" is set to true, but it can also be set to false to enable it. + # In 10.6, the Disabled key in the job plist is consulted, but only if there + # is no entry in the global overrides plist. + # We need to draw a distinction between undefined, true and false for both + # locations where the Disabled flag can be defined. + def enabled? + job_plist_disabled = nil + overrides_disabled = nil + + job_path, job_plist = plist_from_label(resource[:name]) + job_plist_disabled = job_plist["Disabled"] if job_plist.has_key?("Disabled") + + if self.class.get_macosx_version_major == "10.6": + if FileTest.file?(Launchd_Overrides) and overrides = self.class.read_plist(Launchd_Overrides) + if overrides.has_key?(resource[:name]) + overrides_disabled = overrides[resource[:name]]["Disabled"] if overrides[resource[:name]].has_key?("Disabled") + end + end + end + + if overrides_disabled.nil? + if job_plist_disabled.nil? or job_plist_disabled == false + return :true + end + elsif overrides_disabled == false + return :true + end + :false + end + + + # enable and disable are a bit hacky. We write out the plist with the appropriate value + # rather than dealing with launchctl as it is unable to change the Disabled flag + # without actually loading/unloading the job. + # In 10.6 we need to write out a disabled key to the global overrides plist, in earlier + # versions this is stored in the job plist itself. + def enable + if self.class.get_macosx_version_major == "10.6" + overrides = self.class.read_plist(Launchd_Overrides) + overrides[resource[:name]] = { "Disabled" => false } + Plist::Emit.save_plist(overrides, Launchd_Overrides) + else + job_path, job_plist = plist_from_label(resource[:name]) + if self.enabled? == :false + job_plist.delete("Disabled") + Plist::Emit.save_plist(job_plist, job_path) + end + end + end + + + def disable + if self.class.get_macosx_version_major == "10.6" + overrides = self.class.read_plist(Launchd_Overrides) + overrides[resource[:name]] = { "Disabled" => true } + Plist::Emit.save_plist(overrides, Launchd_Overrides) + else + job_path, job_plist = plist_from_label(resource[:name]) + job_plist["Disabled"] = true + Plist::Emit.save_plist(job_plist, job_path) + end + end + + +end diff --git a/mcollective/lib/puppet/provider/service/redhat.rb b/mcollective/lib/puppet/provider/service/redhat.rb new file mode 100755 index 000000000..e851a488d --- /dev/null +++ b/mcollective/lib/puppet/provider/service/redhat.rb @@ -0,0 +1,76 @@ +# Manage Red Hat services. Start/stop uses /sbin/service and enable/disable uses chkconfig + +Puppet::Type.type(:service).provide :redhat, :parent => :init, :source => :init do + desc "Red Hat's (and probably many others) form of `init`-style service management: + + Uses `chkconfig` for service enabling and disabling. + + " + + commands :chkconfig => "/sbin/chkconfig", :service => "/sbin/service" + + defaultfor :operatingsystem => [:redhat, :fedora, :suse, :centos, :sles, :oel, :ovm] + + def self.instances + # this exclude list is all from /sbin/service (5.x), but I did not exclude kudzu + self.get_services(['/etc/init.d'], ['functions', 'halt', 'killall', 'single', 'linuxconf']) + end + + def self.defpath + superclass.defpath + end + + # Remove the symlinks + def disable + output = chkconfig(@resource[:name], :off) + rescue Puppet::ExecutionFailure + raise Puppet::Error, "Could not disable #{self.name}: #{output}" + end + + def enabled? + begin + output = chkconfig(@resource[:name]) + rescue Puppet::ExecutionFailure + return :false + end + + # If it's disabled on SuSE, then it will print output showing "off" + # at the end + if output =~ /.* off$/ + return :false + end + + :true + end + + # Don't support them specifying runlevels; always use the runlevels + # in the init scripts. + def enable + output = chkconfig(@resource[:name], :on) + rescue Puppet::ExecutionFailure => detail + raise Puppet::Error, "Could not enable #{self.name}: #{detail}" + end + + def initscript + raise Puppet::Error, "Do not directly call the init script for '#{@resource[:name]}'; use 'service' instead" + end + + # use hasstatus=>true when its set for the provider. + def statuscmd + ((@resource.provider.get(:hasstatus) == true) || (@resource[:hasstatus] == :true)) && [command(:service), @resource[:name], "status"] + end + + def restartcmd + (@resource[:hasrestart] == :true) && [command(:service), @resource[:name], "restart"] + end + + def startcmd + [command(:service), @resource[:name], "start"] + end + + def stopcmd + [command(:service), @resource[:name], "stop"] + end + +end + diff --git a/mcollective/lib/puppet/provider/service/runit.rb b/mcollective/lib/puppet/provider/service/runit.rb new file mode 100644 index 000000000..736e3db71 --- /dev/null +++ b/mcollective/lib/puppet/provider/service/runit.rb @@ -0,0 +1,103 @@ +# Daemontools service management +# +# author Brice Figureau +Puppet::Type.type(:service).provide :runit, :parent => :daemontools do + desc "Runit service management. + + This provider manages daemons running supervised by Runit. + It tries to detect the service directory, with by order of preference: + + * /service + * /var/service + * /etc/service + + The daemon directory should be placed in a directory that can be + by default in: + + * /etc/sv + + or this can be overriden in the service resource parameters:: + + service { \"myservice\": + provider => \"runit\", + path => \"/path/to/daemons\", + } + + This provider supports out of the box: + + * start/stop + * enable/disable + * restart + * status + + +" + + commands :sv => "/usr/bin/sv" + + class << self + # this is necessary to autodetect a valid resource + # default path, since there is no standard for such directory. + def defpath(dummy_argument=:work_arround_for_ruby_GC_bug) + unless @defpath + ["/etc/sv", "/var/lib/service"].each do |path| + if FileTest.exist?(path) + @defpath = path + break + end + end + raise "Could not find the daemon directory (tested [/var/lib/service,/etc])" unless @defpath + end + @defpath + end + end + + # find the service dir on this node + def servicedir + unless @servicedir + ["/service", "/etc/service","/var/service"].each do |path| + if FileTest.exist?(path) + @servicedir = path + break + end + end + raise "Could not find service directory" unless @servicedir + end + @servicedir + end + + def status + begin + output = sv "status", self.daemon + return :running if output =~ /^run: / + rescue Puppet::ExecutionFailure => detail + unless detail.message =~ /(warning: |runsv not running$)/ + raise Puppet::Error.new( "Could not get status for service #{resource.ref}: #{detail}" ) + end + end + :stopped + end + + def stop + sv "stop", self.service + end + + def start + enable unless enabled? == :true + sv "start", self.service + end + + def restart + sv "restart", self.service + end + + # disable by removing the symlink so that runit + # doesn't restart our service behind our back + # note that runit doesn't need to perform a stop + # before a disable + def disable + # unlink the daemon symlink to disable it + File.unlink(self.service) if FileTest.symlink?(self.service) + end +end + diff --git a/mcollective/lib/puppet/provider/service/smf.rb b/mcollective/lib/puppet/provider/service/smf.rb new file mode 100755 index 000000000..3efb2eb37 --- /dev/null +++ b/mcollective/lib/puppet/provider/service/smf.rb @@ -0,0 +1,103 @@ +# Solaris 10 SMF-style services. +Puppet::Type.type(:service).provide :smf, :parent => :base do + desc "Support for Sun's new Service Management Framework. + + Starting a service is effectively equivalent to enabling it, so there is + only support for starting and stopping services, which also enables and + disables them, respectively. + + By specifying manifest => \"/path/to/service.xml\", the SMF manifest will + be imported if it does not exist. + + " + + defaultfor :operatingsystem => :solaris + + confine :operatingsystem => :solaris + + commands :adm => "/usr/sbin/svcadm", :svcs => "/usr/bin/svcs" + commands :svccfg => "/usr/sbin/svccfg" + + def setupservice + if resource[:manifest] + [command(:svcs), "-l", @resource[:name]] + if $CHILD_STATUS.exitstatus == 1 + Puppet.notice "Importing #{@resource[:manifest]} for #{@resource[:name]}" + svccfg :import, resource[:manifest] + end + end + rescue Puppet::ExecutionFailure => detail + raise Puppet::Error.new( "Cannot config #{self.service} to enable it: #{detail}" ) + end + + def enable + self.start + end + + def enabled? + case self.status + when :running + return :true + else + return :false + end + end + + def disable + self.stop + end + + def restartcmd + [command(:adm), :restart, @resource[:name]] + end + + def startcmd + self.setupservice + case self.status + when :stopped + [command(:adm), :enable, @resource[:name]] + when :maintenance + [command(:adm), :clear, @resource[:name]] + end + end + + def status + if @resource[:status] + super + return + end + + begin + # get the current state and the next state, and if the next + # state is set (i.e. not "-") use it for state comparison + states = svcs("-H", "-o", "state,nstate", @resource[:name]).chomp.split + state = states[1] == "-" ? states[0] : states[1] + rescue Puppet::ExecutionFailure + info "Could not get status on service #{self.name}" + return :stopped + end + + case state + when "online" + #self.warning "matched running #{line.inspect}" + return :running + when "offline", "disabled", "uninitialized" + #self.warning "matched stopped #{line.inspect}" + return :stopped + when "maintenance" + return :maintenance + when "legacy_run" + raise Puppet::Error, + "Cannot manage legacy services through SMF" + else + raise Puppet::Error, + "Unmanageable state '#{state}' on service #{self.name}" + end + + end + + def stopcmd + [command(:adm), :disable, @resource[:name]] + end +end + diff --git a/mcollective/lib/puppet/provider/service/src.rb b/mcollective/lib/puppet/provider/service/src.rb new file mode 100755 index 000000000..2bd643c0b --- /dev/null +++ b/mcollective/lib/puppet/provider/service/src.rb @@ -0,0 +1,87 @@ +# AIX System Resource controller (SRC) +Puppet::Type.type(:service).provide :src, :parent => :base do + + desc "Support for AIX's System Resource controller. + + Services are started/stopped based on the stopsrc and startsrc + commands, and some services can be refreshed with refresh command. + + * Enabling and disableing services is not supported, as it requires + modifications to /etc/inittab. + + * Starting and stopping groups of subsystems is not yet supported + " + + defaultfor :operatingsystem => :aix + confine :operatingsystem => :aix + + commands :stopsrc => "/usr/bin/stopsrc" + commands :startsrc => "/usr/bin/startsrc" + commands :refresh => "/usr/bin/refresh" + commands :lssrc => "/usr/bin/lssrc" + + has_feature :refreshable + + def startcmd + [command(:startsrc), "-s", @resource[:name]] + end + + def stopcmd + [command(:stopsrc), "-s", @resource[:name]] + end + + def restart + execute([command(:lssrc), "-Ss", @resource[:name]]).each do |line| + args = line.split(":") + + next unless args[0] == @resource[:name] + + # Subsystems with the -K flag can get refreshed (HUPed) + # While subsystems with -S (signals) must be stopped/started + method = args[11] + do_refresh = case method + when "-K" then :true + when "-S" then :false + else self.fail("Unknown service communication method #{method}") + end + + begin + if do_refresh == :true + execute([command(:refresh), "-s", @resource[:name]]) + else + self.stop + self.start + end + return :true + rescue Puppet::ExecutionFailure => detail + raise Puppet::Error.new("Unable to restart service #{@resource[:name]}, error was: #{detail}" ) + end + end + self.fail("No such service found") + rescue Puppet::ExecutionFailure => detail + raise Puppet::Error.new("Cannot get status of #{@resource[:name]}, error was: #{detail}" ) + end + + def status + execute([command(:lssrc), "-s", @resource[:name]]).each do |line| + args = line.split + + # This is the header line + next unless args[0] == @resource[:name] + + # PID is the 3rd field, but inoperative subsystems + # skip this so split doesn't work right + state = case args[-1] + when "active" then :running + when "inoperative" then :stopped + end + Puppet.debug("Service #{@resource[:name]} is #{args[-1]}") + return state + end + self.fail("No such service found") + rescue Puppet::ExecutionFailure => detail + raise Puppet::Error.new("Cannot get status of #{@resource[:name]}, error was: #{detail}" ) + end + +end + diff --git a/mcollective/lib/puppet/provider/ssh_authorized_key/parsed.rb b/mcollective/lib/puppet/provider/ssh_authorized_key/parsed.rb new file mode 100644 index 000000000..5243477ff --- /dev/null +++ b/mcollective/lib/puppet/provider/ssh_authorized_key/parsed.rb @@ -0,0 +1,97 @@ +require 'puppet/provider/parsedfile' + + + Puppet::Type.type(:ssh_authorized_key).provide( + :parsed, + :parent => Puppet::Provider::ParsedFile, + :filetype => :flat, + + :default_target => '' +) do + desc "Parse and generate authorized_keys files for SSH." + + text_line :comment, :match => /^#/ + text_line :blank, :match => /^\s+/ + + record_line :parsed, + :fields => %w{options type key name}, + :optional => %w{options}, + :rts => /^\s+/, + :match => /^(?:(.+) )?(ssh-dss|ssh-rsa) ([^ ]+) ?(.*)$/, + :post_parse => proc { |h| + h[:name] = "" if h[:name] == :absent + h[:options] ||= [:absent] + h[:options] = Puppet::Type::Ssh_authorized_key::ProviderParsed.parse_options(h[:options]) if h[:options].is_a? String + }, + :pre_gen => proc { |h| + h[:options] = [] if h[:options].include?(:absent) + h[:options] = h[:options].join(',') + } + + record_line :key_v1, + :fields => %w{options bits exponent modulus name}, + :optional => %w{options}, + :rts => /^\s+/, + :match => /^(?:(.+) )?(\d+) (\d+) (\d+)(?: (.+))?$/ + + def dir_perm + 0700 + end + + def file_perm + 0600 + end + + def target + @resource.should(:target) || File.expand_path("~#{@resource.should(:user)}/.ssh/authorized_keys") + rescue + raise Puppet::Error, "Target not defined and/or specified user does not exist yet" + end + + def user + uid = File.stat(target).uid + Etc.getpwuid(uid).name + end + + def flush + raise Puppet::Error, "Cannot write SSH authorized keys without user" unless @resource.should(:user) + raise Puppet::Error, "User '#{@resource.should(:user)}' does not exist" unless uid = Puppet::Util.uid(@resource.should(:user)) + # ParsedFile usually calls backup_target much later in the flush process, + # but our SUID makes that fail to open filebucket files for writing. + # Fortunately, there's already logic to make sure it only ever happens once, + # so calling it here supresses the later attempt by our superclass's flush method. + self.class.backup_target(target) + + Puppet::Util::SUIDManager.asuser(@resource.should(:user)) do + unless File.exist?(dir = File.dirname(target)) + Puppet.debug "Creating #{dir}" + Dir.mkdir(dir, dir_perm) + end + + super + + File.chmod(file_perm, target) + end + end + + # parse sshv2 option strings, wich is a comma separated list of + # either key="values" elements or bare-word elements + def self.parse_options(options) + result = [] + scanner = StringScanner.new(options) + while !scanner.eos? + scanner.skip(/[ \t]*/) + # scan a long option + if out = scanner.scan(/[-a-z0-9A-Z_]+=\".*?\"/) or out = scanner.scan(/[-a-z0-9A-Z_]+/) + result << out + else + # found an unscannable token, let's abort + break + end + # eat a comma + scanner.skip(/[ \t]*,[ \t]*/) + end + result + end +end + diff --git a/mcollective/lib/puppet/provider/sshkey/parsed.rb b/mcollective/lib/puppet/provider/sshkey/parsed.rb new file mode 100755 index 000000000..dcc55b403 --- /dev/null +++ b/mcollective/lib/puppet/provider/sshkey/parsed.rb @@ -0,0 +1,38 @@ +require 'puppet/provider/parsedfile' + +known = nil +case Facter.value(:operatingsystem) +when "Darwin"; known = "/etc/ssh_known_hosts" +else + known = "/etc/ssh/ssh_known_hosts" +end + + + Puppet::Type.type(:sshkey).provide( + :parsed, + :parent => Puppet::Provider::ParsedFile, + :default_target => known, + + :filetype => :flat +) do + desc "Parse and generate host-wide known hosts files for SSH." + + text_line :comment, :match => /^#/ + text_line :blank, :match => /^\s+/ + + record_line :parsed, :fields => %w{name type key}, + :post_parse => proc { |hash| + names = hash[:name].split(",", -1) + hash[:name] = names.shift + hash[:host_aliases] = names + }, + :pre_gen => proc { |hash| + if hash[:host_aliases] + names = [hash[:name], hash[:host_aliases]].flatten + + hash[:name] = [hash[:name], hash[:host_aliases]].flatten.join(",") + hash.delete(:host_aliases) + end + } +end + diff --git a/mcollective/lib/puppet/provider/user/directoryservice.rb b/mcollective/lib/puppet/provider/user/directoryservice.rb new file mode 100644 index 000000000..4b62a6ae7 --- /dev/null +++ b/mcollective/lib/puppet/provider/user/directoryservice.rb @@ -0,0 +1,100 @@ +# Created by Jeff McCune on 2007-07-22 +# Copyright (c) 2007. All rights reserved. +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation (version 2 of the License) +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston MA 02110-1301 USA + +require 'puppet/provider/nameservice/directoryservice' + +Puppet::Type.type(:user).provide :directoryservice, :parent => Puppet::Provider::NameService::DirectoryService do + desc "User management using DirectoryService on OS X." + + commands :dscl => "/usr/bin/dscl" + confine :operatingsystem => :darwin + defaultfor :operatingsystem => :darwin + + # JJM: DirectoryService can manage passwords. + # This needs to be a special option to dscl though (-passwd) + has_feature :manages_passwords + + # JJM: comment matches up with the /etc/passwd concept of an user + options :comment, :key => "realname" + options :password, :key => "passwd" + + autogen_defaults :home => "/var/empty", :shell => "/usr/bin/false" + + verify :gid, "GID must be an integer" do |value| + value.is_a? Integer + end + + verify :uid, "UID must be an integer" do |value| + value.is_a? Integer + end + + def autogen_comment + @resource[:name].capitalize + end + + # The list of all groups the user is a member of. + # JJM: FIXME: Override this method... + def groups + groups = [] + groups.join(",") + end + + # This is really lame. We have to iterate over each + # of the groups and add us to them. + def groups=(groups) + # case groups + # when Fixnum + # groups = [groups.to_s] + # when String + # groups = groups.split(/\s*,\s*/) + # else + # raise Puppet::DevError, "got invalid groups value #{groups.class} of type #{groups}" + # end + # # Get just the groups we need to modify + # diff = groups - (@is || []) + # + # data = {} + # open("| #{command(:nireport)} / /groups name users") do |file| + # file.each do |line| + # name, members = line.split(/\s+/) + # + # if members.nil? or members =~ /NoValue/ + # data[name] = [] + # else + # # Add each diff group's current members + # data[name] = members.split(/,/) + # end + # end + # end + # + # user = @resource[:name] + # data.each do |name, members| + # if members.include? user and groups.include? name + # # I'm in the group and should be + # next + # elsif members.include? user + # # I'm in the group and shouldn't be + # setuserlist(name, members - [user]) + # elsif groups.include? name + # # I'm not in the group and should be + # setuserlist(name, members + [user]) + # else + # # I'm not in the group and shouldn't be + # next + # end + # end + end + + +end diff --git a/mcollective/lib/puppet/provider/user/hpux.rb b/mcollective/lib/puppet/provider/user/hpux.rb new file mode 100644 index 000000000..983970935 --- /dev/null +++ b/mcollective/lib/puppet/provider/user/hpux.rb @@ -0,0 +1,29 @@ +Puppet::Type.type(:user).provide :hpuxuseradd, :parent => :useradd do + desc "User management for hp-ux! Undocumented switch to special usermod because HP-UX regular usermod is TOO STUPID to change stuff while the user is logged in." + + defaultfor :operatingsystem => "hp-ux" + confine :operatingsystem => "hp-ux" + + commands :modify => "/usr/sam/lbin/usermod.sam", :delete => "/usr/sam/lbin/userdel.sam", :add => "/usr/sbin/useradd" + options :comment, :method => :gecos + options :groups, :flag => "-G" + options :home, :flag => "-d", :method => :dir + + verify :gid, "GID must be an integer" do |value| + value.is_a? Integer + end + + verify :groups, "Groups must be comma-separated" do |value| + value !~ /\s/ + end + + has_features :manages_homedir, :allows_duplicates + + def deletecmd + super.insert(1,"-F") + end + + def modifycmd(param,value) + super.insert(1,"-F") + end +end diff --git a/mcollective/lib/puppet/provider/user/ldap.rb b/mcollective/lib/puppet/provider/user/ldap.rb new file mode 100644 index 000000000..75a9667b3 --- /dev/null +++ b/mcollective/lib/puppet/provider/user/ldap.rb @@ -0,0 +1,129 @@ +require 'puppet/provider/ldap' + +Puppet::Type.type(:user).provide :ldap, :parent => Puppet::Provider::Ldap do + desc "User management via `ldap`. This provider requires that you + have valid values for all of the ldap-related settings, + including `ldapbase`. You will also almost definitely need settings + for `ldapuser` and `ldappassword`, so that your clients can write + to ldap. + + Note that this provider will automatically generate a UID for you if + you do not specify one, but it is a potentially expensive operation, + as it iterates across all existing users to pick the appropriate next + one." + + confine :feature => :ldap, :false => (Puppet[:ldapuser] == "") + + has_feature :manages_passwords + + manages(:posixAccount, :person).at("ou=People").named_by(:uid).and.maps :name => :uid, + :password => :userPassword, + :comment => :cn, + :uid => :uidNumber, + :gid => :gidNumber, + :home => :homeDirectory, + :shell => :loginShell + + # Use the last field of a space-separated array as + # the sn. LDAP requires a surname, for some stupid reason. + manager.generates(:sn).from(:cn).with do |cn| + x = 1 + cn[0].split(/\s+/)[-1] + end + + # Find the next uid after the current largest uid. + provider = self + manager.generates(:uidNumber).with do + largest = 500 + if existing = provider.manager.search + existing.each do |hash| + next unless value = hash[:uid] + num = value[0].to_i + largest = num if num > largest + end + end + largest + 1 + end + + # Convert our gid to a group name, if necessary. + def gid=(value) + value = group2id(value) unless [Fixnum, Bignum].include?(value.class) + + @property_hash[:gid] = value + end + + # Find all groups this user is a member of in ldap. + def groups + # We want to cache the current result, so we know if we + # have to remove old values. + unless @property_hash[:groups] + unless result = group_manager.search("memberUid=#{name}") + return @property_hash[:groups] = :absent + end + + return @property_hash[:groups] = result.collect { |r| r[:name] }.sort.join(",") + end + @property_hash[:groups] + end + + # Manage the list of groups this user is a member of. + def groups=(values) + should = values.split(",") + + if groups == :absent + is = [] + else + is = groups.split(",") + end + + modes = {} + [is, should].flatten.uniq.each do |group| + # Skip it when they're in both + next if is.include?(group) and should.include?(group) + + # We're adding a group. + modes[group] = :add and next unless is.include?(group) + + # We're removing a group. + modes[group] = :remove and next unless should.include?(group) + end + + modes.each do |group, form| + self.fail "Could not find ldap group #{group}" unless ldap_group = group_manager.find(group) + + current = ldap_group[:members] + + if form == :add + if current.is_a?(Array) and ! current.empty? + new = current + [name] + else + new = [name] + end + else + new = current - [name] + new = :absent if new.empty? + end + + group_manager.update(group, {:ensure => :present, :members => current}, {:ensure => :present, :members => new}) + end + end + + # Convert a gropu name to an id. + def group2id(group) + Puppet::Type.type(:group).provider(:ldap).name2id(group) + end + + private + + def group_manager + Puppet::Type.type(:group).provider(:ldap).manager + end + + def group_properties(values) + if values.empty? or values == :absent + {:ensure => :present} + else + {:ensure => :present, :members => values} + end + end +end diff --git a/mcollective/lib/puppet/provider/user/pw.rb b/mcollective/lib/puppet/provider/user/pw.rb new file mode 100644 index 000000000..a5988cad1 --- /dev/null +++ b/mcollective/lib/puppet/provider/user/pw.rb @@ -0,0 +1,41 @@ +require 'puppet/provider/nameservice/pw' + +Puppet::Type.type(:user).provide :pw, :parent => Puppet::Provider::NameService::PW do + desc "User management via `pw` on FreeBSD." + + commands :pw => "pw" + has_features :manages_homedir, :allows_duplicates + + defaultfor :operatingsystem => :freebsd + + options :home, :flag => "-d", :method => :dir + options :comment, :method => :gecos + options :groups, :flag => "-G" + + verify :gid, "GID must be an integer" do |value| + value.is_a? Integer + end + + verify :groups, "Groups must be comma-separated" do |value| + value !~ /\s/ + end + + def addcmd + cmd = [command(:pw), "useradd", @resource[:name]] + @resource.class.validproperties.each do |property| + next if property == :ensure + # the value needs to be quoted, mostly because -c might + # have spaces in it + if value = @resource.should(property) and value != "" + cmd << flag(property) << value + end + end + + cmd << "-o" if @resource.allowdupe? + + cmd << "-m" if @resource.managehome? + + cmd + end +end + diff --git a/mcollective/lib/puppet/provider/user/user_role_add.rb b/mcollective/lib/puppet/provider/user/user_role_add.rb new file mode 100644 index 000000000..e562d68b2 --- /dev/null +++ b/mcollective/lib/puppet/provider/user/user_role_add.rb @@ -0,0 +1,207 @@ +require 'puppet/util' +require 'puppet/util/user_attr' + +Puppet::Type.type(:user).provide :user_role_add, :parent => :useradd, :source => :useradd do + + desc "User management inherits `useradd` and adds logic to manage roles on Solaris using roleadd." + + defaultfor :operatingsystem => :solaris + + commands :add => "useradd", :delete => "userdel", :modify => "usermod", :password => "passwd", :role_add => "roleadd", :role_delete => "roledel", :role_modify => "rolemod" + options :home, :flag => "-d", :method => :dir + options :comment, :method => :gecos + options :groups, :flag => "-G" + options :roles, :flag => "-R" + options :auths, :flag => "-A" + options :profiles, :flag => "-P" + options :password_min_age, :flag => "-n" + options :password_max_age, :flag => "-x" + + verify :gid, "GID must be an integer" do |value| + value.is_a? Integer + end + + verify :groups, "Groups must be comma-separated" do |value| + value !~ /\s/ + end + + has_features :manages_homedir, :allows_duplicates, :manages_solaris_rbac, :manages_passwords, :manages_password_age + + #must override this to hand the keyvalue pairs + def add_properties + cmd = [] + Puppet::Type.type(:user).validproperties.each do |property| + #skip the password because we can't create it with the solaris useradd + next if [:ensure, :password, :password_min_age, :password_max_age].include?(property) + # 1680 Now you can set the hashed passwords on solaris:lib/puppet/provider/user/user_role_add.rb + # the value needs to be quoted, mostly because -c might + # have spaces in it + if value = @resource.should(property) and value != "" + if property == :keys + cmd += build_keys_cmd(value) + else + cmd << flag(property) << value + end + end + end + cmd + end + + def user_attributes + @user_attributes ||= UserAttr.get_attributes_by_name(@resource[:name]) + end + + def flush + @user_attributes = nil + end + + def command(cmd) + cmd = ("role_#{cmd}").intern if is_role? or (!exists? and @resource[:ensure] == :role) + super(cmd) + end + + def is_role? + user_attributes and user_attributes[:type] == "role" + end + + def run(cmd, msg) + execute(cmd) + rescue Puppet::ExecutionFailure => detail + raise Puppet::Error, "Could not #{msg} #{@resource.class.name} #{@resource.name}: #{detail}" + end + + def transition(type) + cmd = [command(:modify)] + cmd << "-K" << "type=#{type}" + cmd += add_properties + cmd << @resource[:name] + end + + def create + if is_role? + run(transition("normal"), "transition role to") + else + run(addcmd, "create") + if cmd = passcmd + run(cmd, "change password policy for") + end + end + # added to handle case when password is specified + self.password = @resource[:password] if @resource[:password] + end + + def destroy + run(deletecmd, "delete "+ (is_role? ? "role" : "user")) + end + + def create_role + if exists? and !is_role? + run(transition("role"), "transition user to") + else + run(addcmd, "create role") + end + end + + def roles + user_attributes[:roles] if user_attributes + end + + def auths + user_attributes[:auths] if user_attributes + end + + def profiles + user_attributes[:profiles] if user_attributes + end + + def project + user_attributes[:project] if user_attributes + end + + def managed_attributes + [:name, :type, :roles, :auths, :profiles, :project] + end + + def remove_managed_attributes + managed = managed_attributes + user_attributes.select { |k,v| !managed.include?(k) }.inject({}) { |hash, array| hash[array[0]] = array[1]; hash } + end + + def keys + if user_attributes + #we have to get rid of all the keys we are managing another way + remove_managed_attributes + end + end + + def build_keys_cmd(keys_hash) + cmd = [] + keys_hash.each do |k,v| + cmd << "-K" << "#{k}=#{v}" + end + cmd + end + + def keys=(keys_hash) + run([command(:modify)] + build_keys_cmd(keys_hash) << @resource[:name], "modify attribute key pairs") + end + + + # This helper makes it possible to test this on stub data without having to + # do too many crazy things! + def target_file_path + "/etc/shadow" + end + private :target_file_path + + #Read in /etc/shadow, find the line for this user (skipping comments, because who knows) and return it + #No abstraction, all esoteric knowledge of file formats, yay + def shadow_entry + return @shadow_entry if defined? @shadow_entry + @shadow_entry = File.readlines(target_file_path). + reject { |r| r =~ /^[^\w]/ }. + collect { |l| l.chomp.split(':') }. + find { |user, _| user == @resource[:name] } + end + + def password + shadow_entry[1] if shadow_entry + end + + def password_min_age + shadow_entry ? shadow_entry[3] : :absent + end + + def password_max_age + shadow_entry ? shadow_entry[4] : :absent + end + + # Read in /etc/shadow, find the line for our used and rewrite it with the + # new pw. Smooth like 80 grit sandpaper. + # + # Now uses the `replace_file` mechanism to minimize the chance that we lose + # data, but it is still terrible. We still skip platform locking, so a + # concurrent `vipw -s` session will have no idea we risk data loss. + def password=(cryptopw) + begin + shadow = File.read(target_file_path) + + # Go Mifune loves the race here where we can lose data because + # /etc/shadow changed between reading it and writing it. + # --daniel 2012-02-05 + Puppet::Util.replace_file(target_file_path, 0640) do |fh| + shadow.each_line do |line| + line_arr = line.split(':') + if line_arr[0] == @resource[:name] + line_arr[1] = cryptopw + line = line_arr.join(':') + end + fh.print line + end + end + rescue => detail + fail "Could not write replace #{target_file_path}: #{detail}" + end + end +end + diff --git a/mcollective/lib/puppet/provider/user/useradd.rb b/mcollective/lib/puppet/provider/user/useradd.rb new file mode 100644 index 000000000..b87971738 --- /dev/null +++ b/mcollective/lib/puppet/provider/user/useradd.rb @@ -0,0 +1,114 @@ +require 'puppet/provider/nameservice/objectadd' + +Puppet::Type.type(:user).provide :useradd, :parent => Puppet::Provider::NameService::ObjectAdd do + desc "User management via `useradd` and its ilk. Note that you will need to install the `Shadow Password` Ruby library often known as ruby-libshadow to manage user passwords." + + commands :add => "useradd", :delete => "userdel", :modify => "usermod", :password => "chage" + + options :home, :flag => "-d", :method => :dir + options :comment, :method => :gecos + options :groups, :flag => "-G" + options :password_min_age, :flag => "-m" + options :password_max_age, :flag => "-M" + + verify :gid, "GID must be an integer" do |value| + value.is_a? Integer + end + + verify :groups, "Groups must be comma-separated" do |value| + value !~ /\s/ + end + + has_features :manages_homedir, :allows_duplicates, :manages_expiry, :system_users + + has_features :manages_passwords, :manages_password_age if Puppet.features.libshadow? + + def check_allow_dup + @resource.allowdupe? ? ["-o"] : [] + end + + def check_manage_home + cmd = [] + if @resource.managehome? + cmd << "-m" + elsif %w{Fedora RedHat CentOS OEL OVS}.include?(Facter.value("operatingsystem")) + cmd << "-M" + end + cmd + end + + def check_manage_expiry + cmd = [] + if @resource[:expiry] + cmd << "-e #{@resource[:expiry]}" + end + + cmd + end + + def check_system_users + @resource.system? ? ["-r"] : [] + end + + def add_properties + cmd = [] + Puppet::Type.type(:user).validproperties.each do |property| + next if property == :ensure + next if property.to_s =~ /password_.+_age/ + # the value needs to be quoted, mostly because -c might + # have spaces in it + if value = @resource.should(property) and value != "" + cmd << flag(property) << value + end + end + cmd + end + + def addcmd + cmd = [command(:add)] + cmd += add_properties + cmd += check_allow_dup + cmd += check_manage_home + cmd += check_manage_expiry + cmd += check_system_users + cmd << @resource[:name] + end + + def passcmd + age_limits = [:password_min_age, :password_max_age].select { |property| @resource.should(property) } + if age_limits.empty? + nil + else + [command(:password),age_limits.collect { |property| [flag(property), @resource.should(property)]}, @resource[:name]].flatten + end + end + + def password_min_age + if Puppet.features.libshadow? + if ent = Shadow::Passwd.getspnam(@resource.name) + return ent.sp_min + end + end + :absent + end + + def password_max_age + if Puppet.features.libshadow? + if ent = Shadow::Passwd.getspnam(@resource.name) + return ent.sp_max + end + end + :absent + end + + # Retrieve the password using the Shadow Password library + def password + if Puppet.features.libshadow? + if ent = Shadow::Passwd.getspnam(@resource.name) + return ent.sp_pwdp + end + end + :absent + end +end + diff --git a/mcollective/lib/puppet/provider/zfs/solaris.rb b/mcollective/lib/puppet/provider/zfs/solaris.rb new file mode 100644 index 000000000..85d054f86 --- /dev/null +++ b/mcollective/lib/puppet/provider/zfs/solaris.rb @@ -0,0 +1,45 @@ +Puppet::Type.type(:zfs).provide(:solaris) do + desc "Provider for Solaris zfs." + + commands :zfs => "/usr/sbin/zfs" + defaultfor :operatingsystem => :solaris + + def add_properties + properties = [] + Puppet::Type.type(:zfs).validproperties.each do |property| + next if property == :ensure + if value = @resource[property] and value != "" + properties << "-o" << "#{property}=#{value}" + end + end + properties + end + + def create + zfs *([:create] + add_properties + [@resource[:name]]) + end + + def destroy + zfs(:destroy, @resource[:name]) + end + + def exists? + if zfs(:list).split("\n").detect { |line| line.split("\s")[0] == @resource[:name] } + true + else + false + end + end + + [:mountpoint, :compression, :copies, :quota, :reservation, :sharenfs, :snapdir].each do |field| + define_method(field) do + zfs(:get, "-H", "-o", "value", field, @resource[:name]).strip + end + + define_method(field.to_s + "=") do |should| + zfs(:set, "#{field}=#{should}", @resource[:name]) + end + end + +end + diff --git a/mcollective/lib/puppet/provider/zone/solaris.rb b/mcollective/lib/puppet/provider/zone/solaris.rb new file mode 100644 index 000000000..f46337b14 --- /dev/null +++ b/mcollective/lib/puppet/provider/zone/solaris.rb @@ -0,0 +1,257 @@ +Puppet::Type.type(:zone).provide(:solaris) do + desc "Provider for Solaris Zones." + + commands :adm => "/usr/sbin/zoneadm", :cfg => "/usr/sbin/zonecfg" + defaultfor :operatingsystem => :solaris + + mk_resource_methods + + # Convert the output of a list into a hash + def self.line2hash(line) + fields = [:id, :name, :ensure, :path] + + properties = {} + line.split(":").each_with_index { |value, index| + next unless fields[index] + properties[fields[index]] = value + } + + # Configured but not installed zones do not have IDs + properties.delete(:id) if properties[:id] == "-" + + properties[:ensure] = symbolize(properties[:ensure]) + + properties + end + + def self.instances + # LAK:NOTE See http://snurl.com/21zf8 [groups_google_com] + x = adm(:list, "-cp").split("\n").collect do |line| + new(line2hash(line)) + end + end + + # Perform all of our configuration steps. + def configure + # If the thing is entirely absent, then we need to create the config. + # Is there someway to get this on one line? + str = "create -b #{@resource[:create_args]}\nset zonepath=#{@resource[:path]}\n" + + # Then perform all of our configuration steps. It's annoying + # that we need this much internal info on the resource. + @resource.send(:properties).each do |property| + str += property.configtext + "\n" if property.is_a? ZoneConfigProperty and ! property.safe_insync?(properties[property.name]) + end + + str += "commit\n" + setconfig(str) + end + + def destroy + zonecfg :delete, "-F" + end + + def exists? + properties[:ensure] != :absent + end + + # Clear out the cached values. + def flush + @property_hash.clear + end + + def install(dummy_argument=:work_arround_for_ruby_GC_bug) + if @resource[:clone] # TODO: add support for "-s snapshot" + zoneadm :clone, @resource[:clone] + elsif @resource[:install_args] + zoneadm :install, @resource[:install_args].split(" ") + else + zoneadm :install + end + end + + # Look up the current status. + def properties + if @property_hash.empty? + @property_hash = status || {} + if @property_hash.empty? + @property_hash[:ensure] = :absent + else + @resource.class.validproperties.each do |name| + @property_hash[name] ||= :absent + end + end + + end + @property_hash.dup + end + + # We need a way to test whether a zone is in process. Our 'ensure' + # property models the static states, but we need to handle the temporary ones. + def processing? + if hash = status + case hash[:ensure] + when "incomplete", "ready", "shutting_down" + true + else + false + end + else + false + end + end + + # Collect the configuration of the zone. + def getconfig + output = zonecfg :info + + name = nil + current = nil + hash = {} + output.split("\n").each do |line| + case line + when /^(\S+):\s*$/ + name = $1 + current = nil # reset it + when /^(\S+):\s*(.+)$/ + hash[$1.intern] = $2 + when /^\s+(\S+):\s*(.+)$/ + if name + hash[name] = [] unless hash.include? name + + unless current + current = {} + hash[name] << current + end + current[$1.intern] = $2 + else + err "Ignoring '#{line}'" + end + else + debug "Ignoring zone output '#{line}'" + end + end + + hash + end + + # Execute a configuration string. Can't be private because it's called + # by the properties. + def setconfig(str) + command = "#{command(:cfg)} -z #{@resource[:name]} -f -" + debug "Executing '#{command}' in zone #{@resource[:name]} with '#{str}'" + IO.popen(command, "w") do |pipe| + pipe.puts str + end + + unless $CHILD_STATUS == 0 + raise ArgumentError, "Failed to apply configuration" + end + end + + def start + # Check the sysidcfg stuff + if cfg = @resource[:sysidcfg] + zoneetc = File.join(@resource[:path], "root", "etc") + sysidcfg = File.join(zoneetc, "sysidcfg") + + # if the zone root isn't present "ready" the zone + # which makes zoneadmd mount the zone root + zoneadm :ready unless File.directory?(zoneetc) + + unless File.exists?(sysidcfg) + begin + File.open(sysidcfg, "w", 0600) do |f| + f.puts cfg + end + rescue => detail + puts detail.stacktrace if Puppet[:debug] + raise Puppet::Error, "Could not create sysidcfg: #{detail}" + end + end + end + + zoneadm :boot + end + + # Return a hash of the current status of this zone. + def status + begin + output = adm "-z", @resource[:name], :list, "-p" + rescue Puppet::ExecutionFailure + return nil + end + + main = self.class.line2hash(output.chomp) + + # Now add in the configuration information + config_status.each do |name, value| + main[name] = value + end + + main + end + + def ready + zoneadm :ready + end + + def stop + zoneadm :halt + end + + def unconfigure + zonecfg :delete, "-F" + end + + def uninstall + zoneadm :uninstall, "-F" + end + + private + + # Turn the results of getconfig into status information. + def config_status + config = getconfig + result = {} + + result[:autoboot] = config[:autoboot] ? config[:autoboot].intern : :absent + result[:pool] = config[:pool] + result[:shares] = config[:shares] + if dir = config["inherit-pkg-dir"] + result[:inherit] = dir.collect { |dirs| dirs[:dir] } + end + result[:iptype] = config[:"ip-type"] + if net = config["net"] + result[:ip] = net.collect do |params| + if params[:defrouter] + "#{params[:physical]}:#{params[:address]}:#{params[:defrouter]}" + elsif params[:address] + "#{params[:physical]}:#{params[:address]}" + else + params[:physical] + end + end + end + + result + end + + def zoneadm(*cmd) + adm("-z", @resource[:name], *cmd) + rescue Puppet::ExecutionFailure => detail + self.fail "Could not #{cmd[0]} zone: #{detail}" + end + + def zonecfg(*cmd) + # You apparently can't get the configuration of the global zone + return "" if self.name == "global" + + begin + cfg("-z", self.name, *cmd) + rescue Puppet::ExecutionFailure => detail + self.fail "Could not #{cmd[0]} zone: #{detail}" + end + end +end + diff --git a/mcollective/lib/puppet/provider/zpool/solaris.rb b/mcollective/lib/puppet/provider/zpool/solaris.rb new file mode 100644 index 000000000..e597c2ae1 --- /dev/null +++ b/mcollective/lib/puppet/provider/zpool/solaris.rb @@ -0,0 +1,114 @@ +Puppet::Type.type(:zpool).provide(:solaris) do + desc "Provider for Solaris zpool." + + commands :zpool => "/usr/sbin/zpool" + defaultfor :operatingsystem => :solaris + + def process_zpool_data(pool_array) + if pool_array == [] + return Hash.new(:absent) + end + #get the name and get rid of it + pool = Hash.new + pool[:pool] = pool_array[0] + pool_array.shift + + tmp = [] + + #order matters here :( + pool_array.reverse.each do |value| + sym = nil + case value + when "spares"; sym = :spare + when "logs"; sym = :log + when "mirror", "raidz1", "raidz2" + sym = value == "mirror" ? :mirror : :raidz + pool[:raid_parity] = "raidz2" if value == "raidz2" + else + tmp << value + sym = :disk if value == pool_array.first + end + + if sym + pool[sym] = pool[sym] ? pool[sym].unshift(tmp.reverse.join(' ')) : [tmp.reverse.join(' ')] + tmp.clear + end + end + + pool + end + + def get_pool_data + #this is all voodoo dependent on the output from zpool + zpool_data = %x{ zpool status #{@resource[:pool]}}.split("\n").select { |line| line.index("\t") == 0 }.collect { |l| l.strip.split("\s")[0] } + zpool_data.shift + zpool_data + end + + def current_pool + @current_pool = process_zpool_data(get_pool_data) unless (defined?(@current_pool) and @current_pool) + @current_pool + end + + def flush + @current_pool= nil + end + + #Adds log and spare + def build_named(name) + if prop = @resource[name.intern] + [name] + prop.collect { |p| p.split(' ') }.flatten + else + [] + end + end + + #query for parity and set the right string + def raidzarity + @resource[:raid_parity] ? @resource[:raid_parity] : "raidz1" + end + + #handle mirror or raid + def handle_multi_arrays(prefix, array) + array.collect{ |a| [prefix] + a.split(' ') }.flatten + end + + #builds up the vdevs for create command + def build_vdevs + if disk = @resource[:disk] + disk.collect { |d| d.split(' ') }.flatten + elsif mirror = @resource[:mirror] + handle_multi_arrays("mirror", mirror) + elsif raidz = @resource[:raidz] + handle_multi_arrays(raidzarity, raidz) + end + end + + def create + zpool(*([:create, @resource[:pool]] + build_vdevs + build_named("spare") + build_named("log"))) + end + + def delete + zpool :destroy, @resource[:pool] + end + + def exists? + if current_pool[:pool] == :absent + false + else + true + end + end + + [:disk, :mirror, :raidz, :log, :spare].each do |field| + define_method(field) do + current_pool[field] + end + + define_method(field.to_s + "=") do |should| + Puppet.warning "NO CHANGES BEING MADE: zpool #{field} does not match, should be '#{should}' currently is '#{current_pool[field]}'" + end + end + +end + diff --git a/mcollective/lib/puppet/rails.rb b/mcollective/lib/puppet/rails.rb new file mode 100644 index 000000000..74805bb6f --- /dev/null +++ b/mcollective/lib/puppet/rails.rb @@ -0,0 +1,132 @@ +# Load the appropriate libraries, or set a class indicating they aren't available + +require 'facter' +require 'puppet' +require 'logger' + +module Puppet::Rails + TIME_DEBUG = true + + def self.connect + # This global init does not work for testing, because we remove + # the state dir on every test. + return if ActiveRecord::Base.connected? + + Puppet.settings.use(:main, :rails, :master) + + ActiveRecord::Base.logger = Logger.new(Puppet[:railslog]) + begin + loglevel = Logger.const_get(Puppet[:rails_loglevel].upcase) + ActiveRecord::Base.logger.level = loglevel + rescue => detail + Puppet.warning "'#{Puppet[:rails_loglevel]}' is not a valid Rails log level; using debug" + ActiveRecord::Base.logger.level = Logger::DEBUG + end + + # As of ActiveRecord 2.2 allow_concurrency has been deprecated and no longer has any effect. + ActiveRecord::Base.allow_concurrency = true if Puppet::Util.activerecord_version < 2.2 + + ActiveRecord::Base.verify_active_connections! + + begin + args = database_arguments + Puppet.info "Connecting to #{args[:adapter]} database: #{args[:database]}" + ActiveRecord::Base.establish_connection(args) + rescue => detail + puts detail.backtrace if Puppet[:trace] + raise Puppet::Error, "Could not connect to database: #{detail}" + end + end + + # The arguments for initializing the database connection. + def self.database_arguments + adapter = Puppet[:dbadapter] + + args = {:adapter => adapter, :log_level => Puppet[:rails_loglevel]} + + case adapter + when "sqlite3" + args[:database] = Puppet[:dblocation] + when "mysql", "postgresql" + args[:host] = Puppet[:dbserver] unless Puppet[:dbserver].to_s.empty? + args[:port] = Puppet[:dbport] unless Puppet[:dbport].to_s.empty? + args[:username] = Puppet[:dbuser] unless Puppet[:dbuser].to_s.empty? + args[:password] = Puppet[:dbpassword] unless Puppet[:dbpassword].to_s.empty? + args[:pool] = Puppet[:dbconnections].to_i unless Puppet[:dbconnections].to_i <= 0 + args[:database] = Puppet[:dbname] + args[:reconnect]= true + + socket = Puppet[:dbsocket] + args[:socket] = socket unless socket.to_s.empty? + when "oracle_enhanced": + args[:database] = Puppet[:dbname] unless Puppet[:dbname].to_s.empty? + args[:username] = Puppet[:dbuser] unless Puppet[:dbuser].to_s.empty? + args[:password] = Puppet[:dbpassword] unless Puppet[:dbpassword].to_s.empty? + args[:pool] = Puppet[:dbconnections].to_i unless Puppet[:dbconnections].to_i <= 0 + else + raise ArgumentError, "Invalid db adapter #{adapter}" + end + args + end + + # Set up our database connection. It'd be nice to have a "use" system + # that could make callbacks. + def self.init + raise Puppet::DevError, "No activerecord, cannot init Puppet::Rails" unless Puppet.features.rails? + + connect + + unless ActiveRecord::Base.connection.tables.include?("resources") + require 'puppet/rails/database/schema' + Puppet::Rails::Schema.init + end + + migrate if Puppet[:dbmigrate] + end + + # Migrate to the latest db schema. + def self.migrate + dbdir = nil + $LOAD_PATH.each { |d| + tmp = File.join(d, "puppet/rails/database") + if FileTest.directory?(tmp) + dbdir = tmp + break + end + } + + raise Puppet::Error, "Could not find Puppet::Rails database dir" unless dbdir + + raise Puppet::Error, "Database has problems, can't migrate." unless ActiveRecord::Base.connection.tables.include?("resources") + + Puppet.notice "Migrating" + + begin + ActiveRecord::Migrator.migrate(dbdir) + rescue => detail + puts detail.backtrace if Puppet[:trace] + raise Puppet::Error, "Could not migrate database: #{detail}" + end + end + + # Tear down the database. Mostly only used during testing. + def self.teardown + raise Puppet::DevError, "No activerecord, cannot init Puppet::Rails" unless Puppet.features.rails? + + Puppet.settings.use(:master, :rails) + + begin + ActiveRecord::Base.establish_connection(database_arguments) + rescue => detail + puts detail.backtrace if Puppet[:trace] + raise Puppet::Error, "Could not connect to database: #{detail}" + end + + ActiveRecord::Base.connection.tables.each do |t| + ActiveRecord::Base.connection.drop_table t + end + end +end + +require 'puppet/rails/host' if Puppet.features.rails? + diff --git a/mcollective/lib/puppet/rails/benchmark.rb b/mcollective/lib/puppet/rails/benchmark.rb new file mode 100644 index 000000000..e1e92bb79 --- /dev/null +++ b/mcollective/lib/puppet/rails/benchmark.rb @@ -0,0 +1,63 @@ +require 'benchmark' +module Puppet::Rails::Benchmark + $benchmarks = {:accumulated => {}} + + def time_debug? + Puppet::Rails::TIME_DEBUG + end + + def railsmark(message) + result = nil + seconds = Benchmark.realtime { result = yield } + Puppet.debug(message + " in %0.2f seconds" % seconds) + + $benchmarks[message] = seconds if time_debug? + result + end + + def debug_benchmark(message) + return yield unless Puppet::Rails::TIME_DEBUG + + railsmark(message) { yield } + end + + # Collect partial benchmarks to be logged when they're + # all done. + # These are always low-level debugging so we only + # print them if time_debug is enabled. + def accumulate_benchmark(message, label) + return yield unless time_debug? + + $benchmarks[:accumulated][message] ||= Hash.new(0) + $benchmarks[:accumulated][message][label] += Benchmark.realtime { yield } + end + + # Log the accumulated marks. + def log_accumulated_marks(message) + return unless time_debug? + + return if $benchmarks[:accumulated].empty? or $benchmarks[:accumulated][message].nil? or $benchmarks[:accumulated][message].empty? + + $benchmarks[:accumulated][message].each do |label, value| + Puppet.debug(message + ("(#{label})") + (" in %0.2f seconds" % value)) + end + end + + def write_benchmarks + return unless time_debug? + + branch = %x{git branch}.split("\n").find { |l| l =~ /^\*/ }.sub("* ", '') + + file = "/tmp/time_debugging.yaml" + + require 'yaml' + + if FileTest.exist?(file) + data = YAML.load_file(file) + else + data = {} + end + data[branch] = $benchmarks + Puppet::Util.replace_file(file, 0644) { |f| f.print YAML.dump(data) } + end +end diff --git a/mcollective/lib/puppet/rails/database/001_add_created_at_to_all_tables.rb b/mcollective/lib/puppet/rails/database/001_add_created_at_to_all_tables.rb new file mode 100644 index 000000000..22dffb854 --- /dev/null +++ b/mcollective/lib/puppet/rails/database/001_add_created_at_to_all_tables.rb @@ -0,0 +1,13 @@ +class AddCreatedAtToAllTables < ActiveRecord::Migration + def self.up + ActiveRecord::Base.connection.tables.each do |t| + add_column t.to_s, :created_at, :datetime unless ActiveRecord::Base.connection.columns(t).collect {|c| c.name}.include?("created_at") + end + end + + def self.down + ActiveRecord::Base.connection.tables.each do |t| + remove_column t.to_s, :created_at unless ActiveRecord::Base.connection.columns(t).collect {|c| c.name}.include?("created_at") + end + end +end diff --git a/mcollective/lib/puppet/rails/database/002_remove_duplicated_index_on_all_tables.rb b/mcollective/lib/puppet/rails/database/002_remove_duplicated_index_on_all_tables.rb new file mode 100644 index 000000000..021889df7 --- /dev/null +++ b/mcollective/lib/puppet/rails/database/002_remove_duplicated_index_on_all_tables.rb @@ -0,0 +1,13 @@ +class RemoveDuplicatedIndexOnAllTables < ActiveRecord::Migration + def self.up + ActiveRecord::Base.connection.tables.each do |t| + remove_index t.to_s, :id if ActiveRecord::Base.connection.indexes(t).collect {|c| c.columns}.include?("id") + end + end + + def self.down + ActiveRecord::Base.connection.tables.each do |t| + add_index t.to_s, :id, :integer => true unless ActiveRecord::Base.connection.indexes(t).collect {|c| c.columns}.include?("id") + end + end +end diff --git a/mcollective/lib/puppet/rails/database/003_add_environment_to_host.rb b/mcollective/lib/puppet/rails/database/003_add_environment_to_host.rb new file mode 100644 index 000000000..5c06bb8c0 --- /dev/null +++ b/mcollective/lib/puppet/rails/database/003_add_environment_to_host.rb @@ -0,0 +1,9 @@ +class AddEnvironmentToHost < ActiveRecord::Migration + def self.up + add_column :hosts, :environment, :string unless ActiveRecord::Base.connection.columns(:hosts).collect {|c| c.name}.include?("environment") + end + + def self.down + remove_column :hosts, :environment if ActiveRecord::Base.connection.columns(:hosts).collect {|c| c.name}.include?("environment") + end +end diff --git a/mcollective/lib/puppet/rails/database/004_add_inventory_service_tables.rb b/mcollective/lib/puppet/rails/database/004_add_inventory_service_tables.rb new file mode 100644 index 000000000..6e6b28c0c --- /dev/null +++ b/mcollective/lib/puppet/rails/database/004_add_inventory_service_tables.rb @@ -0,0 +1,36 @@ +class AddInventoryServiceTables < ActiveRecord::Migration + def self.up + unless ActiveRecord::Base.connection.tables.include?("inventory_nodes") + create_table :inventory_nodes do |t| + t.column :name, :string, :null => false + t.column :timestamp, :datetime, :null => false + t.column :updated_at, :datetime + t.column :created_at, :datetime + end + + add_index :inventory_nodes, :name, :unique => true + end + + unless ActiveRecord::Base.connection.tables.include?("inventory_facts") + create_table :inventory_facts, :id => false do |t| + t.column :node_id, :integer, :null => false + t.column :name, :string, :null => false + t.column :value, :text, :null => false + end + + add_index :inventory_facts, [:node_id, :name], :unique => true + end + end + + def self.down + unless ActiveRecord::Base.connection.tables.include?("inventory_nodes") + remove_index :inventory_nodes, :name + drop_table :inventory_nodes + end + + if ActiveRecord::Base.connection.tables.include?("inventory_facts") + remove_index :inventory_facts, [:node_id, :name] + drop_table :inventory_facts + end + end +end diff --git a/mcollective/lib/puppet/rails/database/schema.rb b/mcollective/lib/puppet/rails/database/schema.rb new file mode 100644 index 000000000..7b75f4216 --- /dev/null +++ b/mcollective/lib/puppet/rails/database/schema.rb @@ -0,0 +1,131 @@ +class Puppet::Rails::Schema + def self.init + oldout = nil + Puppet::Util.benchmark(Puppet, :notice, "Initialized database") do + # We want to rewrite stdout, so we don't get migration messages. + oldout = $stdout + $stdout = File.open("/dev/null", "w") + ActiveRecord::Schema.define do + create_table :resources do |t| + t.column :title, :text, :null => false + t.column :restype, :string, :null => false + t.column :host_id, :integer + t.column :source_file_id, :integer + t.column :exported, :boolean + t.column :line, :integer + t.column :updated_at, :datetime + t.column :created_at, :datetime + end + add_index :resources, :host_id, :integer => true + add_index :resources, :source_file_id, :integer => true + + # Thanks, mysql! MySQL requires a length on indexes in text fields. + # So, we provide them for mysql and handle everything else specially. + # Oracle doesn't index on CLOB fields, so we skip it + if Puppet[:dbadapter] == "mysql" + execute "CREATE INDEX typentitle ON resources (restype,title(50));" + elsif Puppet[:dbadapter] != "oracle_enhanced" + add_index :resources, [:title, :restype] + end + + create_table :source_files do |t| + t.column :filename, :string + t.column :path, :string + t.column :updated_at, :datetime + t.column :created_at, :datetime + end + add_index :source_files, :filename + + create_table :resource_tags do |t| + t.column :resource_id, :integer + t.column :puppet_tag_id, :integer + t.column :updated_at, :datetime + t.column :created_at, :datetime + end + add_index :resource_tags, :resource_id, :integer => true + add_index :resource_tags, :puppet_tag_id, :integer => true + + create_table :puppet_tags do |t| + t.column :name, :string + t.column :updated_at, :datetime + t.column :created_at, :datetime + end + + # Oracle automatically creates a primary key index + add_index :puppet_tags, :id, :integer => true if Puppet[:dbadapter] != "oracle_enhanced" + + create_table :hosts do |t| + t.column :name, :string, :null => false + t.column :ip, :string + t.column :environment, :text + t.column :last_compile, :datetime + t.column :last_freshcheck, :datetime + t.column :last_report, :datetime + #Use updated_at to automatically add timestamp on save. + t.column :updated_at, :datetime + t.column :source_file_id, :integer + t.column :created_at, :datetime + end + add_index :hosts, :source_file_id, :integer => true + add_index :hosts, :name + + create_table :fact_names do |t| + t.column :name, :string, :null => false + t.column :updated_at, :datetime + t.column :created_at, :datetime + end + add_index :fact_names, :name + + create_table :fact_values do |t| + t.column :value, :text, :null => false + t.column :fact_name_id, :integer, :null => false + t.column :host_id, :integer, :null => false + t.column :updated_at, :datetime + t.column :created_at, :datetime + end + add_index :fact_values, :fact_name_id, :integer => true + add_index :fact_values, :host_id, :integer => true + + create_table :param_values do |t| + t.column :value, :text, :null => false + t.column :param_name_id, :integer, :null => false + t.column :line, :integer + t.column :resource_id, :integer + t.column :updated_at, :datetime + t.column :created_at, :datetime + end + add_index :param_values, :param_name_id, :integer => true + add_index :param_values, :resource_id, :integer => true + + create_table :param_names do |t| + t.column :name, :string, :null => false + t.column :updated_at, :datetime + t.column :created_at, :datetime + end + add_index :param_names, :name + + create_table :inventory_nodes do |t| + t.column :name, :string, :null => false + t.column :timestamp, :datetime, :null => false + t.column :updated_at, :datetime + t.column :created_at, :datetime + end + + add_index :inventory_nodes, :name, :unique => true + + create_table :inventory_facts, :id => false do |t| + t.column :node_id, :integer, :null => false + t.column :name, :string, :null => false + t.column :value, :text, :null => false + end + + add_index :inventory_facts, [:node_id, :name], :unique => true + end + end + ensure + $stdout.close + $stdout = oldout if oldout + oldout = nil + end +end + diff --git a/mcollective/lib/puppet/rails/fact_name.rb b/mcollective/lib/puppet/rails/fact_name.rb new file mode 100644 index 000000000..4273399e5 --- /dev/null +++ b/mcollective/lib/puppet/rails/fact_name.rb @@ -0,0 +1,5 @@ +require 'puppet/rails/fact_value' + +class Puppet::Rails::FactName < ActiveRecord::Base + has_many :fact_values, :dependent => :destroy +end diff --git a/mcollective/lib/puppet/rails/fact_value.rb b/mcollective/lib/puppet/rails/fact_value.rb new file mode 100644 index 000000000..9fd81ae1c --- /dev/null +++ b/mcollective/lib/puppet/rails/fact_value.rb @@ -0,0 +1,8 @@ +class Puppet::Rails::FactValue < ActiveRecord::Base + belongs_to :fact_name + belongs_to :host + + def to_label + "#{self.fact_name.name}" + end +end diff --git a/mcollective/lib/puppet/rails/host.rb b/mcollective/lib/puppet/rails/host.rb new file mode 100644 index 000000000..e5360217c --- /dev/null +++ b/mcollective/lib/puppet/rails/host.rb @@ -0,0 +1,258 @@ +require 'puppet/node/environment' +require 'puppet/rails' +require 'puppet/rails/resource' +require 'puppet/rails/fact_name' +require 'puppet/rails/source_file' +require 'puppet/rails/benchmark' +require 'puppet/util/rails/collection_merger' + +class Puppet::Rails::Host < ActiveRecord::Base + include Puppet::Rails::Benchmark + extend Puppet::Rails::Benchmark + include Puppet::Util + include Puppet::Util::CollectionMerger + + has_many :fact_values, :dependent => :destroy, :class_name => "Puppet::Rails::FactValue" + has_many :fact_names, :through => :fact_values, :class_name => "Puppet::Rails::FactName" + belongs_to :source_file + has_many :resources, :dependent => :destroy, :class_name => "Puppet::Rails::Resource" + + def self.from_puppet(node) + host = find_by_name(node.name) || new(:name => node.name) + + {"ipaddress" => "ip", "environment" => "environment"}.each do |myparam, itsparam| + if value = node.send(myparam) + host.send(itsparam + "=", value) + end + end + + host + end + + # Override the setter for environment to force it to be a string, lest it + # be YAML encoded. See #4487. + def environment=(value) + super value.to_s + end + + # returns a hash of fact_names.name => [ fact_values ] for this host. + # Note that 'fact_values' is actually a list of the value instances, not + # just actual values. + def get_facts_hash + fact_values = self.fact_values.find(:all, :include => :fact_name) + return fact_values.inject({}) do | hash, value | + hash[value.fact_name.name] ||= [] + hash[value.fact_name.name] << value + hash + end + end + + + # This is *very* similar to the merge_parameters method + # of Puppet::Rails::Resource. + def merge_facts(facts) + db_facts = {} + + deletions = [] + self.fact_values.find(:all, :include => :fact_name).each do |value| + deletions << value['id'] and next unless facts.include?(value['name']) + # Now store them for later testing. + db_facts[value['name']] ||= [] + db_facts[value['name']] << value + end + + # Now get rid of any parameters whose value list is different. + # This might be extra work in cases where an array has added or lost + # a single value, but in the most common case (a single value has changed) + # this makes sense. + db_facts.each do |name, value_hashes| + values = value_hashes.collect { |v| v['value'] } + + unless values == facts[name] + value_hashes.each { |v| deletions << v['id'] } + end + end + + # Perform our deletions. + Puppet::Rails::FactValue.delete(deletions) unless deletions.empty? + + # Lastly, add any new parameters. + facts.each do |name, value| + next if db_facts.include?(name) + values = value.is_a?(Array) ? value : [value] + + values.each do |v| + fact_values.build(:value => v, :fact_name => Puppet::Rails::FactName.find_or_create_by_name(name)) + end + end + end + + # Set our resources. + def merge_resources(list) + # keep only exported resources in thin_storeconfig mode + list = list.select { |r| r.exported? } if Puppet.settings[:thin_storeconfigs] + + resources_by_id = nil + debug_benchmark("Searched for resources") { + resources_by_id = find_resources + } + + debug_benchmark("Searched for resource params and tags") { + find_resources_parameters_tags(resources_by_id) + } if id + + debug_benchmark("Performed resource comparison") { + compare_to_catalog(resources_by_id, list) + } + end + + def find_resources + condition = { :exported => true } if Puppet.settings[:thin_storeconfigs] + + resources.find(:all, :include => :source_file, :conditions => condition || {}).inject({}) do | hash, resource | + hash[resource.id] = resource + hash + end + end + + def find_resources_parameters_tags(resources) + find_resources_parameters(resources) + find_resources_tags(resources) + end + + def compare_to_catalog(existing, list) + compiled = list.inject({}) do |hash, resource| + hash[resource.ref] = resource + hash + end + + resources = nil + debug_benchmark("Resource removal") { + resources = remove_unneeded_resources(compiled, existing) + } + + # Now for all resources in the catalog but not in the db, we're pretty easy. + additions = nil + debug_benchmark("Resource merger") { + additions = perform_resource_merger(compiled, resources) + } + + debug_benchmark("Resource addition") { + additions.each do |resource| + build_rails_resource_from_parser_resource(resource) + end + + log_accumulated_marks "Added resources" + } + end + + def add_new_resources(additions) + additions.each do |resource| + Puppet::Rails::Resource.from_parser_resource(self, resource) + end + end + + # Turn a parser resource into a Rails resource. + def build_rails_resource_from_parser_resource(resource) + db_resource = nil + accumulate_benchmark("Added resources", :initialization) { + args = Puppet::Rails::Resource.rails_resource_initial_args(resource) + + db_resource = self.resources.build(args) + + # Our file= method does the name to id conversion. + db_resource.file = resource.file + } + + + accumulate_benchmark("Added resources", :parameters) { + resource.each do |param, value| + Puppet::Rails::ParamValue.from_parser_param(param, value).each do |value_hash| + db_resource.param_values.build(value_hash) + end + end + } + + accumulate_benchmark("Added resources", :tags) { + resource.tags.each { |tag| db_resource.add_resource_tag(tag) } + } + + db_resource.save + + db_resource + end + + + def perform_resource_merger(compiled, resources) + return compiled.values if resources.empty? + + # Now for all resources in the catalog but not in the db, we're pretty easy. + additions = [] + compiled.each do |ref, resource| + if db_resource = resources[ref] + db_resource.merge_parser_resource(resource) + else + additions << resource + end + end + log_accumulated_marks "Resource merger" + + additions + end + + def remove_unneeded_resources(compiled, existing) + deletions = [] + resources = {} + existing.each do |id, resource| + # it seems that it can happen (see bug #2010) some resources are duplicated in the + # database (ie logically corrupted database), in which case we remove the extraneous + # entries. + if resources.include?(resource.ref) + deletions << id + next + end + + # If the resource is in the db but not in the catalog, mark it + # for removal. + unless compiled.include?(resource.ref) + deletions << id + next + end + + resources[resource.ref] = resource + end + # We need to use 'destroy' here, not 'delete', so that all + # dependent objects get removed, too. + Puppet::Rails::Resource.destroy(deletions) unless deletions.empty? + + resources + end + + def find_resources_parameters(resources) + params = Puppet::Rails::ParamValue.find_all_params_from_host(self) + + # assign each loaded parameters/tags to the resource it belongs to + params.each do |param| + resources[param['resource_id']].add_param_to_list(param) if resources.include?(param['resource_id']) + end + end + + def find_resources_tags(resources) + tags = Puppet::Rails::ResourceTag.find_all_tags_from_host(self) + + tags.each do |tag| + resources[tag['resource_id']].add_tag_to_list(tag) if resources.include?(tag['resource_id']) + end + end + + def to_puppet + node = Puppet::Node.new(self.name) + {"ip" => "ipaddress", "environment" => "environment"}.each do |myparam, itsparam| + if value = send(myparam) + node.send(itsparam + "=", value) + end + end + + node + end +end diff --git a/mcollective/lib/puppet/rails/inventory_fact.rb b/mcollective/lib/puppet/rails/inventory_fact.rb new file mode 100644 index 000000000..aa6334eef --- /dev/null +++ b/mcollective/lib/puppet/rails/inventory_fact.rb @@ -0,0 +1,5 @@ +require 'puppet/rails/inventory_node' + +class Puppet::Rails::InventoryFact < ::ActiveRecord::Base + belongs_to :node, :class_name => "Puppet::Rails::InventoryNode" +end diff --git a/mcollective/lib/puppet/rails/inventory_node.rb b/mcollective/lib/puppet/rails/inventory_node.rb new file mode 100644 index 000000000..da7e61040 --- /dev/null +++ b/mcollective/lib/puppet/rails/inventory_node.rb @@ -0,0 +1,30 @@ +require 'puppet/rails/inventory_fact' + +class Puppet::Rails::InventoryNode < ::ActiveRecord::Base + has_many :facts, :class_name => "Puppet::Rails::InventoryFact", :foreign_key => :node_id, :dependent => :delete_all + + if Puppet::Util.activerecord_version >= 3.0 + # Prevents "DEPRECATION WARNING: Base.named_scope has been deprecated, please use Base.scope instead" + ActiveRecord::NamedScope::ClassMethods.module_eval { alias :named_scope :scope } + end + + named_scope :has_fact_with_value, lambda { |name,value| + { + :conditions => ["inventory_facts.name = ? AND inventory_facts.value = ?", name, value], + :joins => :facts + } + } + + named_scope :has_fact_without_value, lambda { |name,value| + { + :conditions => ["inventory_facts.name = ? AND inventory_facts.value != ?", name, value], + :joins => :facts + } + } + + def facts_to_hash + facts.inject({}) do |fact_hash,fact| + fact_hash.merge(fact.name => fact.value) + end + end +end diff --git a/mcollective/lib/puppet/rails/param_name.rb b/mcollective/lib/puppet/rails/param_name.rb new file mode 100644 index 000000000..91cf45628 --- /dev/null +++ b/mcollective/lib/puppet/rails/param_name.rb @@ -0,0 +1,25 @@ +require 'puppet/util/rails/collection_merger' +require 'puppet/rails/param_value' +require 'puppet/util/rails/cache_accumulator' + +class Puppet::Rails::ParamName < ActiveRecord::Base + include Puppet::Util::CollectionMerger + has_many :param_values, :dependent => :destroy + + include Puppet::Util::CacheAccumulator + accumulates :name + + def to_resourceparam(resource, source) + hash = {} + hash[:name] = self.name.to_sym + hash[:source] = source + hash[:value] = resource.param_values.find(:all, :conditions => [ "param_name_id = ?", self.id]).collect { |v| v.value } + if hash[:value].length == 1 + hash[:value] = hash[:value].shift + elsif hash[:value].empty? + hash[:value] = nil + end + Puppet::Parser::Resource::Param.new hash + end +end + diff --git a/mcollective/lib/puppet/rails/param_value.rb b/mcollective/lib/puppet/rails/param_value.rb new file mode 100644 index 000000000..d7c88f844 --- /dev/null +++ b/mcollective/lib/puppet/rails/param_value.rb @@ -0,0 +1,74 @@ +require 'puppet/util/rails/reference_serializer' + +class Puppet::Rails::ParamValue < ActiveRecord::Base + include Puppet::Util::ReferenceSerializer + extend Puppet::Util::ReferenceSerializer + + belongs_to :param_name + belongs_to :resource + + # Store a new parameter in a Rails db. + def self.from_parser_param(param, values) + values = munge_parser_values(values) + + param_name = Puppet::Rails::ParamName.find_or_create_by_name(param.to_s) + return values.collect do |v| + {:value => v, :param_name => param_name} + end + end + + # Make sure an array (or possibly not an array) of values is correctly + # set up for Rails. The main thing is that Resource::Reference objects + # should stay objects, so they just get serialized. + def self.munge_parser_values(value) + values = value.is_a?(Array) ? value : [value] + values.map do |v| + if v.is_a?(Puppet::Resource) + v + else + v.to_s + end + end + end + + + def value + unserialize_value(self[:value]) + end + + # I could not find a cleaner way to handle making sure that resource references + # were consistently serialized and deserialized. + def value=(val) + self[:value] = serialize_value(val) + end + + def to_label + "#{self.param_name.name}" + end + + # returns an array of hash containing all the parameters of a given resource + def self.find_all_params_from_resource(db_resource) + params = db_resource.connection.select_all("SELECT v.id, v.value, v.line, v.resource_id, v.param_name_id, n.name FROM param_values v INNER JOIN param_names n ON v.param_name_id=n.id WHERE v.resource_id=#{db_resource.id}") + params.each do |val| + val['value'] = unserialize_value(val['value']) + val['line'] = val['line'] ? Integer(val['line']) : nil + val['resource_id'] = Integer(val['resource_id']) + end + params + end + + # returns an array of hash containing all the parameters of a given host + def self.find_all_params_from_host(db_host) + params = db_host.connection.select_all("SELECT v.id, v.value, v.line, v.resource_id, v.param_name_id, n.name FROM param_values v INNER JOIN resources r ON v.resource_id=r.id INNER JOIN param_names n ON v.param_name_id=n.id WHERE r.host_id=#{db_host.id}") + params.each do |val| + val['value'] = unserialize_value(val['value']) + val['line'] = val['line'] ? Integer(val['line']) : nil + val['resource_id'] = Integer(val['resource_id']) + end + params + end + + def to_s + "#{self.name} => #{self.value}" + end +end diff --git a/mcollective/lib/puppet/rails/puppet_tag.rb b/mcollective/lib/puppet/rails/puppet_tag.rb new file mode 100644 index 000000000..fadbfa6f6 --- /dev/null +++ b/mcollective/lib/puppet/rails/puppet_tag.rb @@ -0,0 +1,10 @@ +require 'puppet/rails/resource_tag' +require 'puppet/util/rails/cache_accumulator' + +class Puppet::Rails::PuppetTag < ActiveRecord::Base + has_many :resource_tags, :dependent => :destroy + has_many :resources, :through => :resource_tags + + include Puppet::Util::CacheAccumulator + accumulates :name +end diff --git a/mcollective/lib/puppet/rails/resource.rb b/mcollective/lib/puppet/rails/resource.rb new file mode 100644 index 000000000..582cdd41a --- /dev/null +++ b/mcollective/lib/puppet/rails/resource.rb @@ -0,0 +1,231 @@ +require 'puppet' +require 'puppet/rails/param_name' +require 'puppet/rails/param_value' +require 'puppet/rails/puppet_tag' +require 'puppet/rails/benchmark' +require 'puppet/util/rails/collection_merger' + +class Puppet::Rails::Resource < ActiveRecord::Base + include Puppet::Util::CollectionMerger + include Puppet::Util::ReferenceSerializer + include Puppet::Rails::Benchmark + + has_many :param_values, :dependent => :destroy, :class_name => "Puppet::Rails::ParamValue" + has_many :param_names, :through => :param_values, :class_name => "Puppet::Rails::ParamName" + + has_many :resource_tags, :dependent => :destroy, :class_name => "Puppet::Rails::ResourceTag" + has_many :puppet_tags, :through => :resource_tags, :class_name => "Puppet::Rails::PuppetTag" + + belongs_to :source_file + belongs_to :host + + @tags = {} + def self.tags + @tags + end + + # Determine the basic details on the resource. + def self.rails_resource_initial_args(resource) + result = [:type, :title, :line].inject({}) do |hash, param| + # 'type' isn't a valid column name, so we have to use another name. + to = (param == :type) ? :restype : param + if value = resource.send(param) + hash[to] = value + end + hash + end + + # We always want a value here, regardless of what the resource has, + # so we break it out separately. + result[:exported] = resource.exported || false + + result + end + + def add_resource_tag(tag) + pt = Puppet::Rails::PuppetTag.accumulate_by_name(tag) + resource_tags.build(:puppet_tag => pt) + end + + def file + (f = self.source_file) ? f.filename : nil + end + + def file=(file) + self.source_file = Puppet::Rails::SourceFile.find_or_create_by_filename(file) + end + + def title + unserialize_value(self[:title]) + end + + def params_list + @params_list ||= [] + end + + def params_list=(params) + @params_list = params + end + + def add_param_to_list(param) + params_list << param + end + + def tags_list + @tags_list ||= [] + end + + def tags_list=(tags) + @tags_list = tags + end + + def add_tag_to_list(tag) + tags_list << tag + end + + def [](param) + super || parameter(param) + end + + # Make sure this resource is equivalent to the provided Parser resource. + def merge_parser_resource(resource) + accumulate_benchmark("Individual resource merger", :attributes) { merge_attributes(resource) } + accumulate_benchmark("Individual resource merger", :parameters) { merge_parameters(resource) } + accumulate_benchmark("Individual resource merger", :tags) { merge_tags(resource) } + save + end + + def merge_attributes(resource) + args = self.class.rails_resource_initial_args(resource) + args.each do |param, value| + self[param] = value unless resource[param] == value + end + + # Handle file specially + self.file = resource.file if (resource.file and (!resource.file or self.file != resource.file)) + end + + def merge_parameters(resource) + catalog_params = {} + resource.each do |param, value| + catalog_params[param.to_s] = value + end + + db_params = {} + + deletions = [] + params_list.each do |value| + # First remove any parameters our catalog resource doesn't have at all. + deletions << value['id'] and next unless catalog_params.include?(value['name']) + + # Now store them for later testing. + db_params[value['name']] ||= [] + db_params[value['name']] << value + end + + # Now get rid of any parameters whose value list is different. + # This might be extra work in cases where an array has added or lost + # a single value, but in the most common case (a single value has changed) + # this makes sense. + db_params.each do |name, value_hashes| + values = value_hashes.collect { |v| v['value'] } + + value_hashes.each { |v| deletions << v['id'] } unless value_compare(catalog_params[name], values) + end + + # Perform our deletions. + Puppet::Rails::ParamValue.delete(deletions) unless deletions.empty? + + # Lastly, add any new parameters. + catalog_params.each do |name, value| + next if db_params.include?(name) && ! db_params[name].find{ |val| deletions.include?( val["id"] ) } + values = value.is_a?(Array) ? value : [value] + + values.each do |v| + param_values.build(:value => serialize_value(v), :line => resource.line, :param_name => Puppet::Rails::ParamName.accumulate_by_name(name)) + end + end + end + + # Make sure the tag list is correct. + def merge_tags(resource) + in_db = [] + deletions = [] + resource_tags = resource.tags + tags_list.each do |tag| + deletions << tag['id'] and next unless resource_tags.include?(tag['name']) + in_db << tag['name'] + end + Puppet::Rails::ResourceTag.delete(deletions) unless deletions.empty? + + (resource_tags - in_db).each do |tag| + add_resource_tag(tag) + end + end + + def value_compare(v,db_value) + v = [v] unless v.is_a?(Array) + + v == db_value + end + + def name + ref + end + + def parameter(param) + if pn = param_names.find_by_name(param) + return (pv = param_values.find(:first, :conditions => [ 'param_name_id = ?', pn])) ? pv.value : nil + end + end + + def ref(dummy_argument=:work_arround_for_ruby_GC_bug) + "#{self[:restype].split("::").collect { |s| s.capitalize }.join("::")}[#{self.title}]" + end + + # Returns a hash of parameter names and values, no ActiveRecord instances. + def to_hash + Puppet::Rails::ParamValue.find_all_params_from_resource(self).inject({}) do |hash, value| + hash[value['name']] ||= [] + hash[value['name']] << value.value + hash + end + end + + # Convert our object to a resource. Do not retain whether the object + # is exported, though, since that would cause it to get stripped + # from the configuration. + def to_resource(scope) + hash = self.attributes + hash["type"] = hash["restype"] + hash.delete("restype") + + # FIXME At some point, we're going to want to retain this information + # for logging and auditing. + hash.delete("host_id") + hash.delete("updated_at") + hash.delete("source_file_id") + hash.delete("created_at") + hash.delete("id") + hash.each do |p, v| + hash.delete(p) if v.nil? + end + hash[:scope] = scope + hash[:source] = scope.source + hash[:parameters] = [] + names = [] + self.param_names.each do |pname| + # We can get the same name multiple times because of how the + # db layout works. + next if names.include?(pname.name) + names << pname.name + hash[:parameters] << pname.to_resourceparam(self, scope.source) + end + obj = Puppet::Parser::Resource.new(hash.delete("type"), hash.delete("title"), hash) + + # Store the ID, so we can check if we're re-collecting the same resource. + obj.rails_id = self.id + + obj + end +end diff --git a/mcollective/lib/puppet/rails/resource_tag.rb b/mcollective/lib/puppet/rails/resource_tag.rb new file mode 100644 index 000000000..1c1aa4595 --- /dev/null +++ b/mcollective/lib/puppet/rails/resource_tag.rb @@ -0,0 +1,26 @@ +class Puppet::Rails::ResourceTag < ActiveRecord::Base + belongs_to :puppet_tag + belongs_to :resource + + def to_label + "#{self.puppet_tag.name}" + end + + # returns an array of hash containing tags of resource + def self.find_all_tags_from_resource(db_resource) + tags = db_resource.connection.select_all("SELECT t.id, t.resource_id, p.name FROM resource_tags t INNER JOIN puppet_tags p ON t.puppet_tag_id=p.id WHERE t.resource_id=#{db_resource.id}") + tags.each do |val| + val['resource_id'] = Integer(val['resource_id']) + end + tags + end + + # returns an array of hash containing tags of a host + def self.find_all_tags_from_host(db_host) + tags = db_host.connection.select_all("SELECT t.id, t.resource_id, p.name FROM resource_tags t INNER JOIN resources r ON t.resource_id=r.id INNER JOIN puppet_tags p ON t.puppet_tag_id=p.id WHERE r.host_id=#{db_host.id}") + tags.each do |val| + val['resource_id'] = Integer(val['resource_id']) + end + tags + end +end diff --git a/mcollective/lib/puppet/rails/source_file.rb b/mcollective/lib/puppet/rails/source_file.rb new file mode 100644 index 000000000..02557ffca --- /dev/null +++ b/mcollective/lib/puppet/rails/source_file.rb @@ -0,0 +1,8 @@ +class Puppet::Rails::SourceFile < ActiveRecord::Base + has_one :host + has_one :resource + + def to_label + "#{self.filename}" + end +end diff --git a/mcollective/lib/puppet/reference/configuration.rb b/mcollective/lib/puppet/reference/configuration.rb new file mode 100644 index 000000000..6581427ff --- /dev/null +++ b/mcollective/lib/puppet/reference/configuration.rb @@ -0,0 +1,144 @@ +config = Puppet::Util::Reference.newreference(:configuration, :depth => 1, :doc => "A reference for all configuration parameters") do + docs = {} + Puppet.settings.each do |name, object| + docs[name] = object + end + + str = "" + docs.sort { |a, b| + a[0].to_s <=> b[0].to_s + }.each do |name, object| + # Make each name an anchor + header = name.to_s + str += h(header, 3) + + # Print the doc string itself + begin + str += object.desc.gsub(/\n/, " ") + rescue => detail + puts detail.backtrace + puts detail + end + str += "\n\n" + + # Now print the data about the item. + str += "" + val = object.default + if name.to_s == "vardir" + val = "/var/lib/puppet" + elsif name.to_s == "confdir" + val = "/etc/puppet" + end + + # Leave out the section information; it was apparently confusing people. + #str += "- **Section**: #{object.section}\n" + unless val == "" + str += "- *Default*: #{val}\n" + end + str += "\n" + end + + return str +end + +config.header = " +## Specifying Configuration Parameters + +### On The Command-Line + +Every Puppet executable (with the exception of `puppetdoc`) accepts all of +the parameters below, but not all of the arguments make sense for every executable. + +I have tried to be as thorough as possible in the descriptions of the +arguments, so it should be obvious whether an argument is appropriate or not. + +These parameters can be supplied to the executables either as command-line +options or in the configuration file. For instance, the command-line +invocation below would set the configuration directory to `/private/puppet`: + + $ puppet agent --confdir=/private/puppet + +Note that boolean options are turned on and off with a slightly different +syntax on the command line: + + $ puppet agent --storeconfigs + + $ puppet agent --no-storeconfigs + +The invocations above will enable and disable, respectively, the storage of +the client configuration. + +### Configuration Files + +As mentioned above, the configuration parameters can also be stored in a +configuration file, located in the configuration directory. As root, the +default configuration directory is `/etc/puppet`, and as a regular user, the +default configuration directory is `~user/.puppet`. As of 0.23.0, all +executables look for `puppet.conf` in their configuration directory +(although they previously looked for separate files). For example, +`puppet.conf` is located at `/etc/puppet/puppet.conf` as `root` and +`~user/.puppet/puppet.conf` as a regular user by default. + +All executables will set any parameters set within the `[main]` section, +and each executable will also use one of the `[master]`, `[agent]`. + +#### File Format + +The file follows INI-style formatting. Here is an example of a very simple +`puppet.conf` file: + + [main] + confdir = /private/puppet + storeconfigs = true + +Note that boolean parameters must be explicitly specified as `true` or +`false` as seen above. + +If you need to change file or directory parameters (e.g., reset the mode or owner), do +so within curly braces on the same line: + + [main] + vardir = /new/vardir {owner = root, mode = 644} + +If you're starting out with a fresh configuration, you may wish to let +the executable generate a template configuration file for you by invoking +the executable in question with the `--genconfig` command. The executable +will print a template configuration to standard output, which can be +redirected to a file like so: + + $ puppet agent --genconfig > /etc/puppet/puppet.conf + +Note that this invocation will replace the contents of any pre-existing +`puppet.conf` file, so make a backup of your present config if it contains +valuable information. + +Like the `--genconfig` argument, the executables also accept a `--genmanifest` +argument, which will generate a manifest that can be used to manage all of +Puppet's directories and files and prints it to standard output. This can +likewise be redirected to a file: + + $ puppet agent --genmanifest > /etc/puppet/manifests/site.pp + +Puppet can also create user and group accounts for itself (one `puppet` group +and one `puppet` user) if it is invoked as `root` with the `--mkusers` argument: + + $ puppet master --mkusers + +## Signals + +The `puppet agent` and `puppet master` executables catch some signals for special +handling. Both daemons catch (`SIGHUP`), which forces the server to restart +tself. Predictably, interrupt and terminate (`SIGINT` and `SIGTERM`) will shut +down the server, whether it be an instance of `puppet agent` or `puppet master`. + +Sending the `SIGUSR1` signal to an instance of `puppet agent` will cause it to +immediately begin a new configuration transaction with the server. This +signal has no effect on `puppet master`. + +## Configuration Parameter Reference + +Below is a list of all documented parameters. Not all of them are valid with all +Puppet executables, but the executables will ignore any inappropriate values. + +" + diff --git a/mcollective/lib/puppet/reference/function.rb b/mcollective/lib/puppet/reference/function.rb new file mode 100644 index 000000000..7d39bebd5 --- /dev/null +++ b/mcollective/lib/puppet/reference/function.rb @@ -0,0 +1,17 @@ +function = Puppet::Util::Reference.newreference :function, :doc => "All functions available in the parser" do + Puppet::Parser::Functions.functiondocs +end +function.header = " +There are two types of functions in Puppet: Statements and rvalues. +Statements stand on their own and do not return arguments; they are used for +performing stand-alone work like importing. Rvalues return values and can +only be used in a statement requiring a value, such as an assignment or a case +statement. + +Functions execute on the Puppet master. They do not execute on the Puppet agent. +Hence they only have access to the commands and data available on the Puppet master +host. + +Here are the functions available in Puppet: + +" diff --git a/mcollective/lib/puppet/reference/indirection.rb b/mcollective/lib/puppet/reference/indirection.rb new file mode 100644 index 000000000..e5b076508 --- /dev/null +++ b/mcollective/lib/puppet/reference/indirection.rb @@ -0,0 +1,33 @@ +require 'puppet/indirector/indirection' +require 'puppet/util/checksums' +require 'puppet/file_serving/content' +require 'puppet/file_serving/metadata' + +reference = Puppet::Util::Reference.newreference :indirection, :doc => "Indirection types and their terminus classes" do + text = "" + Puppet::Indirector::Indirection.instances.sort { |a,b| a.to_s <=> b.to_s }.each do |indirection| + ind = Puppet::Indirector::Indirection.instance(indirection) + name = indirection.to_s.capitalize + text += "## " + indirection.to_s + "\n\n" + + text += ind.doc + "\n\n" + + Puppet::Indirector::Terminus.terminus_classes(ind.name).sort { |a,b| a.to_s <=> b.to_s }.each do |terminus| + text += "### " + terminus.to_s + "\n\n" + + term_class = Puppet::Indirector::Terminus.terminus_class(ind.name, terminus) + + text += Puppet::Util::Docs.scrub(term_class.doc) + "\n\n" + end + end + + text +end + +reference.header = "This is the list of all indirections, their associated terminus classes, and how you select between them. + +In general, the appropriate terminus class is selected by the application for you (e.g., `puppet agent` would always use the `rest` +terminus for most of its indirected classes), but some classes are tunable via normal settings. These will have `terminus setting` documentation listed with them. + + +" diff --git a/mcollective/lib/puppet/reference/metaparameter.rb b/mcollective/lib/puppet/reference/metaparameter.rb new file mode 100644 index 000000000..3c4c08701 --- /dev/null +++ b/mcollective/lib/puppet/reference/metaparameter.rb @@ -0,0 +1,41 @@ +metaparameter = Puppet::Util::Reference.newreference :metaparameter, :doc => "All Puppet metaparameters and all their details" do + types = {} + Puppet::Type.loadall + + Puppet::Type.eachtype { |type| + next if type.name == :puppet + next if type.name == :component + types[type.name] = type + } + + str = %{ + +# Metaparameters + +Metaparameters are parameters that work with any resource type; they are part of the +Puppet framework itself rather than being part of the implementation of any +given instance. Thus, any defined metaparameter can be used with any instance +in your manifest, including defined components. + +## Available Metaparameters + +} + begin + params = [] + Puppet::Type.eachmetaparam { |param| + params << param + } + + params.sort { |a,b| + a.to_s <=> b.to_s + }.each { |param| + str += paramwrap(param.to_s, scrub(Puppet::Type.metaparamdoc(param)), :level => 3) + } + rescue => detail + puts detail.backtrace + puts "incorrect metaparams: #{detail}" + exit(1) + end + + str +end diff --git a/mcollective/lib/puppet/reference/network.rb b/mcollective/lib/puppet/reference/network.rb new file mode 100644 index 000000000..fda7931fb --- /dev/null +++ b/mcollective/lib/puppet/reference/network.rb @@ -0,0 +1,39 @@ +require 'puppet/network/handler' + +network = Puppet::Util::Reference.newreference :network, :depth => 2, :doc => "Available network handlers and clients" do + ret = "" + Puppet::Network::Handler.subclasses.sort { |a,b| a.to_s <=> b.to_s }.each do |name| + handler = Puppet::Network::Handler.handler(name) + + next if ! handler.doc or handler.doc == "" + + interface = handler.interface + + ret += h(name, 2) + + ret += scrub(handler.doc) + ret += "\n\n" + ret += option(:prefix, interface.prefix) + ret += option(:side, handler.side.to_s.capitalize) + ret += option(:methods, interface.methods.collect { |ary| ary[0] }.join(", ") ) + ret += "\n\n" + end + + ret +end + +network.header = " +This is a list of all Puppet network interfaces. Each interface is +implemented in the form of a client and a handler; the handler is loaded +on the server, and the client knows how to call the handler's methods +appropriately. + +Most handlers are meant to be started on the server, usually within +`puppet master`, and the clients are mostly started on the client, +usually within `puppet agent`. + +You can find the server-side handler for each interface at +`puppet/network/handler/.rb` and the client class at +`puppet/network/client/.rb`. + +" diff --git a/mcollective/lib/puppet/reference/providers.rb b/mcollective/lib/puppet/reference/providers.rb new file mode 100644 index 000000000..c85ad23ab --- /dev/null +++ b/mcollective/lib/puppet/reference/providers.rb @@ -0,0 +1,123 @@ +# This doesn't get stored in trac, since it changes every time. +providers = Puppet::Util::Reference.newreference :providers, :title => "Provider Suitability Report", :depth => 1, :dynamic => true, :doc => "Which providers are valid for this machine" do + types = [] + Puppet::Type.loadall + Puppet::Type.eachtype do |klass| + next unless klass.providers.length > 0 + types << klass + end + types.sort! { |a,b| a.name.to_s <=> b.name.to_s } + + command_line = Puppet::Util::CommandLine.new + types.reject! { |type| ! command_line.args.include?(type.name.to_s) } unless command_line.args.empty? + + ret = "Details about this host:\n\n" + + # Throw some facts in there, so we know where the report is from. + ["Ruby Version", "Puppet Version", "Operating System", "Operating System Release"].each do |label| + name = label.gsub(/\s+/, '') + value = Facter.value(name) + ret += option(label, value) + end + ret += "\n" + + count = 1 + + # Produce output for each type. + types.each do |type| + features = type.features + ret += "\n" # add a trailing newline + + # Now build up a table of provider suitability. + headers = %w{Provider Suitable?} + features.collect { |f| f.to_s }.sort + + table_data = {} + + functional = false + notes = [] + begin + default = type.defaultprovider.name + rescue Puppet::DevError + default = "none" + end + type.providers.sort { |a,b| a.to_s <=> b.to_s }.each do |pname| + data = [] + table_data[pname] = data + provider = type.provider(pname) + + # Add the suitability note + if missing = provider.suitable?(false) and missing.empty? + data << "*X*" + suit = true + functional = true + else + data << "[#{count}]_" # A pointer to the appropriate footnote + suit = false + end + + # Add a footnote with the details about why this provider is unsuitable, if that's the case + unless suit + details = ".. [#{count}]\n" + missing.each do |test, values| + case test + when :exists + details += " - Missing files #{values.join(", ")}\n" + when :variable + values.each do |name, facts| + if Puppet.settings.valid?(name) + details += " - Setting #{name} (currently #{Puppet.settings.value(name).inspect}) not in list #{facts.join(", ")}\n" + else + details += " - Fact #{name} (currently #{Facter.value(name).inspect}) not in list #{facts.join(", ")}\n" + end + end + when :true + details += " - Got #{values} true tests that should have been false\n" + when :false + details += " - Got #{values} false tests that should have been true\n" + when :feature + details += " - Missing features #{values.collect { |f| f.to_s }.join(",")}\n" + end + end + notes << details + + count += 1 + end + + # Add a note for every feature + features.each do |feature| + if provider.features.include?(feature) + data << "*X*" + else + data << "" + end + end + end + + ret += h(type.name.to_s + "_", 2) + + ret += "[#{type.name}](#{"http://docs.puppetlabs.com/references/stable/type.html##{type.name}"})\n\n" + ret += option("Default provider", default) + ret += doctable(headers, table_data) + + notes.each do |note| + ret += note + "\n" + end + + ret += "\n" + end + + ret += "\n" + + ret +end +providers.header = " +Puppet resource types are usually backed by multiple implementations called `providers`, +which handle variance between platforms and tools. + +Different providers are suitable or unsuitable on different platforms based on things +like the presence of a given tool. + +Here are all of the provider-backed types and their different providers. Any unmentioned +types do not use providers yet. + +" diff --git a/mcollective/lib/puppet/reference/report.rb b/mcollective/lib/puppet/reference/report.rb new file mode 100644 index 000000000..47fc779ab --- /dev/null +++ b/mcollective/lib/puppet/reference/report.rb @@ -0,0 +1,23 @@ +require 'puppet/reports' + +report = Puppet::Util::Reference.newreference :report, :doc => "All available transaction reports" do + Puppet::Reports.reportdocs +end + +report.header = " +Puppet clients can report back to the server after each transaction. This +transaction report is sent as a YAML dump of the +`Puppet::Transaction::Report` class and includes every log message that was +generated during the transaction along with as many metrics as Puppet knows how +to collect. See [Reports and Reporting](http://projects.puppetlabs.com/projects/puppet/wiki/Reports_And_Reporting) for more information on how to use reports. + +Currently, clients default to not sending in reports; you can enable reporting +by setting the `report` parameter to true. + +To use a report, set the `reports` parameter on the server; multiple +reports must be comma-separated. You can also specify `none` to disable +reports entirely. + +Puppet provides multiple report handlers that will process client reports: + +" diff --git a/mcollective/lib/puppet/reference/type.rb b/mcollective/lib/puppet/reference/type.rb new file mode 100644 index 000000000..b423387e9 --- /dev/null +++ b/mcollective/lib/puppet/reference/type.rb @@ -0,0 +1,113 @@ +type = Puppet::Util::Reference.newreference :type, :doc => "All Puppet resource types and all their details" do + types = {} + Puppet::Type.loadall + + Puppet::Type.eachtype { |type| + next if type.name == :puppet + next if type.name == :component + next if type.name == :whit + types[type.name] = type + } + + str = %{ + +## Resource Types + +- The *namevar* is the parameter used to uniquely identify a type instance. + This is the parameter that gets assigned when a string is provided before + the colon in a type declaration. In general, only developers will need to + worry about which parameter is the `namevar`. + + In the following code: + + file { "/etc/passwd": + owner => root, + group => root, + mode => 644 + } + + `/etc/passwd` is considered the title of the file object (used for things like + dependency handling), and because `path` is the namevar for `file`, that + string is assigned to the `path` parameter. + +- *Parameters* determine the specific configuration of the instance. They either + directly modify the system (internally, these are called properties) or they affect + how the instance behaves (e.g., adding a search path for `exec` instances or determining recursion on `file` instances). + +- *Providers* provide low-level functionality for a given resource type. This is + usually in the form of calling out to external commands. + + When required binaries are specified for providers, fully qualifed paths + indicate that the binary must exist at that specific path and unqualified + binaries indicate that Puppet will search for the binary using the shell + path. + +- *Features* are abilities that some providers might not support. You can use the list + of supported features to determine how a given provider can be used. + + Resource types define features they can use, and providers can be tested to see + which features they provide. + + } + + types.sort { |a,b| + a.to_s <=> b.to_s + }.each { |name,type| + + str += " + +---------------- + +" + + str += h(name, 3) + str += scrub(type.doc) + "\n\n" + + # Handle the feature docs. + if featuredocs = type.featuredocs + str += h("Features", 4) + str += featuredocs + end + + docs = {} + type.validproperties.sort { |a,b| + a.to_s <=> b.to_s + }.reject { |sname| + property = type.propertybyname(sname) + property.nodoc + }.each { |sname| + property = type.propertybyname(sname) + + raise "Could not retrieve property #{sname} on type #{type.name}" unless property + + doc = nil + unless doc = property.doc + $stderr.puts "No docs for #{type}[#{sname}]" + next + end + doc = doc.dup + tmp = doc + tmp = scrub(tmp) + + docs[sname] = tmp + } + + str += h("Parameters", 4) + "\n" + type.parameters.sort { |a,b| + a.to_s <=> b.to_s + }.each { |name,param| + #docs[name] = indent(scrub(type.paramdoc(name)), $tab) + docs[name] = scrub(type.paramdoc(name)) + } + + additional_key_attributes = type.key_attributes - [:name] + docs.sort { |a, b| + a[0].to_s <=> b[0].to_s + }.each { |name, doc| + str += paramwrap(name, doc, :namevar => additional_key_attributes.include?(name)) + } + str += "\n" + } + + str +end diff --git a/mcollective/lib/puppet/relationship.rb b/mcollective/lib/puppet/relationship.rb new file mode 100644 index 000000000..08d7d042b --- /dev/null +++ b/mcollective/lib/puppet/relationship.rb @@ -0,0 +1,98 @@ +#!/usr/bin/env ruby +# +# Created by Luke A. Kanies on 2006-11-24. +# Copyright (c) 2006. All rights reserved. + +# subscriptions are permanent associations determining how different +# objects react to an event + +require 'puppet/util/pson' + +# This is Puppet's class for modeling edges in its configuration graph. +# It used to be a subclass of GRATR::Edge, but that class has weird hash +# overrides that dramatically slow down the graphing. +class Puppet::Relationship + extend Puppet::Util::Pson + attr_accessor :source, :target, :callback + + attr_reader :event + + def self.from_pson(pson) + source = pson["source"] + target = pson["target"] + + args = {} + if event = pson["event"] + args[:event] = event + end + if callback = pson["callback"] + args[:callback] = callback + end + + new(source, target, args) + end + + def event=(event) + raise ArgumentError, "You must pass a callback for non-NONE events" if event != :NONE and ! callback + @event = event + end + + def initialize(source, target, options = {}) + @source, @target = source, target + + options = (options || {}).inject({}) { |h,a| h[a[0].to_sym] = a[1]; h } + [:callback, :event].each do |option| + if value = options[option] + send(option.to_s + "=", value) + end + end + end + + # Does the passed event match our event? This is where the meaning + # of :NONE comes from. + def match?(event) + if self.event.nil? or event == :NONE or self.event == :NONE + return false + elsif self.event == :ALL_EVENTS or event == self.event + return true + else + return false + end + end + + def label + result = {} + result[:callback] = callback if callback + result[:event] = event if event + result + end + + def ref + "#{source} => #{target}" + end + + def inspect + "{ #{source} => #{target} }" + end + + def to_pson_data_hash + data = { + 'source' => source.to_s, + 'target' => target.to_s + } + + ["event", "callback"].each do |attr| + next unless value = send(attr) + data[attr] = value + end + data + end + + def to_pson(*args) + to_pson_data_hash.to_pson(*args) + end + + def to_s + ref + end +end diff --git a/mcollective/lib/puppet/reports.rb b/mcollective/lib/puppet/reports.rb new file mode 100755 index 000000000..3ebd16e30 --- /dev/null +++ b/mcollective/lib/puppet/reports.rb @@ -0,0 +1,49 @@ +require 'puppet/util/instance_loader' + +# A simple mechanism for loading and returning reports. +class Puppet::Reports + extend Puppet::Util::ClassGen + extend Puppet::Util::InstanceLoader + + # Set up autoloading and retrieving of reports. + instance_load :report, 'puppet/reports' + + class << self + attr_reader :hooks + end + + # Add a new report type. + def self.register_report(name, options = {}, &block) + name = symbolize(name) + + mod = genmodule(name, :extend => Puppet::Util::Docs, :hash => instance_hash(:report), :block => block) + + mod.useyaml = true if options[:useyaml] + + mod.send(:define_method, :report_name) do + name + end + end + + # Collect the docs for all of our reports. + def self.reportdocs + docs = "" + + # Use this method so they all get loaded + instance_loader(:report).loadall + loaded_instances(:report).sort { |a,b| a.to_s <=> b.to_s }.each do |name| + mod = self.report(name) + docs += "#{name}\n#{"-" * name.to_s.length}\n" + + docs += Puppet::Util::Docs.scrub(mod.doc) + "\n\n" + end + + docs + end + + # List each of the reports. + def self.reports + instance_loader(:report).loadall + loaded_instances(:report) + end +end diff --git a/mcollective/lib/puppet/reports/http.rb b/mcollective/lib/puppet/reports/http.rb new file mode 100644 index 000000000..7ac54dfbd --- /dev/null +++ b/mcollective/lib/puppet/reports/http.rb @@ -0,0 +1,22 @@ +require 'puppet' +require 'net/http' +require 'uri' + +Puppet::Reports.register_report(:http) do + + desc <<-DESC + Send report information via HTTP to the `reporturl`. Each host sends + its report as a YAML dump and this sends this YAML to a client via HTTP POST. + The YAML is the `report` parameter of the request." + DESC + + def process + url = URI.parse(Puppet[:reporturl]) + req = Net::HTTP::Post.new(url.path) + req.body = self.to_yaml + req.content_type = "application/x-yaml" + Net::HTTP.new(url.host, url.port).start {|http| + http.request(req) + } + end +end diff --git a/mcollective/lib/puppet/reports/log.rb b/mcollective/lib/puppet/reports/log.rb new file mode 100644 index 000000000..1fc6876a6 --- /dev/null +++ b/mcollective/lib/puppet/reports/log.rb @@ -0,0 +1,14 @@ +require 'puppet/reports' + +Puppet::Reports.register_report(:log) do + desc "Send all received logs to the local log destinations. Usually + the log destination is syslog." + + def process + self.logs.each do |log| + log.source = "//#{self.host}/#{log.source}" + Puppet::Util::Log.newmessage(log) + end + end +end + diff --git a/mcollective/lib/puppet/reports/rrdgraph.rb b/mcollective/lib/puppet/reports/rrdgraph.rb new file mode 100644 index 000000000..517fa8f03 --- /dev/null +++ b/mcollective/lib/puppet/reports/rrdgraph.rb @@ -0,0 +1,128 @@ +Puppet::Reports.register_report(:rrdgraph) do + desc "Graph all available data about hosts using the RRD library. You + must have the Ruby RRDtool library installed to use this report, which + you can get from + [the RubyRRDTool RubyForge page](http://rubyforge.org/projects/rubyrrdtool/). + This package may also be available as `ruby-rrd` or `rrdtool-ruby` in your + distribution's package management system. The library and/or package will both + require the binary `rrdtool` package from your distribution to be installed. + + This report will create, manage, and graph RRD database files for each + of the metrics generated during transactions, and it will create a + few simple html files to display the reporting host's graphs. At this + point, it will not create a common index file to display links to + all hosts. + + All RRD files and graphs get created in the `rrddir` directory. If + you want to serve these publicly, you should be able to just alias that + directory in a web server. + + If you really know what you're doing, you can tune the `rrdinterval`, + which defaults to the `runinterval`." + + def hostdir + @hostdir ||= File.join(Puppet[:rrddir], self.host) + end + + def htmlfile(type, graphs, field) + file = File.join(hostdir, "#{type}.html") + File.open(file, "w") do |of| + of.puts "#{type.capitalize} graphs for #{host}" + + graphs.each do |graph| + if field == :first + name = graph.sub(/-\w+.png/, '').capitalize + else + name = graph.sub(/\w+-/, '').sub(".png", '').capitalize + end + of.puts "
" + end + of.puts "" + end + + file + end + + def mkhtml + images = Dir.entries(hostdir).find_all { |d| d =~ /\.png/ } + + periodorder = %w{daily weekly monthly yearly} + + periods = {} + types = {} + images.each do |n| + type, period = n.sub(".png", '').split("-") + periods[period] ||= [] + types[type] ||= [] + periods[period] << n + types[type] << n + end + + files = [] + # Make the period html files + periodorder.each do |period| + unless ary = periods[period] + raise Puppet::Error, "Could not find graphs for #{period}" + end + files << htmlfile(period, ary, :first) + end + + # make the type html files + types.sort { |a,b| a[0] <=> b[0] }.each do |type, ary| + newary = [] + periodorder.each do |period| + if graph = ary.find { |g| g.include?("-#{period}.png") } + newary << graph + else + raise "Could not find #{type}-#{period} graph" + end + end + + files << htmlfile(type, newary, :second) + end + + File.open(File.join(hostdir, "index.html"), "w") do |of| + of.puts "Report graphs for #{host}" + files.each do |file| + of.puts "#{File.basename(file).sub(".html",'').capitalize}
" + end + of.puts "" + end + end + + def process(time = nil) + time ||= Time.now.to_i + + unless File.directory?(hostdir) and FileTest.writable?(hostdir) + # Some hackishness to create the dir with all of the right modes and ownership + config = Puppet::Util::Settings.new + config.setdefaults(:reports, :hostdir => {:default => hostdir, :owner => 'service', :mode => 0755, :group => 'service', :desc => "eh"}) + + # This creates the dir. + config.use(:reports) + end + + self.metrics.each do |name, metric| + metric.basedir = hostdir + + if name == "time" + timeclean(metric) + end + + metric.store(time) + + metric.graph + end + + mkhtml unless FileTest.exists?(File.join(hostdir, "index.html")) + end + + # Unfortunately, RRD does not deal well with changing lists of values, + # so we have to pick a list of values and stick with it. In this case, + # that means we record the total time, the config time, and that's about + # it. We should probably send each type's time as a separate metric. + def timeclean(metric) + metric.values = metric.values.find_all { |name, label, value| ['total', 'config_retrieval'].include?(name.to_s) } + end +end + diff --git a/mcollective/lib/puppet/reports/store.rb b/mcollective/lib/puppet/reports/store.rb new file mode 100644 index 000000000..dcce17ad0 --- /dev/null +++ b/mcollective/lib/puppet/reports/store.rb @@ -0,0 +1,49 @@ +require 'puppet' + +SEPARATOR = [Regexp.escape(File::SEPARATOR.to_s), Regexp.escape(File::ALT_SEPARATOR.to_s)].join + +Puppet::Reports.register_report(:store) do + desc "Store the yaml report on disk. Each host sends its report as a YAML dump + and this just stores the file on disk, in the `reportdir` directory. + + These files collect quickly -- one every half hour -- so it is a good idea + to perform some maintenance on them if you use this report (it's the only + default report)." + + def process + # We don't want any tracking back in the fs. Unlikely, but there + # you go. + if host =~ Regexp.union(/[#{SEPARATOR}]/, /\A\.\.?\Z/) + raise ArgumentError, "Invalid node name #{host.inspect}" + end + + dir = File.join(Puppet[:reportdir], host) + + if ! FileTest.exists?(dir) + FileUtils.mkdir_p(dir) + FileUtils.chmod_R(0750, dir) + end + + # Now store the report. + now = Time.now.gmtime + name = %w{year month day hour min}.collect do |method| + # Make sure we're at least two digits everywhere + "%02d" % now.send(method).to_s + end.join("") + ".yaml" + + file = File.join(dir, name) + + begin + File.open(file, "w", 0640) do |f| + f.print to_yaml + end + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.warning "Could not write report for #{host} at #{file}: #{detail}" + end + + # Only testing cares about the return value + file + end +end + diff --git a/mcollective/lib/puppet/reports/tagmail.rb b/mcollective/lib/puppet/reports/tagmail.rb new file mode 100644 index 000000000..e17143e2f --- /dev/null +++ b/mcollective/lib/puppet/reports/tagmail.rb @@ -0,0 +1,168 @@ +require 'puppet' +require 'pp' + +require 'net/smtp' +require 'time' + +Puppet::Reports.register_report(:tagmail) do + desc "This report sends specific log messages to specific email addresses + based on the tags in the log messages. + + See the [UsingTags tag documentation](http://projects.puppetlabs.com/projects/puppet/wiki/Using_Tags) for more information on tags. + + To use this report, you must create a `tagmail.conf` (in the location + specified by `tagmap`). This is a simple file that maps tags to + email addresses: Any log messages in the report that match the specified + tags will be sent to the specified email addresses. + + Tags must be comma-separated, and they can be negated so that messages + only match when they do not have that tag. The tags are separated from + the email addresses by a colon, and the email addresses should also + be comma-separated. + + Lastly, there is an `all` tag that will always match all log messages. + + Here is an example `tagmail.conf`: + + all: me@domain.com + webserver, !mailserver: httpadmins@domain.com + + This will send all messages to `me@domain.com`, and all messages from + webservers that are not also from mailservers to `httpadmins@domain.com`. + + If you are using anti-spam controls, such as grey-listing, on your mail + server you should whitelist the sending email (controlled by `reportform` configuration option) to ensure your email is not discarded as spam. + " + + # Find all matching messages. + def match(taglists) + matching_logs = [] + taglists.each do |emails, pos, neg| + # First find all of the messages matched by our positive tags + messages = nil + if pos.include?("all") + messages = self.logs + else + # Find all of the messages that are tagged with any of our + # tags. + messages = self.logs.find_all do |log| + pos.detect { |tag| log.tagged?(tag) } + end + end + + # Now go through and remove any messages that match our negative tags + messages = messages.reject do |log| + true if neg.detect do |tag| log.tagged?(tag) end + end + + if messages.empty? + Puppet.info "No messages to report to #{emails.join(",")}" + next + else + matching_logs << [emails, messages.collect { |m| m.to_report }.join("\n")] + end + end + + matching_logs + end + + # Load the config file + def parse(text) + taglists = [] + text.split("\n").each do |line| + taglist = emails = nil + case line.chomp + when /^\s*#/; next + when /^\s*$/; next + when /^\s*(.+)\s*:\s*(.+)\s*$/ + taglist = $1 + emails = $2.sub(/#.*$/,'') + else + raise ArgumentError, "Invalid tagmail config file" + end + + pos = [] + neg = [] + taglist.sub(/\s+$/,'').split(/\s*,\s*/).each do |tag| + unless tag =~ /^!?[-\w\.]+$/ + raise ArgumentError, "Invalid tag #{tag.inspect}" + end + case tag + when /^\w+/; pos << tag + when /^!\w+/; neg << tag.sub("!", '') + else + raise Puppet::Error, "Invalid tag '#{tag}'" + end + end + + # Now split the emails + emails = emails.sub(/\s+$/,'').split(/\s*,\s*/) + taglists << [emails, pos, neg] + end + taglists + end + + # Process the report. This just calls the other associated messages. + def process + unless FileTest.exists?(Puppet[:tagmap]) + Puppet.notice "Cannot send tagmail report; no tagmap file #{Puppet[:tagmap]}" + return + end + + taglists = parse(File.read(Puppet[:tagmap])) + + # Now find any appropriately tagged messages. + reports = match(taglists) + + send(reports) + end + + # Send the email reports. + def send(reports) + pid = fork do + if Puppet[:smtpserver] != "none" + begin + Net::SMTP.start(Puppet[:smtpserver]) do |smtp| + reports.each do |emails, messages| + smtp.open_message_stream(Puppet[:reportfrom], *emails) do |p| + p.puts "From: #{Puppet[:reportfrom]}" + p.puts "Subject: Puppet Report for #{self.host}" + p.puts "To: " + emails.join(", ") + p.puts "Date: #{Time.now.rfc2822}" + p.puts + p.puts messages + end + end + end + rescue => detail + puts detail.backtrace if Puppet[:debug] + raise Puppet::Error, + "Could not send report emails through smtp: #{detail}" + end + elsif Puppet[:sendmail] != "" + begin + reports.each do |emails, messages| + # We need to open a separate process for every set of email addresses + IO.popen(Puppet[:sendmail] + " " + emails.join(" "), "w") do |p| + p.puts "From: #{Puppet[:reportfrom]}" + p.puts "Subject: Puppet Report for #{self.host}" + p.puts "To: " + emails.join(", ") + + p.puts messages + end + end + rescue => detail + puts detail.backtrace if Puppet[:debug] + raise Puppet::Error, + "Could not send report emails via sendmail: #{detail}" + end + else + raise Puppet::Error, "SMTP server is unset and could not find sendmail" + end + end + + # Don't bother waiting for the pid to return. + Process.detach(pid) + end +end + diff --git a/mcollective/lib/puppet/resource.rb b/mcollective/lib/puppet/resource.rb new file mode 100644 index 000000000..214516908 --- /dev/null +++ b/mcollective/lib/puppet/resource.rb @@ -0,0 +1,439 @@ +require 'puppet' +require 'puppet/util/tagging' +require 'puppet/util/pson' + +# The simplest resource class. Eventually it will function as the +# base class for all resource-like behaviour. +class Puppet::Resource + # This stub class is only needed for serialization compatibility with 0.25.x. + # Specifically, it exists to provide a compatibility API when using YAML + # serialized objects loaded from StoreConfigs. + Reference = Puppet::Resource + + include Puppet::Util::Tagging + + require 'puppet/resource/type_collection_helper' + include Puppet::Resource::TypeCollectionHelper + + extend Puppet::Util::Pson + include Enumerable + attr_accessor :file, :line, :catalog, :exported, :virtual, :validate_parameters, :strict + attr_reader :type, :title + + require 'puppet/indirector' + extend Puppet::Indirector + indirects :resource, :terminus_class => :ral + + ATTRIBUTES = [:file, :line, :exported] + + def self.from_pson(pson) + raise ArgumentError, "No resource type provided in pson data" unless type = pson['type'] + raise ArgumentError, "No resource title provided in pson data" unless title = pson['title'] + + resource = new(type, title) + + if params = pson['parameters'] + params.each { |param, value| resource[param] = value } + end + + if tags = pson['tags'] + tags.each { |tag| resource.tag(tag) } + end + + ATTRIBUTES.each do |a| + if value = pson[a.to_s] + resource.send(a.to_s + "=", value) + end + end + + resource.exported ||= false + + resource + end + + def inspect + "#{@type}[#{@title}]#{to_hash.inspect}" + end + + def to_pson_data_hash + data = ([:type, :title, :tags] + ATTRIBUTES).inject({}) do |hash, param| + next hash unless value = self.send(param) + hash[param.to_s] = value + hash + end + + data["exported"] ||= false + + params = self.to_hash.inject({}) do |hash, ary| + param, value = ary + + # Don't duplicate the title as the namevar + next hash if param == namevar and value == title + + hash[param] = Puppet::Resource.value_to_pson_data(value) + hash + end + + data["parameters"] = params unless params.empty? + + data + end + + def self.value_to_pson_data(value) + if value.is_a? Array + value.map{|v| value_to_pson_data(v) } + elsif value.is_a? Puppet::Resource + value.to_s + else + value + end + end + + def yaml_property_munge(x) + case x + when Hash + x.inject({}) { |h,kv| + k,v = kv + h[k] = self.class.value_to_pson_data(v) + h + } + else self.class.value_to_pson_data(x) + end + end + + def to_pson(*args) + to_pson_data_hash.to_pson(*args) + end + + # Proxy these methods to the parameters hash. It's likely they'll + # be overridden at some point, but this works for now. + %w{has_key? keys length delete empty? <<}.each do |method| + define_method(method) do |*args| + parameters.send(method, *args) + end + end + + # Set a given parameter. Converts all passed names + # to lower-case symbols. + def []=(param, value) + validate_parameter(param) if validate_parameters + parameters[parameter_name(param)] = value + end + + # Return a given parameter's value. Converts all passed names + # to lower-case symbols. + def [](param) + parameters[parameter_name(param)] + end + + def ==(other) + return false unless other.respond_to?(:title) and self.type == other.type and self.title == other.title + + return false unless to_hash == other.to_hash + true + end + + # Compatibility method. + def builtin? + builtin_type? + end + + # Is this a builtin resource type? + def builtin_type? + resource_type.is_a?(Class) + end + + # Iterate over each param/value pair, as required for Enumerable. + def each + parameters.each { |p,v| yield p, v } + end + + def include?(parameter) + super || parameters.keys.include?( parameter_name(parameter) ) + end + + # These two methods are extracted into a Helper + # module, but file load order prevents me + # from including them in the class, and I had weird + # behaviour (i.e., sometimes it didn't work) when + # I directly extended each resource with the helper. + def environment + Puppet::Node::Environment.new(@environment) + end + + def environment=(env) + if env.is_a?(String) or env.is_a?(Symbol) + @environment = env + else + @environment = env.name + end + end + + %w{exported virtual strict}.each do |m| + define_method(m+"?") do + self.send(m) + end + end + + # Create our resource. + def initialize(type, title = nil, attributes = {}) + @parameters = {} + + # Set things like strictness first. + attributes.each do |attr, value| + next if attr == :parameters + send(attr.to_s + "=", value) + end + + @type, @title = extract_type_and_title(type, title) + + @type = munge_type_name(@type) + + if @type == "Class" + @title = :main if @title == "" + @title = munge_type_name(@title) + end + + if params = attributes[:parameters] + extract_parameters(params) + end + + tag(self.type) + tag(self.title) if valid_tag?(self.title) + + @reference = self # for serialization compatibility with 0.25.x + if strict? and ! resource_type + if @type == 'Class' + raise ArgumentError, "Could not find declared class #{title}" + else + raise ArgumentError, "Invalid resource type #{type}" + end + end + end + + def ref + to_s + end + + # Find our resource. + def resolve + return(catalog ? catalog.resource(to_s) : nil) + end + + def resource_type + case type + when "Class"; known_resource_types.hostclass(title == :main ? "" : title) + when "Node"; known_resource_types.node(title) + else + Puppet::Type.type(type.to_s.downcase.to_sym) || known_resource_types.definition(type) + end + end + + # Produce a simple hash of our parameters. + def to_hash + parse_title.merge parameters + end + + def to_s + "#{type}[#{title}]" + end + + def uniqueness_key + # Temporary kludge to deal with inconsistant use patters + h = self.to_hash + h[namevar] ||= h[:name] + h[:name] ||= h[namevar] + h.values_at(*key_attributes.sort_by { |k| k.to_s }) + end + + def key_attributes + return(resource_type.respond_to? :key_attributes) ? resource_type.key_attributes : [:name] + end + + # Convert our resource to Puppet code. + def to_manifest + # Collect list of attributes to align => and move ensure first + attr = parameters.keys + attr_max = attr.inject(0) { |max,k| k.to_s.length > max ? k.to_s.length : max } + + attr.sort! + if attr.first != :ensure && attr.include?(:ensure) + attr.delete(:ensure) + attr.unshift(:ensure) + end + + attributes = attr.collect { |k| + v = parameters[k] + if v.is_a? Array + " %-#{attr_max}s => %s,\n" % [ k, "[\'#{v.join("', '")}\']" ] + else + " %-#{attr_max}s => %s,\n" % [ k, "\'#{v}\'" ] + end + } + + "%s { '%s':\n%s}" % [self.type.to_s.downcase, self.title, attributes] + end + + def to_ref + ref + end + + # Convert our resource to a RAL resource instance. Creates component + # instances for resource types that don't exist. + def to_ral + if typeklass = Puppet::Type.type(self.type) + return typeklass.new(self) + else + return Puppet::Type::Component.new(self) + end + end + + # Translate our object to a backward-compatible transportable object. + def to_trans + if builtin_type? and type.downcase.to_s != "stage" + result = to_transobject + else + result = to_transbucket + end + + result.file = self.file + result.line = self.line + + result + end + + def to_trans_ref + [type.to_s, title.to_s] + end + + # Create an old-style TransObject instance, for builtin resource types. + def to_transobject + # Now convert to a transobject + result = Puppet::TransObject.new(title, type) + to_hash.each do |p, v| + if v.is_a?(Puppet::Resource) + v = v.to_trans_ref + elsif v.is_a?(Array) + v = v.collect { |av| + av = av.to_trans_ref if av.is_a?(Puppet::Resource) + av + } + end + + # If the value is an array with only one value, then + # convert it to a single value. This is largely so that + # the database interaction doesn't have to worry about + # whether it returns an array or a string. + result[p.to_s] = if v.is_a?(Array) and v.length == 1 + v[0] + else + v + end + end + + result.tags = self.tags + + result + end + + def name + # this is potential namespace conflict + # between the notion of an "indirector name" + # and a "resource name" + [ type, title ].join('/') + end + + def to_resource + self + end + + def valid_parameter?(name) + resource_type.valid_parameter?(name) + end + + def validate_parameter(name) + raise ArgumentError, "Invalid parameter #{name}" unless valid_parameter?(name) + end + + private + + # Produce a canonical method name. + def parameter_name(param) + param = param.to_s.downcase.to_sym + if param == :name and n = namevar + param = namevar + end + param + end + + # The namevar for our resource type. If the type doesn't exist, + # always use :name. + def namevar + if builtin_type? and t = resource_type and t.key_attributes.length == 1 + t.key_attributes.first + else + :name + end + end + + # Create an old-style TransBucket instance, for non-builtin resource types. + def to_transbucket + bucket = Puppet::TransBucket.new([]) + + bucket.type = self.type + bucket.name = self.title + + # TransBuckets don't support parameters, which is why they're being deprecated. + bucket + end + + def extract_parameters(params) + params.each do |param, value| + validate_parameter(param) if strict? + self[param] = value + end + end + + def extract_type_and_title(argtype, argtitle) + if (argtitle || argtype) =~ /^([^\[\]]+)\[(.+)\]$/m then [ $1, $2 ] + elsif argtitle then [ argtype, argtitle ] + elsif argtype.is_a?(Puppet::Type) then [ argtype.class.name, argtype.title ] + elsif argtype.is_a?(Hash) then + raise ArgumentError, "Puppet::Resource.new does not take a hash as the first argument. "+ + "Did you mean (#{(argtype[:type] || argtype["type"]).inspect}, #{(argtype[:title] || argtype["title"]).inspect }) ?" + else raise ArgumentError, "No title provided and #{argtype.inspect} is not a valid resource reference" + end + end + + def munge_type_name(value) + return :main if value == :main + return "Class" if value == "" or value.nil? or value.to_s.downcase == "component" + + value.to_s.split("::").collect { |s| s.capitalize }.join("::") + end + + def parse_title + h = {} + type = resource_type + if type.respond_to? :title_patterns + type.title_patterns.each { |regexp, symbols_and_lambdas| + if captures = regexp.match(title.to_s) + symbols_and_lambdas.zip(captures[1..-1]).each { |symbol_and_lambda,capture| + sym, lam = symbol_and_lambda + #self[sym] = lam.call(capture) + h[sym] = lam.call(capture) + } + return h + end + } + else + return { :name => title.to_s } + end + end + + def parameters + # @parameters could have been loaded from YAML, causing it to be nil (by + # bypassing initialize). + @parameters ||= {} + end +end diff --git a/mcollective/lib/puppet/resource/catalog.rb b/mcollective/lib/puppet/resource/catalog.rb new file mode 100644 index 000000000..2fdd19b0c --- /dev/null +++ b/mcollective/lib/puppet/resource/catalog.rb @@ -0,0 +1,604 @@ +require 'puppet/node' +require 'puppet/indirector' +require 'puppet/simple_graph' +require 'puppet/transaction' + +require 'puppet/util/cacher' +require 'puppet/util/pson' + +require 'puppet/util/tagging' + +# This class models a node catalog. It is the thing +# meant to be passed from server to client, and it contains all +# of the information in the catalog, including the resources +# and the relationships between them. +class Puppet::Resource::Catalog < Puppet::SimpleGraph + class DuplicateResourceError < Puppet::Error; end + + extend Puppet::Indirector + indirects :catalog, :terminus_setting => :catalog_terminus + + include Puppet::Util::Tagging + extend Puppet::Util::Pson + include Puppet::Util::Cacher::Expirer + + # The host name this is a catalog for. + attr_accessor :name + + # The catalog version. Used for testing whether a catalog + # is up to date. + attr_accessor :version + + # How long this catalog took to retrieve. Used for reporting stats. + attr_accessor :retrieval_duration + + # Whether this is a host catalog, which behaves very differently. + # In particular, reports are sent, graphs are made, and state is + # stored in the state database. If this is set incorrectly, then you often + # end up in infinite loops, because catalogs are used to make things + # that the host catalog needs. + attr_accessor :host_config + + # Whether this catalog was retrieved from the cache, which affects + # whether it is written back out again. + attr_accessor :from_cache + + # Some metadata to help us compile and generally respond to the current state. + attr_accessor :client_version, :server_version + + # Add classes to our class list. + def add_class(*classes) + classes.each do |klass| + @classes << klass + end + + # Add the class names as tags, too. + tag(*classes) + end + + def title_key_for_ref( ref ) + ref =~ /^([-\w:]+)\[(.*)\]$/m + [$1, $2] + end + + # Add one or more resources to our graph and to our resource table. + # This is actually a relatively complicated method, because it handles multiple + # aspects of Catalog behaviour: + # * Add the resource to the resource table + # * Add the resource to the resource graph + # * Add the resource to the relationship graph + # * Add any aliases that make sense for the resource (e.g., name != title) + def add_resource(*resources) + resources.each do |resource| + raise ArgumentError, "Can only add objects that respond to :ref, not instances of #{resource.class}" unless resource.respond_to?(:ref) + end.each { |resource| fail_on_duplicate_type_and_title(resource) }.each do |resource| + title_key = title_key_for_ref(resource.ref) + + @transient_resources << resource if applying? + @resource_table[title_key] = resource + + # If the name and title differ, set up an alias + + if resource.respond_to?(:name) and resource.respond_to?(:title) and resource.respond_to?(:isomorphic?) and resource.name != resource.title + self.alias(resource, resource.uniqueness_key) if resource.isomorphic? + end + + resource.catalog = self if resource.respond_to?(:catalog=) + + add_vertex(resource) + + @relationship_graph.add_vertex(resource) if @relationship_graph + + yield(resource) if block_given? + end + end + + # Create an alias for a resource. + def alias(resource, key) + resource.ref =~ /^(.+)\[/ + class_name = $1 || resource.class.name + + newref = [class_name, key].flatten + + if key.is_a? String + ref_string = "#{class_name}[#{key}]" + return if ref_string == resource.ref + end + + # LAK:NOTE It's important that we directly compare the references, + # because sometimes an alias is created before the resource is + # added to the catalog, so comparing inside the below if block + # isn't sufficient. + if existing = @resource_table[newref] + return if existing == resource + resource_definition = " at #{resource.file}:#{resource.line}" if resource.file and resource.line + existing_definition = " at #{existing.file}:#{existing.line}" if existing.file and existing.line + msg = "Cannot alias #{resource.ref} to #{key.inspect}#{resource_definition}; resource #{newref.inspect} already defined#{existing_definition}" + raise ArgumentError, msg + end + @resource_table[newref] = resource + @aliases[resource.ref] ||= [] + @aliases[resource.ref] << newref + end + + # Apply our catalog to the local host. Valid options + # are: + # :tags - set the tags that restrict what resources run + # during the transaction + # :ignoreschedules - tell the transaction to ignore schedules + # when determining the resources to run + def apply(options = {}) + @applying = true + + # Expire all of the resource data -- this ensures that all + # data we're operating against is entirely current. + expire + + Puppet::Util::Storage.load if host_config? + + transaction = Puppet::Transaction.new(self, options[:report]) + register_report = options[:report].nil? + + transaction.tags = options[:tags] if options[:tags] + transaction.ignoreschedules = true if options[:ignoreschedules] + + transaction.add_times :config_retrieval => self.retrieval_duration || 0 + + begin + Puppet::Util::Log.newdestination(transaction.report) if register_report + begin + transaction.evaluate + ensure + Puppet::Util::Log.close(transaction.report) if register_report + end + rescue Puppet::Error => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Could not apply complete catalog: #{detail}" + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Got an uncaught exception of type #{detail.class}: #{detail}" + ensure + # Don't try to store state unless we're a host config + # too recursive. + Puppet::Util::Storage.store if host_config? + end + + yield transaction if block_given? + + return transaction + ensure + @applying = false + cleanup + end + + # Are we in the middle of applying the catalog? + def applying? + @applying + end + + def clear(remove_resources = true) + super() + # We have to do this so that the resources clean themselves up. + @resource_table.values.each { |resource| resource.remove } if remove_resources + @resource_table.clear + + if @relationship_graph + @relationship_graph.clear + @relationship_graph = nil + end + end + + def classes + @classes.dup + end + + # Create a new resource and register it in the catalog. + def create_resource(type, options) + unless klass = Puppet::Type.type(type) + raise ArgumentError, "Unknown resource type #{type}" + end + return unless resource = klass.new(options) + + add_resource(resource) + resource + end + + def dependent_data_expired?(ts) + if applying? + return super + else + return true + end + end + + # Turn our catalog graph into an old-style tree of TransObjects and TransBuckets. + # LAK:NOTE(20081211): This is a pre-0.25 backward compatibility method. + # It can be removed as soon as xmlrpc is killed. + def extract + top = nil + current = nil + buckets = {} + + unless main = resource(:stage, "main") + raise Puppet::DevError, "Could not find 'main' stage; cannot generate catalog" + end + + if stages = vertices.find_all { |v| v.type == "Stage" and v.title != "main" } and ! stages.empty? + Puppet.warning "Stages are not supported by 0.24.x client; stage(s) #{stages.collect { |s| s.to_s }.join(', ') } will be ignored" + end + + bucket = nil + walk(main, :out) do |source, target| + # The sources are always non-builtins. + unless tmp = buckets[source.to_s] + if tmp = buckets[source.to_s] = source.to_trans + bucket = tmp + else + # This is because virtual resources return nil. If a virtual + # container resource contains realized resources, we still need to get + # to them. So, we keep a reference to the last valid bucket + # we returned and use that if the container resource is virtual. + end + end + bucket = tmp || bucket + if child = target.to_trans + raise "No bucket created for #{source}" unless bucket + bucket.push child + + # It's important that we keep a reference to any TransBuckets we've created, so + # we don't create multiple buckets for children. + buckets[target.to_s] = child unless target.builtin? + end + end + + # Retrieve the bucket for the top-level scope and set the appropriate metadata. + unless result = buckets[main.to_s] + # This only happens when the catalog is entirely empty. + result = buckets[main.to_s] = main.to_trans + end + + result.classes = classes + + # Clear the cache to encourage the GC + buckets.clear + result + end + + # Make sure all of our resources are "finished". + def finalize + make_default_resources + + @resource_table.values.each { |resource| resource.finish } + + write_graph(:resources) + end + + def host_config? + host_config + end + + def initialize(name = nil) + super() + @name = name if name + @classes = [] + @resource_table = {} + @transient_resources = [] + @applying = false + @relationship_graph = nil + + @host_config = true + + @aliases = {} + + if block_given? + yield(self) + finalize + end + end + + # Make the default objects necessary for function. + def make_default_resources + # We have to add the resources to the catalog, or else they won't get cleaned up after + # the transaction. + + # First create the default scheduling objects + Puppet::Type.type(:schedule).mkdefaultschedules.each { |res| add_resource(res) unless resource(res.ref) } + + # And filebuckets + if bucket = Puppet::Type.type(:filebucket).mkdefaultbucket + add_resource(bucket) unless resource(bucket.ref) + end + end + + # Create a graph of all of the relationships in our catalog. + def relationship_graph + unless @relationship_graph + # It's important that we assign the graph immediately, because + # the debug messages below use the relationships in the + # relationship graph to determine the path to the resources + # spitting out the messages. If this is not set, + # then we get into an infinite loop. + @relationship_graph = Puppet::SimpleGraph.new + + # First create the dependency graph + self.vertices.each do |vertex| + @relationship_graph.add_vertex vertex + vertex.builddepends.each do |edge| + @relationship_graph.add_edge(edge) + end + end + + # Lastly, add in any autorequires + @relationship_graph.vertices.each do |vertex| + vertex.autorequire(self).each do |edge| + unless @relationship_graph.edge?(edge.source, edge.target) # don't let automatic relationships conflict with manual ones. + unless @relationship_graph.edge?(edge.target, edge.source) + vertex.debug "Autorequiring #{edge.source}" + @relationship_graph.add_edge(edge) + else + vertex.debug "Skipping automatic relationship with #{(edge.source == vertex ? edge.target : edge.source)}" + end + end + end + end + @relationship_graph.write_graph(:relationships) if host_config? + + # Then splice in the container information + @relationship_graph.splice!(self, Puppet::Type::Component) + + @relationship_graph.write_graph(:expanded_relationships) if host_config? + end + @relationship_graph + end + + # Remove the resource from our catalog. Notice that we also call + # 'remove' on the resource, at least until resource classes no longer maintain + # references to the resource instances. + def remove_resource(*resources) + resources.each do |resource| + @resource_table.delete(resource.ref) + if aliases = @aliases[resource.ref] + aliases.each { |res_alias| @resource_table.delete(res_alias) } + @aliases.delete(resource.ref) + end + remove_vertex!(resource) if vertex?(resource) + @relationship_graph.remove_vertex!(resource) if @relationship_graph and @relationship_graph.vertex?(resource) + resource.remove + end + end + + # Look a resource up by its reference (e.g., File[/etc/passwd]). + def resource(type, title = nil) + # Always create a resource reference, so that it always canonizes how we + # are referring to them. + if title + res = Puppet::Resource.new(type, title) + else + # If they didn't provide a title, then we expect the first + # argument to be of the form 'Class[name]', which our + # Reference class canonizes for us. + res = Puppet::Resource.new(nil, type) + end + title_key = [res.type, res.title.to_s] + uniqueness_key = [res.type, res.uniqueness_key].flatten + @resource_table[title_key] || @resource_table[uniqueness_key] + end + + def resource_refs + resource_keys.collect{ |type, name| name.is_a?( String ) ? "#{type}[#{name}]" : nil}.compact + end + + def resource_keys + @resource_table.keys + end + + def resources + @resource_table.values.uniq + end + + def self.from_pson(data) + result = new(data['name']) + + if tags = data['tags'] + result.tag(*tags) + end + + if version = data['version'] + result.version = version + end + + if resources = data['resources'] + resources = PSON.parse(resources) if resources.is_a?(String) + resources.each do |res| + resource_from_pson(result, res) + end + end + + if edges = data['edges'] + edges = PSON.parse(edges) if edges.is_a?(String) + edges.each do |edge| + edge_from_pson(result, edge) + end + end + + if classes = data['classes'] + result.add_class(*classes) + end + + result + end + + def self.edge_from_pson(result, edge) + # If no type information was presented, we manually find + # the class. + edge = Puppet::Relationship.from_pson(edge) if edge.is_a?(Hash) + unless source = result.resource(edge.source) + raise ArgumentError, "Could not convert from pson: Could not find relationship source #{edge.source.inspect}" + end + edge.source = source + + unless target = result.resource(edge.target) + raise ArgumentError, "Could not convert from pson: Could not find relationship target #{edge.target.inspect}" + end + edge.target = target + + result.add_edge(edge) + end + + def self.resource_from_pson(result, res) + res = Puppet::Resource.from_pson(res) if res.is_a? Hash + result.add_resource(res) + end + + PSON.register_document_type('Catalog',self) + def to_pson_data_hash + { + 'document_type' => 'Catalog', + 'data' => { + 'tags' => tags, + 'name' => name, + 'version' => version, + 'resources' => vertices.collect { |v| v.to_pson_data_hash }, + 'edges' => edges. collect { |e| e.to_pson_data_hash }, + 'classes' => classes + }, + 'metadata' => { + 'api_version' => 1 + } + } + end + + def to_pson(*args) + to_pson_data_hash.to_pson(*args) + end + + # Convert our catalog into a RAL catalog. + def to_ral + to_catalog :to_ral + end + + # Convert our catalog into a catalog of Puppet::Resource instances. + def to_resource + to_catalog :to_resource + end + + # filter out the catalog, applying +block+ to each resource. + # If the block result is false, the resource will + # be kept otherwise it will be skipped + def filter(&block) + to_catalog :to_resource, &block + end + + # Store the classes in the classfile. + def write_class_file + ::File.open(Puppet[:classfile], "w") do |f| + f.puts classes.join("\n") + end + rescue => detail + Puppet.err "Could not create class file #{Puppet[:classfile]}: #{detail}" + end + + # Produce the graph files if requested. + def write_graph(name) + # We only want to graph the main host catalog. + return unless host_config? + + super + end + + private + + def cleanup + # Expire any cached data the resources are keeping. + expire + end + + # Verify that the given resource isn't defined elsewhere. + def fail_on_duplicate_type_and_title(resource) + # Short-curcuit the common case, + return unless existing_resource = @resource_table[title_key_for_ref(resource.ref)] + + # If we've gotten this far, it's a real conflict + msg = "Duplicate definition: #{resource.ref} is already defined" + + msg << " in file #{existing_resource.file} at line #{existing_resource.line}" if existing_resource.file and existing_resource.line + + msg << "; cannot redefine" if resource.line or resource.file + + raise DuplicateResourceError.new(msg) + end + + # An abstracted method for converting one catalog into another type of catalog. + # This pretty much just converts all of the resources from one class to another, using + # a conversion method. + def to_catalog(convert) + result = self.class.new(self.name) + + result.version = self.version + + map = {} + vertices.each do |resource| + next if virtual_not_exported?(resource) + next if block_given? and yield resource + + #This is hackity hack for 1094 + #Aliases aren't working in the ral catalog because the current instance of the resource + #has a reference to the catalog being converted. . . So, give it a reference to the new one + #problem solved. . . + if resource.class == Puppet::Resource + resource = resource.dup + resource.catalog = result + elsif resource.is_a?(Puppet::TransObject) + resource = resource.dup + resource.catalog = result + elsif resource.is_a?(Puppet::Parser::Resource) + resource = resource.to_resource + resource.catalog = result + end + + if resource.is_a?(Puppet::Resource) and convert.to_s == "to_resource" + newres = resource + else + newres = resource.send(convert) + end + + # We can't guarantee that resources don't munge their names + # (like files do with trailing slashes), so we have to keep track + # of what a resource got converted to. + map[resource.ref] = newres + + result.add_resource newres + end + + message = convert.to_s.gsub "_", " " + edges.each do |edge| + # Skip edges between virtual resources. + next if virtual_not_exported?(edge.source) + next if block_given? and yield edge.source + + next if virtual_not_exported?(edge.target) + next if block_given? and yield edge.target + + unless source = map[edge.source.ref] + raise Puppet::DevError, "Could not find resource #{edge.source.ref} when converting #{message} resources" + end + + unless target = map[edge.target.ref] + raise Puppet::DevError, "Could not find resource #{edge.target.ref} when converting #{message} resources" + end + + result.add_edge(source, target, edge.label) + end + + map.clear + + result.add_class(*self.classes) + result.tag(*self.tags) + + result + end + + def virtual_not_exported?(resource) + resource.respond_to?(:virtual?) and resource.virtual? and (resource.respond_to?(:exported?) and not resource.exported?) + end +end diff --git a/mcollective/lib/puppet/resource/status.rb b/mcollective/lib/puppet/resource/status.rb new file mode 100644 index 000000000..dea8c105d --- /dev/null +++ b/mcollective/lib/puppet/resource/status.rb @@ -0,0 +1,79 @@ +module Puppet + class Resource + class Status + include Puppet::Util::Tagging + include Puppet::Util::Logging + + attr_accessor :resource, :node, :file, :line, :current_values, :status, :evaluation_time + + STATES = [:skipped, :failed, :failed_to_restart, :restarted, :changed, :out_of_sync, :scheduled] + attr_accessor *STATES + + attr_reader :source_description, :default_log_level, :time, :resource + attr_reader :change_count, :out_of_sync_count, :resource_type, :title + + YAML_ATTRIBUTES = %w{@resource @file @line @evaluation_time @change_count @out_of_sync_count @tags @time @events @out_of_sync @changed @resource_type @title @skipped @failed} + + # Provide a boolean method for each of the states. + STATES.each do |attr| + define_method("#{attr}?") do + !! send(attr) + end + end + + def <<(event) + add_event(event) + self + end + + def add_event(event) + @events << event + if event.status == 'failure' + self.failed = true + elsif event.status == 'success' + @change_count += 1 + @changed = true + end + if event.status != 'audit' + @out_of_sync_count += 1 + @out_of_sync = true + end + end + + def events + @events + end + + def initialize(resource) + @source_description = resource.path + @resource = resource.to_s + @change_count = 0 + @out_of_sync_count = 0 + @changed = false + @out_of_sync = false + @skipped = false + @failed = false + + [:file, :line].each do |attr| + send(attr.to_s + "=", resource.send(attr)) + end + + tag(*resource.tags) + @time = Time.now + @events = [] + @resource_type = resource.type.to_s.capitalize + @title = resource.title + end + + def to_yaml_properties + (YAML_ATTRIBUTES & instance_variables).sort + end + + private + + def log_source + source_description + end + end + end +end diff --git a/mcollective/lib/puppet/resource/type.rb b/mcollective/lib/puppet/resource/type.rb new file mode 100644 index 000000000..34fddf135 --- /dev/null +++ b/mcollective/lib/puppet/resource/type.rb @@ -0,0 +1,347 @@ +require 'puppet/parser/parser' +require 'puppet/util/warnings' +require 'puppet/util/errors' +require 'puppet/util/inline_docs' +require 'puppet/parser/ast/leaf' +require 'puppet/dsl' + +class Puppet::Resource::Type + Puppet::ResourceType = self + include Puppet::Util::InlineDocs + include Puppet::Util::Warnings + include Puppet::Util::Errors + + RESOURCE_SUPERTYPES = [:hostclass, :node, :definition] + + attr_accessor :file, :line, :doc, :code, :ruby_code, :parent, :resource_type_collection, :module_name + attr_reader :type, :namespace, :arguments, :behaves_like + + RESOURCE_SUPERTYPES.each do |t| + define_method("#{t}?") { self.type == t } + end + + require 'puppet/indirector' + extend Puppet::Indirector + indirects :resource_type, :terminus_class => :parser + + def self.from_pson(data) + name = data.delete('name') or raise ArgumentError, "Resource Type names must be specified" + type = data.delete('type') || "definition" + + data = data.inject({}) { |result, ary| result[ary[0].intern] = ary[1]; result } + + new(type, name, data) + end + + def to_pson_data_hash + data = [:code, :doc, :line, :file, :parent].inject({}) do |hash, param| + next hash unless value = self.send(param) + hash[param.to_s] = value + hash + end + + data['arguments'] = arguments.dup + + data['name'] = name + data['type'] = type + + data + end + + def to_pson(*args) + to_pson_data_hash.to_pson(*args) + end + + # Are we a child of the passed class? Do a recursive search up our + # parentage tree to figure it out. + def child_of?(klass) + return false unless parent + + return(klass == parent_type ? true : parent_type.child_of?(klass)) + end + + # Now evaluate the code associated with this class or definition. + def evaluate_code(resource) + scope = resource.scope + + if tmp = evaluate_parent_type(resource) + scope = tmp + end + + scope = subscope(scope, resource) unless resource.title == :main + scope.compiler.add_class(name) unless definition? + + set_resource_parameters(resource, scope) + + code.safeevaluate(scope) if code + + evaluate_ruby_code(resource, scope) if ruby_code + end + + def initialize(type, name, options = {}) + @type = type.to_s.downcase.to_sym + raise ArgumentError, "Invalid resource supertype '#{type}'" unless RESOURCE_SUPERTYPES.include?(@type) + + name = convert_from_ast(name) if name.is_a?(Puppet::Parser::AST::HostName) + + set_name_and_namespace(name) + + [:code, :doc, :line, :file, :parent].each do |param| + next unless value = options[param] + send(param.to_s + "=", value) + end + + set_arguments(options[:arguments]) + end + + # This is only used for node names, and really only when the node name + # is a regexp. + def match(string) + return string.to_s.downcase == name unless name_is_regex? + + @name =~ string + end + + # Add code from a new instance to our code. + def merge(other) + fail "#{name} is not a class; cannot add code to it" unless type == :hostclass + fail "#{other.name} is not a class; cannot add code from it" unless other.type == :hostclass + fail "Cannot have code outside of a class/node/define because 'freeze_main' is enabled" if name == "" and Puppet.settings[:freeze_main] + + if parent and other.parent and parent != other.parent + fail "Cannot merge classes with different parent classes (#{name} => #{parent} vs. #{other.name} => #{other.parent})" + end + + # We know they're either equal or only one is set, so keep whichever parent is specified. + self.parent ||= other.parent + + if other.doc + self.doc ||= "" + self.doc += other.doc + end + + # This might just be an empty, stub class. + return unless other.code + + unless self.code + self.code = other.code + return + end + + array_class = Puppet::Parser::AST::ASTArray + self.code = array_class.new(:children => [self.code]) unless self.code.is_a?(array_class) + + if other.code.is_a?(array_class) + code.children += other.code.children + else + code.children << other.code + end + end + + # Make an instance of the resource type, and place it in the catalog + # if it isn't in the catalog already. This is only possible for + # classes and nodes. No parameters are be supplied--if this is a + # parameterized class, then all parameters take on their default + # values. + def ensure_in_catalog(scope, parameters=nil) + type == :definition and raise ArgumentError, "Cannot create resources for defined resource types" + resource_type = type == :hostclass ? :class : :node + + # Do nothing if the resource already exists; this makes sure we don't + # get multiple copies of the class resource, which helps provide the + # singleton nature of classes. + # we should not do this for classes with parameters + # if parameters are passed, we should still try to create the resource + # even if it exists so that we can fail + # this prevents us from being able to combine param classes with include + if resource = scope.catalog.resource(resource_type, name) and !parameters + return resource + end + resource = Puppet::Parser::Resource.new(resource_type, name, :scope => scope, :source => self) + if parameters + parameters.each do |k,v| + resource.set_parameter(k,v) + end + end + instantiate_resource(scope, resource) + scope.compiler.add_resource(scope, resource) + resource + end + + def instantiate_resource(scope, resource) + # Make sure our parent class has been evaluated, if we have one. + if parent && !scope.catalog.resource(resource.type, parent) + parent_type(scope).ensure_in_catalog(scope) + end + + if ['Class', 'Node'].include? resource.type + scope.catalog.tag(*resource.tags) + end + end + + def name + return @name unless @name.is_a?(Regexp) + @name.source.downcase.gsub(/[^-\w:.]/,'').sub(/^\.+/,'') + end + + def name_is_regex? + @name.is_a?(Regexp) + end + + # MQR TODO: + # + # The change(s) introduced by the fix for #4270 are mostly silly & should be + # removed, though we didn't realize it at the time. If it can be established/ + # ensured that nodes never call parent_type and that resource_types are always + # (as they should be) members of exactly one resource_type_collection the + # following method could / should be replaced with: + # + # def parent_type + # @parent_type ||= parent && ( + # resource_type_collection.find_or_load([name],parent,type.to_sym) || + # fail Puppet::ParseError, "Could not find parent resource type '#{parent}' of type #{type} in #{resource_type_collection.environment}" + # ) + # end + # + # ...and then the rest of the changes around passing in scope reverted. + # + def parent_type(scope = nil) + return nil unless parent + + unless @parent_type + raise "Must pass scope to parent_type when called first time" unless scope + unless @parent_type = scope.environment.known_resource_types.send("find_#{type}", [name], parent) + fail Puppet::ParseError, "Could not find parent resource type '#{parent}' of type #{type} in #{scope.environment}" + end + end + + @parent_type + end + + # Set any arguments passed by the resource as variables in the scope. + def set_resource_parameters(resource, scope) + set = {} + resource.to_hash.each do |param, value| + param = param.to_sym + fail Puppet::ParseError, "#{resource.ref} does not accept attribute #{param}" unless valid_parameter?(param) + + exceptwrap { scope.setvar(param.to_s, value) } + + set[param] = true + end + + if @type == :hostclass + scope.setvar("title", resource.title.to_s.downcase) unless set.include? :title + scope.setvar("name", resource.name.to_s.downcase ) unless set.include? :name + else + scope.setvar("title", resource.title ) unless set.include? :title + scope.setvar("name", resource.name ) unless set.include? :name + end + scope.setvar("module_name", module_name) if module_name and ! set.include? :module_name + + if caller_name = scope.parent_module_name and ! set.include?(:caller_module_name) + scope.setvar("caller_module_name", caller_name) + end + scope.class_set(self.name,scope) if hostclass? or node? + # Verify that all required arguments are either present or + # have been provided with defaults. + arguments.each do |param, default| + param = param.to_sym + next if set.include?(param) + + # Even if 'default' is a false value, it's an AST value, so this works fine + fail Puppet::ParseError, "Must pass #{param} to #{resource.ref}" unless default + + value = default.safeevaluate(scope) + scope.setvar(param.to_s, value) + + # Set it in the resource, too, so the value makes it to the client. + resource[param] = value + end + + end + + # Create a new subscope in which to evaluate our code. + def subscope(scope, resource) + scope.newscope :resource => resource, :namespace => self.namespace, :source => self + end + + # Check whether a given argument is valid. + def valid_parameter?(param) + param = param.to_s + + return true if param == "name" + return true if Puppet::Type.metaparam?(param) + return false unless defined?(@arguments) + return(arguments.include?(param) ? true : false) + end + + def set_arguments(arguments) + @arguments = {} + return if arguments.nil? + + arguments.each do |arg, default| + arg = arg.to_s + warn_if_metaparam(arg, default) + @arguments[arg] = default + end + end + + private + + def convert_from_ast(name) + value = name.value + if value.is_a?(Puppet::Parser::AST::Regex) + name = value.value + else + name = value + end + end + + def evaluate_parent_type(resource) + return unless klass = parent_type(resource.scope) and parent_resource = resource.scope.compiler.catalog.resource(:class, klass.name) || resource.scope.compiler.catalog.resource(:node, klass.name) + parent_resource.evaluate unless parent_resource.evaluated? + parent_scope(resource.scope, klass) + end + + def evaluate_ruby_code(resource, scope) + Puppet::DSL::ResourceAPI.new(resource, scope, ruby_code).evaluate + end + + # Split an fq name into a namespace and name + def namesplit(fullname) + ary = fullname.split("::") + n = ary.pop || "" + ns = ary.join("::") + return ns, n + end + + def parent_scope(scope, klass) + scope.class_scope(klass) || raise(Puppet::DevError, "Could not find scope for #{klass.name}") + end + + def set_name_and_namespace(name) + if name.is_a?(Regexp) + @name = name + @namespace = "" + else + @name = name.to_s.downcase + + # Note we're doing something somewhat weird here -- we're setting + # the class's namespace to its fully qualified name. This means + # anything inside that class starts looking in that namespace first. + @namespace, ignored_shortname = @type == :hostclass ? [@name, ''] : namesplit(@name) + end + end + + def warn_if_metaparam(param, default) + return unless Puppet::Type.metaparamclass(param) + + if default + warnonce "#{param} is a metaparam; this value will inherit to all contained resources" + else + raise Puppet::ParseError, "#{param} is a metaparameter; please choose another parameter name in the #{self.name} definition" + end + end +end + diff --git a/mcollective/lib/puppet/resource/type_collection.rb b/mcollective/lib/puppet/resource/type_collection.rb new file mode 100644 index 000000000..4210475ad --- /dev/null +++ b/mcollective/lib/puppet/resource/type_collection.rb @@ -0,0 +1,222 @@ +class Puppet::Resource::TypeCollection + attr_reader :environment + + def clear + @hostclasses.clear + @definitions.clear + @nodes.clear + end + + def initialize(env) + @environment = env.is_a?(String) ? Puppet::Node::Environment.new(env) : env + @hostclasses = {} + @definitions = {} + @nodes = {} + + # So we can keep a list and match the first-defined regex + @node_list = [] + + @watched_files = {} + end + + def inspect + "TypeCollection" + { :hostclasses => @hostclasses.keys, :definitions => @definitions.keys, :nodes => @nodes.keys }.inspect + end + + def <<(thing) + add(thing) + self + end + + def add(instance) + if instance.type == :hostclass and other = @hostclasses[instance.name] and other.type == :hostclass + other.merge(instance) + return other + end + method = "add_#{instance.type}" + send(method, instance) + instance.resource_type_collection = self + instance + end + + def add_hostclass(instance) + dupe_check(instance, @hostclasses) { |dupe| "Class '#{instance.name}' is already defined#{dupe.error_context}; cannot redefine" } + dupe_check(instance, @definitions) { |dupe| "Definition '#{instance.name}' is already defined#{dupe.error_context}; cannot be redefined as a class" } + + @hostclasses[instance.name] = instance + instance + end + + def hostclass(name) + @hostclasses[munge_name(name)] + end + + def add_node(instance) + dupe_check(instance, @nodes) { |dupe| "Node '#{instance.name}' is already defined#{dupe.error_context}; cannot redefine" } + + @node_list << instance + @nodes[instance.name] = instance + instance + end + + def loader + require 'puppet/parser/type_loader' + @loader ||= Puppet::Parser::TypeLoader.new(environment) + end + + def node(name) + name = munge_name(name) + + if node = @nodes[name] + return node + end + + @node_list.each do |node| + next unless node.name_is_regex? + return node if node.match(name) + end + nil + end + + def node_exists?(name) + @nodes[munge_name(name)] + end + + def nodes? + @nodes.length > 0 + end + + def add_definition(instance) + dupe_check(instance, @hostclasses) { |dupe| "'#{instance.name}' is already defined#{dupe.error_context} as a class; cannot redefine as a definition" } + dupe_check(instance, @definitions) { |dupe| "Definition '#{instance.name}' is already defined#{dupe.error_context}; cannot be redefined" } + @definitions[instance.name] = instance + end + + def definition(name) + @definitions[munge_name(name)] + end + + def find(namespaces, name, type) + #Array("") == [] for some reason + namespaces = [namespaces] unless namespaces.is_a?(Array) + + if name =~ /^::/ + return send(type, name.sub(/^::/, '')) + end + + namespaces.each do |namespace| + ary = namespace.split("::") + + while ary.length > 0 + tmp_namespace = ary.join("::") + if r = find_partially_qualified(tmp_namespace, name, type) + return r + end + + # Delete the second to last object, which reduces our namespace by one. + ary.pop + end + + if result = send(type, name) + return result + end + end + nil + end + + def find_or_load(namespaces, name, type) + name = name.downcase + namespaces = [namespaces] unless namespaces.is_a?(Array) + namespaces = namespaces.collect { |ns| ns.downcase } + + # This could be done in the load_until, but the knowledge seems to + # belong here. + if r = find(namespaces, name, type) + return r + end + + loader.load_until(namespaces, name) { find(namespaces, name, type) } + end + + def find_node(namespaces, name) + find("", name, :node) + end + + def find_hostclass(namespaces, name) + find_or_load(namespaces, name, :hostclass) + end + + def find_definition(namespaces, name) + find_or_load(namespaces, name, :definition) + end + + [:hostclasses, :nodes, :definitions].each do |m| + define_method(m) do + instance_variable_get("@#{m}").dup + end + end + + def perform_initial_import + parser = Puppet::Parser::Parser.new(environment) + if code = Puppet.settings.uninterpolated_value(:code, environment.to_s) and code != "" + parser.string = code + else + file = Puppet.settings.value(:manifest, environment.to_s) + parser.file = file + end + parser.parse + rescue => detail + @parse_failed = true + + msg = "Could not parse for environment #{environment}: #{detail}" + error = Puppet::Error.new(msg) + error.set_backtrace(detail.backtrace) + raise error + end + + def require_reparse? + @parse_failed || stale? + end + + def stale? + @watched_files.values.detect { |file| file.changed? } + end + + def version + return @version if defined?(@version) + + if environment[:config_version] == "" + @version = Time.now.to_i + return @version + end + + @version = Puppet::Util.execute([environment[:config_version]]).strip + + rescue Puppet::ExecutionFailure => e + raise Puppet::ParseError, "Unable to set config_version: #{e.message}" + end + + def watch_file(file) + @watched_files[file] = Puppet::Util::LoadedFile.new(file) + end + + def watching_file?(file) + @watched_files.include?(file) + end + + private + + def find_partially_qualified(namespace, name, type) + send(type, [namespace, name].join("::")) + end + + def munge_name(name) + name.to_s.downcase + end + + def dupe_check(instance, hash) + return unless dupe = hash[instance.name] + message = yield dupe + instance.fail Puppet::ParseError, message + end +end diff --git a/mcollective/lib/puppet/resource/type_collection_helper.rb b/mcollective/lib/puppet/resource/type_collection_helper.rb new file mode 100644 index 000000000..9dc3cce00 --- /dev/null +++ b/mcollective/lib/puppet/resource/type_collection_helper.rb @@ -0,0 +1,7 @@ +require 'puppet/resource/type_collection' + +module Puppet::Resource::TypeCollectionHelper + def known_resource_types + environment.known_resource_types + end +end diff --git a/mcollective/lib/puppet/run.rb b/mcollective/lib/puppet/run.rb new file mode 100644 index 000000000..150e644a0 --- /dev/null +++ b/mcollective/lib/puppet/run.rb @@ -0,0 +1,77 @@ +require 'puppet/agent' +require 'puppet/configurer' +require 'puppet/indirector' + +# A basic class for running the agent. Used by +# puppetrun to kick off agents remotely. +class Puppet::Run + extend Puppet::Indirector + indirects :run, :terminus_class => :local + + attr_reader :status, :background, :options + + def agent + Puppet::Agent.new(Puppet::Configurer) + end + + def background? + background + end + + def initialize(options = {}) + if options.include?(:background) + @background = options[:background] + options.delete(:background) + end + + valid_options = [:tags, :ignoreschedules] + options.each do |key, value| + raise ArgumentError, "Run does not accept #{key}" unless valid_options.include?(key) + end + + @options = options + end + + def log_run + msg = "" + msg += "triggered run" % if options[:tags] + msg += " with tags #{options[:tags].inspect}" + end + + msg += " ignoring schedules" if options[:ignoreschedules] + + Puppet.notice msg + end + + def run + if agent.running? + @status = "running" + return self + end + + log_run + + if background? + Thread.new { agent.run(options) } + else + agent.run(options) + end + + @status = "success" + + self + end + + def self.from_pson( pson ) + options = {} + pson.each do |key, value| + options[key.to_sym] = value + end + + new(options) + end + + def to_pson + @options.merge(:background => @background).to_pson + end +end diff --git a/mcollective/lib/puppet/simple_graph.rb b/mcollective/lib/puppet/simple_graph.rb new file mode 100644 index 000000000..c658b3b92 --- /dev/null +++ b/mcollective/lib/puppet/simple_graph.rb @@ -0,0 +1,452 @@ +# Created by Luke A. Kanies on 2007-11-07. +# Copyright (c) 2007. All rights reserved. + +require 'puppet/external/dot' +require 'puppet/relationship' +require 'set' + +# A hopefully-faster graph class to replace the use of GRATR. +class Puppet::SimpleGraph + # An internal class for handling a vertex's edges. + class VertexWrapper + attr_accessor :in, :out, :vertex + + # Remove all references to everything. + def clear + @adjacencies[:in].clear + @adjacencies[:out].clear + @vertex = nil + end + + def initialize(vertex) + @vertex = vertex + @adjacencies = {:in => {}, :out => {}} + end + + # Find adjacent vertices or edges. + def adjacent(options) + direction = options[:direction] || :out + options[:type] ||= :vertices + + return send(direction.to_s + "_edges") if options[:type] == :edges + + @adjacencies[direction].keys.reject { |vertex| @adjacencies[direction][vertex].empty? } + end + + # Add an edge to our list. + def add_edge(direction, edge) + opposite_adjacencies(direction, edge) << edge + end + + # Return all known edges. + def edges + in_edges + out_edges + end + + # Test whether we share an edge with a given vertex. + def has_edge?(direction, vertex) + return(vertex_adjacencies(direction, vertex).length > 0 ? true : false) + end + + # Create methods for returning the degree and edges. + [:in, :out].each do |direction| + # LAK:NOTE If you decide to create methods for directly + # testing the degree, you'll have to get the values and flatten + # the results -- you might have duplicate edges, which can give + # a false impression of what the degree is. That's just + # as expensive as just getting the edge list, so I've decided + # to only add this method. + define_method("#{direction}_edges") do + @adjacencies[direction].values.inject([]) { |total, adjacent| total += adjacent.to_a; total } + end + end + + # The other vertex in the edge. + def other_vertex(direction, edge) + case direction + when :in; edge.source + else + edge.target + end + end + + # Remove an edge from our list. Assumes that we've already checked + # that the edge is valid. + def remove_edge(direction, edge) + opposite_adjacencies(direction, edge).delete(edge) + end + + def to_s + vertex.to_s + end + + def inspect + { :@adjacencies => @adjacencies, :@vertex => @vertex.to_s }.inspect + end + + private + + # These methods exist so we don't need a Hash with a default proc. + + # Look up the adjacencies for a vertex at the other end of an + # edge. + def opposite_adjacencies(direction, edge) + opposite_vertex = other_vertex(direction, edge) + vertex_adjacencies(direction, opposite_vertex) + end + + # Look up the adjacencies for a given vertex. + def vertex_adjacencies(direction, vertex) + @adjacencies[direction][vertex] ||= Set.new + @adjacencies[direction][vertex] + end + end + + def initialize + @vertices = {} + @edges = [] + end + + # Clear our graph. + def clear + @vertices.each { |vertex, wrapper| wrapper.clear } + @vertices.clear + @edges.clear + end + + # Which resources a given resource depends upon. + def dependents(resource) + tree_from_vertex(resource).keys + end + + # Which resources depend upon the given resource. + def dependencies(resource) + # Cache the reversal graph, because it's somewhat expensive + # to create. + @reversal ||= reversal + # Strangely, it's significantly faster to search a reversed + # tree in the :out direction than to search a normal tree + # in the :in direction. + @reversal.tree_from_vertex(resource, :out).keys + end + + # Whether our graph is directed. Always true. Used to produce dot files. + def directed? + true + end + + # Determine all of the leaf nodes below a given vertex. + def leaves(vertex, direction = :out) + tree = tree_from_vertex(vertex, direction) + l = tree.keys.find_all { |c| adjacent(c, :direction => direction).empty? } + end + + # Collect all of the edges that the passed events match. Returns + # an array of edges. + def matching_edges(event, base = nil) + source = base || event.resource + + unless vertex?(source) + Puppet.warning "Got an event from invalid vertex #{source.ref}" + return [] + end + # Get all of the edges that this vertex should forward events + # to, which is the same thing as saying all edges directly below + # This vertex in the graph. + adjacent(source, :direction => :out, :type => :edges).find_all do |edge| + edge.match?(event.name) + end + end + + # Return a reversed version of this graph. + def reversal + result = self.class.new + vertices.each { |vertex| result.add_vertex(vertex) } + edges.each do |edge| + newedge = edge.class.new(edge.target, edge.source, edge.label) + result.add_edge(newedge) + end + result + end + + # Return the size of the graph. + def size + @vertices.length + end + + # Return the graph as an array. + def to_a + @vertices.keys + end + + # Provide a topological sort. + def topsort + degree = {} + zeros = [] + result = [] + + # Collect each of our vertices, with the number of in-edges each has. + @vertices.each do |name, wrapper| + edges = wrapper.in_edges + zeros << wrapper if edges.length == 0 + degree[wrapper.vertex] = edges + end + + # Iterate over each 0-degree vertex, decrementing the degree of + # each of its out-edges. + while wrapper = zeros.pop + result << wrapper.vertex + wrapper.out_edges.each do |edge| + degree[edge.target].delete(edge) + zeros << @vertices[edge.target] if degree[edge.target].length == 0 + end + end + + # If we have any vertices left with non-zero in-degrees, then we've found a cycle. + if cycles = degree.find_all { |vertex, edges| edges.length > 0 } and cycles.length > 0 + message = cycles.collect { |vertex, edges| edges.collect { |e| e.to_s }.join(", ") }.join(", ") + raise Puppet::Error, "Found dependency cycles in the following relationships: #{message}; try using the '--graph' option and open the '.dot' files in OmniGraffle or GraphViz" + end + + result + end + + # Add a new vertex to the graph. + def add_vertex(vertex) + @reversal = nil + return false if vertex?(vertex) + setup_vertex(vertex) + true # don't return the VertexWrapper instance. + end + + # Remove a vertex from the graph. + def remove_vertex!(vertex) + return nil unless vertex?(vertex) + @vertices[vertex].edges.each { |edge| remove_edge!(edge) } + @edges -= @vertices[vertex].edges + @vertices[vertex].clear + @vertices.delete(vertex) + end + + # Test whether a given vertex is in the graph. + def vertex?(vertex) + @vertices.include?(vertex) + end + + # Return a list of all vertices. + def vertices + @vertices.keys + end + + # Add a new edge. The graph user has to create the edge instance, + # since they have to specify what kind of edge it is. + def add_edge(source, target = nil, label = nil) + @reversal = nil + if target + edge = Puppet::Relationship.new(source, target, label) + else + edge = source + end + [edge.source, edge.target].each { |vertex| setup_vertex(vertex) unless vertex?(vertex) } + @vertices[edge.source].add_edge :out, edge + @vertices[edge.target].add_edge :in, edge + @edges << edge + true + end + + # Find a matching edge. Note that this only finds the first edge, + # not all of them or whatever. + def edge(source, target) + @edges.each_with_index { |test_edge, index| return test_edge if test_edge.source == source and test_edge.target == target } + end + + def edge_label(source, target) + return nil unless edge = edge(source, target) + edge.label + end + + # Is there an edge between the two vertices? + def edge?(source, target) + return false unless vertex?(source) and vertex?(target) + + @vertices[source].has_edge?(:out, target) + end + + def edges + @edges.dup + end + + # Remove an edge from our graph. + def remove_edge!(edge) + @vertices[edge.source].remove_edge(:out, edge) + @vertices[edge.target].remove_edge(:in, edge) + + @edges.delete(edge) + nil + end + + # Find adjacent edges. + def adjacent(vertex, options = {}) + return [] unless wrapper = @vertices[vertex] + wrapper.adjacent(options) + end + + private + + # An internal method that skips the validation, so we don't have + # duplicate validation calls. + def setup_vertex(vertex) + @vertices[vertex] = VertexWrapper.new(vertex) + end + + public + +# # For some reason, unconnected vertices do not show up in +# # this graph. +# def to_jpg(path, name) +# gv = vertices +# Dir.chdir(path) do +# induced_subgraph(gv).write_to_graphic_file('jpg', name) +# end +# end + + # Take container information from another graph and use it + # to replace any container vertices with their respective leaves. + # This creates direct relationships where there were previously + # indirect relationships through the containers. + def splice!(other, type) + # We have to get the container list via a topological sort on the + # configuration graph, because otherwise containers that contain + # other containers will add those containers back into the + # graph. We could get a similar affect by only setting relationships + # to container leaves, but that would result in many more + # relationships. + stage_class = Puppet::Type.type(:stage) + whit_class = Puppet::Type.type(:whit) + containers = other.topsort.find_all { |v| (v.is_a?(type) or v.is_a?(stage_class)) and vertex?(v) } + containers.each do |container| + # Get the list of children from the other graph. + children = other.adjacent(container, :direction => :out) + + # MQR TODO: Luke suggests that it should be possible to refactor the system so that + # container nodes are retained, thus obviating the need for the whit. + children = [whit_class.new(:name => container.name, :catalog => other)] if children.empty? + + # First create new edges for each of the :in edges + [:in, :out].each do |dir| + edges = adjacent(container, :direction => dir, :type => :edges) + edges.each do |edge| + children.each do |child| + if dir == :in + s = edge.source + t = child + else + s = child + t = edge.target + end + + add_edge(s, t, edge.label) + end + + # Now get rid of the edge, so remove_vertex! works correctly. + remove_edge!(edge) + end + end + remove_vertex!(container) + end + end + + # Just walk the tree and pass each edge. + def walk(source, direction) + # Use an iterative, breadth-first traversal of the graph. One could do + # this recursively, but Ruby's slow function calls and even slower + # recursion make the shorter, recursive algorithm cost-prohibitive. + stack = [source] + seen = Set.new + until stack.empty? + node = stack.shift + next if seen.member? node + connected = adjacent(node, :direction => direction) + connected.each do |target| + yield node, target + end + stack.concat(connected) + seen << node + end + end + + # A different way of walking a tree, and a much faster way than the + # one that comes with GRATR. + def tree_from_vertex(start, direction = :out) + predecessor={} + walk(start, direction) do |parent, child| + predecessor[child] = parent + end + predecessor + end + + # LAK:FIXME This is just a paste of the GRATR code with slight modifications. + + # Return a DOT::DOTDigraph for directed graphs or a DOT::DOTSubgraph for an + # undirected Graph. _params_ can contain any graph property specified in + # rdot.rb. If an edge or vertex label is a kind of Hash then the keys + # which match +dot+ properties will be used as well. + def to_dot_graph (params = {}) + params['name'] ||= self.class.name.gsub(/:/,'_') + fontsize = params['fontsize'] ? params['fontsize'] : '8' + graph = (directed? ? DOT::DOTDigraph : DOT::DOTSubgraph).new(params) + edge_klass = directed? ? DOT::DOTDirectedEdge : DOT::DOTEdge + vertices.each do |v| + name = v.to_s + params = {'name' => '"'+name+'"', + 'fontsize' => fontsize, + 'label' => name} + v_label = v.to_s + params.merge!(v_label) if v_label and v_label.kind_of? Hash + graph << DOT::DOTNode.new(params) + end + edges.each do |e| + params = {'from' => '"'+ e.source.to_s + '"', + 'to' => '"'+ e.target.to_s + '"', + 'fontsize' => fontsize } + e_label = e.to_s + params.merge!(e_label) if e_label and e_label.kind_of? Hash + graph << edge_klass.new(params) + end + graph + end + + # Output the dot format as a string + def to_dot (params={}) to_dot_graph(params).to_s; end + + # Call +dotty+ for the graph which is written to the file 'graph.dot' + # in the # current directory. + def dotty (params = {}, dotfile = 'graph.dot') + File.open(dotfile, 'w') {|f| f << to_dot(params) } + system('dotty', dotfile) + end + + # Use +dot+ to create a graphical representation of the graph. Returns the + # filename of the graphics file. + def write_to_graphic_file (fmt='png', dotfile='graph') + src = dotfile + '.dot' + dot = dotfile + '.' + fmt + + File.open(src, 'w') {|f| f << self.to_dot << "\n"} + + system( "dot -T#{fmt} #{src} -o #{dot}" ) + dot + end + + # Produce the graph files if requested. + def write_graph(name) + return unless Puppet[:graph] + + Puppet.settings.use(:graphing) + + file = File.join(Puppet[:graphdir], "#{name}.dot") + File.open(file, "w") { |f| + f.puts to_dot("name" => name.to_s.capitalize) + } + end +end diff --git a/mcollective/lib/puppet/ssl.rb b/mcollective/lib/puppet/ssl.rb new file mode 100644 index 000000000..8f71ba8a4 --- /dev/null +++ b/mcollective/lib/puppet/ssl.rb @@ -0,0 +1,8 @@ +# Just to make the constants work out. +require 'puppet' +require 'openssl' + +module Puppet::SSL # :nodoc: + CA_NAME = "ca" + require 'puppet/ssl/host' +end diff --git a/mcollective/lib/puppet/ssl/base.rb b/mcollective/lib/puppet/ssl/base.rb new file mode 100644 index 000000000..25fd70ca2 --- /dev/null +++ b/mcollective/lib/puppet/ssl/base.rb @@ -0,0 +1,87 @@ +require 'puppet/ssl' + +# The base class for wrapping SSL instances. +class Puppet::SSL::Base + # For now, use the YAML separator. + SEPARATOR = "\n---\n" + + # Only allow printing ascii characters, excluding / + VALID_CERTNAME = /\A[ -.0-~]+\Z/ + + def self.from_multiple_s(text) + text.split(SEPARATOR).collect { |inst| from_s(inst) } + end + + def self.to_multiple_s(instances) + instances.collect { |inst| inst.to_s }.join(SEPARATOR) + end + + def self.wraps(klass) + @wrapped_class = klass + end + + def self.wrapped_class + raise(Puppet::DevError, "#{self} has not declared what class it wraps") unless defined?(@wrapped_class) + @wrapped_class + end + + def self.validate_certname(name) + raise "Certname #{name.inspect} must not contain unprintable or non-ASCII characters" unless name =~ VALID_CERTNAME + end + + attr_accessor :name, :content + + # Is this file for the CA? + def ca? + name == Puppet::SSL::Host.ca_name + end + + def generate + raise Puppet::DevError, "#{self.class} did not override 'generate'" + end + + def initialize(name) + @name = name.to_s.downcase + self.class.validate_certname(@name) + end + + # Read content from disk appropriately. + def read(path) + @content = wrapped_class.new(File.read(path)) + end + + # Convert our thing to pem. + def to_s + return "" unless content + content.to_pem + end + + # Provide the full text of the thing we're dealing with. + def to_text + return "" unless content + content.to_text + end + + def fingerprint(md = :MD5) + require 'openssl/digest' + + # ruby 1.8.x openssl digest constants are string + # but in 1.9.x they are symbols + mds = md.to_s.upcase + if OpenSSL::Digest.constants.include?(mds) + md = mds + elsif OpenSSL::Digest.constants.include?(mds.to_sym) + md = mds.to_sym + else + raise ArgumentError, "#{md} is not a valid digest algorithm for fingerprinting certificate #{name}" + end + + OpenSSL::Digest.const_get(md).hexdigest(content.to_der).scan(/../).join(':').upcase + end + + private + + def wrapped_class + self.class.wrapped_class + end +end diff --git a/mcollective/lib/puppet/ssl/certificate.rb b/mcollective/lib/puppet/ssl/certificate.rb new file mode 100644 index 000000000..719a741a7 --- /dev/null +++ b/mcollective/lib/puppet/ssl/certificate.rb @@ -0,0 +1,40 @@ +require 'puppet/ssl/base' + +# Manage certificates themselves. This class has no +# 'generate' method because the CA is responsible +# for turning CSRs into certificates; we can only +# retrieve them from the CA (or not, as is often +# the case). +class Puppet::SSL::Certificate < Puppet::SSL::Base + # This is defined from the base class + wraps OpenSSL::X509::Certificate + + extend Puppet::Indirector + indirects :certificate, :terminus_class => :file + + # Convert a string into an instance. + def self.from_s(string) + instance = wrapped_class.new(string) + name = instance.subject.to_s.sub(/\/CN=/i, '').downcase + result = new(name) + result.content = instance + result + end + + # Because of how the format handler class is included, this + # can't be in the base class. + def self.supported_formats + [:s] + end + + def subject_alt_names + alts = content.extensions.find{|ext| ext.oid == "subjectAltName"} + return [] unless alts + alts.value.split(/\s*,\s*/) + end + + def expiration + return nil unless content + content.not_after + end +end diff --git a/mcollective/lib/puppet/ssl/certificate_authority.rb b/mcollective/lib/puppet/ssl/certificate_authority.rb new file mode 100644 index 000000000..752261c30 --- /dev/null +++ b/mcollective/lib/puppet/ssl/certificate_authority.rb @@ -0,0 +1,369 @@ +require 'puppet/ssl/host' +require 'puppet/ssl/certificate_request' +require 'puppet/util/cacher' + +# The class that knows how to sign certificates. It creates +# a 'special' SSL::Host whose name is 'ca', thus indicating +# that, well, it's the CA. There's some magic in the +# indirector/ssl_file terminus base class that does that +# for us. +# This class mostly just signs certs for us, but +# it can also be seen as a general interface into all of the +# SSL stuff. +class Puppet::SSL::CertificateAuthority + # We will only sign extensions on this whitelist, ever. Any CSR with a + # requested extension that we don't recognize is rejected, against the risk + # that it will introduce some security issue through our ignorance of it. + # + # Adding an extension to this whitelist simply means we will consider it + # further, not that we will always accept a certificate with an extension + # requested on this list. + RequestExtensionWhitelist = %w{subjectAltName} + + require 'puppet/ssl/certificate_factory' + require 'puppet/ssl/inventory' + require 'puppet/ssl/certificate_revocation_list' + require 'puppet/ssl/certificate_authority/interface' + require 'puppet/network/authstore' + + class CertificateVerificationError < RuntimeError + attr_accessor :error_code + + def initialize(code) + @error_code = code + end + end + + class CertificateSigningError < RuntimeError + attr_accessor :host + + def initialize(host) + @host = host + end + end + + class << self + include Puppet::Util::Cacher + + cached_attr(:singleton_instance) { new } + end + + def self.ca? + return false unless Puppet[:ca] + return false unless Puppet.run_mode.master? + true + end + + # If this process can function as a CA, then return a singleton + # instance. + def self.instance + return nil unless ca? + + singleton_instance + end + + attr_reader :name, :host + + # Create and run an applicator. I wanted to build an interface where you could do + # something like 'ca.apply(:generate).to(:all) but I don't think it's really possible. + def apply(method, options) + raise ArgumentError, "You must specify the hosts to apply to; valid values are an array or the symbol :all" unless options[:to] + applier = Interface.new(method, options) + applier.apply(self) + end + + # If autosign is configured, then autosign all CSRs that match our configuration. + def autosign + return unless auto = autosign? + + store = nil + store = autosign_store(auto) if auto != true + + Puppet::SSL::CertificateRequest.search("*").each do |csr| + sign(csr.name) if auto == true or store.allowed?(csr.name, "127.1.1.1") + end + end + + # Do we autosign? This returns true, false, or a filename. + def autosign? + auto = Puppet[:autosign] + return false if ['false', false].include?(auto) + return true if ['true', true].include?(auto) + + raise ArgumentError, "The autosign configuration '#{auto}' must be a fully qualified file" unless auto =~ /^\// + FileTest.exist?(auto) && auto + end + + # Create an AuthStore for autosigning. + def autosign_store(file) + auth = Puppet::Network::AuthStore.new + File.readlines(file).each do |line| + next if line =~ /^\s*#/ + next if line =~ /^\s*$/ + auth.allow(line.chomp) + end + + auth + end + + # Retrieve (or create, if necessary) the certificate revocation list. + def crl + unless defined?(@crl) + unless @crl = Puppet::SSL::CertificateRevocationList.find(Puppet::SSL::CA_NAME) + @crl = Puppet::SSL::CertificateRevocationList.new(Puppet::SSL::CA_NAME) + @crl.generate(host.certificate.content, host.key.content) + @crl.save + end + end + @crl + end + + # Delegate this to our Host class. + def destroy(name) + Puppet::SSL::Host.destroy(name) + end + + # Generate a new certificate. + def generate(name, options = {}) + raise ArgumentError, "A Certificate already exists for #{name}" if Puppet::SSL::Certificate.find(name) + + # Pass on any requested subjectAltName field. + san = options[:dns_alt_names] + + host = Puppet::SSL::Host.new(name) + host.generate_certificate_request(:dns_alt_names => san) + sign(name, !!san) + end + + # Generate our CA certificate. + def generate_ca_certificate + generate_password unless password? + + host.generate_key unless host.key + + # Create a new cert request. We do this specially, because we don't want + # to actually save the request anywhere. + request = Puppet::SSL::CertificateRequest.new(host.name) + + # We deliberately do not put any subjectAltName in here: the CA + # certificate absolutely does not need them. --daniel 2011-10-13 + request.generate(host.key) + + # Create a self-signed certificate. + @certificate = sign(host.name, false, request) + + # And make sure we initialize our CRL. + crl + end + + def initialize + Puppet.settings.use :main, :ssl, :ca + + @name = Puppet[:certname] + + @host = Puppet::SSL::Host.new(Puppet::SSL::Host.ca_name) + + setup + end + + # Retrieve (or create, if necessary) our inventory manager. + def inventory + @inventory ||= Puppet::SSL::Inventory.new + end + + # Generate a new password for the CA. + def generate_password + pass = "" + 20.times { pass += (rand(74) + 48).chr } + + begin + Puppet.settings.write(:capass) { |f| f.print pass } + rescue Errno::EACCES => detail + raise Puppet::Error, "Could not write CA password: #{detail}" + end + + @password = pass + + pass + end + + # List all signed certificates. + def list + Puppet::SSL::Certificate.search("*").collect { |c| c.name } + end + + # Read the next serial from the serial file, and increment the + # file so this one is considered used. + def next_serial + serial = nil + + # This is slightly odd. If the file doesn't exist, our readwritelock creates + # it, but with a mode we can't actually read in some cases. So, use + # a default before the lock. + serial = 0x1 unless FileTest.exist?(Puppet[:serial]) + + Puppet.settings.readwritelock(:serial) { |f| + serial ||= File.read(Puppet.settings[:serial]).chomp.hex if FileTest.exist?(Puppet[:serial]) + + # We store the next valid serial, not the one we just used. + f << "%04X" % (serial + 1) + } + + serial + end + + # Does the password file exist? + def password? + FileTest.exist? Puppet[:capass] + end + + # Print a given host's certificate as text. + def print(name) + (cert = Puppet::SSL::Certificate.find(name)) ? cert.to_text : nil + end + + # Revoke a given certificate. + def revoke(name) + raise ArgumentError, "Cannot revoke certificates when the CRL is disabled" unless crl + + if cert = Puppet::SSL::Certificate.find(name) + serial = cert.content.serial + elsif ! serial = inventory.serial(name) + raise ArgumentError, "Could not find a serial number for #{name}" + end + crl.revoke(serial, host.key.content) + end + + # This initializes our CA so it actually works. This should be a private + # method, except that you can't any-instance stub private methods, which is + # *awesome*. This method only really exists to provide a stub-point during + # testing. + def setup + generate_ca_certificate unless @host.certificate + end + + # Sign a given certificate request. + def sign(hostname, allow_dns_alt_names = false, self_signing_csr = nil) + # This is a self-signed certificate + if self_signing_csr + # # This is a self-signed certificate, which is for the CA. Since this + # # forces the certificate to be self-signed, anyone who manages to trick + # # the system into going through this path gets a certificate they could + # # generate anyway. There should be no security risk from that. + csr = self_signing_csr + cert_type = :ca + issuer = csr.content + else + allow_dns_alt_names = true if hostname == Puppet[:certname].downcase + unless csr = Puppet::SSL::CertificateRequest.find(hostname) + raise ArgumentError, "Could not find certificate request for #{hostname}" + end + + cert_type = :server + issuer = host.certificate.content + + # Make sure that the CSR conforms to our internal signing policies. + # This will raise if the CSR doesn't conform, but just in case... + check_internal_signing_policies(hostname, csr, allow_dns_alt_names) or + raise CertificateSigningError.new(hostname), "CSR had an unknown failure checking internal signing policies, will not sign!" + end + + cert = Puppet::SSL::Certificate.new(hostname) + cert.content = Puppet::SSL::CertificateFactory. + build(cert_type, csr, issuer, next_serial) + cert.content.sign(host.key.content, OpenSSL::Digest::SHA1.new) + + Puppet.notice "Signed certificate request for #{hostname}" + + # Add the cert to the inventory before we save it, since + # otherwise we could end up with it being duplicated, if + # this is the first time we build the inventory file. + inventory.add(cert) + + # Save the now-signed cert. This should get routed correctly depending + # on the certificate type. + cert.save + + # And remove the CSR if this wasn't self signed. + Puppet::SSL::CertificateRequest.destroy(csr.name) unless self_signing_csr + + cert + end + + def check_internal_signing_policies(hostname, csr, allow_dns_alt_names) + # Reject unknown request extensions. + unknown_req = csr.request_extensions. + reject {|x| RequestExtensionWhitelist.include? x["oid"] } + + if unknown_req and not unknown_req.empty? + names = unknown_req.map {|x| x["oid"] }.sort.uniq.join(", ") + raise CertificateSigningError.new(hostname), "CSR has request extensions that are not permitted: #{names}" + end + + # Do not sign misleading CSRs + cn = csr.content.subject.to_a.assoc("CN")[1] + if hostname != cn + raise CertificateSigningError.new(hostname), "CSR subject common name #{cn.inspect} does not match expected certname #{hostname.inspect}" + end + + if hostname !~ Puppet::SSL::Base::VALID_CERTNAME + raise CertificateSigningError.new(hostname), "CSR #{hostname.inspect} subject contains unprintable or non-ASCII characters" + end + + # Wildcards: we don't allow 'em at any point. + # + # The stringification here makes the content visible, and saves us having + # to scrobble through the content of the CSR subject field to make sure it + # is what we expect where we expect it. + if csr.content.subject.to_s.include? '*' + raise CertificateSigningError.new(hostname), "CSR subject contains a wildcard, which is not allowed: #{csr.content.subject.to_s}" + end + + unless csr.subject_alt_names.empty? + # If you alt names are allowed, they are required. Otherwise they are + # disallowed. Self-signed certs are implicitly trusted, however. + unless allow_dns_alt_names + raise CertificateSigningError.new(hostname), "CSR '#{csr.name}' contains subject alternative names (#{csr.subject_alt_names.join(', ')}), which are disallowed. Use `puppet cert --allow-dns-alt-names sign #{csr.name}` to sign this request." + end + + # If subjectAltNames are present, validate that they are only for DNS + # labels, not any other kind. + unless csr.subject_alt_names.all? {|x| x =~ /^DNS:/ } + raise CertificateSigningError.new(hostname), "CSR '#{csr.name}' contains a subjectAltName outside the DNS label space: #{csr.subject_alt_names.join(', ')}. To continue, this CSR needs to be cleaned." + end + + # Check for wildcards in the subjectAltName fields too. + if csr.subject_alt_names.any? {|x| x.include? '*' } + raise CertificateSigningError.new(hostname), "CSR '#{csr.name}' subjectAltName contains a wildcard, which is not allowed: #{csr.subject_alt_names.join(', ')} To continue, this CSR needs to be cleaned." + end + end + + return true # good enough for us! + end + + # Verify a given host's certificate. + def verify(name) + unless cert = Puppet::SSL::Certificate.find(name) + raise ArgumentError, "Could not find a certificate for #{name}" + end + store = OpenSSL::X509::Store.new + store.add_file Puppet[:cacert] + store.add_crl crl.content if self.crl + store.purpose = OpenSSL::X509::PURPOSE_SSL_CLIENT + store.flags = OpenSSL::X509::V_FLAG_CRL_CHECK_ALL|OpenSSL::X509::V_FLAG_CRL_CHECK if Puppet.settings[:certificate_revocation] + + raise CertificateVerificationError.new(store.error), store.error_string unless store.verify(cert.content) + end + + def fingerprint(name, md = :MD5) + unless cert = Puppet::SSL::Certificate.find(name) || Puppet::SSL::CertificateRequest.find(name) + raise ArgumentError, "Could not find a certificate or csr for #{name}" + end + cert.fingerprint(md) + end + + # List the waiting certificate requests. + def waiting? + Puppet::SSL::CertificateRequest.search("*").collect { |r| r.name } + end +end diff --git a/mcollective/lib/puppet/ssl/certificate_authority/interface.rb b/mcollective/lib/puppet/ssl/certificate_authority/interface.rb new file mode 100644 index 000000000..775b7b0c2 --- /dev/null +++ b/mcollective/lib/puppet/ssl/certificate_authority/interface.rb @@ -0,0 +1,181 @@ +# This class is basically a hidden class that knows how to act +# on the CA. It's only used by the 'puppetca' executable, and its +# job is to provide a CLI-like interface to the CA class. +module Puppet + module SSL + class CertificateAuthority + class Interface + INTERFACE_METHODS = [:destroy, :list, :revoke, :generate, :sign, :print, :verify, :fingerprint] + + class InterfaceError < ArgumentError; end + + attr_reader :method, :subjects, :digest, :options + + # Actually perform the work. + def apply(ca) + unless subjects or method == :list + raise ArgumentError, "You must provide hosts or :all when using #{method}" + end + + begin + return send(method, ca) if respond_to?(method) + + (subjects == :all ? ca.list : subjects).each do |host| + ca.send(method, host) + end + rescue InterfaceError + raise + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Could not call #{method}: #{detail}" + end + end + + def generate(ca) + raise InterfaceError, "It makes no sense to generate all hosts; you must specify a list" if subjects == :all + + subjects.each do |host| + ca.generate(host, options) + end + end + + def initialize(method, options) + self.method = method + self.subjects = options.delete(:to) + @digest = options.delete(:digest) || :MD5 + @options = options + end + + # List the hosts. + def list(ca) + signed = ca.list + requests = ca.waiting? + + case subjects + when :all + hosts = [signed, requests].flatten + when :signed + hosts = signed.flatten + when nil + hosts = requests + else + hosts = subjects + end + + certs = {:signed => {}, :invalid => {}, :request => {}} + + return if hosts.empty? + + hosts.uniq.sort.each do |host| + begin + ca.verify(host) unless requests.include?(host) + rescue Puppet::SSL::CertificateAuthority::CertificateVerificationError => details + verify_error = details.to_s + end + + if verify_error + cert = Puppet::SSL::Certificate.indirection.find(host) + certs[:invalid][host] = [cert, verify_error] + elsif signed.include?(host) + cert = Puppet::SSL::Certificate.indirection.find(host) + certs[:signed][host] = cert + else + req = Puppet::SSL::CertificateRequest.indirection.find(host) + certs[:request][host] = req + end + end + + names = certs.values.map(&:keys).flatten + + name_width = names.sort_by(&:length).last.length rescue 0 + # We quote these names, so account for those characters + name_width += 2 + + output = [:request, :signed, :invalid].map do |type| + next if certs[type].empty? + + certs[type].map do |host,info| + format_host(ca, host, type, info, name_width) + end + end.flatten.compact.sort.join("\n") + + puts output + end + + def format_host(ca, host, type, info, width) + certish, verify_error = info + alt_names = case type + when :signed + certish.subject_alt_names + when :request + certish.subject_alt_names + else + [] + end + + alt_names.delete(host) + + alt_str = "(alt names: #{alt_names.map(&:inspect).join(', ')})" unless alt_names.empty? + + glyph = {:signed => '+', :request => ' ', :invalid => '-'}[type] + + name = host.inspect.ljust(width) + fingerprint = "(#{ca.fingerprint(host, @digest)})" + + explanation = "(#{verify_error})" if verify_error + + [glyph, name, fingerprint, alt_str, explanation].compact.join(' ') + end + + # Set the method to apply. + def method=(method) + raise ArgumentError, "Invalid method #{method} to apply" unless INTERFACE_METHODS.include?(method) + @method = method + end + + # Print certificate information. + def print(ca) + (subjects == :all ? ca.list : subjects).each do |host| + if value = ca.print(host) + puts value + else + Puppet.err "Could not find certificate for #{host}" + end + end + end + + # Print certificate information. + def fingerprint(ca) + (subjects == :all ? ca.list + ca.waiting?: subjects).each do |host| + if value = ca.fingerprint(host, @digest) + puts "#{host} #{value}" + else + Puppet.err "Could not find certificate for #{host}" + end + end + end + + # Sign a given certificate. + def sign(ca) + list = subjects == :all ? ca.waiting? : subjects + raise InterfaceError, "No waiting certificate requests to sign" if list.empty? + list.each do |host| + ca.sign(host, options[:allow_dns_alt_names]) + end + end + + # Set the list of hosts we're operating on. Also supports keywords. + def subjects=(value) + unless value == :all or value == :signed or value.is_a?(Array) + raise ArgumentError, "Subjects must be an array or :all; not #{value}" + end + + value = nil if value.is_a?(Array) and value.empty? + + @subjects = value + end + end + end + end +end + diff --git a/mcollective/lib/puppet/ssl/certificate_factory.rb b/mcollective/lib/puppet/ssl/certificate_factory.rb new file mode 100644 index 000000000..cd51ff5d7 --- /dev/null +++ b/mcollective/lib/puppet/ssl/certificate_factory.rb @@ -0,0 +1,166 @@ +require 'puppet/ssl' + +# The tedious class that does all the manipulations to the +# certificate to correctly sign it. Yay. +module Puppet::SSL::CertificateFactory + # How we convert from various units to the required seconds. + UNITMAP = { + "y" => 365 * 24 * 60 * 60, + "d" => 24 * 60 * 60, + "h" => 60 * 60, + "s" => 1 + } + + def self.build(cert_type, csr, issuer, serial) + # Work out if we can even build the requested type of certificate. + build_extensions = "build_#{cert_type.to_s}_extensions" + respond_to?(build_extensions) or + raise ArgumentError, "#{cert_type.to_s} is an invalid certificate type!" + + # set up the certificate, and start building the content. + cert = OpenSSL::X509::Certificate.new + + cert.version = 2 # X509v3 + cert.subject = csr.content.subject + cert.issuer = issuer.subject + cert.public_key = csr.content.public_key + cert.serial = serial + + # Make the certificate valid as of yesterday, because so many people's + # clocks are out of sync. This gives one more day of validity than people + # might expect, but is better than making every person who has a messed up + # clock fail, and better than having every cert we generate expire a day + # before the user expected it to when they asked for "one year". + cert.not_before = Time.now - (60*60*24) + cert.not_after = Time.now + ttl + + add_extensions_to(cert, csr, issuer, send(build_extensions)) + + return cert + end + + private + + def self.add_extensions_to(cert, csr, issuer, extensions) + ef = OpenSSL::X509::ExtensionFactory. + new(cert, issuer.is_a?(OpenSSL::X509::Request) ? cert : issuer) + + # Extract the requested extensions from the CSR. + requested_exts = csr.request_extensions.inject({}) do |hash, re| + hash[re["oid"]] = [re["value"], re["critical"]] + hash + end + + # Produce our final set of extensions. We deliberately order these to + # build the way we want: + # 1. "safe" default values, like the comment, that no one cares about. + # 2. request extensions, from the CSR + # 3. extensions based on the type we are generating + # 4. overrides, which we always want to have in their form + # + # This ordering *is* security-critical, but we want to allow the user + # enough rope to shoot themselves in the foot, if they want to ignore our + # advice and externally approve a CSR that sets the basicConstraints. + # + # Swapping the order of 2 and 3 would ensure that you couldn't slip a + # certificate through where the CA constraint was true, though, if + # something went wrong up there. --daniel 2011-10-11 + defaults = { "nsComment" => "Puppet Ruby/OpenSSL Internal Certificate" } + override = { "subjectKeyIdentifier" => "hash" } + + exts = [defaults, requested_exts, extensions, override]. + inject({}) {|ret, val| ret.merge(val) } + + cert.extensions = exts.map do |oid, val| + val, crit = *val + val = val.join(', ') unless val.is_a? String + + # Enforce the X509v3 rules about subjectAltName being critical: + # specifically, it SHOULD NOT be critical if we have a subject, which we + # always do. --daniel 2011-10-18 + crit = false if oid == "subjectAltName" + + # val can be either a string, or [string, critical], and this does the + # right thing regardless of what we get passed. + ef.create_ext(oid, val, crit) + end + end + + # TTL for new certificates in seconds. If config param :ca_ttl is set, + # use that, otherwise use :ca_days for backwards compatibility + def self.ttl + ttl = Puppet.settings[:ca_ttl] + + return ttl unless ttl.is_a?(String) + + raise ArgumentError, "Invalid ca_ttl #{ttl}" unless ttl =~ /^(\d+)(y|d|h|s)$/ + + $1.to_i * UNITMAP[$2] + end + + # Woot! We're a CA. + def self.build_ca_extensions + { + # This was accidentally omitted in the previous version of this code: an + # effort was made to add it last, but that actually managed to avoid + # adding it to the certificate at all. + # + # We have some sort of bug, which means that when we add it we get a + # complaint that the issuer keyid can't be fetched, which breaks all + # sorts of things in our test suite and, e.g., bootstrapping the CA. + # + # http://tools.ietf.org/html/rfc5280#section-4.2.1.1 says that, to be a + # conforming CA we MAY omit the field if we are self-signed, which I + # think gives us a pass in the specific case. + # + # It also notes that we MAY derive the ID from the subject and serial + # number of the issuer, or from the key ID, and we definitely have the + # former data, should we want to restore this... + # + # Anyway, preserving this bug means we don't risk breaking anything in + # the field, even though it would be nice to have. --daniel 2011-10-11 + # + # "authorityKeyIdentifier" => "keyid:always,issuer:always", + "keyUsage" => [%w{cRLSign keyCertSign}, true], + "basicConstraints" => ["CA:TRUE", true], + } + end + + # We're a terminal CA, probably not self-signed. + def self.build_terminalsubca_extensions + { + "keyUsage" => [%w{cRLSign keyCertSign}, true], + "basicConstraints" => ["CA:TRUE,pathlen:0", true], + } + end + + # We're a normal server. + def self.build_server_extensions + { + "keyUsage" => [%w{digitalSignature keyEncipherment}, true], + "extendedKeyUsage" => [%w{serverAuth clientAuth}, true], + "basicConstraints" => ["CA:FALSE", true], + } + end + + # Um, no idea. + def self.build_ocsp_extensions + { + "keyUsage" => [%w{nonRepudiation digitalSignature}, true], + "extendedKeyUsage" => [%w{serverAuth OCSPSigning}, true], + "basicConstraints" => ["CA:FALSE", true], + } + end + + # Normal client. + def self.build_client_extensions + { + "keyUsage" => [%w{nonRepudiation digitalSignature keyEncipherment}, true], + # We don't seem to use this, but that seems much more reasonable here... + "extendedKeyUsage" => [%w{clientAuth emailProtection}, true], + "basicConstraints" => ["CA:FALSE", true], + "nsCertType" => "client,email", + } + end +end + diff --git a/mcollective/lib/puppet/ssl/certificate_request.rb b/mcollective/lib/puppet/ssl/certificate_request.rb new file mode 100644 index 000000000..d879b6fbd --- /dev/null +++ b/mcollective/lib/puppet/ssl/certificate_request.rb @@ -0,0 +1,145 @@ +require 'puppet/ssl/base' + +# Manage certificate requests. +class Puppet::SSL::CertificateRequest < Puppet::SSL::Base + wraps OpenSSL::X509::Request + + extend Puppet::Indirector + indirects :certificate_request, :terminus_class => :file + + # Convert a string into an instance. + def self.from_s(string) + instance = wrapped_class.new(string) + name = instance.subject.to_s.sub(/\/CN=/i, '').downcase + result = new(name) + result.content = instance + result + end + + # Because of how the format handler class is included, this + # can't be in the base class. + def self.supported_formats + [:s] + end + + def extension_factory + @ef ||= OpenSSL::X509::ExtensionFactory.new + end + + # How to create a certificate request with our system defaults. + def generate(key, options = {}) + Puppet.info "Creating a new SSL certificate request for #{name}" + + # Support either an actual SSL key, or a Puppet key. + key = key.content if key.is_a?(Puppet::SSL::Key) + + # If we're a CSR for the CA, then use the real ca_name, rather than the + # fake 'ca' name. This is mostly for backward compatibility with 0.24.x, + # but it's also just a good idea. + common_name = name == Puppet::SSL::CA_NAME ? Puppet.settings[:ca_name] : name + + csr = OpenSSL::X509::Request.new + csr.version = 0 + csr.subject = OpenSSL::X509::Name.new([["CN", common_name]]) + csr.public_key = key.public_key + + if options[:dns_alt_names] then + names = options[:dns_alt_names].split(/\s*,\s*/).map(&:strip) + [name] + names = names.sort.uniq.map {|name| "DNS:#{name}" }.join(", ") + names = extension_factory.create_extension("subjectAltName", names, false) + + extReq = OpenSSL::ASN1::Set([OpenSSL::ASN1::Sequence([names])]) + + # We only support the standard request extensions. If you really need + # msExtReq support, let us know and we can restore them. --daniel 2011-10-10 + csr.add_attribute(OpenSSL::X509::Attribute.new("extReq", extReq)) + end + + csr.sign(key, OpenSSL::Digest::MD5.new) + + raise Puppet::Error, "CSR sign verification failed; you need to clean the certificate request for #{name} on the server" unless csr.verify(key.public_key) + + @content = csr + Puppet.info "Certificate Request fingerprint (md5): #{fingerprint}" + @content + end + + def save(args = {}) + super() + + # Try to autosign the CSR. + if ca = Puppet::SSL::CertificateAuthority.instance + ca.autosign + end + end + + # Return the set of extensions requested on this CSR, in a form designed to + # be useful to Ruby: a hash. Which, not coincidentally, you can pass + # successfully to the OpenSSL constructor later, if you want. + def request_extensions + raise Puppet::Error, "CSR needs content to extract fields" unless @content + + # Prefer the standard extReq, but accept the Microsoft specific version as + # a fallback, if the standard version isn't found. + ext = @content.attributes.find {|x| x.oid == "extReq" } or + @content.attributes.find {|x| x.oid == "msExtReq" } + return [] unless ext + + # Assert the structure and extract the names into an array of arrays. + unless ext.value.is_a? OpenSSL::ASN1::Set + raise Puppet::Error, "In #{ext.oid}, expected Set but found #{ext.value.class}" + end + + unless ext.value.value.is_a? Array + raise Puppet::Error, "In #{ext.oid}, expected Set[Array] but found #{ext.value.value.class}" + end + + unless ext.value.value.length == 1 + raise Puppet::Error, "In #{ext.oid}, expected Set[Array[...]], but found #{ext.value.value.length} items in the array" + end + + san = ext.value.value.first + unless san.is_a? OpenSSL::ASN1::Sequence + raise Puppet::Error, "In #{ext.oid}, expected Set[Array[Sequence[...]]], but found #{san.class}" + end + san = san.value + + # OK, now san should be the array of items, validate that... + index = -1 + san.map do |name| + index += 1 + + unless name.is_a? OpenSSL::ASN1::Sequence + raise Puppet::Error, "In #{ext.oid}, expected request extension record #{index} to be a Sequence, but found #{name.class}" + end + name = name.value + + # OK, turn that into an extension, to unpack the content. Lovely that + # we have to swap the order of arguments to the underlying method, or + # perhaps that the ASN.1 representation chose to pack them in a + # strange order where the optional component comes *earlier* than the + # fixed component in the sequence. + case name.length + when 2 + ev = OpenSSL::X509::Extension.new(name[0].value, name[1].value) + { "oid" => ev.oid, "value" => ev.value } + + when 3 + ev = OpenSSL::X509::Extension.new(name[0].value, name[2].value, name[1].value) + { "oid" => ev.oid, "value" => ev.value, "critical" => ev.critical? } + + else + raise Puppet::Error, "In #{ext.oid}, expected extension record #{index} to have two or three items, but found #{name.length}" + end + end.flatten + end + + def subject_alt_names + @subject_alt_names ||= request_extensions. + select {|x| x["oid"] = "subjectAltName" }. + map {|x| x["value"].split(/\s*,\s*/) }. + flatten. + sort. + uniq + end +end diff --git a/mcollective/lib/puppet/ssl/certificate_revocation_list.rb b/mcollective/lib/puppet/ssl/certificate_revocation_list.rb new file mode 100644 index 000000000..44e0a9e22 --- /dev/null +++ b/mcollective/lib/puppet/ssl/certificate_revocation_list.rb @@ -0,0 +1,84 @@ +require 'puppet/ssl/base' +require 'puppet/indirector' + +# Manage the CRL. +class Puppet::SSL::CertificateRevocationList < Puppet::SSL::Base + wraps OpenSSL::X509::CRL + + extend Puppet::Indirector + indirects :certificate_revocation_list, :terminus_class => :file + + # Convert a string into an instance. + def self.from_s(string) + instance = wrapped_class.new(string) + result = new('foo') # The name doesn't matter + result.content = instance + result + end + + # Because of how the format handler class is included, this + # can't be in the base class. + def self.supported_formats + [:s] + end + + # Knows how to create a CRL with our system defaults. + def generate(cert, cakey) + Puppet.info "Creating a new certificate revocation list" + @content = wrapped_class.new + @content.issuer = cert.subject + @content.version = 1 + + # Init the CRL number. + crlNum = OpenSSL::ASN1::Integer(0) + @content.extensions = [OpenSSL::X509::Extension.new("crlNumber", crlNum)] + + # Set last/next update + @content.last_update = Time.now + # Keep CRL valid for 5 years + @content.next_update = Time.now + 5 * 365*24*60*60 + + @content.sign(cakey, OpenSSL::Digest::SHA1.new) + + @content + end + + # The name doesn't actually matter; there's only one CRL. + # We just need the name so our Indirector stuff all works more easily. + def initialize(fakename) + @name = "crl" + end + + # Revoke the certificate with serial number SERIAL issued by this + # CA, then write the CRL back to disk. The REASON must be one of the + # OpenSSL::OCSP::REVOKED_* reasons + def revoke(serial, cakey, reason = OpenSSL::OCSP::REVOKED_STATUS_KEYCOMPROMISE) + Puppet.notice "Revoked certificate with serial #{serial}" + time = Time.now + + # Add our revocation to the CRL. + revoked = OpenSSL::X509::Revoked.new + revoked.serial = serial + revoked.time = time + enum = OpenSSL::ASN1::Enumerated(reason) + ext = OpenSSL::X509::Extension.new("CRLReason", enum) + revoked.add_extension(ext) + @content.add_revoked(revoked) + + # Increment the crlNumber + e = @content.extensions.find { |e| e.oid == 'crlNumber' } + ext = @content.extensions.reject { |e| e.oid == 'crlNumber' } + crlNum = OpenSSL::ASN1::Integer(e ? e.value.to_i + 1 : 0) + ext << OpenSSL::X509::Extension.new("crlNumber", crlNum) + @content.extensions = ext + + # Set last/next update + @content.last_update = time + # Keep CRL valid for 5 years + @content.next_update = time + 5 * 365*24*60*60 + + @content.sign(cakey, OpenSSL::Digest::SHA1.new) + + save + end +end diff --git a/mcollective/lib/puppet/ssl/host.rb b/mcollective/lib/puppet/ssl/host.rb new file mode 100644 index 000000000..2aa7b3123 --- /dev/null +++ b/mcollective/lib/puppet/ssl/host.rb @@ -0,0 +1,278 @@ +require 'puppet/ssl' +require 'puppet/ssl/key' +require 'puppet/ssl/certificate' +require 'puppet/ssl/certificate_request' +require 'puppet/ssl/certificate_revocation_list' +require 'puppet/util/cacher' + +# The class that manages all aspects of our SSL certificates -- +# private keys, public keys, requests, etc. +class Puppet::SSL::Host + # Yay, ruby's strange constant lookups. + Key = Puppet::SSL::Key + CA_NAME = Puppet::SSL::CA_NAME + Certificate = Puppet::SSL::Certificate + CertificateRequest = Puppet::SSL::CertificateRequest + CertificateRevocationList = Puppet::SSL::CertificateRevocationList + + attr_reader :name + attr_accessor :ca + + attr_writer :key, :certificate, :certificate_request + + class << self + include Puppet::Util::Cacher + + cached_attr(:localhost) do + result = new + result.generate unless result.certificate + result.key # Make sure it's read in + result + end + end + + # This is the constant that people will use to mark that a given host is + # a certificate authority. + def self.ca_name + CA_NAME + end + + class << self + attr_reader :ca_location + end + + # Configure how our various classes interact with their various terminuses. + def self.configure_indirection(terminus, cache = nil) + Certificate.terminus_class = terminus + CertificateRequest.terminus_class = terminus + CertificateRevocationList.terminus_class = terminus + + if cache + # This is weird; we don't actually cache our keys, we + # use what would otherwise be the cache as our normal + # terminus. + Key.terminus_class = cache + else + Key.terminus_class = terminus + end + + if cache + Certificate.cache_class = cache + CertificateRequest.cache_class = cache + CertificateRevocationList.cache_class = cache + else + # Make sure we have no cache configured. puppet master + # switches the configurations around a bit, so it's important + # that we specify the configs for absolutely everything, every + # time. + Certificate.cache_class = nil + CertificateRequest.cache_class = nil + CertificateRevocationList.cache_class = nil + end + end + + CA_MODES = { + # Our ca is local, so we use it as the ultimate source of information + # And we cache files locally. + :local => [:ca, :file], + # We're a remote CA client. + :remote => [:rest, :file], + # We are the CA, so we don't have read/write access to the normal certificates. + :only => [:ca], + # We have no CA, so we just look in the local file store. + :none => [:file] + } + + # Specify how we expect to interact with our certificate authority. + def self.ca_location=(mode) + raise ArgumentError, "CA Mode can only be #{CA_MODES.collect { |m| m.to_s }.join(", ")}" unless CA_MODES.include?(mode) + + @ca_location = mode + + configure_indirection(*CA_MODES[@ca_location]) + end + + # Remove all traces of a given host + def self.destroy(name) + [Key, Certificate, CertificateRequest].collect { |part| part.destroy(name) }.any? { |x| x } + end + + # Search for more than one host, optionally only specifying + # an interest in hosts with a given file type. + # This just allows our non-indirected class to have one of + # indirection methods. + def self.search(options = {}) + classlist = [options[:for] || [Key, CertificateRequest, Certificate]].flatten + + # Collect the results from each class, flatten them, collect all of the names, make the name list unique, + # then create a Host instance for each one. + classlist.collect { |klass| klass.search }.flatten.collect { |r| r.name }.uniq.collect do |name| + new(name) + end + end + + # Is this a ca host, meaning that all of its files go in the CA location? + def ca? + ca + end + + def key + @key ||= Key.find(name) + end + + # This is the private key; we can create it from scratch + # with no inputs. + def generate_key + @key = Key.new(name) + @key.generate + begin + @key.save + rescue + @key = nil + raise + end + true + end + + def certificate_request + @certificate_request ||= CertificateRequest.find(name) + end + + def this_csr_is_for_the_current_host + name == Puppet[:certname].downcase + end + + # Our certificate request requires the key but that's all. + def generate_certificate_request(options = {}) + generate_key unless key + + # If this is for the current machine... + if this_csr_is_for_the_current_host + # ...add our configured dns_alt_names + if Puppet[:dns_alt_names] and Puppet[:dns_alt_names] != '' + options[:dns_alt_names] ||= Puppet[:dns_alt_names] + elsif Puppet::SSL::CertificateAuthority.ca? and fqdn = Facter.value(:fqdn) and domain = Facter.value(:domain) + options[:dns_alt_names] = "puppet, #{fqdn}, puppet.#{domain}" + end + end + + @certificate_request = CertificateRequest.new(name) + @certificate_request.generate(key.content, options) + begin + @certificate_request.save + rescue + @certificate_request = nil + raise + end + + true + end + + def certificate + unless @certificate + generate_key unless key + + # get the CA cert first, since it's required for the normal cert + # to be of any use. + return nil unless Certificate.find("ca") unless ca? + return nil unless @certificate = Certificate.find(name) + + unless certificate_matches_key? + raise Puppet::Error, "Retrieved certificate does not match private key; please remove certificate from server and regenerate it with the current key" + end + end + @certificate + end + + def certificate_matches_key? + return false unless key + return false unless certificate + + certificate.content.check_private_key(key.content) + end + + # Generate all necessary parts of our ssl host. + def generate + generate_key unless key + generate_certificate_request unless certificate_request + + # If we can get a CA instance, then we're a valid CA, and we + # should use it to sign our request; else, just try to read + # the cert. + if ! certificate and ca = Puppet::SSL::CertificateAuthority.instance + ca.sign(self.name, true) + end + end + + def initialize(name = nil) + @name = (name || Puppet[:certname]).downcase + Puppet::SSL::Base.validate_certname(@name) + @key = @certificate = @certificate_request = nil + @ca = (name == self.class.ca_name) + end + + # Extract the public key from the private key. + def public_key + key.content.public_key + end + + # Create/return a store that uses our SSL info to validate + # connections. + def ssl_store(purpose = OpenSSL::X509::PURPOSE_ANY) + unless @ssl_store + @ssl_store = OpenSSL::X509::Store.new + @ssl_store.purpose = purpose + + # Use the file path here, because we don't want to cause + # a lookup in the middle of setting our ssl connection. + @ssl_store.add_file(Puppet[:localcacert]) + + # If there's a CRL, add it to our store. + if crl = Puppet::SSL::CertificateRevocationList.find(CA_NAME) + @ssl_store.flags = OpenSSL::X509::V_FLAG_CRL_CHECK_ALL|OpenSSL::X509::V_FLAG_CRL_CHECK if Puppet.settings[:certificate_revocation] + @ssl_store.add_crl(crl.content) + end + return @ssl_store + end + @ssl_store + end + + # Attempt to retrieve a cert, if we don't already have one. + def wait_for_cert(time) + begin + return if certificate + generate + return if certificate + rescue SystemExit,NoMemoryError + raise + rescue Exception => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Could not request certificate: #{detail}" + if time < 1 + puts "Exiting; failed to retrieve certificate and waitforcert is disabled" + exit(1) + else + sleep(time) + end + retry + end + + if time < 1 + puts "Exiting; no certificate found and waitforcert is disabled" + exit(1) + end + + while true + sleep time + begin + break if certificate + Puppet.notice "Did not receive certificate" + rescue StandardError => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Could not request certificate: #{detail}" + end + end + end +end + +require 'puppet/ssl/certificate_authority' diff --git a/mcollective/lib/puppet/ssl/inventory.rb b/mcollective/lib/puppet/ssl/inventory.rb new file mode 100644 index 000000000..b2b402a53 --- /dev/null +++ b/mcollective/lib/puppet/ssl/inventory.rb @@ -0,0 +1,52 @@ +require 'puppet/ssl' +require 'puppet/ssl/certificate' + +# Keep track of all of our known certificates. +class Puppet::SSL::Inventory + attr_reader :path + + # Add a certificate to our inventory. + def add(cert) + cert = cert.content if cert.is_a?(Puppet::SSL::Certificate) + + # Create our file, if one does not already exist. + rebuild unless FileTest.exist?(@path) + + Puppet.settings.write(:cert_inventory, "a") do |f| + f.print format(cert) + end + end + + # Format our certificate for output. + def format(cert) + iso = '%Y-%m-%dT%H:%M:%S%Z' + "0x%04x %s %s %s\n" % [cert.serial, cert.not_before.strftime(iso), cert.not_after.strftime(iso), cert.subject] + end + + def initialize + @path = Puppet[:cert_inventory] + end + + # Rebuild the inventory from scratch. This should happen if + # the file is entirely missing or if it's somehow corrupted. + def rebuild + Puppet.notice "Rebuilding inventory file" + + Puppet.settings.write(:cert_inventory) do |f| + f.print "# Inventory of signed certificates\n# SERIAL NOT_BEFORE NOT_AFTER SUBJECT\n" + end + + Puppet::SSL::Certificate.search("*").each { |cert| add(cert) } + end + + # Find the serial number for a given certificate. + def serial(name) + return nil unless FileTest.exist?(@path) + + File.readlines(@path).each do |line| + next unless line =~ /^(\S+).+\/CN=#{name}$/ + + return Integer($1) + end + end +end diff --git a/mcollective/lib/puppet/ssl/key.rb b/mcollective/lib/puppet/ssl/key.rb new file mode 100644 index 000000000..0ddc9623c --- /dev/null +++ b/mcollective/lib/puppet/ssl/key.rb @@ -0,0 +1,56 @@ +require 'puppet/ssl/base' +require 'puppet/indirector' + +# Manage private and public keys as a pair. +class Puppet::SSL::Key < Puppet::SSL::Base + wraps OpenSSL::PKey::RSA + + extend Puppet::Indirector + indirects :key, :terminus_class => :file + + # Because of how the format handler class is included, this + # can't be in the base class. + def self.supported_formats + [:s] + end + + attr_accessor :password_file + + # Knows how to create keys with our system defaults. + def generate + Puppet.info "Creating a new SSL key for #{name}" + @content = OpenSSL::PKey::RSA.new(Puppet[:keylength].to_i) + end + + def initialize(name) + super + + if ca? + @password_file = Puppet[:capass] + else + @password_file = Puppet[:passfile] + end + end + + def password + return nil unless password_file and FileTest.exist?(password_file) + + ::File.read(password_file) + end + + # Optionally support specifying a password file. + def read(path) + return super unless password_file + + #@content = wrapped_class.new(::File.read(path), password) + @content = wrapped_class.new(::File.read(path), password) + end + + def to_s + if pass = password + @content.export(OpenSSL::Cipher::DES.new(:EDE3, :CBC), pass) + else + return super + end + end +end diff --git a/mcollective/lib/puppet/status.rb b/mcollective/lib/puppet/status.rb new file mode 100644 index 000000000..eecd0e18c --- /dev/null +++ b/mcollective/lib/puppet/status.rb @@ -0,0 +1,28 @@ +require 'puppet/indirector' + +class Puppet::Status + extend Puppet::Indirector + indirects :status, :terminus_class => :local + + attr :status, true + + def initialize( status = nil ) + @status = status || {"is_alive" => true} + end + + def to_pson + @status.to_pson + end + + def self.from_pson( pson ) + self.new( pson ) + end + + def name + "status" + end + + def name=(name) + # NOOP + end +end diff --git a/mcollective/lib/puppet/transaction.rb b/mcollective/lib/puppet/transaction.rb new file mode 100644 index 000000000..d6d50d410 --- /dev/null +++ b/mcollective/lib/puppet/transaction.rb @@ -0,0 +1,323 @@ +# the class that actually walks our resource/property tree, collects the changes, +# and performs them + +require 'puppet' +require 'puppet/util/tagging' +require 'puppet/application' + +class Puppet::Transaction + require 'puppet/transaction/event' + require 'puppet/transaction/event_manager' + require 'puppet/transaction/resource_harness' + require 'puppet/resource/status' + + attr_accessor :component, :catalog, :ignoreschedules + attr_accessor :sorted_resources, :configurator + + # The report, once generated. + attr_reader :report + + # Routes and stores any events and subscriptions. + attr_reader :event_manager + + # Handles most of the actual interacting with resources + attr_reader :resource_harness + + include Puppet::Util + include Puppet::Util::Tagging + + # Wraps application run state check to flag need to interrupt processing + def stop_processing? + Puppet::Application.stop_requested? + end + + # Add some additional times for reporting + def add_times(hash) + hash.each do |name, num| + report.add_times(name, num) + end + end + + # Are there any failed resources in this transaction? + def any_failed? + report.resource_statuses.values.detect { |status| status.failed? } + end + + # Apply all changes for a resource + def apply(resource, ancestor = nil) + status = resource_harness.evaluate(resource) + add_resource_status(status) + event_manager.queue_events(ancestor || resource, status.events) unless status.failed? + rescue => detail + resource.err "Could not evaluate: #{detail}" + end + + # Find all of the changed resources. + def changed? + report.resource_statuses.values.find_all { |status| status.changed }.collect { |status| catalog.resource(status.resource) } + end + + # Copy an important relationships from the parent to the newly-generated + # child resource. + def make_parent_child_relationship(resource, children) + depthfirst = resource.depthfirst? + + children.each do |gen_child| + if depthfirst + edge = [gen_child, resource] + else + edge = [resource, gen_child] + end + relationship_graph.add_vertex(gen_child) + + unless relationship_graph.edge?(edge[1], edge[0]) + relationship_graph.add_edge(*edge) + else + resource.debug "Skipping automatic relationship to #{gen_child}" + end + end + end + + # See if the resource generates new resources at evaluation time. + def eval_generate(resource) + generate_additional_resources(resource, :eval_generate) + end + + # Evaluate a single resource. + def eval_resource(resource, ancestor = nil) + if skip?(resource) + resource_status(resource).skipped = true + else + eval_children_and_apply_resource(resource, ancestor) + end + + # Check to see if there are any events queued for this resource + event_manager.process_events(resource) + end + + def eval_children_and_apply_resource(resource, ancestor = nil) + resource_status(resource).scheduled = true + + # We need to generate first regardless, because the recursive + # actions sometimes change how the top resource is applied. + children = eval_generate(resource) + + if ! children.empty? and resource.depthfirst? + children.each do |child| + # The child will never be skipped when the parent isn't + eval_resource(child, ancestor || resource) + end + end + + # Perform the actual changes + apply(resource, ancestor) + + if ! children.empty? and ! resource.depthfirst? + children.each do |child| + eval_resource(child, ancestor || resource) + end + end + end + + # This method does all the actual work of running a transaction. It + # collects all of the changes, executes them, and responds to any + # necessary events. + def evaluate + prepare + + Puppet.info "Applying configuration version '#{catalog.version}'" if catalog.version + + @sorted_resources.each do |resource| + next if stop_processing? + if resource.is_a?(Puppet::Type::Component) + Puppet.warning "Somehow left a component in the relationship graph" + next + end + ret = nil + seconds = thinmark do + ret = eval_resource(resource) + end + + resource.info "valuated in %0.2f seconds" % seconds if Puppet[:evaltrace] and @catalog.host_config? + ret + end + + Puppet.debug "Finishing transaction #{object_id}" + end + + def events + event_manager.events + end + + def failed?(resource) + s = resource_status(resource) and s.failed? + end + + # Does this resource have any failed dependencies? + def failed_dependencies?(resource) + # First make sure there are no failed dependencies. To do this, + # we check for failures in any of the vertexes above us. It's not + # enough to check the immediate dependencies, which is why we use + # a tree from the reversed graph. + found_failed = false + relationship_graph.dependencies(resource).each do |dep| + next unless failed?(dep) + resource.notice "Dependency #{dep} has failures: #{resource_status(dep).failed}" + found_failed = true + end + + found_failed + end + + # A general method for recursively generating new resources from a + # resource. + def generate_additional_resources(resource, method) + return [] unless resource.respond_to?(method) + begin + made = resource.send(method) + rescue => detail + puts detail.backtrace if Puppet[:trace] + resource.err "Failed to generate additional resources using '#{method}': #{detail}" + end + return [] unless made + made = [made] unless made.is_a?(Array) + made.uniq.find_all do |res| + begin + res.tag(*resource.tags) + @catalog.add_resource(res) do |r| + r.finish + make_parent_child_relationship(resource, [r]) + + # Call 'generate' recursively + generate_additional_resources(r, method) + end + true + rescue Puppet::Resource::Catalog::DuplicateResourceError + res.info "Duplicate generated resource; skipping" + false + end + end + end + + # Collect any dynamically generated resources. This method is called + # before the transaction starts. + def generate + list = @catalog.vertices + newlist = [] + while ! list.empty? + list.each do |resource| + newlist += generate_additional_resources(resource, :generate) + end + list = newlist + newlist = [] + end + end + + # Should we ignore tags? + def ignore_tags? + ! (@catalog.host_config? or Puppet[:name] == "puppet") + end + + # this should only be called by a Puppet::Type::Component resource now + # and it should only receive an array + def initialize(catalog, report = nil) + @catalog = catalog + @report = report || Report.new("apply", catalog.version) + @event_manager = Puppet::Transaction::EventManager.new(self) + @resource_harness = Puppet::Transaction::ResourceHarness.new(self) + end + + # Prefetch any providers that support it. We don't support prefetching + # types, just providers. + def prefetch + prefetchers = {} + @catalog.vertices.each do |resource| + if provider = resource.provider and provider.class.respond_to?(:prefetch) + prefetchers[provider.class] ||= {} + prefetchers[provider.class][resource.name] = resource + end + end + + # Now call prefetch, passing in the resources so that the provider instances can be replaced. + prefetchers.each do |provider, resources| + Puppet.debug "Prefetching #{provider.name} resources for #{provider.resource_type.name}" + begin + provider.prefetch(resources) + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Could not prefetch #{provider.resource_type.name} provider '#{provider.name}': #{detail}" + end + end + end + + # Prepare to evaluate the resources in a transaction. + def prepare + # Now add any dynamically generated resources + generate + + # Then prefetch. It's important that we generate and then prefetch, + # so that any generated resources also get prefetched. + prefetch + + # This will throw an error if there are cycles in the graph. + @sorted_resources = relationship_graph.topsort + end + + def relationship_graph + catalog.relationship_graph + end + + def add_resource_status(status) + report.add_resource_status status + end + + def resource_status(resource) + report.resource_statuses[resource.to_s] || add_resource_status(Puppet::Resource::Status.new(resource)) + end + + # Is the resource currently scheduled? + def scheduled?(resource) + self.ignoreschedules or resource_harness.scheduled?(resource_status(resource), resource) + end + + # Should this resource be skipped? + def skip?(resource) + if missing_tags?(resource) + resource.debug "Not tagged with #{tags.join(", ")}" + elsif ! scheduled?(resource) + resource.debug "Not scheduled" + elsif failed_dependencies?(resource) + resource.warning "Skipping because of failed dependencies" + elsif resource.virtual? + resource.debug "Skipping because virtual" + else + return false + end + true + end + + # The tags we should be checking. + def tags + self.tags = Puppet[:tags] unless defined?(@tags) + + super + end + + def handle_qualified_tags( qualified ) + # The default behavior of Puppet::Util::Tagging is + # to split qualified tags into parts. That would cause + # qualified tags to match too broadly here. + return + end + + # Is this resource tagged appropriately? + def missing_tags?(resource) + return false if ignore_tags? + return false if tags.empty? + + not resource.tagged?(*tags) + end +end + +require 'puppet/transaction/report' + diff --git a/mcollective/lib/puppet/transaction/event.rb b/mcollective/lib/puppet/transaction/event.rb new file mode 100644 index 000000000..cd695cff8 --- /dev/null +++ b/mcollective/lib/puppet/transaction/event.rb @@ -0,0 +1,66 @@ +require 'puppet/transaction' +require 'puppet/util/tagging' +require 'puppet/util/logging' + +# A simple struct for storing what happens on the system. +class Puppet::Transaction::Event + include Puppet::Util::Tagging + include Puppet::Util::Logging + + ATTRIBUTES = [:name, :resource, :property, :previous_value, :desired_value, :historical_value, :status, :message, :file, :line, :source_description, :audited] + YAML_ATTRIBUTES = %w{@audited @property @previous_value @desired_value @historical_value @message @name @status @time} + attr_accessor *ATTRIBUTES + attr_writer :tags + attr_accessor :time + attr_reader :default_log_level + + EVENT_STATUSES = %w{noop success failure audit} + + def initialize(options = {}) + @audited = false + options.each { |attr, value| send(attr.to_s + "=", value) } + + @time = Time.now + end + + def property=(prop) + @property = prop.to_s + end + + def resource=(res) + if res.respond_to?(:[]) and level = res[:loglevel] + @default_log_level = level + end + @resource = res.to_s + end + + def send_log + super(log_level, message) + end + + def status=(value) + raise ArgumentError, "Event status can only be #{EVENT_STATUSES.join(', ')}" unless EVENT_STATUSES.include?(value) + @status = value + end + + def to_s + message + end + + def to_yaml_properties + (YAML_ATTRIBUTES & instance_variables).sort + end + + private + + # If it's a failure, use 'err', else use either the resource's log level (if available) + # or 'notice'. + def log_level + status == "failure" ? :err : (@default_log_level || :notice) + end + + # Used by the Logging module + def log_source + source_description || property || resource + end +end diff --git a/mcollective/lib/puppet/transaction/event_manager.rb b/mcollective/lib/puppet/transaction/event_manager.rb new file mode 100644 index 000000000..3ebb0a9d3 --- /dev/null +++ b/mcollective/lib/puppet/transaction/event_manager.rb @@ -0,0 +1,99 @@ +require 'puppet/transaction' + +class Puppet::Transaction::EventManager + attr_reader :transaction, :events + + def initialize(transaction) + @transaction = transaction + @event_queues = {} + @events = [] + end + + def relationship_graph + transaction.relationship_graph + end + + # Respond to any queued events for this resource. + def process_events(resource) + restarted = false + queued_events(resource) do |callback, events| + r = process_callback(resource, callback, events) + restarted ||= r + end + + if restarted + queue_events(resource, [resource.event(:name => :restarted, :status => "success")]) + + transaction.resource_status(resource).restarted = true + end + end + + # Queue events for other resources to respond to. All of these events have + # to be from the same resource. + def queue_events(resource, events) + @events += events + + # Do some basic normalization so we're not doing so many + # graph queries for large sets of events. + events.inject({}) do |collection, event| + collection[event.name] ||= [] + collection[event.name] << event + collection + end.collect do |name, list| + # It doesn't matter which event we use - they all have the same source + # and name here. + event = list[0] + + # Collect the targets of any subscriptions to those events. We pass + # the parent resource in so it will override the source in the events, + # since eval_generated children can't have direct relationships. + relationship_graph.matching_edges(event, resource).each do |edge| + next unless method = edge.callback + next unless edge.target.respond_to?(method) + + queue_events_for_resource(resource, edge.target, method, list) + end + + queue_events_for_resource(resource, resource, :refresh, [event]) if resource.self_refresh? and ! resource.deleting? + end + end + + def queue_events_for_resource(source, target, callback, events) + source.info "Scheduling #{callback} of #{target}" + + @event_queues[target] ||= {} + @event_queues[target][callback] ||= [] + @event_queues[target][callback] += events + end + + def queued_events(resource) + return unless callbacks = @event_queues[resource] + callbacks.each do |callback, events| + yield callback, events + end + end + + private + + def process_callback(resource, callback, events) + process_noop_events(resource, callback, events) and return false unless events.detect { |e| e.status != "noop" } + resource.send(callback) + + resource.notice "Triggered '#{callback}' from #{events.length} events" + return true + rescue => detail + resource.err "Failed to call #{callback}: #{detail}" + + transaction.resource_status(resource).failed_to_restart = true + puts detail.backtrace if Puppet[:trace] + return false + end + + def process_noop_events(resource, callback, events) + resource.notice "Would have triggered '#{callback}' from #{events.length} events" + + # And then add an event for it. + queue_events(resource, [resource.event(:status => "noop", :name => :noop_restart)]) + true # so the 'and if' works + end +end diff --git a/mcollective/lib/puppet/transaction/report.rb b/mcollective/lib/puppet/transaction/report.rb new file mode 100644 index 000000000..77b9da833 --- /dev/null +++ b/mcollective/lib/puppet/transaction/report.rb @@ -0,0 +1,187 @@ +require 'puppet' +require 'puppet/indirector' + +# A class for reporting what happens on each client. Reports consist of +# two types of data: Logs and Metrics. Logs are the output that each +# change produces, and Metrics are all of the numerical data involved +# in the transaction. +class Puppet::Transaction::Report + extend Puppet::Indirector + + indirects :report, :terminus_class => :processor + + attr_accessor :configuration_version, :host + attr_reader :resource_statuses, :logs, :metrics, :time, :kind, :status + + # This is necessary since Marshall doesn't know how to + # dump hash with default proc (see below @records) + def self.default_format + :yaml + end + + def <<(msg) + @logs << msg + self + end + + def add_times(name, value) + @external_times[name] = value + end + + def add_metric(name, hash) + metric = Puppet::Util::Metric.new(name) + + hash.each do |name, value| + metric.newvalue(name, value) + end + + @metrics[metric.name] = metric + metric + end + + def add_resource_status(status) + @resource_statuses[status.resource] = status + end + + def compute_status(resource_metrics, change_metric) + if (resource_metrics["failed"] || 0) > 0 + 'failed' + elsif change_metric > 0 + 'changed' + else + 'unchanged' + end + end + + def finalize_report + resource_metrics = add_metric(:resources, calculate_resource_metrics) + add_metric(:time, calculate_time_metrics) + change_metric = calculate_change_metric + add_metric(:changes, {"total" => change_metric}) + add_metric(:events, calculate_event_metrics) + @status = compute_status(resource_metrics, change_metric) + end + + def initialize(kind, configuration_version=nil) + @metrics = {} + @logs = [] + @resource_statuses = {} + @external_times ||= {} + @host = Puppet[:node_name_value] + @time = Time.now + @kind = kind + @report_format = 2 + @puppet_version = Puppet.version + @configuration_version = configuration_version + @status = 'failed' # assume failed until the report is finalized + end + + def name + host + end + + # Provide a human readable textual summary of this report. + def summary + report = raw_summary + + ret = "" + report.keys.sort { |a,b| a.to_s <=> b.to_s }.each do |key| + ret += "#{Puppet::Util::Metric.labelize(key)}:\n" + + report[key].keys.sort { |a,b| + # sort by label + if a == :total + 1 + elsif b == :total + -1 + else + report[key][a].to_s <=> report[key][b].to_s + end + }.each do |label| + value = report[key][label] + next if value == 0 + value = "%0.2f" % value if value.is_a?(Float) + ret += " %15s %s\n" % [Puppet::Util::Metric.labelize(label) + ":", value] + end + end + ret + end + + # Provide a raw hash summary of this report. + def raw_summary + report = {} + + @metrics.each do |name, metric| + key = metric.name.to_s + report[key] = {} + metric.values.each do |name, label, value| + report[key][name.to_s] = value + end + report[key]["total"] = 0 unless key == "time" or report[key].include?("total") + end + (report["time"] ||= {})["last_run"] = Time.now.tv_sec + report + end + + # Based on the contents of this report's metrics, compute a single number + # that represents the report. The resulting number is a bitmask where + # individual bits represent the presence of different metrics. + def exit_status + status = 0 + status |= 2 if @metrics["changes"]["total"] > 0 + status |= 4 if @metrics["resources"]["failed"] > 0 + status + end + + def to_yaml_properties + (instance_variables - ["@external_times"]).sort + end + + private + + def calculate_change_metric + resource_statuses.map { |name, status| status.change_count || 0 }.inject(0) { |a,b| a+b } + end + + def calculate_event_metrics + metrics = Hash.new(0) + metrics["total"] = 0 + resource_statuses.each do |name, status| + metrics["total"] += status.events.length + status.events.each do |event| + metrics[event.status] += 1 + end + end + + metrics + end + + def calculate_resource_metrics + metrics = Hash.new(0) + metrics["total"] = resource_statuses.length + + resource_statuses.each do |name, status| + Puppet::Resource::Status::STATES.each do |state| + metrics[state.to_s] += 1 if status.send(state) + end + end + + metrics + end + + def calculate_time_metrics + metrics = Hash.new(0) + resource_statuses.each do |name, status| + type = Puppet::Resource.new(name).type + metrics[type.to_s.downcase] += status.evaluation_time if status.evaluation_time + end + + @external_times.each do |name, value| + metrics[name.to_s.downcase] = value + end + + metrics["total"] = metrics.values.inject(0) { |a,b| a+b } + + metrics + end +end diff --git a/mcollective/lib/puppet/transaction/resource_harness.rb b/mcollective/lib/puppet/transaction/resource_harness.rb new file mode 100644 index 000000000..4a3d35e0d --- /dev/null +++ b/mcollective/lib/puppet/transaction/resource_harness.rb @@ -0,0 +1,178 @@ +require 'puppet/resource/status' + +class Puppet::Transaction::ResourceHarness + extend Forwardable + def_delegators :@transaction, :relationship_graph + + attr_reader :transaction + + def allow_changes?(resource) + if resource.purging? and resource.deleting? and deps = relationship_graph.dependents(resource) \ + and ! deps.empty? and deps.detect { |d| ! d.deleting? } + deplabel = deps.collect { |r| r.ref }.join(",") + plurality = deps.length > 1 ? "":"s" + resource.warning "#{deplabel} still depend#{plurality} on me -- not purging" + false + else + true + end + end + + # Used mostly for scheduling and auditing at this point. + def cached(resource, name) + Puppet::Util::Storage.cache(resource)[name] + end + + # Used mostly for scheduling and auditing at this point. + def cache(resource, name, value) + Puppet::Util::Storage.cache(resource)[name] = value + end + + def perform_changes(resource) + current = resource.retrieve_resource + + cache resource, :checked, Time.now + + return [] if ! allow_changes?(resource) + + current_values = current.to_hash + historical_values = Puppet::Util::Storage.cache(resource).dup + desired_values = {} + resource.properties.each do |property| + desired_values[property.name] = property.should + end + audited_params = (resource[:audit] || []).map { |p| p.to_sym } + synced_params = [] + + # Record the current state in state.yml. + audited_params.each do |param| + cache(resource, param, current_values[param]) + end + + # Update the machine state & create logs/events + events = [] + ensure_param = resource.parameter(:ensure) + if desired_values[:ensure] && !ensure_param.safe_insync?(current_values[:ensure]) + events << apply_parameter(ensure_param, current_values[:ensure], audited_params.include?(:ensure), historical_values[:ensure]) + synced_params << :ensure + elsif current_values[:ensure] != :absent + work_order = resource.properties # Note: only the resource knows what order to apply changes in + work_order.each do |param| + if desired_values[param.name] && !param.safe_insync?(current_values[param.name]) + events << apply_parameter(param, current_values[param.name], audited_params.include?(param.name), historical_values[param.name]) + synced_params << param.name + end + end + end + + # Add more events to capture audit results + audited_params.each do |param_name| + if historical_values.include?(param_name) + if historical_values[param_name] != current_values[param_name] && !synced_params.include?(param_name) + event = create_change_event(resource.parameter(param_name), current_values[param_name], true, historical_values[param_name]) + event.send_log + events << event + end + else + resource.property(param_name).notice "audit change: newly-recorded value #{current_values[param_name]}" + end + end + + events + end + + def create_change_event(property, current_value, do_audit, historical_value) + event = property.event + event.previous_value = current_value + event.desired_value = property.should + event.historical_value = historical_value + + if do_audit + event.audited = true + event.status = "audit" + if historical_value != current_value + event.message = "audit change: previously recorded value #{property.is_to_s(historical_value)} has been changed to #{property.is_to_s(current_value)}" + end + end + + event + end + + def apply_parameter(property, current_value, do_audit, historical_value) + event = create_change_event(property, current_value, do_audit, historical_value) + + if do_audit && historical_value && historical_value != current_value + brief_audit_message = " (previously recorded value was #{property.is_to_s(historical_value)})" + else + brief_audit_message = "" + end + + if property.noop + event.message = "current_value #{property.is_to_s(current_value)}, should be #{property.should_to_s(property.should)} (noop)#{brief_audit_message}" + event.status = "noop" + else + property.sync + event.message = [ property.change_to_s(current_value, property.should), brief_audit_message ].join + event.status = "success" + end + event + rescue => detail + puts detail.backtrace if Puppet[:trace] + event.status = "failure" + + event.message = "change from #{property.is_to_s(current_value)} to #{property.should_to_s(property.should)} failed: #{detail}" + event + ensure + event.send_log + end + + def evaluate(resource) + start = Time.now + status = Puppet::Resource::Status.new(resource) + + perform_changes(resource).each do |event| + status << event + end + + if status.changed? && ! resource.noop? + cache(resource, :synced, Time.now) + resource.flush if resource.respond_to?(:flush) + end + + return status + rescue => detail + resource.fail "Could not create resource status: #{detail}" unless status + puts detail.backtrace if Puppet[:trace] + resource.err "Could not evaluate: #{detail}" + status.failed = true + return status + ensure + (status.evaluation_time = Time.now - start) if status + end + + def initialize(transaction) + @transaction = transaction + end + + def scheduled?(status, resource) + return true if Puppet[:ignoreschedules] + return true unless schedule = schedule(resource) + + # We use 'checked' here instead of 'synced' because otherwise we'll + # end up checking most resources most times, because they will generally + # have been synced a long time ago (e.g., a file only gets updated + # once a month on the server and its schedule is daily; the last sync time + # will have been a month ago, so we'd end up checking every run). + schedule.match?(cached(resource, :checked).to_i) + end + + def schedule(resource) + unless resource.catalog + resource.warning "Cannot schedule without a schedule-containing catalog" + return nil + end + + return nil unless name = resource[:schedule] + resource.catalog.resource(:schedule, name) || resource.fail("Could not find schedule #{name}") + end +end diff --git a/mcollective/lib/puppet/transportable.rb b/mcollective/lib/puppet/transportable.rb new file mode 100644 index 000000000..f7b839c14 --- /dev/null +++ b/mcollective/lib/puppet/transportable.rb @@ -0,0 +1,248 @@ +require 'puppet' +require 'yaml' + +module Puppet + # The transportable objects themselves. Basically just a hash with some + # metadata and a few extra methods. I used to have the object actually + # be a subclass of Hash, but I could never correctly dump them using + # YAML. + class TransObject + include Enumerable + attr_accessor :type, :name, :file, :line, :catalog + + attr_writer :tags + + %w{has_key? include? length delete empty? << [] []=}.each { |method| + define_method(method) do |*args| + @params.send(method, *args) + end + } + + def each + @params.each { |p,v| yield p, v } + end + + def initialize(name,type) + @type = type.to_s.downcase + @name = name + @params = {} + @tags = [] + end + + def longname + [@type,@name].join('--') + end + + def ref + @ref ||= Puppet::Resource.new(@type, @name) + @ref.to_s + end + + def tags + @tags + end + + # Convert a defined type into a component. + def to_component + trans = TransObject.new(ref, :component) + @params.each { |param,value| + next unless Puppet::Type::Component.valid_parameter?(param) + Puppet.debug "Defining #{param} on #{ref}" + trans[param] = value + } + trans.catalog = self.catalog + Puppet::Type::Component.create(trans) + end + + def to_hash + @params.dup + end + + def to_s + "#{@type}(#{@name}) => #{super}" + end + + def to_manifest + "%s { '%s':\n%s\n}" % [self.type.to_s, self.name, + @params.collect { |p, v| + if v.is_a? Array + " #{p} => [\'#{v.join("','")}\']" + else + " #{p} => \'#{v}\'" + end + }.join(",\n") + ] + end + + # Create a normalized resource from our TransObject. + def to_resource + result = Puppet::Resource.new(type, name, :parameters => @params.dup) + result.tag(*tags) + + result + end + + def to_yaml_properties + instance_variables.reject { |v| %w{@ref}.include?(v) } + end + + def to_ref + ref + end + + def to_ral + to_resource.to_ral + end + end + + # Just a linear container for objects. Behaves mostly like an array, except + # that YAML will correctly dump them even with their instance variables. + class TransBucket + include Enumerable + + attr_accessor :name, :type, :file, :line, :classes, :keyword, :top, :catalog + + %w{delete shift include? length empty? << []}.each { |method| + define_method(method) do |*args| + #Puppet.warning "Calling #{method} with #{args.inspect}" + @children.send(method, *args) + #Puppet.warning @params.inspect + end + } + + # Recursively yield everything. + def delve(&block) + @children.each do |obj| + block.call(obj) + if obj.is_a? self.class + obj.delve(&block) + else + obj + end + end + end + + def each + @children.each { |c| yield c } + end + + # Turn our heirarchy into a flat list + def flatten + @children.collect do |obj| + if obj.is_a? Puppet::TransBucket + obj.flatten + else + obj + end + end.flatten + end + + def initialize(children = []) + @children = children + end + + def push(*args) + args.each { |arg| + case arg + when Puppet::TransBucket, Puppet::TransObject + # nada + else + raise Puppet::DevError, + "TransBuckets cannot handle objects of type #{arg.class}" + end + } + @children += args + end + + # Convert to a parseable manifest + def to_manifest + unless self.top + raise Puppet::DevError, "No keyword; cannot convert to manifest" unless @keyword + end + + str = "#{@keyword} #{@name} {\n%s\n}" + str % @children.collect { |child| + child.to_manifest + }.collect { |str| + if self.top + str + else + str.gsub(/^/, " ") # indent everything once + end + }.join("\n\n") # and throw in a blank line + end + + def to_yaml_properties + instance_variables + end + + # Create a resource graph from our structure. + def to_catalog(clear_on_failure = true) + catalog = Puppet::Resource::Catalog.new(Facter.value("hostname")) + + # This should really use the 'delve' method, but this + # whole class is going away relatively soon, hopefully, + # so it's not worth it. + delver = proc do |obj| + obj.catalog = catalog + unless container = catalog.resource(obj.to_ref) + container = obj.to_ral + catalog.add_resource container + end + obj.each do |child| + child.catalog = catalog + unless resource = catalog.resource(child.to_ref) + resource = child.to_ral + catalog.add_resource resource + end + + catalog.add_edge(container, resource) + delver.call(child) if child.is_a?(self.class) + end + end + + begin + delver.call(self) + catalog.finalize + rescue => detail + # This is important until we lose the global resource references. + catalog.clear if (clear_on_failure) + raise + end + + catalog + end + + def to_ref + unless defined?(@ref) + if self.type and self.name + @ref = Puppet::Resource.new(self.type, self.name) + elsif self.type and ! self.name # This is old-school node types + @ref = Puppet::Resource.new("node", self.type) + elsif ! self.type and self.name + @ref = Puppet::Resource.new("component", self.name) + else + @ref = nil + end + end + @ref.to_s if @ref + end + + def to_ral + to_resource.to_ral + end + + # Create a normalized resource from our TransObject. + def to_resource + params = defined?(@parameters) ? @parameters.dup : {} + Puppet::Resource.new(type, name, :parameters => params) + end + + def param(param,value) + @parameters ||= {} + @parameters[param] = value + end + + end +end + diff --git a/mcollective/lib/puppet/type.rb b/mcollective/lib/puppet/type.rb new file mode 100644 index 000000000..1933097da --- /dev/null +++ b/mcollective/lib/puppet/type.rb @@ -0,0 +1,1899 @@ +require 'puppet' +require 'puppet/util/log' +require 'puppet/util/metric' +require 'puppet/property' +require 'puppet/parameter' +require 'puppet/util' +require 'puppet/util/autoload' +require 'puppet/metatype/manager' +require 'puppet/util/errors' +require 'puppet/util/log_paths' +require 'puppet/util/logging' +require 'puppet/util/cacher' +require 'puppet/file_collection/lookup' +require 'puppet/util/tagging' + +# see the bottom of the file for the rest of the inclusions + +module Puppet +class Type + include Puppet::Util + include Puppet::Util::Errors + include Puppet::Util::LogPaths + include Puppet::Util::Logging + include Puppet::Util::Cacher + include Puppet::FileCollection::Lookup + include Puppet::Util::Tagging + + ############################### + # Code related to resource type attributes. + class << self + include Puppet::Util::ClassGen + include Puppet::Util::Warnings + attr_reader :properties + end + + def self.states + warnonce "The states method is deprecated; use properties" + properties + end + + # All parameters, in the appropriate order. The key_attributes come first, then + # the provider, then the properties, and finally the params and metaparams + # in the order they were specified in the files. + def self.allattrs + key_attributes | (parameters & [:provider]) | properties.collect { |property| property.name } | parameters | metaparams + end + + # Retrieve an attribute alias, if there is one. + def self.attr_alias(param) + @attr_aliases[symbolize(param)] + end + + # Create an alias to an existing attribute. This will cause the aliased + # attribute to be valid when setting and retrieving values on the instance. + def self.set_attr_alias(hash) + hash.each do |new, old| + @attr_aliases[symbolize(new)] = symbolize(old) + end + end + + # Find the class associated with any given attribute. + def self.attrclass(name) + @attrclasses ||= {} + + # We cache the value, since this method gets called such a huge number + # of times (as in, hundreds of thousands in a given run). + unless @attrclasses.include?(name) + @attrclasses[name] = case self.attrtype(name) + when :property; @validproperties[name] + when :meta; @@metaparamhash[name] + when :param; @paramhash[name] + end + end + @attrclasses[name] + end + + # What type of parameter are we dealing with? Cache the results, because + # this method gets called so many times. + def self.attrtype(attr) + @attrtypes ||= {} + unless @attrtypes.include?(attr) + @attrtypes[attr] = case + when @validproperties.include?(attr); :property + when @paramhash.include?(attr); :param + when @@metaparamhash.include?(attr); :meta + end + end + + @attrtypes[attr] + end + + def self.eachmetaparam + @@metaparams.each { |p| yield p.name } + end + + # Create the 'ensure' class. This is a separate method so other types + # can easily call it and create their own 'ensure' values. + def self.ensurable(&block) + if block_given? + self.newproperty(:ensure, :parent => Puppet::Property::Ensure, &block) + else + self.newproperty(:ensure, :parent => Puppet::Property::Ensure) do + self.defaultvalues + end + end + end + + # Should we add the 'ensure' property to this class? + def self.ensurable? + # If the class has all three of these methods defined, then it's + # ensurable. + ens = [:exists?, :create, :destroy].inject { |set, method| + set &&= self.public_method_defined?(method) + } + + ens + end + + # Deal with any options passed into parameters. + def self.handle_param_options(name, options) + # If it's a boolean parameter, create a method to test the value easily + if options[:boolean] + define_method(name.to_s + "?") do + val = self[name] + if val == :true or val == true + return true + end + end + end + end + + # Is the parameter in question a meta-parameter? + def self.metaparam?(param) + @@metaparamhash.include?(symbolize(param)) + end + + # Find the metaparameter class associated with a given metaparameter name. + def self.metaparamclass(name) + @@metaparamhash[symbolize(name)] + end + + def self.metaparams + @@metaparams.collect { |param| param.name } + end + + def self.metaparamdoc(metaparam) + @@metaparamhash[metaparam].doc + end + + # Create a new metaparam. Requires a block and a name, stores it in the + # @parameters array, and does some basic checking on it. + def self.newmetaparam(name, options = {}, &block) + @@metaparams ||= [] + @@metaparamhash ||= {} + name = symbolize(name) + + + param = genclass( + name, + :parent => options[:parent] || Puppet::Parameter, + :prefix => "MetaParam", + :hash => @@metaparamhash, + :array => @@metaparams, + :attributes => options[:attributes], + + &block + ) + + # Grr. + param.required_features = options[:required_features] if options[:required_features] + + handle_param_options(name, options) + + param.metaparam = true + + param + end + + def self.key_attribute_parameters + @key_attribute_parameters ||= ( + params = @parameters.find_all { |param| + param.isnamevar? or param.name == :name + } + ) + end + + def self.key_attributes + key_attribute_parameters.collect { |p| p.name } + end + + def self.title_patterns + case key_attributes.length + when 0; [] + when 1; + identity = lambda {|x| x} + [ [ /(.*)/m, [ [key_attributes.first, identity ] ] ] ] + else + raise Puppet::DevError,"you must specify title patterns when there are two or more key attributes" + end + end + + def uniqueness_key + self.class.key_attributes.sort_by { |attribute_name| attribute_name.to_s }.map{ |attribute_name| self[attribute_name] } + end + + # Create a new parameter. Requires a block and a name, stores it in the + # @parameters array, and does some basic checking on it. + def self.newparam(name, options = {}, &block) + options[:attributes] ||= {} + + param = genclass( + name, + :parent => options[:parent] || Puppet::Parameter, + :attributes => options[:attributes], + :block => block, + :prefix => "Parameter", + :array => @parameters, + + :hash => @paramhash + ) + + handle_param_options(name, options) + + # Grr. + param.required_features = options[:required_features] if options[:required_features] + + param.isnamevar if options[:namevar] + + param + end + + def self.newstate(name, options = {}, &block) + Puppet.warning "newstate() has been deprecrated; use newproperty(#{name})" + newproperty(name, options, &block) + end + + # Create a new property. The first parameter must be the name of the property; + # this is how users will refer to the property when creating new instances. + # The second parameter is a hash of options; the options are: + # * :parent: The parent class for the property. Defaults to Puppet::Property. + # * :retrieve: The method to call on the provider or @parent object (if + # the provider is not set) to retrieve the current value. + def self.newproperty(name, options = {}, &block) + name = symbolize(name) + + # This is here for types that might still have the old method of defining + # a parent class. + unless options.is_a? Hash + raise Puppet::DevError, + "Options must be a hash, not #{options.inspect}" + end + + raise Puppet::DevError, "Class #{self.name} already has a property named #{name}" if @validproperties.include?(name) + + if parent = options[:parent] + options.delete(:parent) + else + parent = Puppet::Property + end + + # We have to create our own, new block here because we want to define + # an initial :retrieve method, if told to, and then eval the passed + # block if available. + prop = genclass(name, :parent => parent, :hash => @validproperties, :attributes => options) do + # If they've passed a retrieve method, then override the retrieve + # method on the class. + if options[:retrieve] + define_method(:retrieve) do + provider.send(options[:retrieve]) + end + end + + class_eval(&block) if block + end + + # If it's the 'ensure' property, always put it first. + if name == :ensure + @properties.unshift prop + else + @properties << prop + end + + prop + end + + def self.paramdoc(param) + @paramhash[param].doc + end + + # Return the parameter names + def self.parameters + return [] unless defined?(@parameters) + @parameters.collect { |klass| klass.name } + end + + # Find the parameter class associated with a given parameter name. + def self.paramclass(name) + @paramhash[name] + end + + # Return the property class associated with a name + def self.propertybyname(name) + @validproperties[name] + end + + def self.validattr?(name) + name = symbolize(name) + return true if name == :name + @validattrs ||= {} + + unless @validattrs.include?(name) + @validattrs[name] = !!(self.validproperty?(name) or self.validparameter?(name) or self.metaparam?(name)) + end + + @validattrs[name] + end + + # does the name reflect a valid property? + def self.validproperty?(name) + name = symbolize(name) + @validproperties.include?(name) && @validproperties[name] + end + + # Return the list of validproperties + def self.validproperties + return {} unless defined?(@parameters) + + @validproperties.keys + end + + # does the name reflect a valid parameter? + def self.validparameter?(name) + raise Puppet::DevError, "Class #{self} has not defined parameters" unless defined?(@parameters) + !!(@paramhash.include?(name) or @@metaparamhash.include?(name)) + end + + # This is a forward-compatibility method - it's the validity interface we'll use in Puppet::Resource. + def self.valid_parameter?(name) + validattr?(name) + end + + # Return either the attribute alias or the attribute. + def attr_alias(name) + name = symbolize(name) + if synonym = self.class.attr_alias(name) + return synonym + else + return name + end + end + + # Are we deleting this resource? + def deleting? + obj = @parameters[:ensure] and obj.should == :absent + end + + # Create a new property if it is valid but doesn't exist + # Returns: true if a new parameter was added, false otherwise + def add_property_parameter(prop_name) + if self.class.validproperty?(prop_name) && !@parameters[prop_name] + self.newattr(prop_name) + return true + end + false + end + + # + # The name_var is the key_attribute in the case that there is only one. + # + def name_var + key_attributes = self.class.key_attributes + (key_attributes.length == 1) && key_attributes.first + end + + # abstract accessing parameters and properties, and normalize + # access to always be symbols, not strings + # This returns a value, not an object. It returns the 'is' + # value, but you can also specifically return 'is' and 'should' + # values using 'object.is(:property)' or 'object.should(:property)'. + def [](name) + name = attr_alias(name) + + fail("Invalid parameter #{name}(#{name.inspect})") unless self.class.validattr?(name) + + if name == :name && nv = name_var + name = nv + end + + if obj = @parameters[name] + # Note that if this is a property, then the value is the "should" value, + # not the current value. + obj.value + else + return nil + end + end + + # Abstract setting parameters and properties, and normalize + # access to always be symbols, not strings. This sets the 'should' + # value on properties, and otherwise just sets the appropriate parameter. + def []=(name,value) + name = attr_alias(name) + + fail("Invalid parameter #{name}") unless self.class.validattr?(name) + + if name == :name && nv = name_var + name = nv + end + raise Puppet::Error.new("Got nil value for #{name}") if value.nil? + + property = self.newattr(name) + + if property + begin + # make sure the parameter doesn't have any errors + property.value = value + rescue => detail + error = Puppet::Error.new("Parameter #{name} failed: #{detail}") + error.set_backtrace(detail.backtrace) + raise error + end + end + + nil + end + + # remove a property from the object; useful in testing or in cleanup + # when an error has been encountered + def delete(attr) + attr = symbolize(attr) + if @parameters.has_key?(attr) + @parameters.delete(attr) + else + raise Puppet::DevError.new("Undefined attribute '#{attr}' in #{self}") + end + end + + # iterate across the existing properties + def eachproperty + # properties is a private method + properties.each { |property| + yield property + } + end + + # Create a transaction event. Called by Transaction or by + # a property. + def event(options = {}) + Puppet::Transaction::Event.new({:resource => self, :file => file, :line => line, :tags => tags}.merge(options)) + end + + # Let the catalog determine whether a given cached value is + # still valid or has expired. + def expirer + catalog + end + + # retrieve the 'should' value for a specified property + def should(name) + name = attr_alias(name) + (prop = @parameters[name] and prop.is_a?(Puppet::Property)) ? prop.should : nil + end + + # Create the actual attribute instance. Requires either the attribute + # name or class as the first argument, then an optional hash of + # attributes to set during initialization. + def newattr(name) + if name.is_a?(Class) + klass = name + name = klass.name + end + + unless klass = self.class.attrclass(name) + raise Puppet::Error, "Resource type #{self.class.name} does not support parameter #{name}" + end + + if provider and ! provider.class.supports_parameter?(klass) + missing = klass.required_features.find_all { |f| ! provider.class.feature?(f) } + info "Provider %s does not support features %s; not managing attribute %s" % [provider.class.name, missing.join(", "), name] + return nil + end + + return @parameters[name] if @parameters.include?(name) + + @parameters[name] = klass.new(:resource => self) + end + + # return the value of a parameter + def parameter(name) + @parameters[name.to_sym] + end + + def parameters + @parameters.dup + end + + # Is the named property defined? + def propertydefined?(name) + name = name.intern unless name.is_a? Symbol + @parameters.include?(name) + end + + # Return an actual property instance by name; to return the value, use 'resource[param]' + # LAK:NOTE(20081028) Since the 'parameter' method is now a superset of this method, + # this one should probably go away at some point. + def property(name) + (obj = @parameters[symbolize(name)] and obj.is_a?(Puppet::Property)) ? obj : nil + end + + # For any parameters or properties that have defaults and have not yet been + # set, set them now. This method can be handed a list of attributes, + # and if so it will only set defaults for those attributes. + def set_default(attr) + return unless klass = self.class.attrclass(attr) + return unless klass.method_defined?(:default) + return if @parameters.include?(klass.name) + + return unless parameter = newattr(klass.name) + + if value = parameter.default and ! value.nil? + parameter.value = value + else + @parameters.delete(parameter.name) + end + end + + # Convert our object to a hash. This just includes properties. + def to_hash + rethash = {} + + @parameters.each do |name, obj| + rethash[name] = obj.value + end + + rethash + end + + def type + self.class.name + end + + # Return a specific value for an attribute. + def value(name) + name = attr_alias(name) + + (obj = @parameters[name] and obj.respond_to?(:value)) ? obj.value : nil + end + + def version + return 0 unless catalog + catalog.version + end + + # Return all of the property objects, in the order specified in the + # class. + def properties + self.class.properties.collect { |prop| @parameters[prop.name] }.compact + end + + # Is this type's name isomorphic with the object? That is, if the + # name conflicts, does it necessarily mean that the objects conflict? + # Defaults to true. + def self.isomorphic? + if defined?(@isomorphic) + return @isomorphic + else + return true + end + end + + def isomorphic? + self.class.isomorphic? + end + + # is the instance a managed instance? A 'yes' here means that + # the instance was created from the language, vs. being created + # in order resolve other questions, such as finding a package + # in a list + def managed? + # Once an object is managed, it always stays managed; but an object + # that is listed as unmanaged might become managed later in the process, + # so we have to check that every time + if @managed + return @managed + else + @managed = false + properties.each { |property| + s = property.should + if s and ! property.class.unmanaged + @managed = true + break + end + } + return @managed + end + end + + ############################### + # Code related to the container behaviour. + + # this is a retarded hack method to get around the difference between + # component children and file children + def self.depthfirst? + @depthfirst + end + + def depthfirst? + self.class.depthfirst? + end + + # Remove an object. The argument determines whether the object's + # subscriptions get eliminated, too. + def remove(rmdeps = true) + # This is hackish (mmm, cut and paste), but it works for now, and it's + # better than warnings. + @parameters.each do |name, obj| + obj.remove + end + @parameters.clear + + @parent = nil + + # Remove the reference to the provider. + if self.provider + @provider.clear + @provider = nil + end + end + + ############################### + # Code related to evaluating the resources. + + # Flush the provider, if it supports it. This is called by the + # transaction. + def flush + self.provider.flush if self.provider and self.provider.respond_to?(:flush) + end + + # if all contained objects are in sync, then we're in sync + # FIXME I don't think this is used on the type instances any more, + # it's really only used for testing + def insync?(is) + insync = true + + if property = @parameters[:ensure] + unless is.include? property + raise Puppet::DevError, + "The is value is not in the is array for '#{property.name}'" + end + ensureis = is[property] + if property.safe_insync?(ensureis) and property.should == :absent + return true + end + end + + properties.each { |property| + unless is.include? property + raise Puppet::DevError, + "The is value is not in the is array for '#{property.name}'" + end + + propis = is[property] + unless property.safe_insync?(propis) + property.debug("Not in sync: #{propis.inspect} vs #{property.should.inspect}") + insync = false + #else + # property.debug("In sync") + end + } + + #self.debug("#{self} sync status is #{insync}") + insync + end + + # retrieve the current value of all contained properties + def retrieve + fail "Provider #{provider.class.name} is not functional on this host" if self.provider.is_a?(Puppet::Provider) and ! provider.class.suitable? + + result = Puppet::Resource.new(type, title) + + # Provide the name, so we know we'll always refer to a real thing + result[:name] = self[:name] unless self[:name] == title + + if ensure_prop = property(:ensure) or (self.class.validattr?(:ensure) and ensure_prop = newattr(:ensure)) + result[:ensure] = ensure_state = ensure_prop.retrieve + else + ensure_state = nil + end + + properties.each do |property| + next if property.name == :ensure + if ensure_state == :absent + result[property] = :absent + else + result[property] = property.retrieve + end + end + + result + end + + def retrieve_resource + resource = retrieve + resource = Resource.new(type, title, :parameters => resource) if resource.is_a? Hash + resource + end + + # Get a hash of the current properties. Returns a hash with + # the actual property instance as the key and the current value + # as the, um, value. + def currentpropvalues + # It's important to use the 'properties' method here, as it follows the order + # in which they're defined in the class. It also guarantees that 'ensure' + # is the first property, which is important for skipping 'retrieve' on + # all the properties if the resource is absent. + ensure_state = false + return properties.inject({}) do | prophash, property| + if property.name == :ensure + ensure_state = property.retrieve + prophash[property] = ensure_state + else + if ensure_state == :absent + prophash[property] = :absent + else + prophash[property] = property.retrieve + end + end + prophash + end + end + + # Are we running in noop mode? + def noop? + # If we're not a host_config, we're almost certainly part of + # Settings, and we want to ignore 'noop' + return false if catalog and ! catalog.host_config? + + if defined?(@noop) + @noop + else + Puppet[:noop] + end + end + + def noop + noop? + end + + ############################### + # Code related to managing resource instances. + require 'puppet/transportable' + + # retrieve a named instance of the current type + def self.[](name) + raise "Global resource access is deprecated" + @objects[name] || @aliases[name] + end + + # add an instance by name to the class list of instances + def self.[]=(name,object) + raise "Global resource storage is deprecated" + newobj = nil + if object.is_a?(Puppet::Type) + newobj = object + else + raise Puppet::DevError, "must pass a Puppet::Type object" + end + + if exobj = @objects[name] and self.isomorphic? + msg = "Object '#{newobj.class.name}[#{name}]' already exists" + + msg += ("in file #{object.file} at line #{object.line}") if exobj.file and exobj.line + msg += ("and cannot be redefined in file #{object.file} at line #{object.line}") if object.file and object.line + error = Puppet::Error.new(msg) + raise error + else + #Puppet.info("adding %s of type %s to class list" % + # [name,object.class]) + @objects[name] = newobj + end + end + + # Create an alias. We keep these in a separate hash so that we don't encounter + # the objects multiple times when iterating over them. + def self.alias(name, obj) + raise "Global resource aliasing is deprecated" + if @objects.include?(name) + unless @objects[name] == obj + raise Puppet::Error.new( + "Cannot create alias #{name}: object already exists" + ) + end + end + + if @aliases.include?(name) + unless @aliases[name] == obj + raise Puppet::Error.new( + "Object #{@aliases[name].name} already has alias #{name}" + ) + end + end + + @aliases[name] = obj + end + + # remove all of the instances of a single type + def self.clear + raise "Global resource removal is deprecated" + if defined?(@objects) + @objects.each do |name, obj| + obj.remove(true) + end + @objects.clear + end + @aliases.clear if defined?(@aliases) + end + + # Force users to call this, so that we can merge objects if + # necessary. + def self.create(args) + # LAK:DEP Deprecation notice added 12/17/2008 + Puppet.warning "Puppet::Type.create is deprecated; use Puppet::Type.new" + new(args) + end + + # remove a specified object + def self.delete(resource) + raise "Global resource removal is deprecated" + return unless defined?(@objects) + @objects.delete(resource.title) if @objects.include?(resource.title) + @aliases.delete(resource.title) if @aliases.include?(resource.title) + if @aliases.has_value?(resource) + names = [] + @aliases.each do |name, otherres| + if otherres == resource + names << name + end + end + names.each { |name| @aliases.delete(name) } + end + end + + # iterate across each of the type's instances + def self.each + raise "Global resource iteration is deprecated" + return unless defined?(@objects) + @objects.each { |name,instance| + yield instance + } + end + + # does the type have an object with the given name? + def self.has_key?(name) + raise "Global resource access is deprecated" + @objects.has_key?(name) + end + + # Retrieve all known instances. Either requires providers or must be overridden. + def self.instances + raise Puppet::DevError, "#{self.name} has no providers and has not overridden 'instances'" if provider_hash.empty? + + # Put the default provider first, then the rest of the suitable providers. + provider_instances = {} + providers_by_source.collect do |provider| + provider.instances.collect do |instance| + # We always want to use the "first" provider instance we find, unless the resource + # is already managed and has a different provider set + if other = provider_instances[instance.name] + Puppet.warning "%s %s found in both %s and %s; skipping the %s version" % + [self.name.to_s.capitalize, instance.name, other.class.name, instance.class.name, instance.class.name] + next + end + provider_instances[instance.name] = instance + + new(:name => instance.name, :provider => instance, :audit => :all) + end + end.flatten.compact + end + + # Return a list of one suitable provider per source, with the default provider first. + def self.providers_by_source + # Put the default provider first, then the rest of the suitable providers. + sources = [] + [defaultprovider, suitableprovider].flatten.uniq.collect do |provider| + next if sources.include?(provider.source) + + sources << provider.source + provider + end.compact + end + + # Convert a simple hash into a Resource instance. + def self.hash2resource(hash) + hash = hash.inject({}) { |result, ary| result[ary[0].to_sym] = ary[1]; result } + + title = hash.delete(:title) + title ||= hash[:name] + title ||= hash[key_attributes.first] if key_attributes.length == 1 + + raise Puppet::Error, "Title or name must be provided" unless title + + # Now create our resource. + resource = Puppet::Resource.new(self.name, title) + [:catalog].each do |attribute| + if value = hash[attribute] + hash.delete(attribute) + resource.send(attribute.to_s + "=", value) + end + end + + hash.each do |param, value| + resource[param] = value + end + resource + end + + # Create the path for logging and such. + def pathbuilder + if p = parent + [p.pathbuilder, self.ref].flatten + else + [self.ref] + end + end + + ############################### + # Add all of the meta parameters. + newmetaparam(:noop) do + desc "Boolean flag indicating whether work should actually + be done." + + newvalues(:true, :false) + munge do |value| + case value + when true, :true, "true"; @resource.noop = true + when false, :false, "false"; @resource.noop = false + end + end + end + + newmetaparam(:schedule) do + desc "On what schedule the object should be managed. You must create a + schedule object, and then reference the name of that object to use + that for your schedule: + + schedule { daily: + period => daily, + range => \"2-4\" + } + + exec { \"/usr/bin/apt-get update\": + schedule => daily + } + + The creation of the schedule object does not need to appear in the + configuration before objects that use it." + end + + newmetaparam(:audit) do + desc "Marks a subset of this resource's unmanaged attributes for auditing. Accepts an + attribute name or a list of attribute names. + + Auditing a resource attribute has two effects: First, whenever a catalog + is applied with puppet apply or puppet agent, Puppet will check whether + that attribute of the resource has been modified, comparing its current + value to the previous run; any change will be logged alongside any actions + performed by Puppet while applying the catalog. + + Secondly, marking a resource attribute for auditing will include that + attribute in inspection reports generated by puppet inspect; see the + puppet inspect documentation for more details. + + Managed attributes for a resource can also be audited, but note that + changes made by Puppet will be logged as additional modifications. (I.e. + if a user manually edits a file whose contents are audited and managed, + puppet agent's next two runs will both log an audit notice: the first run + will log the user's edit and then revert the file to the desired state, + and the second run will log the edit made by Puppet.)" + + validate do |list| + list = Array(list).collect {|p| p.to_sym} + unless list == [:all] + list.each do |param| + next if @resource.class.validattr?(param) + fail "Cannot audit #{param}: not a valid attribute for #{resource}" + end + end + end + + munge do |args| + properties_to_audit(args).each do |param| + next unless resource.class.validproperty?(param) + resource.newattr(param) + end + end + + def all_properties + resource.class.properties.find_all do |property| + resource.provider.nil? or resource.provider.class.supports_parameter?(property) + end.collect do |property| + property.name + end + end + + def properties_to_audit(list) + if !list.kind_of?(Array) && list.to_sym == :all + list = all_properties + else + list = Array(list).collect { |p| p.to_sym } + end + end + end + + newmetaparam(:check) do + desc "Audit specified attributes of resources over time, and report if any have changed. + This parameter has been deprecated in favor of 'audit'." + + munge do |args| + resource.warning "'check' attribute is deprecated; use 'audit' instead" + resource[:audit] = args + end + end + + newmetaparam(:loglevel) do + desc "Sets the level that information will be logged. + The log levels have the biggest impact when logs are sent to + syslog (which is currently the default)." + defaultto :notice + + newvalues(*Puppet::Util::Log.levels) + newvalues(:verbose) + + munge do |loglevel| + val = super(loglevel) + if val == :verbose + val = :info + end + val + end + end + + newmetaparam(:alias) do + desc "Creates an alias for the object. Puppet uses this internally when you + provide a symbolic name: + + file { sshdconfig: + path => $operatingsystem ? { + solaris => \"/usr/local/etc/ssh/sshd_config\", + default => \"/etc/ssh/sshd_config\" + }, + source => \"...\" + } + + service { sshd: + subscribe => File[sshdconfig] + } + + When you use this feature, the parser sets `sshdconfig` as the name, + and the library sets that as an alias for the file so the dependency + lookup for `sshd` works. You can use this parameter yourself, + but note that only the library can use these aliases; for instance, + the following code will not work: + + file { \"/etc/ssh/sshd_config\": + owner => root, + group => root, + alias => sshdconfig + } + + file { sshdconfig: + mode => 644 + } + + There's no way here for the Puppet parser to know that these two stanzas + should be affecting the same file. + + See the [Language Tutorial](http://docs.puppetlabs.com/guides/language_tutorial.html) for more information. + + " + + munge do |aliases| + aliases = [aliases] unless aliases.is_a?(Array) + + raise(ArgumentError, "Cannot add aliases without a catalog") unless @resource.catalog + + aliases.each do |other| + if obj = @resource.catalog.resource(@resource.class.name, other) + unless obj.object_id == @resource.object_id + self.fail("#{@resource.title} can not create alias #{other}: object already exists") + end + next + end + + # Newschool, add it to the catalog. + @resource.catalog.alias(@resource, other) + end + end + end + + newmetaparam(:tag) do + desc "Add the specified tags to the associated resource. While all resources + are automatically tagged with as much information as possible + (e.g., each class and definition containing the resource), it can + be useful to add your own tags to a given resource. + + Tags are currently useful for things like applying a subset of a + host's configuration: + + puppet agent --test --tags mytag + + This way, when you're testing a configuration you can run just the + portion you're testing." + + munge do |tags| + tags = [tags] unless tags.is_a? Array + + tags.each do |tag| + @resource.tag(tag) + end + end + end + + class RelationshipMetaparam < Puppet::Parameter + class << self + attr_accessor :direction, :events, :callback, :subclasses + end + + @subclasses = [] + + def self.inherited(sub) + @subclasses << sub + end + + def munge(references) + references = [references] unless references.is_a?(Array) + references.collect do |ref| + if ref.is_a?(Puppet::Resource) + ref + else + Puppet::Resource.new(ref) + end + end + end + + def validate_relationship + @value.each do |ref| + unless @resource.catalog.resource(ref.to_s) + description = self.class.direction == :in ? "dependency" : "dependent" + fail "Could not find #{description} #{ref} for #{resource.ref}" + end + end + end + + # Create edges from each of our relationships. :in + # relationships are specified by the event-receivers, and :out + # relationships are specified by the event generator. This + # way 'source' and 'target' are consistent terms in both edges + # and events -- that is, an event targets edges whose source matches + # the event's source. The direction of the relationship determines + # which resource is applied first and which resource is considered + # to be the event generator. + def to_edges + @value.collect do |reference| + reference.catalog = resource.catalog + + # Either of the two retrieval attempts could have returned + # nil. + unless related_resource = reference.resolve + self.fail "Could not retrieve dependency '#{reference}' of #{@resource.ref}" + end + + # Are we requiring them, or vice versa? See the method docs + # for futher info on this. + if self.class.direction == :in + source = related_resource + target = @resource + else + source = @resource + target = related_resource + end + + if method = self.class.callback + subargs = { + :event => self.class.events, + :callback => method + } + self.debug("subscribes to #{related_resource.ref}") + else + # If there's no callback, there's no point in even adding + # a label. + subargs = nil + self.debug("requires #{related_resource.ref}") + end + + rel = Puppet::Relationship.new(source, target, subargs) + end + end + end + + def self.relationship_params + RelationshipMetaparam.subclasses + end + + + # Note that the order in which the relationships params is defined + # matters. The labelled params (notify and subcribe) must be later, + # so that if both params are used, those ones win. It's a hackish + # solution, but it works. + + newmetaparam(:require, :parent => RelationshipMetaparam, :attributes => {:direction => :in, :events => :NONE}) do + desc "One or more objects that this object depends on. + This is used purely for guaranteeing that changes to required objects + happen before the dependent object. For instance: + + # Create the destination directory before you copy things down + file { \"/usr/local/scripts\": + ensure => directory + } + + file { \"/usr/local/scripts/myscript\": + source => \"puppet://server/module/myscript\", + mode => 755, + require => File[\"/usr/local/scripts\"] + } + + Multiple dependencies can be specified by providing a comma-seperated list + of resources, enclosed in square brackets: + + require => [ File[\"/usr/local\"], File[\"/usr/local/scripts\"] ] + + Note that Puppet will autorequire everything that it can, and + there are hooks in place so that it's easy for resources to add new + ways to autorequire objects, so if you think Puppet could be + smarter here, let us know. + + In fact, the above code was redundant -- Puppet will autorequire + any parent directories that are being managed; it will + automatically realize that the parent directory should be created + before the script is pulled down. + + Currently, exec resources will autorequire their CWD (if it is + specified) plus any fully qualified paths that appear in the + command. For instance, if you had an `exec` command that ran + the `myscript` mentioned above, the above code that pulls the + file down would be automatically listed as a requirement to the + `exec` code, so that you would always be running againts the + most recent version. + " + end + + newmetaparam(:subscribe, :parent => RelationshipMetaparam, :attributes => {:direction => :in, :events => :ALL_EVENTS, :callback => :refresh}) do + desc "One or more objects that this object depends on. Changes in the + subscribed to objects result in the dependent objects being + refreshed (e.g., a service will get restarted). For instance: + + class nagios { + file { \"/etc/nagios/nagios.conf\": + source => \"puppet://server/module/nagios.conf\", + alias => nagconf # just to make things easier for me + } + service { nagios: + ensure => running, + subscribe => File[nagconf] + } + } + + Currently the `exec`, `mount` and `service` type support + refreshing. + " + end + + newmetaparam(:before, :parent => RelationshipMetaparam, :attributes => {:direction => :out, :events => :NONE}) do + desc %{This parameter is the opposite of **require** -- it guarantees + that the specified object is applied later than the specifying + object: + + file { "/var/nagios/configuration": + source => "...", + recurse => true, + before => Exec["nagios-rebuid"] + } + + exec { "nagios-rebuild": + command => "/usr/bin/make", + cwd => "/var/nagios/configuration" + } + + This will make sure all of the files are up to date before the + make command is run.} + end + + newmetaparam(:notify, :parent => RelationshipMetaparam, :attributes => {:direction => :out, :events => :ALL_EVENTS, :callback => :refresh}) do + desc %{This parameter is the opposite of **subscribe** -- it sends events + to the specified object: + + file { "/etc/sshd_config": + source => "....", + notify => Service[sshd] + } + + service { sshd: + ensure => running + } + + This will restart the sshd service if the sshd config file changes.} + end + + newmetaparam(:stage) do + desc %{Which run stage a given resource should reside in. This just creates + a dependency on or from the named milestone. For instance, saying that + this is in the 'bootstrap' stage creates a dependency on the 'bootstrap' + milestone. + + By default, all classes get directly added to the + 'main' stage. You can create new stages as resources: + + stage { [pre, post]: } + + To order stages, use standard relationships: + + stage { pre: before => Stage[main] } + + Or use the new relationship syntax: + + Stage[pre] -> Stage[main] -> Stage[post] + + Then use the new class parameters to specify a stage: + + class { foo: stage => pre } + + Stages can only be set on classes, not individual resources. This will + fail: + + file { '/foo': stage => pre, ensure => file } + } + end + + ############################### + # All of the provider plumbing for the resource types. + require 'puppet/provider' + require 'puppet/util/provider_features' + + # Add the feature handling module. + extend Puppet::Util::ProviderFeatures + + attr_reader :provider + + # the Type class attribute accessors + class << self + attr_accessor :providerloader + attr_writer :defaultprovider + end + + # Find the default provider. + def self.defaultprovider + unless @defaultprovider + suitable = suitableprovider + + # Find which providers are a default for this system. + defaults = suitable.find_all { |provider| provider.default? } + + # If we don't have any default we use suitable providers + defaults = suitable if defaults.empty? + max = defaults.collect { |provider| provider.specificity }.max + defaults = defaults.find_all { |provider| provider.specificity == max } + + retval = nil + if defaults.length > 1 + Puppet.warning( + "Found multiple default providers for #{self.name}: #{defaults.collect { |i| i.name.to_s }.join(", ")}; using #{defaults[0].name}" + ) + retval = defaults.shift + elsif defaults.length == 1 + retval = defaults.shift + else + raise Puppet::DevError, "Could not find a default provider for #{self.name}" + end + + @defaultprovider = retval + end + + @defaultprovider + end + + def self.provider_hash_by_type(type) + @provider_hashes ||= {} + @provider_hashes[type] ||= {} + end + + def self.provider_hash + Puppet::Type.provider_hash_by_type(self.name) + end + + # Retrieve a provider by name. + def self.provider(name) + name = Puppet::Util.symbolize(name) + + # If we don't have it yet, try loading it. + @providerloader.load(name) unless provider_hash.has_key?(name) + provider_hash[name] + end + + # Just list all of the providers. + def self.providers + provider_hash.keys + end + + def self.validprovider?(name) + name = Puppet::Util.symbolize(name) + + (provider_hash.has_key?(name) && provider_hash[name].suitable?) + end + + # Create a new provider of a type. This method must be called + # directly on the type that it's implementing. + def self.provide(name, options = {}, &block) + name = Puppet::Util.symbolize(name) + + if unprovide(name) + Puppet.debug "Reloading #{name} #{self.name} provider" + end + + parent = if pname = options[:parent] + options.delete(:parent) + if pname.is_a? Class + pname + else + if provider = self.provider(pname) + provider + else + raise Puppet::DevError, + "Could not find parent provider #{pname} of #{name}" + end + end + else + Puppet::Provider + end + + options[:resource_type] ||= self + + self.providify + + provider = genclass( + name, + :parent => parent, + :hash => provider_hash, + :prefix => "Provider", + :block => block, + :include => feature_module, + :extend => feature_module, + :attributes => options + ) + + provider + end + + # Make sure we have a :provider parameter defined. Only gets called if there + # are providers. + def self.providify + return if @paramhash.has_key? :provider + + newparam(:provider) do + desc "The specific backend for #{self.name.to_s} to use. You will + seldom need to specify this -- Puppet will usually discover the + appropriate provider for your platform." + + # This is so we can refer back to the type to get a list of + # providers for documentation. + class << self + attr_accessor :parenttype + end + + # We need to add documentation for each provider. + def self.doc + @doc + " Available providers are:\n\n" + parenttype.providers.sort { |a,b| + a.to_s <=> b.to_s + }.collect { |i| + "* **#{i}**: #{parenttype().provider(i).doc}" + }.join("\n") + end + + defaultto { + @resource.class.defaultprovider.name + } + + validate do |provider_class| + provider_class = provider_class[0] if provider_class.is_a? Array + provider_class = provider_class.class.name if provider_class.is_a?(Puppet::Provider) + + unless provider = @resource.class.provider(provider_class) + raise ArgumentError, "Invalid #{@resource.class.name} provider '#{provider_class}'" + end + end + + munge do |provider| + provider = provider[0] if provider.is_a? Array + provider = provider.intern if provider.is_a? String + @resource.provider = provider + + if provider.is_a?(Puppet::Provider) + provider.class.name + else + provider + end + end + end.parenttype = self + end + + def self.unprovide(name) + if @defaultprovider and @defaultprovider.name == name + @defaultprovider = nil + end + + rmclass(name, :hash => provider_hash, :prefix => "Provider") + end + + # Return an array of all of the suitable providers. + def self.suitableprovider + providerloader.loadall if provider_hash.empty? + provider_hash.find_all { |name, provider| + provider.suitable? + }.collect { |name, provider| + provider + }.reject { |p| p.name == :fake } # For testing + end + + def provider=(name) + if name.is_a?(Puppet::Provider) + @provider = name + @provider.resource = self + elsif klass = self.class.provider(name) + @provider = klass.new(self) + else + raise ArgumentError, "Could not find #{name} provider of #{self.class.name}" + end + end + + ############################### + # All of the relationship code. + + # Specify a block for generating a list of objects to autorequire. This + # makes it so that you don't have to manually specify things that you clearly + # require. + def self.autorequire(name, &block) + @autorequires ||= {} + @autorequires[name] = block + end + + # Yield each of those autorequires in turn, yo. + def self.eachautorequire + @autorequires ||= {} + @autorequires.each { |type, block| + yield(type, block) + } + end + + # Figure out of there are any objects we can automatically add as + # dependencies. + def autorequire(rel_catalog = nil) + rel_catalog ||= catalog + raise(Puppet::DevError, "You cannot add relationships without a catalog") unless rel_catalog + + reqs = [] + self.class.eachautorequire { |type, block| + # Ignore any types we can't find, although that would be a bit odd. + next unless typeobj = Puppet::Type.type(type) + + # Retrieve the list of names from the block. + next unless list = self.instance_eval(&block) + list = [list] unless list.is_a?(Array) + + # Collect the current prereqs + list.each { |dep| + # Support them passing objects directly, to save some effort. + unless dep.is_a? Puppet::Type + # Skip autorequires that we aren't managing + unless dep = rel_catalog.resource(type, dep) + next + end + end + + reqs << Puppet::Relationship.new(dep, self) + } + } + + reqs + end + + # Build the dependencies associated with an individual object. + def builddepends + # Handle the requires + self.class.relationship_params.collect do |klass| + if param = @parameters[klass.name] + param.to_edges + end + end.flatten.reject { |r| r.nil? } + end + + # Define the initial list of tags. + def tags=(list) + tag(self.class.name) + tag(*list) + end + + # Types (which map to resources in the languages) are entirely composed of + # attribute value pairs. Generally, Puppet calls any of these things an + # 'attribute', but these attributes always take one of three specific + # forms: parameters, metaparams, or properties. + + # In naming methods, I have tried to consistently name the method so + # that it is clear whether it operates on all attributes (thus has 'attr' in + # the method name, or whether it operates on a specific type of attributes. + attr_writer :title + attr_writer :noop + + include Enumerable + + # class methods dealing with Type management + + public + + # the Type class attribute accessors + class << self + attr_reader :name + attr_accessor :self_refresh + include Enumerable, Puppet::Util::ClassGen + include Puppet::MetaType::Manager + + include Puppet::Util + include Puppet::Util::Logging + end + + # all of the variables that must be initialized for each subclass + def self.initvars + # all of the instances of this class + @objects = Hash.new + @aliases = Hash.new + + @defaults = {} + + @parameters ||= [] + + @validproperties = {} + @properties = [] + @parameters = [] + @paramhash = {} + + @attr_aliases = {} + + @paramdoc = Hash.new { |hash,key| + key = key.intern if key.is_a?(String) + if hash.include?(key) + hash[key] + else + "Param Documentation for #{key} not found" + end + } + + @doc ||= "" + + end + + def self.to_s + if defined?(@name) + "Puppet::Type::#{@name.to_s.capitalize}" + else + super + end + end + + # Create a block to validate that our object is set up entirely. This will + # be run before the object is operated on. + def self.validate(&block) + define_method(:validate, &block) + #@validate = block + end + + # The catalog that this resource is stored in. + attr_accessor :catalog + + # is the resource exported + attr_accessor :exported + + # is the resource virtual (it should not :-)) + attr_accessor :virtual + + # create a log at specified level + def log(msg) + + Puppet::Util::Log.create( + + :level => @parameters[:loglevel].value, + :message => msg, + + :source => self + ) + end + + + # instance methods related to instance intrinsics + # e.g., initialize and name + + public + + attr_reader :original_parameters + + # initialize the type instance + def initialize(resource) + raise Puppet::DevError, "Got TransObject instead of Resource or hash" if resource.is_a?(Puppet::TransObject) + resource = self.class.hash2resource(resource) unless resource.is_a?(Puppet::Resource) + + # The list of parameter/property instances. + @parameters = {} + + # Set the title first, so any failures print correctly. + if resource.type.to_s.downcase.to_sym == self.class.name + self.title = resource.title + else + # This should only ever happen for components + self.title = resource.ref + end + + [:file, :line, :catalog, :exported, :virtual].each do |getter| + setter = getter.to_s + "=" + if val = resource.send(getter) + self.send(setter, val) + end + end + + @tags = resource.tags + + @original_parameters = resource.to_hash + + set_name(@original_parameters) + + set_default(:provider) + + set_parameters(@original_parameters) + + self.validate if self.respond_to?(:validate) + end + + private + + # Set our resource's name. + def set_name(hash) + self[name_var] = hash.delete(name_var) if name_var + end + + # Set all of the parameters from a hash, in the appropriate order. + def set_parameters(hash) + # Use the order provided by allattrs, but add in any + # extra attributes from the resource so we get failures + # on invalid attributes. + no_values = [] + (self.class.allattrs + hash.keys).uniq.each do |attr| + begin + # Set any defaults immediately. This is mostly done so + # that the default provider is available for any other + # property validation. + if hash.has_key?(attr) + self[attr] = hash[attr] + else + no_values << attr + end + rescue ArgumentError, Puppet::Error, TypeError + raise + rescue => detail + error = Puppet::DevError.new( "Could not set #{attr} on #{self.class.name}: #{detail}") + error.set_backtrace(detail.backtrace) + raise error + end + end + no_values.each do |attr| + set_default(attr) + end + end + + public + + # Set up all of our autorequires. + def finish + # Make sure all of our relationships are valid. Again, must be done + # when the entire catalog is instantiated. + self.class.relationship_params.collect do |klass| + if param = @parameters[klass.name] + param.validate_relationship + end + end.flatten.reject { |r| r.nil? } + end + + # For now, leave the 'name' method functioning like it used to. Once 'title' + # works everywhere, I'll switch it. + def name + self[:name] + end + + # Look up our parent in the catalog, if we have one. + def parent + return nil unless catalog + + unless defined?(@parent) + if parents = catalog.adjacent(self, :direction => :in) + # We should never have more than one parent, so let's just ignore + # it if we happen to. + @parent = parents.shift + else + @parent = nil + end + end + @parent + end + + # Return the "type[name]" style reference. + def ref + "#{self.class.name.to_s.capitalize}[#{self.title}]" + end + + def self_refresh? + self.class.self_refresh + end + + # Mark that we're purging. + def purging + @purging = true + end + + # Is this resource being purged? Used by transactions to forbid + # deletion when there are dependencies. + def purging? + if defined?(@purging) + @purging + else + false + end + end + + # Retrieve the title of an object. If no title was set separately, + # then use the object's name. + def title + unless @title + if self.class.validparameter?(name_var) + @title = self[:name] + elsif self.class.validproperty?(name_var) + @title = self.should(name_var) + else + self.devfail "Could not find namevar #{name_var} for #{self.class.name}" + end + end + + @title + end + + # convert to a string + def to_s + self.ref + end + + # Convert to a transportable object + def to_trans(ret = true) + trans = TransObject.new(self.title, self.class.name) + + values = retrieve_resource + values.each do |name, value| + name = name.name if name.respond_to? :name + trans[name] = value + end + + @parameters.each do |name, param| + # Avoid adding each instance name twice + next if param.class.isnamevar? and param.value == self.title + + # We've already got property values + next if param.is_a?(Puppet::Property) + trans[name] = param.value + end + + trans.tags = self.tags + + # FIXME I'm currently ignoring 'parent' and 'path' + + trans + end + + def to_resource + # this 'type instance' versus 'resource' distinction seems artificial + # I'd like to see it collapsed someday ~JW + self.to_trans.to_resource + end + + def virtual?; !!@virtual; end + def exported?; !!@exported; end +end +end + +require 'puppet/provider' + +# Always load these types. +require 'puppet/type/component' diff --git a/mcollective/lib/puppet/type/augeas.rb b/mcollective/lib/puppet/type/augeas.rb new file mode 100644 index 000000000..a8fb1f15f --- /dev/null +++ b/mcollective/lib/puppet/type/augeas.rb @@ -0,0 +1,182 @@ +#-- +# Copyright (C) 2008 Red Hat Inc. +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public +# License as published by the Free Software Foundation; either +# version 2 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# +# Author: Bryan Kearney + +Puppet::Type.newtype(:augeas) do + include Puppet::Util + + feature :parse_commands, "Parse the command string" + feature :need_to_run?, "If the command should run" + feature :execute_changes, "Actually make the changes" + + @doc = "Apply the changes (single or array of changes) to the filesystem + via the augeas tool. + + Requires: + + - augeas to be installed (http://www.augeas.net) + - ruby-augeas bindings + + Sample usage with a string: + + augeas{\"test1\" : + context => \"/files/etc/sysconfig/firstboot\", + changes => \"set RUN_FIRSTBOOT YES\", + onlyif => \"match other_value size > 0\", + } + + Sample usage with an array and custom lenses: + + augeas{\"jboss_conf\": + context => \"/files\", + changes => [ + \"set /etc/jbossas/jbossas.conf/JBOSS_IP $ipaddress\", + \"set /etc/jbossas/jbossas.conf/JAVA_HOME /usr\" + ], + load_path => \"$/usr/share/jbossas/lenses\", + } + + " + + newparam (:name) do + desc "The name of this task. Used for uniqueness" + isnamevar + end + + newparam (:context) do + desc "Optional context path. This value is prepended to the paths of all changes if the path is relative. If INCL is set, defaults to '/files' + INCL, otherwise the empty string" + defaultto "" + munge do |value| + if value.empty? and resource[:incl] + "/files" + resource[:incl] + else + value + end + end + end + + newparam (:onlyif) do + desc "Optional augeas command and comparisons to control the execution of this type. + Supported onlyif syntax: + + get [AUGEAS_PATH] [COMPARATOR] [STRING] + match [MATCH_PATH] size [COMPARATOR] [INT] + match [MATCH_PATH] include [STRING] + match [MATCH_PATH] not_include [STRING] + match [MATCH_PATH] == [AN_ARRAY] + match [MATCH_PATH] != [AN_ARRAY] + + where: + + AUGEAS_PATH is a valid path scoped by the context + MATCH_PATH is a valid match synatx scoped by the context + COMPARATOR is in the set [> >= != == <= <] + STRING is a string + INT is a number + AN_ARRAY is in the form ['a string', 'another']" + defaultto "" + end + + + newparam(:changes) do + desc "The changes which should be applied to the filesystem. This + can be either a string which contains a command or an array of commands. + Commands supported are: + + set [PATH] [VALUE] Sets the value VALUE at loction PATH + rm [PATH] Removes the node at location PATH + remove [PATH] Synonym for rm + clear [PATH] Keeps the node at PATH, but removes the value. + ins [LABEL] [WHERE] [PATH] Inserts an empty node LABEL either [WHERE={before|after}] PATH. + insert [LABEL] [WHERE] [PATH] Synonym for ins + + If the parameter 'context' is set that value is prepended to PATH" + end + + + newparam(:root) do + desc "A file system path; all files loaded by Augeas are loaded underneath ROOT" + defaultto "/" + end + + newparam(:load_path) do + desc "Optional colon separated list of directories; these directories are searched for schema definitions" + defaultto "" + end + + newparam(:force) do + desc "Optional command to force the augeas type to execute even if it thinks changes + will not be made. This does not overide the only setting. If onlyif is set, then the + foce setting will not override that result" + + defaultto false + end + + newparam(:type_check) do + desc "Set to true if augeas should perform typechecking. Optional, defaults to false" + newvalues(:true, :false) + + defaultto :false + end + + newparam(:lens) do + desc "Use a specific lens, e.g. `Hosts.lns`. When this parameter is set, you must also set the incl parameter to indicate which file to load. Only that file will be loaded, which greatly speeds up execution of the type" + end + + newparam(:incl) do + desc "Load only a specific file, e.g. `/etc/hosts`. When this parameter is set, you must also set the lens parameter to indicate which lens to use." + end + + validate do + has_lens = !self[:lens].nil? + has_incl = !self[:incl].nil? + self.fail "You must specify both the lens and incl parameters, or neither" if has_lens != has_incl + end + + # This is the acutal meat of the code. It forces + # augeas to be run and fails or not based on the augeas return + # code. + newproperty(:returns) do |property| + include Puppet::Util + desc "The expected return code from the augeas command. Should not be set" + + defaultto 0 + + # Make output a bit prettier + def change_to_s(currentvalue, newvalue) + "executed successfully" + end + + # if the onlyif resource is provided, then the value is parsed. + # a return value of 0 will stop exection because it matches the + # default value. + def retrieve + if @resource.provider.need_to_run?() + :need_to_run + else + 0 + end + end + + # Actually execute the command. + def sync + @resource.provider.execute_changes + end + end + +end diff --git a/mcollective/lib/puppet/type/component.rb b/mcollective/lib/puppet/type/component.rb new file mode 100644 index 000000000..c8c09409b --- /dev/null +++ b/mcollective/lib/puppet/type/component.rb @@ -0,0 +1,85 @@ + +require 'puppet' +require 'puppet/type' +require 'puppet/transaction' + +Puppet::Type.newtype(:component) do + include Enumerable + + newparam(:name) do + desc "The name of the component. Generally optional." + isnamevar + end + + # Override how parameters are handled so that we support the extra + # parameters that are used with defined resource types. + def [](param) + return super if self.class.valid_parameter?(param) + @extra_parameters[param.to_sym] + end + + # Override how parameters are handled so that we support the extra + # parameters that are used with defined resource types. + def []=(param, value) + return super if self.class.valid_parameter?(param) + @extra_parameters[param.to_sym] = value + end + + # Initialize a new component + def initialize(*args) + @extra_parameters = {} + super + + catalog.alias(self, ref) if catalog and ! catalog.resource(ref) + end + + # Component paths are special because they function as containers. + def pathbuilder + if reference.type == "Class" + # 'main' is the top class, so we want to see '//' instead of + # its name. + if reference.title.to_s.downcase == "main" + myname = "" + else + myname = reference.title + end + else + myname = reference.to_s + end + if p = self.parent + return [p.pathbuilder, myname] + else + return [myname] + end + end + + def ref + reference.to_s + end + + # We want our title to just be the whole reference, rather than @title. + def title + ref + end + + def title=(str) + @reference = Puppet::Resource.new(str) + end + + def refresh + catalog.adjacent(self).each do |child| + if child.respond_to?(:refresh) + child.refresh + child.log "triggering #{:refresh}" + end + end + end + + def to_s + reference.to_s + end + + private + + attr_reader :reference +end diff --git a/mcollective/lib/puppet/type/computer.rb b/mcollective/lib/puppet/type/computer.rb new file mode 100644 index 000000000..7a2c52d53 --- /dev/null +++ b/mcollective/lib/puppet/type/computer.rb @@ -0,0 +1,66 @@ +Puppet::Type.newtype(:computer) do + + @doc = "Computer object management using DirectoryService + on OS X. + + Note that these are distinctly different kinds of objects to 'hosts', + as they require a MAC address and can have all sorts of policy attached to + them. + + This provider only manages Computer objects in the local directory service + domain, not in remote directories. + + If you wish to manage `/etc/hosts` file on Mac OS X, then simply use the host + type as per other platforms. + + This type primarily exists to create localhost Computer objects that MCX + policy can then be attached to. + + **Autorequires:** If Puppet is managing the plist file representing a + Computer object (located at `/var/db/dslocal/nodes/Default/computers/{name}.plist`), + the Computer resource will autorequire it." + + # ensurable + + # We autorequire the computer object in case it is being managed at the + # file level by Puppet. + + autorequire(:file) do + if self[:name] + "/var/db/dslocal/nodes/Default/computers/#{self[:name]}.plist" + else + nil + end + end + + newproperty(:ensure, :parent => Puppet::Property::Ensure) do + desc "Control the existences of this computer record. Set this attribute to + `present` to ensure the computer record exists. Set it to `absent` + to delete any computer records with this name" + newvalue(:present) do + provider.create + end + + newvalue(:absent) do + provider.delete + end + end + + newparam(:name) do + desc "The authoritative 'short' name of the computer record." + isnamevar + end + + newparam(:realname) do + desc "The 'long' name of the computer record." + end + + newproperty(:en_address) do + desc "The MAC address of the primary network interface. Must match en0." + end + + newproperty(:ip_address) do + desc "The IP Address of the Computer object." + end + +end diff --git a/mcollective/lib/puppet/type/cron.rb b/mcollective/lib/puppet/type/cron.rb new file mode 100755 index 000000000..5367517b2 --- /dev/null +++ b/mcollective/lib/puppet/type/cron.rb @@ -0,0 +1,413 @@ +require 'etc' +require 'facter' +require 'puppet/util/filetype' + +Puppet::Type.newtype(:cron) do + @doc = <<-EOT + Installs and manages cron jobs. Every cron resource requires a command + and user attribute, as well as at least one periodic attribute (hour, + minute, month, monthday, weekday, or special). While the name of the cron + job is not part of the actual job, it is used by Puppet to store and + retrieve it. + + If you specify a cron job that matches an existing job in every way + except name, then the jobs will be considered equivalent and the + new name will be permanently associated with that job. Once this + association is made and synced to disk, you can then manage the job + normally (e.g., change the schedule of the job). + + Example: + + cron { logrotate: + command => "/usr/sbin/logrotate", + user => root, + hour => 2, + minute => 0 + } + + Note that all periodic attributes can be specified as an array of values: + + cron { logrotate: + command => "/usr/sbin/logrotate", + user => root, + hour => [2, 4] + } + + ...or using ranges or the step syntax `*/2` (although there's no guarantee + that your `cron` daemon supports these): + + cron { logrotate: + command => "/usr/sbin/logrotate", + user => root, + hour => ['2-4'], + minute => '*/10' + } + EOT + ensurable + + # A base class for all of the Cron parameters, since they all have + # similar argument checking going on. + class CronParam < Puppet::Property + class << self + attr_accessor :boundaries, :default + end + + # We have to override the parent method, because we consume the entire + # "should" array + def insync?(is) + self.is_to_s(is) == self.should_to_s + end + + # A method used to do parameter input handling. Converts integers + # in string form to actual integers, and returns the value if it's + # an integer or false if it's just a normal string. + def numfix(num) + if num =~ /^\d+$/ + return num.to_i + elsif num.is_a?(Integer) + return num + else + return false + end + end + + # Verify that a number is within the specified limits. Return the + # number if it is, or false if it is not. + def limitcheck(num, lower, upper) + (num >= lower and num <= upper) && num + end + + # Verify that a value falls within the specified array. Does case + # insensitive matching, and supports matching either the entire word + # or the first three letters of the word. + def alphacheck(value, ary) + tmp = value.downcase + + # If they specified a shortened version of the name, then see + # if we can lengthen it (e.g., mon => monday). + if tmp.length == 3 + ary.each_with_index { |name, index| + if name =~ /#{tmp}/i + return index + end + } + else + return ary.index(tmp) if ary.include?(tmp) + end + + false + end + + def should_to_s(newvalue = @should) + if newvalue + newvalue = [newvalue] unless newvalue.is_a?(Array) + if self.name == :command or newvalue[0].is_a? Symbol + newvalue[0] + else + newvalue.join(",") + end + else + nil + end + end + + def is_to_s(currentvalue = @is) + if currentvalue + return currentvalue unless currentvalue.is_a?(Array) + + if self.name == :command or currentvalue[0].is_a? Symbol + currentvalue[0] + else + currentvalue.join(",") + end + else + nil + end + end + + def should + if @should and @should[0] == :absent + :absent + else + @should + end + end + + def should=(ary) + super + @should.flatten! + end + + # The method that does all of the actual parameter value + # checking; called by all of the +param=+ methods. + # Requires the value, type, and bounds, and optionally supports + # a boolean of whether to do alpha checking, and if so requires + # the ary against which to do the checking. + munge do |value| + # Support 'absent' as a value, so that they can remove + # a value + if value == "absent" or value == :absent + return :absent + end + + # Allow the */2 syntax + if value =~ /^\*\/[0-9]+$/ + return value + end + + # Allow ranges + if value =~ /^[0-9]+-[0-9]+$/ + return value + end + + # Allow ranges + */2 + if value =~ /^[0-9]+-[0-9]+\/[0-9]+$/ + return value + end + + if value == "*" + return :absent + end + + return value unless self.class.boundaries + lower, upper = self.class.boundaries + retval = nil + if num = numfix(value) + retval = limitcheck(num, lower, upper) + elsif respond_to?(:alpha) + # If it has an alpha method defined, then we check + # to see if our value is in that list and if so we turn + # it into a number + retval = alphacheck(value, alpha) + end + + if retval + return retval.to_s + else + self.fail "#{value} is not a valid #{self.class.name}" + end + end + end + + # Somewhat uniquely, this property does not actually change anything -- it + # just calls +@resource.sync+, which writes out the whole cron tab for + # the user in question. There is no real way to change individual cron + # jobs without rewriting the entire cron file. + # + # Note that this means that managing many cron jobs for a given user + # could currently result in multiple write sessions for that user. + newproperty(:command, :parent => CronParam) do + desc "The command to execute in the cron job. The environment + provided to the command varies by local system rules, and it is + best to always provide a fully qualified command. The user's + profile is not sourced when the command is run, so if the + user's environment is desired it should be sourced manually. + + All cron parameters support `absent` as a value; this will + remove any existing values for that field." + + def retrieve + return_value = super + return_value = return_value[0] if return_value && return_value.is_a?(Array) + + return_value + end + + def should + if @should + if @should.is_a? Array + @should[0] + else + devfail "command is not an array" + end + else + nil + end + end + end + + newproperty(:special) do + desc "A special value such as 'reboot' or 'annually'. + Only available on supported systems such as Vixie Cron. + Overrides more specific time of day/week settings." + + def specials + %w{reboot yearly annually monthly weekly daily midnight hourly} + end + + validate do |value| + raise ArgumentError, "Invalid special schedule #{value.inspect}" unless specials.include?(value) + end + end + + newproperty(:minute, :parent => CronParam) do + self.boundaries = [0, 59] + desc "The minute at which to run the cron job. + Optional; if specified, must be between 0 and 59, inclusive." + end + + newproperty(:hour, :parent => CronParam) do + self.boundaries = [0, 23] + desc "The hour at which to run the cron job. Optional; + if specified, must be between 0 and 23, inclusive." + end + + newproperty(:weekday, :parent => CronParam) do + def alpha + %w{sunday monday tuesday wednesday thursday friday saturday} + end + self.boundaries = [0, 7] + desc "The weekday on which to run the command. + Optional; if specified, must be between 0 and 7, inclusive, with + 0 (or 7) being Sunday, or must be the name of the day (e.g., Tuesday)." + end + + newproperty(:month, :parent => CronParam) do + def alpha + %w{january february march april may june july + august september october november december} + end + self.boundaries = [1, 12] + desc "The month of the year. Optional; if specified + must be between 1 and 12 or the month name (e.g., December)." + end + + newproperty(:monthday, :parent => CronParam) do + self.boundaries = [1, 31] + desc "The day of the month on which to run the + command. Optional; if specified, must be between 1 and 31." + end + + newproperty(:environment) do + desc "Any environment settings associated with this cron job. They + will be stored between the header and the job in the crontab. There + can be no guarantees that other, earlier settings will not also + affect a given cron job. + + + Also, Puppet cannot automatically determine whether an existing, + unmanaged environment setting is associated with a given cron + job. If you already have cron jobs with environment settings, + then Puppet will keep those settings in the same place in the file, + but will not associate them with a specific job. + + Settings should be specified exactly as they should appear in + the crontab, e.g., `PATH=/bin:/usr/bin:/usr/sbin`." + + validate do |value| + unless value =~ /^\s*(\w+)\s*=\s*(.*)\s*$/ or value == :absent or value == "absent" + raise ArgumentError, "Invalid environment setting #{value.inspect}" + end + end + + def insync?(is) + if is.is_a? Array + return is.sort == @should.sort + else + return is == @should + end + end + + def is_to_s(newvalue) + if newvalue + if newvalue.is_a?(Array) + newvalue.join(",") + else + newvalue + end + else + nil + end + end + + def should + @should + end + + def should_to_s(newvalue = @should) + if newvalue + newvalue.join(",") + else + nil + end + end + end + + newparam(:name) do + desc "The symbolic name of the cron job. This name + is used for human reference only and is generated automatically + for cron jobs found on the system. This generally won't + matter, as Puppet will do its best to match existing cron jobs + against specified jobs (and Puppet adds a comment to cron jobs it adds), but it is at least possible that converting from + unmanaged jobs to managed jobs might require manual + intervention." + + isnamevar + end + + newproperty(:user) do + desc "The user to run the command as. This user must + be allowed to run cron jobs, which is not currently checked by + Puppet. + + The user defaults to whomever Puppet is running as." + + defaultto { Etc.getpwuid(Process.uid).name || "root" } + end + + newproperty(:target) do + desc "Where the cron job should be stored. For crontab-style + entries this is the same as the user and defaults that way. + Other providers default accordingly." + + defaultto { + if provider.is_a?(@resource.class.provider(:crontab)) + if val = @resource.should(:user) + val + else + raise ArgumentError, + "You must provide a user with crontab entries" + end + elsif provider.class.ancestors.include?(Puppet::Provider::ParsedFile) + provider.class.default_target + else + nil + end + } + end + + # We have to reorder things so that :provide is before :target + + attr_accessor :uid + + def value(name) + name = symbolize(name) + ret = nil + if obj = @parameters[name] + ret = obj.should + + ret ||= obj.retrieve + + if ret == :absent + ret = nil + end + end + + unless ret + case name + when :command + devfail "No command, somehow" + when :special + # nothing + else + #ret = (self.class.validproperty?(name).default || "*").to_s + ret = "*" + end + end + + ret + end +end + + + diff --git a/mcollective/lib/puppet/type/exec.rb b/mcollective/lib/puppet/type/exec.rb new file mode 100755 index 000000000..be0ece023 --- /dev/null +++ b/mcollective/lib/puppet/type/exec.rb @@ -0,0 +1,514 @@ +module Puppet + newtype(:exec) do + include Puppet::Util::Execution + require 'timeout' + + @doc = "Executes external commands. It is critical that all commands + executed using this mechanism can be run multiple times without + harm, i.e., they are *idempotent*. One useful way to create idempotent + commands is to use the checks like `creates` to avoid running the + command unless some condition is met. + + Note that you can restrict an `exec` to only run when it receives + events by using the `refreshonly` parameter; this is a useful way to + have your configuration respond to events with arbitrary commands. + + Note also that if an `exec` receives an event from another resource, + it will get executed again (or execute the command specified in `refresh`, if there is one). + + There is a strong tendency to use `exec` to do whatever work Puppet + can't already do; while this is obviously acceptable (and unavoidable) + in the short term, it is highly recommended to migrate work from `exec` + to native Puppet types as quickly as possible. If you find that + you are doing a lot of work with `exec`, please at least notify + us at Puppet Labs what you are doing, and hopefully we can work with + you to get a native resource type for the work you are doing. + + **Autorequires:** If Puppet is managing an exec's cwd or the executable file used in an exec's command, the exec resource will autorequire those files. If Puppet is managing the user that an exec should run as, the exec resource will autorequire that user." + + # Create a new check mechanism. It's basically just a parameter that + # provides one extra 'check' method. + def self.newcheck(name, options = {}, &block) + @checks ||= {} + + check = newparam(name, options, &block) + @checks[name] = check + end + + def self.checks + @checks.keys + end + + newproperty(:returns, :array_matching => :all, :event => :executed_command) do |property| + include Puppet::Util::Execution + munge do |value| + value.to_s + end + + def event_name + :executed_command + end + + defaultto "0" + + attr_reader :output + desc "The expected return code(s). An error will be returned if the + executed command returns something else. Defaults to 0. Can be + specified as an array of acceptable return codes or a single value." + + # Make output a bit prettier + def change_to_s(currentvalue, newvalue) + "executed successfully" + end + + # First verify that all of our checks pass. + def retrieve + # We need to return :notrun to trigger evaluation; when that isn't + # true, we *LIE* about what happened and return a "success" for the + # value, which causes us to be treated as in_sync?, which means we + # don't actually execute anything. I think. --daniel 2011-03-10 + if @resource.check_all_attributes + return :notrun + else + return self.should + end + end + + # Actually execute the command. + def sync + olddir = nil + + # We need a dir to change to, even if it's just the cwd + dir = self.resource[:cwd] || Dir.pwd + + event = :executed_command + tries = self.resource[:tries] + try_sleep = self.resource[:try_sleep] + + begin + tries.times do |try| + # Only add debug messages for tries > 1 to reduce log spam. + debug("Exec try #{try+1}/#{tries}") if tries > 1 + @output, @status = provider.run(self.resource[:command]) + break if self.should.include?(@status.exitstatus.to_s) + if try_sleep > 0 and tries > 1 + debug("Sleeping for #{try_sleep} seconds between tries") + sleep try_sleep + end + end + rescue Timeout::Error + self.fail "Command exceeded timeout" % value.inspect + end + + if log = @resource[:logoutput] + case log + when :true + log = @resource[:loglevel] + when :on_failure + unless self.should.include?(@status.exitstatus.to_s) + log = @resource[:loglevel] + else + log = :false + end + end + unless log == :false + @output.split(/\n/).each { |line| + self.send(log, line) + } + end + end + + unless self.should.include?(@status.exitstatus.to_s) + self.fail("#{self.resource[:command]} returned #{@status.exitstatus} instead of one of [#{self.should.join(",")}]") + end + + event + end + end + + newparam(:command) do + isnamevar + desc "The actual command to execute. Must either be fully qualified + or a search path for the command must be provided. If the command + succeeds, any output produced will be logged at the instance's + normal log level (usually `notice`), but if the command fails + (meaning its return code does not match the specified code) then + any output is logged at the `err` log level." + end + + newparam(:path) do + desc "The search path used for command execution. + Commands must be fully qualified if no path is specified. Paths + can be specified as an array or as a colon separated list." + + # Support both arrays and colon-separated fields. + def value=(*values) + @value = values.flatten.collect { |val| + if val =~ /;/ # recognize semi-colon separated paths + val.split(";") + elsif val =~ /^\w:[^:]*$/ # heuristic to avoid splitting a driveletter away + val + else + val.split(":") + end + }.flatten + end + end + + newparam(:user) do + desc "The user to run the command as. Note that if you + use this then any error output is not currently captured. This + is because of a bug within Ruby. If you are using Puppet to + create this user, the exec will automatically require the user, + as long as it is specified by name." + + # Most validation is handled by the SUIDManager class. + validate do |user| + self.fail "Only root can execute commands as other users" unless Puppet.features.root? + end + end + + newparam(:group) do + desc "The group to run the command as. This seems to work quite + haphazardly on different platforms -- it is a platform issue + not a Ruby or Puppet one, since the same variety exists when + running commnands as different users in the shell." + # Validation is handled by the SUIDManager class. + end + + newparam(:cwd, :parent => Puppet::Parameter::Path) do + desc "The directory from which to run the command. If + this directory does not exist, the command will fail." + end + + newparam(:logoutput) do + desc "Whether to log output. Defaults to logging output at the + loglevel for the `exec` resource. Use *on_failure* to only + log the output when the command reports an error. Values are + **true**, *false*, *on_failure*, and any legal log level." + + newvalues(:true, :false, :on_failure) + end + + newparam(:refresh) do + desc "How to refresh this command. By default, the exec is just + called again when it receives an event from another resource, + but this parameter allows you to define a different command + for refreshing." + + validate do |command| + provider.validatecmd(command) + end + end + + newparam(:env) do + desc "This parameter is deprecated. Use 'environment' instead." + + munge do |value| + warning "'env' is deprecated on exec; use 'environment' instead." + resource[:environment] = value + end + end + + newparam(:environment) do + desc "Any additional environment variables you want to set for a + command. Note that if you use this to set PATH, it will override + the `path` attribute. Multiple environment variables should be + specified as an array." + + validate do |values| + values = [values] unless values.is_a? Array + values.each do |value| + unless value =~ /\w+=/ + raise ArgumentError, "Invalid environment setting '#{value}'" + end + end + end + end + + newparam(:timeout) do + desc "The maximum time the command should take. If the command takes + longer than the timeout, the command is considered to have failed + and will be stopped. Use 0 to disable the timeout. + The time is specified in seconds." + + munge do |value| + value = value.shift if value.is_a?(Array) + begin + value = Float(value) + rescue ArgumentError => e + raise ArgumentError, "The timeout must be a number." + end + [value, 0.0].max + end + + defaultto 300 + end + + newparam(:tries) do + desc "The number of times execution of the command should be tried. + Defaults to '1'. This many attempts will be made to execute + the command until an acceptable return code is returned. + Note that the timeout paramater applies to each try rather than + to the complete set of tries." + + munge do |value| + if value.is_a?(String) + unless value =~ /^[\d]+$/ + raise ArgumentError, "Tries must be an integer" + end + value = Integer(value) + end + raise ArgumentError, "Tries must be an integer >= 1" if value < 1 + value + end + + defaultto 1 + end + + newparam(:try_sleep) do + desc "The time to sleep in seconds between 'tries'." + + munge do |value| + if value.is_a?(String) + unless value =~ /^[-\d.]+$/ + raise ArgumentError, "try_sleep must be a number" + end + value = Float(value) + end + raise ArgumentError, "try_sleep cannot be a negative number" if value < 0 + value + end + + defaultto 0 + end + + + newcheck(:refreshonly) do + desc "The command should only be run as a + refresh mechanism for when a dependent object is changed. It only + makes sense to use this option when this command depends on some + other object; it is useful for triggering an action: + + # Pull down the main aliases file + file { \"/etc/aliases\": + source => \"puppet://server/module/aliases\" + } + + # Rebuild the database, but only when the file changes + exec { newaliases: + path => [\"/usr/bin\", \"/usr/sbin\"], + subscribe => File[\"/etc/aliases\"], + refreshonly => true + } + + Note that only `subscribe` and `notify` can trigger actions, not `require`, + so it only makes sense to use `refreshonly` with `subscribe` or `notify`." + + newvalues(:true, :false) + + # We always fail this test, because we're only supposed to run + # on refresh. + def check(value) + # We have to invert the values. + if value == :true + false + else + true + end + end + end + + newcheck(:creates, :parent => Puppet::Parameter::Path) do + desc "A file that this command creates. If this + parameter is provided, then the command will only be run + if the specified file does not exist: + + exec { \"tar xf /my/tar/file.tar\": + cwd => \"/var/tmp\", + creates => \"/var/tmp/myfile\", + path => [\"/usr/bin\", \"/usr/sbin\"] + } + + " + + accept_arrays + + # If the file exists, return false (i.e., don't run the command), + # else return true + def check(value) + ! FileTest.exists?(value) + end + end + + newcheck(:unless) do + desc "If this parameter is set, then this `exec` will run unless + the command returns 0. For example: + + exec { \"/bin/echo root >> /usr/lib/cron/cron.allow\": + path => \"/usr/bin:/usr/sbin:/bin\", + unless => \"grep root /usr/lib/cron/cron.allow 2>/dev/null\" + } + + This would add `root` to the cron.allow file (on Solaris) unless + `grep` determines it's already there. + + Note that this command follows the same rules as the main command, + which is to say that it must be fully qualified if the path is not set. + " + + validate do |cmds| + cmds = [cmds] unless cmds.is_a? Array + + cmds.each do |command| + provider.validatecmd(command) + end + end + + # Return true if the command does not return 0. + def check(value) + begin + output, status = provider.run(value, true) + rescue Timeout::Error + err "Check #{value.inspect} exceeded timeout" + return false + end + + status.exitstatus != 0 + end + end + + newcheck(:onlyif) do + desc "If this parameter is set, then this `exec` will only run if + the command returns 0. For example: + + exec { \"logrotate\": + path => \"/usr/bin:/usr/sbin:/bin\", + onlyif => \"test `du /var/log/messages | cut -f1` -gt 100000\" + } + + This would run `logrotate` only if that test returned true. + + Note that this command follows the same rules as the main command, + which is to say that it must be fully qualified if the path is not set. + + Also note that onlyif can take an array as its value, e.g.: + + onlyif => [\"test -f /tmp/file1\", \"test -f /tmp/file2\"] + + This will only run the exec if /all/ conditions in the array return true. + " + + validate do |cmds| + cmds = [cmds] unless cmds.is_a? Array + + cmds.each do |command| + provider.validatecmd(command) + end + end + + # Return true if the command returns 0. + def check(value) + begin + output, status = provider.run(value, true) + rescue Timeout::Error + err "Check #{value.inspect} exceeded timeout" + return false + end + + status.exitstatus == 0 + end + end + + # Exec names are not isomorphic with the objects. + @isomorphic = false + + validate do + provider.validatecmd(self[:command]) + end + + # FIXME exec should autorequire any exec that 'creates' our cwd + autorequire(:file) do + reqs = [] + + # Stick the cwd in there if we have it + reqs << self[:cwd] if self[:cwd] + + self[:command].scan(/^(#{File::SEPARATOR}\S+)/) { |str| + reqs << str + } + + self[:command].scan(/^"([^"]+)"/) { |str| + reqs << str + } + + [:onlyif, :unless].each { |param| + next unless tmp = self[param] + + tmp = [tmp] unless tmp.is_a? Array + + tmp.each do |line| + # And search the command line for files, adding any we + # find. This will also catch the command itself if it's + # fully qualified. It might not be a bad idea to add + # unqualified files, but, well, that's a bit more annoying + # to do. + reqs += line.scan(%r{(#{File::SEPARATOR}\S+)}) + end + } + + # For some reason, the += isn't causing a flattening + reqs.flatten! + + reqs + end + + autorequire(:user) do + # Autorequire users if they are specified by name + if user = self[:user] and user !~ /^\d+$/ + user + end + end + + def self.instances + [] + end + + # Verify that we pass all of the checks. The argument determines whether + # we skip the :refreshonly check, which is necessary because we now check + # within refresh + def check_all_attributes(refreshing = false) + self.class.checks.each { |check| + next if refreshing and check == :refreshonly + if @parameters.include?(check) + val = @parameters[check].value + val = [val] unless val.is_a? Array + val.each do |value| + return false unless @parameters[check].check(value) + end + end + } + + true + end + + def output + if self.property(:returns).nil? + return nil + else + return self.property(:returns).output + end + end + + # Run the command, or optionally run a separately-specified command. + def refresh + if self.check_all_attributes(true) + if cmd = self[:refresh] + provider.run(cmd) + else + self.property(:returns).sync + end + end + end + end +end diff --git a/mcollective/lib/puppet/type/file.rb b/mcollective/lib/puppet/type/file.rb new file mode 100644 index 000000000..4ac436ca0 --- /dev/null +++ b/mcollective/lib/puppet/type/file.rb @@ -0,0 +1,803 @@ +require 'digest/md5' +require 'cgi' +require 'etc' +require 'uri' +require 'fileutils' +require 'puppet/network/handler' +require 'puppet/util/diff' +require 'puppet/util/checksums' +require 'puppet/util/backups' + +Puppet::Type.newtype(:file) do + include Puppet::Util::MethodHelper + include Puppet::Util::Checksums + include Puppet::Util::Backups + @doc = "Manages local files, including setting ownership and + permissions, creation of both files and directories, and + retrieving entire files from remote servers. As Puppet matures, it + expected that the `file` resource will be used less and less to + manage content, and instead native resources will be used to do so. + + If you find that you are often copying files in from a central + location, rather than using native resources, please contact + Puppet Labs and we can hopefully work with you to develop a + native resource to support what you are doing. + + **Autorequires:** If Puppet is managing the user or group that owns a file, the file resource will autorequire them. If Puppet is managing any parent directories of a file, the file resource will autorequire them." + + def self.title_patterns + [ [ /^(.*?)\/*\Z/m, [ [ :path, lambda{|x| x} ] ] ] ] + end + + newparam(:path) do + desc "The path to the file to manage. Must be fully qualified." + isnamevar + + validate do |value| + # accept various path syntaxes: lone slash, posix, win32, unc + unless (Puppet.features.posix? and value =~ /^\//) or (Puppet.features.microsoft_windows? and (value =~ /^.:\// or value =~ /^\/\/[^\/]+\/[^\/]+/)) + fail Puppet::Error, "File paths must be fully qualified, not '#{value}'" + end + end + + # convert the current path in an index into the collection and the last + # path name. The aim is to use less storage for all common paths in a hierarchy + munge do |value| + path, name = File.split(value.gsub(/\/+/,'/')) + { :index => Puppet::FileCollection.collection.index(path), :name => name } + end + + # and the reverse + unmunge do |value| + basedir = Puppet::FileCollection.collection.path(value[:index]) + # a lone slash as :name indicates a root dir on windows + if value[:name] == '/' + basedir + else + File.join( basedir, value[:name] ) + end + end + end + + newparam(:backup) do + desc "Whether files should be backed up before + being replaced. The preferred method of backing files up is via + a `filebucket`, which stores files by their MD5 sums and allows + easy retrieval without littering directories with backups. You + can specify a local filebucket or a network-accessible + server-based filebucket by setting `backup => bucket-name`. + Alternatively, if you specify any value that begins with a `.` + (e.g., `.puppet-bak`), then Puppet will use copy the file in + the same directory with that value as the extension of the + backup. Setting `backup => false` disables all backups of the + file in question. + + Puppet automatically creates a local filebucket named `puppet` and + defaults to backing up there. To use a server-based filebucket, + you must specify one in your configuration + + filebucket { main: + server => puppet + } + + The `puppet master` daemon creates a filebucket by default, + so you can usually back up to your main server with this + configuration. Once you've described the bucket in your + configuration, you can use it in any file + + file { \"/my/file\": + source => \"/path/in/nfs/or/something\", + backup => main + } + + This will back the file up to the central server. + + At this point, the benefits of using a filebucket are that you do not + have backup files lying around on each of your machines, a given + version of a file is only backed up once, and you can restore + any given file manually, no matter how old. Eventually, + transactional support will be able to automatically restore + filebucketed files. + " + + defaultto "puppet" + + munge do |value| + # I don't really know how this is happening. + value = value.shift if value.is_a?(Array) + + case value + when false, "false", :false + false + when true, "true", ".puppet-bak", :true + ".puppet-bak" + when String + value + else + self.fail "Invalid backup type #{value.inspect}" + end + end + end + + newparam(:recurse) do + desc "Whether and how deeply to do recursive + management. Options are: + + * `inf,true` --- Regular style recursion on both remote and local + directory structure. + * `remote` --- Descends recursively into the remote directory + but not the local directory. Allows copying of + a few files into a directory containing many + unmanaged files without scanning all the local files. + * `false` --- Default of no recursion. + * `[0-9]+` --- Same as true, but limit recursion. Warning: this syntax + has been deprecated in favor of the `recurselimit` attribute. + " + + newvalues(:true, :false, :inf, :remote, /^[0-9]+$/) + + # Replace the validation so that we allow numbers in + # addition to string representations of them. + validate { |arg| } + munge do |value| + newval = super(value) + case newval + when :true, :inf; true + when :false; false + when :remote; :remote + when Integer, Fixnum, Bignum + self.warning "Setting recursion depth with the recurse parameter is now deprecated, please use recurselimit" + + # recurse == 0 means no recursion + return false if value == 0 + + resource[:recurselimit] = value + true + when /^\d+$/ + self.warning "Setting recursion depth with the recurse parameter is now deprecated, please use recurselimit" + value = Integer(value) + + # recurse == 0 means no recursion + return false if value == 0 + + resource[:recurselimit] = value + true + else + self.fail "Invalid recurse value #{value.inspect}" + end + end + end + + newparam(:recurselimit) do + desc "How deeply to do recursive management." + + newvalues(/^[0-9]+$/) + + munge do |value| + newval = super(value) + case newval + when Integer, Fixnum, Bignum; value + when /^\d+$/; Integer(value) + else + self.fail "Invalid recurselimit value #{value.inspect}" + end + end + end + + newparam(:replace, :boolean => true) do + desc "Whether or not to replace a file that is + sourced but exists. This is useful for using file sources + purely for initialization." + newvalues(:true, :false) + aliasvalue(:yes, :true) + aliasvalue(:no, :false) + defaultto :true + end + + newparam(:force, :boolean => true) do + desc "Force the file operation. Currently only used when replacing + directories with links." + newvalues(:true, :false) + defaultto false + end + + newparam(:ignore) do + desc "A parameter which omits action on files matching + specified patterns during recursion. Uses Ruby's builtin globbing + engine, so shell metacharacters are fully supported, e.g. `[a-z]*`. + Matches that would descend into the directory structure are ignored, + e.g., `*/*`." + + validate do |value| + unless value.is_a?(Array) or value.is_a?(String) or value == false + self.devfail "Ignore must be a string or an Array" + end + end + end + + newparam(:links) do + desc "How to handle links during file actions. During file copying, + `follow` will copy the target file instead of the link, `manage` + will copy the link itself, and `ignore` will just pass it by. + When not copying, `manage` and `ignore` behave equivalently + (because you cannot really ignore links entirely during local recursion), and `follow` will manage the file to which the + link points." + + newvalues(:follow, :manage) + + defaultto :manage + end + + newparam(:purge, :boolean => true) do + desc "Whether unmanaged files should be purged. If you have a filebucket + configured the purged files will be uploaded, but if you do not, + this will destroy data. Only use this option for generated + files unless you really know what you are doing. This option only + makes sense when recursively managing directories. + + Note that when using `purge` with `source`, Puppet will purge any files + that are not on the remote system." + + defaultto :false + + newvalues(:true, :false) + end + + newparam(:sourceselect) do + desc "Whether to copy all valid sources, or just the first one. This parameter + is only used in recursive copies; by default, the first valid source is the + only one used as a recursive source, but if this parameter is set to `all`, + then all valid sources will have all of their contents copied to the local host, + and for sources that have the same file, the source earlier in the list will + be used." + + defaultto :first + + newvalues(:first, :all) + end + + # Autorequire any parent directories. + autorequire(:file) do + basedir = File.dirname(self[:path]) + if basedir != self[:path] + basedir + else + nil + end + end + + # Autorequire the owner and group of the file. + {:user => :owner, :group => :group}.each do |type, property| + autorequire(type) do + if @parameters.include?(property) + # The user/group property automatically converts to IDs + next unless should = @parameters[property].shouldorig + val = should[0] + if val.is_a?(Integer) or val =~ /^\d+$/ + nil + else + val + end + end + end + end + + CREATORS = [:content, :source, :target] + SOURCE_ONLY_CHECKSUMS = [:none, :ctime, :mtime] + + validate do + creator_count = 0 + CREATORS.each do |param| + creator_count += 1 if self.should(param) + end + creator_count += 1 if @parameters.include?(:source) + self.fail "You cannot specify more than one of #{CREATORS.collect { |p| p.to_s}.join(", ")}" if creator_count > 1 + + self.fail "You cannot specify a remote recursion without a source" if !self[:source] and self[:recurse] == :remote + + self.fail "You cannot specify source when using checksum 'none'" if self[:checksum] == :none && !self[:source].nil? + + SOURCE_ONLY_CHECKSUMS.each do |checksum_type| + self.fail "You cannot specify content when using checksum '#{checksum_type}'" if self[:checksum] == checksum_type && !self[:content].nil? + end + + self.warning "Possible error: recurselimit is set but not recurse, no recursion will happen" if !self[:recurse] and self[:recurselimit] + end + + def self.[](path) + return nil unless path + super(path.gsub(/\/+/, '/').sub(/\/$/, '')) + end + + def self.instances + return [] + end + + @depthfirst = false + + # Determine the user to write files as. + def asuser + if self.should(:owner) and ! self.should(:owner).is_a?(Symbol) + writeable = Puppet::Util::SUIDManager.asuser(self.should(:owner)) { + FileTest.writable?(File.dirname(self[:path])) + } + + # If the parent directory is writeable, then we execute + # as the user in question. Otherwise we'll rely on + # the 'owner' property to do things. + asuser = self.should(:owner) if writeable + end + + asuser + end + + def bucket + return @bucket if @bucket + + backup = self[:backup] + return nil unless backup + return nil if backup =~ /^\./ + + unless catalog or backup == "puppet" + fail "Can not find filebucket for backups without a catalog" + end + + unless catalog and filebucket = catalog.resource(:filebucket, backup) or backup == "puppet" + fail "Could not find filebucket #{backup} specified in backup" + end + + return default_bucket unless filebucket + + @bucket = filebucket.bucket + + @bucket + end + + def default_bucket + Puppet::Type.type(:filebucket).mkdefaultbucket.bucket + end + + # Does the file currently exist? Just checks for whether + # we have a stat + def exist? + stat ? true : false + end + + # We have to do some extra finishing, to retrieve our bucket if + # there is one. + def finish + # Look up our bucket, if there is one + bucket + super + end + + # Create any children via recursion or whatever. + def eval_generate + return [] unless self.recurse? + + recurse + #recurse.reject do |resource| + # catalog.resource(:file, resource[:path]) + #end.each do |child| + # catalog.add_resource child + # catalog.relationship_graph.add_edge self, child + #end + end + + def flush + # We want to make sure we retrieve metadata anew on each transaction. + @parameters.each do |name, param| + param.flush if param.respond_to?(:flush) + end + @stat = nil + end + + def initialize(hash) + # Used for caching clients + @clients = {} + + super + + # If they've specified a source, we get our 'should' values + # from it. + unless self[:ensure] + if self[:target] + self[:ensure] = :symlink + elsif self[:content] + self[:ensure] = :file + end + end + + @stat = nil + end + + # Configure discovered resources to be purged. + def mark_children_for_purging(children) + children.each do |name, child| + next if child[:source] + child[:ensure] = :absent + end + end + + # Create a new file or directory object as a child to the current + # object. + def newchild(path) + full_path = File.join(self[:path], path) + + # Add some new values to our original arguments -- these are the ones + # set at initialization. We specifically want to exclude any param + # values set by the :source property or any default values. + # LAK:NOTE This is kind of silly, because the whole point here is that + # the values set at initialization should live as long as the resource + # but values set by default or by :source should only live for the transaction + # or so. Unfortunately, we don't have a straightforward way to manage + # the different lifetimes of this data, so we kludge it like this. + # The right-side hash wins in the merge. + options = @original_parameters.merge(:path => full_path).reject { |param, value| value.nil? } + + # These should never be passed to our children. + [:parent, :ensure, :recurse, :recurselimit, :target, :alias, :source].each do |param| + options.delete(param) if options.include?(param) + end + + self.class.new(options) + end + + # Files handle paths specially, because they just lengthen their + # path names, rather than including the full parent's title each + # time. + def pathbuilder + # We specifically need to call the method here, so it looks + # up our parent in the catalog graph. + if parent = parent() + # We only need to behave specially when our parent is also + # a file + if parent.is_a?(self.class) + # Remove the parent file name + list = parent.pathbuilder + list.pop # remove the parent's path info + return list << self.ref + else + return super + end + else + return [self.ref] + end + end + + # Should we be purging? + def purge? + @parameters.include?(:purge) and (self[:purge] == :true or self[:purge] == "true") + end + + # Recursively generate a list of file resources, which will + # be used to copy remote files, manage local files, and/or make links + # to map to another directory. + def recurse + children = {} + children = recurse_local if self[:recurse] != :remote + + if self[:target] + recurse_link(children) + elsif self[:source] + recurse_remote(children) + end + + # If we're purging resources, then delete any resource that isn't on the + # remote system. + mark_children_for_purging(children) if self.purge? + + result = children.values.sort { |a, b| a[:path] <=> b[:path] } + remove_less_specific_files(result) + end + + # This is to fix bug #2296, where two files recurse over the same + # set of files. It's a rare case, and when it does happen you're + # not likely to have many actual conflicts, which is good, because + # this is a pretty inefficient implementation. + def remove_less_specific_files(files) + mypath = self[:path].split(File::Separator) + other_paths = catalog.vertices. + select { |r| r.is_a?(self.class) and r[:path] != self[:path] }. + collect { |r| r[:path].split(File::Separator) }. + select { |p| p[0,mypath.length] == mypath } + + return files if other_paths.empty? + + files.reject { |file| + path = file[:path].split(File::Separator) + other_paths.any? { |p| path[0,p.length] == p } + } + end + + # A simple method for determining whether we should be recursing. + def recurse? + return false unless @parameters.include?(:recurse) + + val = @parameters[:recurse].value + + !!(val and (val == true or val == :remote)) + end + + # Recurse the target of the link. + def recurse_link(children) + perform_recursion(self[:target]).each do |meta| + if meta.relative_path == "." + self[:ensure] = :directory + next + end + + children[meta.relative_path] ||= newchild(meta.relative_path) + if meta.ftype == "directory" + children[meta.relative_path][:ensure] = :directory + else + children[meta.relative_path][:ensure] = :link + children[meta.relative_path][:target] = meta.full_path + end + end + children + end + + # Recurse the file itself, returning a Metadata instance for every found file. + def recurse_local + result = perform_recursion(self[:path]) + return {} unless result + result.inject({}) do |hash, meta| + next hash if meta.relative_path == "." + + hash[meta.relative_path] = newchild(meta.relative_path) + hash + end + end + + # Recurse against our remote file. + def recurse_remote(children) + sourceselect = self[:sourceselect] + + total = self[:source].collect do |source| + next unless result = perform_recursion(source) + return if top = result.find { |r| r.relative_path == "." } and top.ftype != "directory" + result.each { |data| data.source = "#{source}/#{data.relative_path}" } + break result if result and ! result.empty? and sourceselect == :first + result + end.flatten + + # This only happens if we have sourceselect == :all + unless sourceselect == :first + found = [] + total.reject! do |data| + result = found.include?(data.relative_path) + found << data.relative_path unless found.include?(data.relative_path) + result + end + end + + total.each do |meta| + if meta.relative_path == "." + parameter(:source).metadata = meta + next + end + children[meta.relative_path] ||= newchild(meta.relative_path) + children[meta.relative_path][:source] = meta.source + children[meta.relative_path][:checksum] = :md5 if meta.ftype == "file" + + children[meta.relative_path].parameter(:source).metadata = meta + end + + children + end + + def perform_recursion(path) + + Puppet::FileServing::Metadata.search( + + path, + :links => self[:links], + :recurse => (self[:recurse] == :remote ? true : self[:recurse]), + + :recurselimit => self[:recurselimit], + :ignore => self[:ignore], + :checksum_type => (self[:source] || self[:content]) ? self[:checksum] : :none + ) + end + + # Remove any existing data. This is only used when dealing with + # links or directories. + def remove_existing(should) + return unless s = stat + + self.fail "Could not back up; will not replace" unless perform_backup + + unless should.to_s == "link" + return if s.ftype.to_s == should.to_s + end + + case s.ftype + when "directory" + if self[:force] == :true + debug "Removing existing directory for replacement with #{should}" + FileUtils.rmtree(self[:path]) + else + notice "Not removing directory; use 'force' to override" + end + when "link", "file" + debug "Removing existing #{s.ftype} for replacement with #{should}" + File.unlink(self[:path]) + else + self.fail "Could not back up files of type #{s.ftype}" + end + expire + end + + def retrieve + if source = parameter(:source) + source.copy_source_values + end + super + end + + # Set the checksum, from another property. There are multiple + # properties that modify the contents of a file, and they need the + # ability to make sure that the checksum value is in sync. + def setchecksum(sum = nil) + if @parameters.include? :checksum + if sum + @parameters[:checksum].checksum = sum + else + # If they didn't pass in a sum, then tell checksum to + # figure it out. + currentvalue = @parameters[:checksum].retrieve + @parameters[:checksum].checksum = currentvalue + end + end + end + + # Should this thing be a normal file? This is a relatively complex + # way of determining whether we're trying to create a normal file, + # and it's here so that the logic isn't visible in the content property. + def should_be_file? + return true if self[:ensure] == :file + + # I.e., it's set to something like "directory" + return false if e = self[:ensure] and e != :present + + # The user doesn't really care, apparently + if self[:ensure] == :present + return true unless s = stat + return(s.ftype == "file" ? true : false) + end + + # If we've gotten here, then :ensure isn't set + return true if self[:content] + return true if stat and stat.ftype == "file" + false + end + + # Stat our file. Depending on the value of the 'links' attribute, we + # use either 'stat' or 'lstat', and we expect the properties to use the + # resulting stat object accordingly (mostly by testing the 'ftype' + # value). + cached_attr(:stat) do + method = :stat + + # Files are the only types that support links + if (self.class.name == :file and self[:links] != :follow) or self.class.name == :tidy + method = :lstat + end + path = self[:path] + + begin + File.send(method, self[:path]) + rescue Errno::ENOENT => error + return nil + rescue Errno::EACCES => error + warning "Could not stat; permission denied" + return nil + end + end + + # We have to hack this just a little bit, because otherwise we'll get + # an error when the target and the contents are created as properties on + # the far side. + def to_trans(retrieve = true) + obj = super + obj.delete(:target) if obj[:target] == :notlink + obj + end + + # Write out the file. Requires the property name for logging. + # Write will be done by the content property, along with checksum computation + def write(property) + remove_existing(:file) + + use_temporary_file = write_temporary_file? + if use_temporary_file + path = "#{self[:path]}.puppettmp_#{rand(10000)}" + path = "#{self[:path]}.puppettmp_#{rand(10000)}" while File.exists?(path) or File.symlink?(path) + else + path = self[:path] + end + + mode = self.should(:mode) # might be nil + umask = mode ? 000 : 022 + mode_int = mode ? mode.to_i(8) : nil + + content_checksum = Puppet::Util.withumask(umask) { File.open(path, 'w', mode_int ) { |f| write_content(f) } } + + # And put our new file in place + if use_temporary_file # This is only not true when our file is empty. + begin + fail_if_checksum_is_wrong(path, content_checksum) if validate_checksum? + File.rename(path, self[:path]) + rescue => detail + fail "Could not rename temporary file #{path} to #{self[:path]}: #{detail}" + ensure + # Make sure the created file gets removed + File.unlink(path) if FileTest.exists?(path) + end + end + + # make sure all of the modes are actually correct + property_fix + + end + + private + + # Should we validate the checksum of the file we're writing? + def validate_checksum? + self[:checksum] !~ /time/ + end + + # Make sure the file we wrote out is what we think it is. + def fail_if_checksum_is_wrong(path, content_checksum) + newsum = parameter(:checksum).sum_file(path) + return if [:absent, nil, content_checksum].include?(newsum) + + self.fail "File written to disk did not match checksum; discarding changes (#{content_checksum} vs #{newsum})" + end + + # write the current content. Note that if there is no content property + # simply opening the file with 'w' as done in write is enough to truncate + # or write an empty length file. + def write_content(file) + (content = property(:content)) && content.write(file) + end + + private + + def write_temporary_file? + # unfortunately we don't know the source file size before fetching it + # so let's assume the file won't be empty + (c = property(:content) and c.length) || (s = @parameters[:source] and 1) + end + + # There are some cases where all of the work does not get done on + # file creation/modification, so we have to do some extra checking. + def property_fix + properties.each do |thing| + next unless [:mode, :owner, :group, :seluser, :selrole, :seltype, :selrange].include?(thing.name) + + # Make sure we get a new stat objct + expire + currentvalue = thing.retrieve + thing.sync unless thing.safe_insync?(currentvalue) + end + end +end + +# We put all of the properties in separate files, because there are so many +# of them. The order these are loaded is important, because it determines +# the order they are in the property lit. +require 'puppet/type/file/checksum' +require 'puppet/type/file/content' # can create the file +require 'puppet/type/file/source' # can create the file +require 'puppet/type/file/target' # creates a different type of file +require 'puppet/type/file/ensure' # can create the file +require 'puppet/type/file/owner' +require 'puppet/type/file/group' +require 'puppet/type/file/mode' +require 'puppet/type/file/type' +require 'puppet/type/file/selcontext' # SELinux file context +require 'puppet/type/file/ctime' +require 'puppet/type/file/mtime' diff --git a/mcollective/lib/puppet/type/file/checksum.rb b/mcollective/lib/puppet/type/file/checksum.rb new file mode 100755 index 000000000..5586b1383 --- /dev/null +++ b/mcollective/lib/puppet/type/file/checksum.rb @@ -0,0 +1,33 @@ +require 'puppet/util/checksums' + +# Specify which checksum algorithm to use when checksumming +# files. +Puppet::Type.type(:file).newparam(:checksum) do + include Puppet::Util::Checksums + + desc "The checksum type to use when checksumming a file. + + The default checksum parameter, if checksums are enabled, is md5." + + newvalues "md5", "md5lite", "mtime", "ctime", "none" + + defaultto :md5 + + def sum(content) + type = value || :md5 # because this might be called before defaults are set + "{#{type}}" + send(type, content) + end + + def sum_file(path) + type = value || :md5 # because this might be called before defaults are set + method = type.to_s + "_file" + "{#{type}}" + send(method, path).to_s + end + + def sum_stream(&block) + type = value || :md5 # same comment as above + method = type.to_s + "_stream" + checksum = send(method, &block) + "{#{type}}#{checksum}" + end +end diff --git a/mcollective/lib/puppet/type/file/content.rb b/mcollective/lib/puppet/type/file/content.rb new file mode 100755 index 000000000..827183213 --- /dev/null +++ b/mcollective/lib/puppet/type/file/content.rb @@ -0,0 +1,221 @@ +require 'net/http' +require 'uri' +require 'tempfile' + +require 'puppet/util/checksums' +require 'puppet/network/http/api/v1' +require 'puppet/network/http/compression' + +module Puppet + Puppet::Type.type(:file).newproperty(:content) do + include Puppet::Util::Diff + include Puppet::Util::Checksums + include Puppet::Network::HTTP::API::V1 + include Puppet::Network::HTTP::Compression.module + + attr_reader :actual_content + + desc "Specify the contents of a file as a string. Newlines, tabs, and + spaces can be specified using the escaped syntax (e.g., \\n for a newline). The primary purpose of this parameter is to provide a + kind of limited templating: + + define resolve(nameserver1, nameserver2, domain, search) { + $str = \"search $search + domain $domain + nameserver $nameserver1 + nameserver $nameserver2 + \" + + file { \"/etc/resolv.conf\": + content => $str + } + } + + This attribute is especially useful when used with templating." + + # Store a checksum as the value, rather than the actual content. + # Simplifies everything. + munge do |value| + if value == :absent + value + elsif checksum?(value) + # XXX This is potentially dangerous because it means users can't write a file whose + # entire contents are a plain checksum + value + else + @actual_content = value + resource.parameter(:checksum).sum(value) + end + end + + # Checksums need to invert how changes are printed. + def change_to_s(currentvalue, newvalue) + # Our "new" checksum value is provided by the source. + if source = resource.parameter(:source) and tmp = source.checksum + newvalue = tmp + end + if currentvalue == :absent + return "defined content as '#{newvalue}'" + elsif newvalue == :absent + return "undefined content from '#{currentvalue}'" + else + return "content changed '#{currentvalue}' to '#{newvalue}'" + end + end + + def checksum_type + if source = resource.parameter(:source) + result = source.checksum + else checksum = resource.parameter(:checksum) + result = resource[:checksum] + end + if result =~ /^\{(\w+)\}.+/ + return $1.to_sym + else + return result + end + end + + def length + (actual_content and actual_content.length) || 0 + end + + def content + self.should + end + + # Override this method to provide diffs if asked for. + # Also, fix #872: when content is used, and replace is true, the file + # should be insync when it exists + def insync?(is) + if resource.should_be_file? + return false if is == :absent + else + return true + end + + return true if ! @resource.replace? + + result = super + + if ! result and Puppet[:show_diff] + write_temporarily do |path| + print diff(@resource[:path], path) + end + end + result + end + + def retrieve + return :absent unless stat = @resource.stat + ftype = stat.ftype + # Don't even try to manage the content on directories or links + return nil if ["directory","link"].include?(ftype) + + begin + resource.parameter(:checksum).sum_file(resource[:path]) + rescue => detail + raise Puppet::Error, "Could not read #{ftype} #{@resource.title}: #{detail}" + end + end + + # Make sure we're also managing the checksum property. + def should=(value) + @resource.newattr(:checksum) unless @resource.parameter(:checksum) + super + end + + # Just write our content out to disk. + def sync + return_event = @resource.stat ? :file_changed : :file_created + + # We're safe not testing for the 'source' if there's no 'should' + # because we wouldn't have gotten this far if there weren't at least + # one valid value somewhere. + @resource.write(:content) + + return_event + end + + def write_temporarily + tempfile = Tempfile.new("puppet-file") + tempfile.open + + write(tempfile) + + tempfile.close + + yield tempfile.path + + tempfile.delete + end + + def write(file) + resource.parameter(:checksum).sum_stream { |sum| + each_chunk_from(actual_content || resource.parameter(:source)) { |chunk| + sum << chunk + file.print chunk + } + } + end + + def self.standalone? + Puppet.settings[:name] == "apply" + end + + # the content is munged so if it's a checksum source_or_content is nil + # unless the checksum indirectly comes from source + def each_chunk_from(source_or_content) + if source_or_content.is_a?(String) + yield source_or_content + elsif content_is_really_a_checksum? && source_or_content.nil? + yield read_file_from_filebucket + elsif source_or_content.nil? + yield '' + elsif self.class.standalone? + yield source_or_content.content + elsif source_or_content.local? + chunk_file_from_disk(source_or_content) { |chunk| yield chunk } + else + chunk_file_from_source(source_or_content) { |chunk| yield chunk } + end + end + + private + + def content_is_really_a_checksum? + checksum?(should) + end + + def chunk_file_from_disk(source_or_content) + File.open(source_or_content.full_path, "r") do |src| + while chunk = src.read(8192) + yield chunk + end + end + end + + def chunk_file_from_source(source_or_content) + request = Puppet::Indirector::Request.new(:file_content, :find, source_or_content.full_path.sub(/^\//,'')) + connection = Puppet::Network::HttpPool.http_instance(source_or_content.server, source_or_content.port) + connection.request_get(indirection2uri(request), add_accept_encoding({"Accept" => "raw"})) do |response| + case response.code + when /^2/; uncompress(response) { |uncompressor| response.read_body { |chunk| yield uncompressor.uncompress(chunk) } } + else + # Raise the http error if we didn't get a 'success' of some kind. + message = "Error #{response.code} on SERVER: #{(response.body||'').empty? ? response.message : uncompress_body(response)}" + raise Net::HTTPError.new(message, response) + end + end + end + + def read_file_from_filebucket + raise "Could not get filebucket from file" unless dipper = resource.bucket + sum = should.sub(/\{\w+\}/, '') + + dipper.getfile(sum) + rescue => detail + fail "Could not retrieve content for #{should} from filebucket: #{detail}" + end + end +end diff --git a/mcollective/lib/puppet/type/file/ctime.rb b/mcollective/lib/puppet/type/file/ctime.rb new file mode 100644 index 000000000..24b098703 --- /dev/null +++ b/mcollective/lib/puppet/type/file/ctime.rb @@ -0,0 +1,18 @@ +module Puppet + Puppet::Type.type(:file).newproperty(:ctime) do + desc "A read-only state to check the file ctime." + + def retrieve + current_value = :absent + if stat = @resource.stat(false) + current_value = stat.ctime + end + current_value + end + + validate do + fail "ctime is read-only" + end + end +end + diff --git a/mcollective/lib/puppet/type/file/ensure.rb b/mcollective/lib/puppet/type/file/ensure.rb new file mode 100755 index 000000000..99652ecc6 --- /dev/null +++ b/mcollective/lib/puppet/type/file/ensure.rb @@ -0,0 +1,164 @@ +module Puppet + Puppet::Type.type(:file).ensurable do + require 'etc' + desc "Whether to create files that don't currently exist. + Possible values are *absent*, *present*, *file*, and *directory*. + Specifying `present` will match any form of file existence, and + if the file is missing will create an empty file. Specifying + `absent` will delete the file (and directory if recurse => true). + + Anything other than those values will create a symlink. In the interest of readability and clarity, you should use `ensure => link` and explicitly specify a + target; however, if a `target` attribute isn't provided, the value of the `ensure` + attribute will be used as the symlink target: + + # (Useful on Solaris) + # Less maintainable: + file { \"/etc/inetd.conf\": + ensure => \"/etc/inet/inetd.conf\", + } + + # More maintainable: + file { \"/etc/inetd.conf\": + ensure => link, + target => \"/etc/inet/inetd.conf\", + } + + These two declarations are equivalent." + + # Most 'ensure' properties have a default, but with files we, um, don't. + nodefault + + newvalue(:absent) do + File.unlink(@resource[:path]) + end + + aliasvalue(:false, :absent) + + newvalue(:file, :event => :file_created) do + # Make sure we're not managing the content some other way + if property = @resource.property(:content) + property.sync + else + @resource.write(:ensure) + mode = @resource.should(:mode) + end + end + + #aliasvalue(:present, :file) + newvalue(:present, :event => :file_created) do + # Make a file if they want something, but this will match almost + # anything. + set_file + end + + newvalue(:directory, :event => :directory_created) do + mode = @resource.should(:mode) + parent = File.dirname(@resource[:path]) + unless FileTest.exists? parent + raise Puppet::Error, + "Cannot create #{@resource[:path]}; parent directory #{parent} does not exist" + end + if mode + Puppet::Util.withumask(000) do + Dir.mkdir(@resource[:path], mode.to_i(8)) + end + else + Dir.mkdir(@resource[:path]) + end + @resource.send(:property_fix) + return :directory_created + end + + + newvalue(:link, :event => :link_created) do + fail "Cannot create a symlink without a target" unless property = resource.property(:target) + property.retrieve + property.mklink + end + + # Symlinks. + newvalue(/./) do + # This code never gets executed. We need the regex to support + # specifying it, but the work is done in the 'symlink' code block. + end + + munge do |value| + value = super(value) + value,resource[:target] = :link,value unless value.is_a? Symbol + resource[:links] = :manage if value == :link and resource[:links] != :follow + value + end + + def change_to_s(currentvalue, newvalue) + return super unless newvalue.to_s == "file" + + return super unless property = @resource.property(:content) + + # We know that content is out of sync if we're here, because + # it's essentially equivalent to 'ensure' in the transaction. + if source = @resource.parameter(:source) + should = source.checksum + else + should = property.should + end + if should == :absent + is = property.retrieve + else + is = :absent + end + + property.change_to_s(is, should) + end + + # Check that we can actually create anything + def check + basedir = File.dirname(@resource[:path]) + + if ! FileTest.exists?(basedir) + raise Puppet::Error, + "Can not create #{@resource.title}; parent directory does not exist" + elsif ! FileTest.directory?(basedir) + raise Puppet::Error, + "Can not create #{@resource.title}; #{dirname} is not a directory" + end + end + + # We have to treat :present specially, because it works with any + # type of file. + def insync?(currentvalue) + unless currentvalue == :absent or resource.replace? + return true + end + + if self.should == :present + return !(currentvalue.nil? or currentvalue == :absent) + else + return super(currentvalue) + end + end + + def retrieve + if stat = @resource.stat(false) + return stat.ftype.intern + else + if self.should == :false + return :false + else + return :absent + end + end + end + + def sync + @resource.remove_existing(self.should) + if self.should == :absent + return :file_removed + end + + event = super + + event + end + end +end + diff --git a/mcollective/lib/puppet/type/file/group.rb b/mcollective/lib/puppet/type/file/group.rb new file mode 100755 index 000000000..5ed5166bc --- /dev/null +++ b/mcollective/lib/puppet/type/file/group.rb @@ -0,0 +1,113 @@ +require 'puppet/util/posix' + +# Manage file group ownership. +module Puppet + Puppet::Type.type(:file).newproperty(:group) do + include Puppet::Util::POSIX + + require 'etc' + desc "Which group should own the file. Argument can be either group + name or group ID." + @event = :file_changed + + validate do |group| + raise(Puppet::Error, "Invalid group name '#{group.inspect}'") unless group and group != "" + end + + def id2name(id) + return id.to_s if id.is_a?(Symbol) + return nil if id > Puppet[:maximum_uid].to_i + begin + group = Etc.getgrgid(id) + rescue ArgumentError + return nil + end + if group.gid == "" + return nil + else + return group.name + end + end + + # We want to print names, not numbers + def is_to_s(currentvalue) + if currentvalue.is_a? Integer + id2name(currentvalue) || currentvalue + else + return currentvalue.to_s + end + end + + def should_to_s(newvalue = @should) + if newvalue.is_a? Integer + id2name(newvalue) || newvalue + else + return newvalue.to_s + end + end + + def insync?(current) + @should.each do |value| + if value =~ /^\d+$/ + gid = Integer(value) + elsif value.is_a?(String) + fail "Could not find group #{value}" unless gid = gid(value) + else + gid = value + end + + return true if gid == current + end + false + end + + def retrieve + return :absent unless stat = resource.stat(false) + + currentvalue = stat.gid + + # On OS X, files that are owned by -2 get returned as really + # large GIDs instead of negative ones. This isn't a Ruby bug, + # it's an OS X bug, since it shows up in perl, too. + if currentvalue > Puppet[:maximum_uid].to_i + self.warning "Apparently using negative GID (#{currentvalue}) on a platform that does not consistently handle them" + currentvalue = :silly + end + + currentvalue + end + + # Determine if the group is valid, and if so, return the GID + def validgroup?(value) + Integer(value) rescue gid(value) || false + end + + # Normal users will only be able to manage certain groups. Right now, + # we'll just let it fail, but we should probably set things up so + # that users get warned if they try to change to an unacceptable group. + def sync + # Set our method appropriately, depending on links. + if resource[:links] == :manage + method = :lchown + else + method = :chown + end + + gid = nil + @should.each do |group| + break if gid = validgroup?(group) + end + + raise Puppet::Error, "Could not find group(s) #{@should.join(",")}" unless gid + + begin + # set owner to nil so it's ignored + File.send(method, nil, gid, resource[:path]) + rescue => detail + error = Puppet::Error.new( "failed to chgrp #{resource[:path]} to #{gid}: #{detail.message}") + raise error + end + :file_changed + end + end +end diff --git a/mcollective/lib/puppet/type/file/mode.rb b/mcollective/lib/puppet/type/file/mode.rb new file mode 100755 index 000000000..2acd8b359 --- /dev/null +++ b/mcollective/lib/puppet/type/file/mode.rb @@ -0,0 +1,90 @@ +# Manage file modes. This state should support different formats +# for specification (e.g., u+rwx, or -0011), but for now only supports +# specifying the full mode. +module Puppet + Puppet::Type.type(:file).newproperty(:mode) do + require 'etc' + desc "Mode the file should be. Currently relatively limited: + you must specify the exact mode the file should be. + + Note that when you set the mode of a directory, Puppet always + sets the search/traverse (1) bit anywhere the read (4) bit is set. + This is almost always what you want: read allows you to list the + entries in a directory, and search/traverse allows you to access + (read/write/execute) those entries.) Because of this feature, you + can recursively make a directory and all of the files in it + world-readable by setting e.g.: + + file { '/some/dir': + mode => 644, + recurse => true, + } + + In this case all of the files underneath `/some/dir` will have + mode 644, and all of the directories will have mode 755." + + @event = :file_changed + + munge do |should| + if should.is_a?(String) + unless should =~ /^[0-7]+$/ + raise Puppet::Error, "File modes can only be octal numbers, not #{should.inspect}" + end + should.to_i(8).to_s(8) + else + should.to_s(8) + end + end + + # If we're a directory, we need to be executable for all cases + # that are readable. This should probably be selectable, but eh. + def dirmask(value) + if FileTest.directory?(@resource[:path]) + value = value.to_i(8) + value |= 0100 if value & 0400 != 0 + value |= 010 if value & 040 != 0 + value |= 01 if value & 04 != 0 + value = value.to_s(8) + end + + value + end + + def insync?(currentvalue) + if stat = @resource.stat and stat.ftype == "link" and @resource[:links] != :follow + self.debug "Not managing symlink mode" + return true + else + return super(currentvalue) + end + end + + def retrieve + # If we're not following links and we're a link, then we just turn + # off mode management entirely. + + if stat = @resource.stat(false) + unless defined?(@fixed) + @should &&= @should.collect { |s| self.dirmask(s) } + end + return (stat.mode & 007777).to_s(8) + else + return :absent + end + end + + def sync + mode = self.should + + begin + File.chmod(mode.to_i(8), @resource[:path]) + rescue => detail + error = Puppet::Error.new("failed to chmod #{@resource[:path]}: #{detail.message}") + error.set_backtrace detail.backtrace + raise error + end + :file_changed + end + end +end + diff --git a/mcollective/lib/puppet/type/file/mtime.rb b/mcollective/lib/puppet/type/file/mtime.rb new file mode 100644 index 000000000..8ca7ed0d6 --- /dev/null +++ b/mcollective/lib/puppet/type/file/mtime.rb @@ -0,0 +1,17 @@ +module Puppet + Puppet::Type.type(:file).newproperty(:mtime) do + desc "A read-only state to check the file mtime." + + def retrieve + current_value = :absent + if stat = @resource.stat(false) + current_value = stat.mtime + end + current_value + end + + validate do + fail "mtime is read-only" + end + end +end diff --git a/mcollective/lib/puppet/type/file/owner.rb b/mcollective/lib/puppet/type/file/owner.rb new file mode 100755 index 000000000..483cc7fce --- /dev/null +++ b/mcollective/lib/puppet/type/file/owner.rb @@ -0,0 +1,52 @@ +module Puppet + Puppet::Type.type(:file).newproperty(:owner) do + + desc "To whom the file should belong. Argument can be user name or + user ID." + @event = :file_changed + + def insync?(current) + provider.is_owner_insync?(current, @should) + end + + # We want to print names, not numbers + def is_to_s(currentvalue) + provider.id2name(currentvalue) || currentvalue + end + + def should_to_s(newvalue = @should) + case newvalue + when Symbol + newvalue.to_s + when Integer + provider.id2name(newvalue) || newvalue + when String + newvalue + else + raise Puppet::DevError, "Invalid uid type #{newvalue.class}(#{newvalue})" + end + end + + def retrieve + if self.should + @should = @should.collect do |val| + unless val.is_a?(Integer) + if tmp = provider.validuser?(val) + val = tmp + else + raise "Could not find user #{val}" + end + else + val + end + end + end + provider.retrieve(@resource) + end + + def sync + provider.sync(resource[:path], resource[:links], @should) + end + end +end + diff --git a/mcollective/lib/puppet/type/file/selcontext.rb b/mcollective/lib/puppet/type/file/selcontext.rb new file mode 100644 index 000000000..ea385eec0 --- /dev/null +++ b/mcollective/lib/puppet/type/file/selcontext.rb @@ -0,0 +1,119 @@ +# Manage SELinux context of files. +# +# This code actually manages three pieces of data in the context. +# +# [root@delenn files]# ls -dZ / +# drwxr-xr-x root root system_u:object_r:root_t / +# +# The context of '/' here is 'system_u:object_r:root_t'. This is +# three seperate fields: +# +# system_u is the user context +# object_r is the role context +# root_t is the type context +# +# All three of these fields are returned in a single string by the +# output of the stat command, but set individually with the chcon +# command. This allows the user to specify a subset of the three +# values while leaving the others alone. +# +# See http://www.nsa.gov/selinux/ for complete docs on SELinux. + +module Puppet + require 'puppet/util/selinux' + + class SELFileContext < Puppet::Property + include Puppet::Util::SELinux + + def retrieve + return :absent unless @resource.stat(false) + context = self.get_selinux_current_context(@resource[:path]) + parse_selinux_context(name, context) + end + + def retrieve_default_context(property) + if @resource[:selinux_ignore_defaults] == :true + return nil + end + + unless context = self.get_selinux_default_context(@resource[:path]) + return nil + end + + property_default = self.parse_selinux_context(property, context) + self.debug "Found #{property} default '#{property_default}' for #{@resource[:path]}" if not property_default.nil? + property_default + end + + def insync?(value) + if not selinux_support? + debug("SELinux bindings not found. Ignoring parameter.") + return true + end + super + end + + def sync + self.set_selinux_context(@resource[:path], @should, name) + :file_changed + end + end + + Puppet::Type.type(:file).newparam(:selinux_ignore_defaults) do + desc "If this is set then Puppet will not ask SELinux (via matchpathcon) to + supply defaults for the SELinux attributes (seluser, selrole, + seltype, and selrange). In general, you should leave this set at its + default and only set it to true when you need Puppet to not try to fix + SELinux labels automatically." + newvalues(:true, :false) + + defaultto :false + end + + Puppet::Type.type(:file).newproperty(:seluser, :parent => Puppet::SELFileContext) do + desc "What the SELinux user component of the context of the file should be. + Any valid SELinux user component is accepted. For example `user_u`. + If not specified it defaults to the value returned by matchpathcon for + the file, if any exists. Only valid on systems with SELinux support + enabled." + + @event = :file_changed + defaultto { self.retrieve_default_context(:seluser) } + end + + Puppet::Type.type(:file).newproperty(:selrole, :parent => Puppet::SELFileContext) do + desc "What the SELinux role component of the context of the file should be. + Any valid SELinux role component is accepted. For example `role_r`. + If not specified it defaults to the value returned by matchpathcon for + the file, if any exists. Only valid on systems with SELinux support + enabled." + + @event = :file_changed + defaultto { self.retrieve_default_context(:selrole) } + end + + Puppet::Type.type(:file).newproperty(:seltype, :parent => Puppet::SELFileContext) do + desc "What the SELinux type component of the context of the file should be. + Any valid SELinux type component is accepted. For example `tmp_t`. + If not specified it defaults to the value returned by matchpathcon for + the file, if any exists. Only valid on systems with SELinux support + enabled." + + @event = :file_changed + defaultto { self.retrieve_default_context(:seltype) } + end + + Puppet::Type.type(:file).newproperty(:selrange, :parent => Puppet::SELFileContext) do + desc "What the SELinux range component of the context of the file should be. + Any valid SELinux range component is accepted. For example `s0` or + `SystemHigh`. If not specified it defaults to the value returned by + matchpathcon for the file, if any exists. Only valid on systems with + SELinux support enabled and that have support for MCS (Multi-Category + Security)." + + @event = :file_changed + defaultto { self.retrieve_default_context(:selrange) } + end + +end + diff --git a/mcollective/lib/puppet/type/file/source.rb b/mcollective/lib/puppet/type/file/source.rb new file mode 100755 index 000000000..2fb65bbad --- /dev/null +++ b/mcollective/lib/puppet/type/file/source.rb @@ -0,0 +1,183 @@ + +require 'puppet/file_serving/content' +require 'puppet/file_serving/metadata' + +module Puppet + # Copy files from a local or remote source. This state *only* does any work + # when the remote file is an actual file; in that case, this state copies + # the file down. If the remote file is a dir or a link or whatever, then + # this state, during retrieval, modifies the appropriate other states + # so that things get taken care of appropriately. + Puppet::Type.type(:file).newparam(:source) do + include Puppet::Util::Diff + + attr_accessor :source, :local + desc "Copy a file over the current file. Uses `checksum` to + determine when a file should be copied. Valid values are either + fully qualified paths to files, or URIs. Currently supported URI + types are *puppet* and *file*. + + This is one of the primary mechanisms for getting content into + applications that Puppet does not directly support and is very + useful for those configuration files that don't change much across + sytems. For instance: + + class sendmail { + file { \"/etc/mail/sendmail.cf\": + source => \"puppet://server/modules/module_name/sendmail.cf\" + } + } + + You can also leave out the server name, in which case `puppet agent` + will fill in the name of its configuration server and `puppet apply` + will use the local filesystem. This makes it easy to use the same + configuration in both local and centralized forms. + + Currently, only the `puppet` scheme is supported for source + URL's. Puppet will connect to the file server running on + `server` to retrieve the contents of the file. If the + `server` part is empty, the behavior of the command-line + interpreter (`puppet apply`) and the client demon (`puppet agent`) differs + slightly: `apply` will look such a file up on the module path + on the local host, whereas `agent` will connect to the + puppet server that it received the manifest from. + + See the [fileserver configuration documentation](http://projects.puppetlabs.com/projects/puppet/wiki/File_Serving_Configuration) for information on how to configure + and use file services within Puppet. + + If you specify multiple file sources for a file, then the first + source that exists will be used. This allows you to specify + what amount to search paths for files: + + file { \"/path/to/my/file\": + source => [ + \"/modules/nfs/files/file.$host\", + \"/modules/nfs/files/file.$operatingsystem\", + \"/modules/nfs/files/file\" + ] + } + + This will use the first found file as the source. + + You cannot currently copy links using this mechanism; set `links` + to `follow` if any remote sources are links. + " + + validate do |sources| + sources = [sources] unless sources.is_a?(Array) + sources.each do |source| + begin + uri = URI.parse(URI.escape(source)) + rescue => detail + self.fail "Could not understand source #{source}: #{detail}" + end + + self.fail "Cannot use URLs of type '#{uri.scheme}' as source for fileserving" unless uri.scheme.nil? or %w{file puppet}.include?(uri.scheme) + end + end + + munge do |sources| + sources = [sources] unless sources.is_a?(Array) + sources.collect { |source| source.sub(/\/$/, '') } + end + + def change_to_s(currentvalue, newvalue) + # newvalue = "{md5}#{@metadata.checksum}" + if @resource.property(:ensure).retrieve == :absent + return "creating from source #{metadata.source} with contents #{metadata.checksum}" + else + return "replacing from source #{metadata.source} with contents #{metadata.checksum}" + end + end + + def checksum + metadata && metadata.checksum + end + + # Look up (if necessary) and return remote content. + cached_attr(:content) do + raise Puppet::DevError, "No source for content was stored with the metadata" unless metadata.source + + unless tmp = Puppet::FileServing::Content.find(metadata.source) + fail "Could not find any content at %s" % metadata.source + end + tmp.content + end + + # Copy the values from the source to the resource. Yay. + def copy_source_values + devfail "Somehow got asked to copy source values without any metadata" unless metadata + + # Take each of the stats and set them as states on the local file + # if a value has not already been provided. + [:owner, :mode, :group, :checksum].each do |metadata_method| + param_name = (metadata_method == :checksum) ? :content : metadata_method + next if metadata_method == :owner and !Puppet.features.root? + next if metadata_method == :checksum and metadata.ftype == "directory" + next if metadata_method == :checksum and metadata.ftype == "link" and metadata.links == :manage + + if resource[param_name].nil? or resource[param_name] == :absent + resource[param_name] = metadata.send(metadata_method) + end + end + + if resource[:ensure] == :absent + # We know all we need to + elsif metadata.ftype != "link" + resource[:ensure] = metadata.ftype + elsif @resource[:links] == :follow + resource[:ensure] = :present + else + resource[:ensure] = "link" + resource[:target] = metadata.destination + end + end + + def found? + ! (metadata.nil? or metadata.ftype.nil?) + end + + # Provide, and retrieve if necessary, the metadata for this file. Fail + # if we can't find data about this host, and fail if there are any + # problems in our query. + cached_attr(:metadata) do + return nil unless value + result = nil + value.each do |source| + begin + if data = Puppet::FileServing::Metadata.find(source) + result = data + result.source = source + break + end + rescue => detail + fail detail, "Could not retrieve file metadata for #{source}: #{detail}" + end + end + fail "Could not retrieve information from environment #{Puppet[:environment]} source(s) #{value.join(", ")}" unless result + result + end + + def local? + found? and uri and (uri.scheme || "file") == "file" + end + + def full_path + URI.unescape(uri.path) if found? and uri + end + + def server + (uri and uri.host) or Puppet.settings[:server] + end + + def port + (uri and uri.port) or Puppet.settings[:masterport] + end + + private + + def uri + @uri ||= URI.parse(URI.escape(metadata.source)) + end + end +end diff --git a/mcollective/lib/puppet/type/file/target.rb b/mcollective/lib/puppet/type/file/target.rb new file mode 100644 index 000000000..7d391e672 --- /dev/null +++ b/mcollective/lib/puppet/type/file/target.rb @@ -0,0 +1,87 @@ +module Puppet + Puppet::Type.type(:file).newproperty(:target) do + desc "The target for creating a link. Currently, symlinks are the + only type supported. + + You can make relative links: + + # (Useful on Solaris) + file { \"/etc/inetd.conf\": + ensure => link, + target => \"inet/inetd.conf\", + } + + You can also make recursive symlinks, which will create a + directory structure that maps to the target directory, + with directories corresponding to each directory + and links corresponding to each file." + + newvalue(:notlink) do + # We do nothing if the value is absent + return :nochange + end + + # Anything else, basically + newvalue(/./) do + @resource[:ensure] = :link if ! @resource.should(:ensure) + + # Only call mklink if ensure didn't call us in the first place. + currentensure = @resource.property(:ensure).retrieve + mklink if @resource.property(:ensure).safe_insync?(currentensure) + end + + # Create our link. + def mklink + raise Puppet::Error, "Cannot symlink on Microsoft Windows" if Puppet.features.microsoft_windows? + + target = self.should + + # Clean up any existing objects. The argument is just for logging, + # it doesn't determine what's removed. + @resource.remove_existing(target) + + raise Puppet::Error, "Could not remove existing file" if FileTest.exists?(@resource[:path]) + + Dir.chdir(File.dirname(@resource[:path])) do + Puppet::Util::SUIDManager.asuser(@resource.asuser) do + mode = @resource.should(:mode) + if mode + Puppet::Util.withumask(000) do + File.symlink(target, @resource[:path]) + end + else + File.symlink(target, @resource[:path]) + end + end + + @resource.send(:property_fix) + + :link_created + end + end + + def insync?(currentvalue) + if [:nochange, :notlink].include?(self.should) or @resource.recurse? + return true + elsif ! @resource.replace? and File.exists?(@resource[:path]) + return true + else + return super(currentvalue) + end + end + + + def retrieve + if stat = @resource.stat + if stat.ftype == "link" + return File.readlink(@resource[:path]) + else + return :notlink + end + else + return :absent + end + end + end +end + diff --git a/mcollective/lib/puppet/type/file/type.rb b/mcollective/lib/puppet/type/file/type.rb new file mode 100755 index 000000000..4da54e2cb --- /dev/null +++ b/mcollective/lib/puppet/type/file/type.rb @@ -0,0 +1,19 @@ +module Puppet + Puppet::Type.type(:file).newproperty(:type) do + require 'etc' + desc "A read-only state to check the file type." + + def retrieve + current_value = :absent + if stat = @resource.stat(false) + current_value = stat.ftype + end + current_value + end + + validate do + fail "type is read-only" + end + end +end + diff --git a/mcollective/lib/puppet/type/filebucket.rb b/mcollective/lib/puppet/type/filebucket.rb new file mode 100755 index 000000000..7fd2ef46b --- /dev/null +++ b/mcollective/lib/puppet/type/filebucket.rb @@ -0,0 +1,94 @@ +module Puppet + require 'puppet/file_bucket/dipper' + + newtype(:filebucket) do + @doc = "A repository for backing up files. If no filebucket is + defined, then files will be backed up in their current directory, + but the filebucket can be either a host- or site-global repository + for backing up. It stores files and returns the MD5 sum, which + can later be used to retrieve the file if restoration becomes + necessary. A filebucket does not do any work itself; instead, + it can be specified as the value of *backup* in a **file** object. + + Currently, filebuckets are only useful for manual retrieval of + accidentally removed files (e.g., you look in the log for the md5 sum and retrieve the file with that sum from the filebucket), but + when transactions are fully supported filebuckets will be used to + undo transactions. + + You will normally want to define a single filebucket for your + whole network and then use that as the default backup location: + + # Define the bucket + filebucket { main: server => puppet } + + # Specify it as the default target + File { backup => main } + + Puppetmaster servers create a filebucket by default, so this will + work in a default configuration." + + newparam(:name) do + desc "The name of the filebucket." + isnamevar + end + + newparam(:server) do + desc "The server providing the remote filebucket. If this is not + specified then *path* is checked. If it is set, then the + bucket is local. Otherwise the puppetmaster server specified + in the config or at the commandline is used." + defaultto { Puppet[:server] } + end + + newparam(:port) do + desc "The port on which the remote server is listening. + Defaults to the normal Puppet port, %s." % Puppet[:masterport] + + defaultto { Puppet[:masterport] } + end + + newparam(:path) do + desc "The path to the local filebucket. If this is + unset, then the bucket is remote. The parameter *server* must + can be specified to set the remote server." + + defaultto { Puppet[:clientbucketdir] } + end + + # Create a default filebucket. + def self.mkdefaultbucket + new(:name => "puppet", :path => Puppet[:clientbucketdir]) + end + + def bucket + mkbucket unless defined?(@bucket) + @bucket + end + + private + + def mkbucket + # Default is a local filebucket, if no server is given. + # If the default path has been removed, too, then + # the puppetmaster is used as default server + + type = "local" + args = {} + if self[:path] + args[:Path] = self[:path] + else + args[:Server] = self[:server] + args[:Port] = self[:port] + end + + begin + @bucket = Puppet::FileBucket::Dipper.new(args) + rescue => detail + puts detail.backtrace if Puppet[:trace] + self.fail("Could not create #{type} filebucket: #{detail}") + end + + @bucket.name = self.name + end + end +end diff --git a/mcollective/lib/puppet/type/group.rb b/mcollective/lib/puppet/type/group.rb new file mode 100755 index 000000000..066bd49df --- /dev/null +++ b/mcollective/lib/puppet/type/group.rb @@ -0,0 +1,109 @@ +require 'etc' +require 'facter' + +module Puppet + newtype(:group) do + @doc = "Manage groups. On most platforms this can only create groups. + Group membership must be managed on individual users. + + On some platforms such as OS X, group membership is managed as an + attribute of the group, not the user record. Providers must have + the feature 'manages_members' to manage the 'members' property of + a group record." + + feature :manages_members, + "For directories where membership is an attribute of groups not users." + + feature :system_groups, + "The provider allows you to create system groups with lower GIDs." + + ensurable do + desc "Create or remove the group." + + newvalue(:present) do + provider.create + end + + newvalue(:absent) do + provider.delete + end + end + + newproperty(:gid) do + desc "The group ID. Must be specified numerically. If not + specified, a number will be picked, which can result in ID + differences across systems and thus is not recommended. The + GID is picked according to local system standards." + + def retrieve + provider.gid + end + + def sync + if self.should == :absent + raise Puppet::DevError, "GID cannot be deleted" + else + provider.gid = self.should + end + end + + munge do |gid| + case gid + when String + if gid =~ /^[-0-9]+$/ + gid = Integer(gid) + else + self.fail "Invalid GID #{gid}" + end + when Symbol + unless gid == :absent + self.devfail "Invalid GID #{gid}" + end + end + + return gid + end + end + + newproperty(:members, :array_matching => :all, :required_features => :manages_members) do + desc "The members of the group. For directory services where group + membership is stored in the group objects, not the users." + + def change_to_s(currentvalue, newvalue) + currentvalue = currentvalue.join(",") if currentvalue != :absent + newvalue = newvalue.join(",") + super(currentvalue, newvalue) + end + end + + newparam(:auth_membership) do + desc "whether the provider is authoritative for group membership." + defaultto true + end + + newparam(:name) do + desc "The group name. While naming limitations vary by + system, it is advisable to keep the name to the degenerate + limitations, which is a maximum of 8 characters beginning with + a letter." + isnamevar + end + + newparam(:allowdupe, :boolean => true) do + desc "Whether to allow duplicate GIDs. This option does not work on + FreeBSD (contract to the `pw` man page)." + + newvalues(:true, :false) + + defaultto false + end + + newparam(:system, :boolean => true) do + desc "Whether the group is a system group with lower GID." + + newvalues(:true, :false) + + defaultto false + end + end +end diff --git a/mcollective/lib/puppet/type/host.rb b/mcollective/lib/puppet/type/host.rb new file mode 100755 index 000000000..2666e50ae --- /dev/null +++ b/mcollective/lib/puppet/type/host.rb @@ -0,0 +1,98 @@ +module Puppet + newtype(:host) do + ensurable + + newproperty(:ip) do + desc "The host's IP address, IPv4 or IPv6." + + validate do |value| + unless value =~ /((([0-9a-fA-F]+:){7}[0-9a-fA-F]+)|(([0-9a-fA-F]+:)*[0-9a-fA-F]+)?::(([0-9a-fA-F]+:)*[0-9a-fA-F]+)?)|((25[0-5]|2[0-4][\d]|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3})/ + raise Puppet::Error, "Invalid IP address" + end + end + + end + + newproperty(:host_aliases) do + desc "Any aliases the host might have. Multiple values must be + specified as an array." + + def insync?(is) + is == @should + end + + def is_to_s(currentvalue = @is) + currentvalue = [currentvalue] unless currentvalue.is_a? Array + currentvalue.join(" ") + end + + def retrieve + is = super + case is + when String + is = is.split(/\s*,\s*/) + when Symbol + is = [is] + when Array + # nothing + else + raise Puppet::DevError, "Invalid @is type #{is.class}" + end + is + end + + # We actually want to return the whole array here, not just the first + # value. + def should + if defined?(@should) + if @should == [:absent] + return :absent + else + return @should + end + else + return nil + end + end + + def should_to_s(newvalue = @should) + newvalue.join(" ") + end + + validate do |value| + raise Puppet::Error, "Host aliases cannot include whitespace" if value =~ /\s/ + end + end + + newproperty(:target) do + desc "The file in which to store service information. Only used by + those providers that write to disk. On most systems this defaults to `/etc/hosts`." + + defaultto { if @resource.class.defaultprovider.ancestors.include?(Puppet::Provider::ParsedFile) + @resource.class.defaultprovider.default_target + else + nil + end + } + end + + newparam(:name) do + desc "The host name." + + isnamevar + + validate do |value| + # LAK:NOTE See http://snurl.com/21zf8 [groups_google_com] + x = value.split('.').each do |hostpart| + unless hostpart =~ /^([\d\w]+|[\d\w][\d\w\-]+[\d\w])$/ + raise Puppet::Error, "Invalid host name" + end + end + end + end + + @doc = "Installs and manages host entries. For most systems, these + entries will just be in `/etc/hosts`, but some systems (notably OS X) + will have different solutions." + end +end diff --git a/mcollective/lib/puppet/type/k5login.rb b/mcollective/lib/puppet/type/k5login.rb new file mode 100644 index 000000000..4bc35df3f --- /dev/null +++ b/mcollective/lib/puppet/type/k5login.rb @@ -0,0 +1,88 @@ +# Plug-in type for handling k5login files +require 'puppet/util' + +Puppet::Type.newtype(:k5login) do + @doc = "Manage the `.k5login` file for a user. Specify the full path to + the `.k5login` file as the name and an array of principals as the + property principals." + + ensurable + + # Principals that should exist in the file + newproperty(:principals, :array_matching => :all) do + desc "The principals present in the `.k5login` file." + end + + # The path/name of the k5login file + newparam(:path) do + isnamevar + desc "The path to the file to manage. Must be fully qualified." + + validate do |value| + unless value =~ /^#{File::SEPARATOR}/ + raise Puppet::Error, "File paths must be fully qualified" + end + end + end + + # To manage the mode of the file + newproperty(:mode) do + desc "Manage the k5login file's mode" + defaultto { "644" } + end + + provide(:k5login) do + desc "The k5login provider is the only provider for the k5login + type." + + # Does this file exist? + def exists? + File.exists?(@resource[:name]) + end + + # create the file + def create + write(@resource.should(:principals)) + should_mode = @resource.should(:mode) + unless self.mode == should_mode + self.mode = should_mode + end + end + + # remove the file + def destroy + File.unlink(@resource[:name]) + end + + # Return the principals + def principals(dummy_argument=:work_arround_for_ruby_GC_bug) + if File.exists?(@resource[:name]) + File.readlines(@resource[:name]).collect { |line| line.chomp } + else + :absent + end + end + + # Write the principals out to the k5login file + def principals=(value) + write(value) + end + + # Return the mode as an octal string, not as an integer + def mode + "%o" % (File.stat(@resource[:name]).mode & 007777) + end + + # Set the file mode, converting from a string to an integer. + def mode=(value) + File.chmod(Integer("0#{value}"), @resource[:name]) + end + + private + def write(value) + Puppet::Util.replace_file(@resource[:name], 0644) do |f| + f.puts value + end + end + end +end diff --git a/mcollective/lib/puppet/type/macauthorization.rb b/mcollective/lib/puppet/type/macauthorization.rb new file mode 100644 index 000000000..e89aa7c89 --- /dev/null +++ b/mcollective/lib/puppet/type/macauthorization.rb @@ -0,0 +1,166 @@ +Puppet::Type.newtype(:macauthorization) do + + @doc = "Manage the Mac OS X authorization database. + See the [Apple developer site](http://developer.apple.com/documentation/Security/Conceptual/Security_Overview/Security_Services/chapter_4_section_5.html) for more information. + + **Autorequires:** If Puppet is managing the `/etc/authorization` file, each + macauthorization resource will autorequire it." + + ensurable + + autorequire(:file) do + ["/etc/authorization"] + end + + def munge_boolean(value) + case value + when true, "true", :true + :true + when false, "false", :false + :false + else + fail("munge_boolean only takes booleans") + end + end + + def munge_integer(value) + Integer(value) + rescue ArgumentError + fail("munge_integer only takes integers") + end + + newparam(:name) do + desc "The name of the right or rule to be managed. + Corresponds to 'key' in Authorization Services. The key is the name + of a rule. A key uses the same naming conventions as a right. The + Security Server uses a rule’s key to match the rule with a right. + Wildcard keys end with a ‘.’. The generic rule has an empty key value. + Any rights that do not match a specific rule use the generic rule." + + isnamevar + end + + newproperty(:auth_type) do + desc "type - can be a 'right' or a 'rule'. 'comment' has not yet been + implemented." + + newvalue(:right) + newvalue(:rule) + # newvalue(:comment) # not yet implemented. + end + + newproperty(:allow_root, :boolean => true) do + desc "Corresponds to 'allow-root' in the authorization store, renamed + due to hyphens being problematic. Specifies whether a right should be + allowed automatically if the requesting process is running with + uid == 0. AuthorizationServices defaults this attribute to false if + not specified" + + newvalue(:true) + newvalue(:false) + + munge do |value| + @resource.munge_boolean(value) + end + end + + newproperty(:authenticate_user, :boolean => true) do + desc "Corresponds to 'authenticate-user' in the authorization store, + renamed due to hyphens being problematic." + + newvalue(:true) + newvalue(:false) + + munge do |value| + @resource.munge_boolean(value) + end + end + + newproperty(:auth_class) do + desc "Corresponds to 'class' in the authorization store, renamed due + to 'class' being a reserved word." + + newvalue(:user) + newvalue(:'evaluate-mechanisms') + newvalue(:allow) + newvalue(:deny) + newvalue(:rule) + end + + newproperty(:comment) do + desc "The 'comment' attribute for authorization resources." + end + + newproperty(:group) do + desc "The user must authenticate as a member of this group. This + attribute can be set to any one group." + end + + newproperty(:k_of_n) do + desc "k-of-n describes how large a subset of rule mechanisms must + succeed for successful authentication. If there are 'n' mechanisms, + then 'k' (the integer value of this parameter) mechanisms must succeed. + The most common setting for this parameter is '1'. If k-of-n is not + set, then 'n-of-n' mechanisms must succeed." + + munge do |value| + @resource.munge_integer(value) + end + end + + newproperty(:mechanisms, :array_matching => :all) do + desc "an array of suitable mechanisms." + end + + newproperty(:rule, :array_matching => :all) do + desc "The rule(s) that this right refers to." + end + + newproperty(:session_owner, :boolean => true) do + desc "Corresponds to 'session-owner' in the authorization store, + renamed due to hyphens being problematic. Whether the session owner + automatically matches this rule or right." + + newvalue(:true) + newvalue(:false) + + munge do |value| + @resource.munge_boolean(value) + end + end + + newproperty(:shared, :boolean => true) do + desc "If this is set to true, then the Security Server marks the + credentials used to gain this right as shared. The Security Server + may use any shared credentials to authorize this right. For maximum + security, set sharing to false so credentials stored by the Security + Server for one application may not be used by another application." + + newvalue(:true) + newvalue(:false) + + munge do |value| + @resource.munge_boolean(value) + end + end + + newproperty(:timeout) do + desc "The credential used by this rule expires in the specified + number of seconds. For maximum security where the user must + authenticate every time, set the timeout to 0. For minimum security, + remove the timeout attribute so the user authenticates only once per + session." + + munge do |value| + @resource.munge_integer(value) + end + end + + newproperty(:tries) do + desc "The number of tries allowed." + munge do |value| + @resource.munge_integer(value) + end + end + +end diff --git a/mcollective/lib/puppet/type/mailalias.rb b/mcollective/lib/puppet/type/mailalias.rb new file mode 100755 index 000000000..ce7ca790b --- /dev/null +++ b/mcollective/lib/puppet/type/mailalias.rb @@ -0,0 +1,48 @@ +module Puppet + newtype(:mailalias) do + @doc = "Creates an email alias in the local alias database." + + ensurable + + newparam(:name, :namevar => true) do + desc "The alias name." + end + + newproperty(:recipient, :array_matching => :all) do + desc "Where email should be sent. Multiple values + should be specified as an array." + + def is_to_s(value) + if value.include?(:absent) + super + else + value.join(",") + end + end + + def should + @should + end + + def should_to_s(value) + if value.include?(:absent) + super + else + value.join(",") + end + end + end + + newproperty(:target) do + desc "The file in which to store the aliases. Only used by + those providers that write to disk." + + defaultto { if @resource.class.defaultprovider.ancestors.include?(Puppet::Provider::ParsedFile) + @resource.class.defaultprovider.default_target + else + nil + end + } + end + end +end diff --git a/mcollective/lib/puppet/type/maillist.rb b/mcollective/lib/puppet/type/maillist.rb new file mode 100755 index 000000000..732fbf09f --- /dev/null +++ b/mcollective/lib/puppet/type/maillist.rb @@ -0,0 +1,62 @@ +module Puppet + newtype(:maillist) do + @doc = "Manage email lists. This resource type currently can only create + and remove lists, it cannot reconfigure them." + + ensurable do + defaultvalues + + newvalue(:purged) do + provider.purge + end + + def change_to_s(current_value, newvalue) + return "Purged #{resource}" if newvalue == :purged + super + end + + def insync?(is) + return true if is == :absent && should == :purged + super + end + end + + newparam(:name, :namevar => true) do + desc "The name of the email list." + end + + newparam(:description) do + desc "The description of the mailing list." + end + + newparam(:password) do + desc "The admin password." + end + + newparam(:webserver) do + desc "The name of the host providing web archives and the administrative interface." + end + + newparam(:mailserver) do + desc "The name of the host handling email for the list." + end + + newparam(:admin) do + desc "The email address of the administrator." + end + + def generate + if provider.respond_to?(:aliases) + should = self.should(:ensure) || :present + if should == :purged + should = :absent + end + atype = Puppet::Type.type(:mailalias) + + provider.aliases. + reject { |name,recipient| catalog.resource(:mailalias, name) }. + collect { |name,recipient| atype.new(:name => name, :recipient => recipient, :ensure => should) } + end + end + end +end diff --git a/mcollective/lib/puppet/type/mcx.rb b/mcollective/lib/puppet/type/mcx.rb new file mode 100644 index 000000000..07c9348dd --- /dev/null +++ b/mcollective/lib/puppet/type/mcx.rb @@ -0,0 +1,118 @@ +#-- +# Copyright (C) 2008 Jeffrey J McCune. + +# This program and entire repository is free software; you can +# redistribute it and/or modify it under the terms of the GNU +# General Public License as published by the Free Software +# Foundation; either version 2 of the License, or any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +# Author: Jeff McCune + +Puppet::Type.newtype(:mcx) do + + @doc = "MCX object management using DirectoryService on OS X. + +The default provider of this type merely manages the XML plist as +reported by the dscl -mcxexport command. This is similar to the +content property of the file type in Puppet. + +The recommended method of using this type is to use Work Group Manager +to manage users and groups on the local computer, record the resulting +puppet manifest using the command `puppet resource mcx`, then deploy it +to other machines. + +**Autorequires:** If Puppet is managing the user, group, or computer that these +MCX settings refer to, the MCX resource will autorequire that user, group, or computer. +" + feature :manages_content, \ + "The provider can manage MCXSettings as a string.", + :methods => [:content, :content=] + + ensurable do + desc "Create or remove the MCX setting." + + newvalue(:present) do + provider.create + end + + newvalue(:absent) do + provider.destroy + end + + end + + newparam(:name) do + desc "The name of the resource being managed. + The default naming convention follows Directory Service paths: + + /Computers/localhost + /Groups/admin + /Users/localadmin + + The `ds_type` and `ds_name` type parameters are not necessary if the + default naming convention is followed." + isnamevar + end + + newparam(:ds_type) do + + desc "The DirectoryService type this MCX setting attaches to." + + newvalues(:user, :group, :computer, :computerlist) + + end + + newparam(:ds_name) do + desc "The name to attach the MCX Setting to. + e.g. 'localhost' when ds_type => computer. This setting is not + required, as it may be parsed so long as the resource name is + parseable. e.g. /Groups/admin where 'group' is the dstype." + end + + newproperty(:content, :required_features => :manages_content) do + desc "The XML Plist. The value of MCXSettings in DirectoryService. + This is the standard output from the system command: + + dscl localhost -mcxexport /Local/Default//ds_name + + Note that `ds_type` is capitalized and plural in the dscl command." + end + + # JJM Yes, this is not DRY at all. Because of the code blocks + # autorequire must be done this way. I think. + + def setup_autorequire(type) + # value returns a Symbol + name = value(:name) + ds_type = value(:ds_type) + ds_name = value(:ds_name) + if ds_type == type + rval = [ ds_name.to_s ] + else + rval = [ ] + end + rval + end + + autorequire(:user) do + setup_autorequire(:user) + end + + autorequire(:group) do + setup_autorequire(:group) + end + + autorequire(:computer) do + setup_autorequire(:computer) + end + +end diff --git a/mcollective/lib/puppet/type/mount.rb b/mcollective/lib/puppet/type/mount.rb new file mode 100755 index 000000000..5b8c5ca58 --- /dev/null +++ b/mcollective/lib/puppet/type/mount.rb @@ -0,0 +1,240 @@ +module Puppet + # We want the mount to refresh when it changes. + newtype(:mount, :self_refresh => true) do + @doc = "Manages mounted filesystems, including putting mount + information into the mount table. The actual behavior depends + on the value of the 'ensure' parameter. + + Note that if a `mount` receives an event from another resource, + it will try to remount the filesystems if `ensure` is set to `mounted`." + + feature :refreshable, "The provider can remount the filesystem.", + :methods => [:remount] + + # Use the normal parent class, because we actually want to + # call code when sync is called. + newproperty(:ensure) do + desc "Control what to do with this mount. Set this attribute to + `umounted` to make sure the filesystem is in the filesystem table + but not mounted (if the filesystem is currently mounted, it will be unmounted). Set it to `absent` to unmount (if necessary) and remove + the filesystem from the fstab. Set to `mounted` to add it to the + fstab and mount it. Set to `present` to add to fstab but not change + mount/unmount status" + + # IS -> SHOULD In Sync Action + # ghost -> present NO create + # absent -> present NO create + # (mounted -> present YES) + # (unmounted -> present YES) + newvalue(:defined) do + provider.create + return :mount_created + end + + aliasvalue :present, :defined + + # IS -> SHOULD In Sync Action + # ghost -> unmounted NO create, unmount + # absent -> unmounted NO create + # mounted -> unmounted NO unmount + newvalue(:unmounted) do + case self.retrieve + when :ghost # (not in fstab but mounted) + provider.create + @resource.flush + provider.unmount + return :mount_unmounted + when nil, :absent # (not in fstab and not mounted) + provider.create + return :mount_created + when :mounted # (in fstab and mounted) + provider.unmount + syncothers # I guess it's more likely that the mount was originally mounted with + # the wrong attributes so I sync AFTER the umount + return :mount_unmounted + else + raise Puppet::Error, "Unexpected change from #{current_value} to unmounted}" + end + end + + # IS -> SHOULD In Sync Action + # ghost -> absent NO unmount + # mounted -> absent NO provider.destroy AND unmount + # unmounted -> absent NO provider.destroy + newvalue(:absent, :event => :mount_deleted) do + current_value = self.retrieve + provider.unmount if provider.mounted? + provider.destroy unless current_value == :ghost + end + + # IS -> SHOULD In Sync Action + # ghost -> mounted NO provider.create + # absent -> mounted NO provider.create AND mount + # unmounted -> mounted NO mount + newvalue(:mounted, :event => :mount_mounted) do + # Create the mount point if it does not already exist. + current_value = self.retrieve + currently_mounted = provider.mounted? + provider.create if [nil, :absent, :ghost].include?(current_value) + + syncothers + + # The fs can be already mounted if it was absent but mounted + provider.property_hash[:needs_mount] = true unless currently_mounted + end + + # insync: mounted -> present + # unmounted -> present + def insync?(is) + if should == :defined and [:mounted,:unmounted].include?(is) + true + else + super + end + end + + def syncothers + # We have to flush any changes to disk. + currentvalues = @resource.retrieve_resource + + # Determine if there are any out-of-sync properties. + oos = @resource.send(:properties).find_all do |prop| + unless currentvalues.include?(prop) + raise Puppet::DevError, "Parent has property %s but it doesn't appear in the current values", [prop.name] + end + if prop.name == :ensure + false + else + ! prop.safe_insync?(currentvalues[prop]) + end + end.each { |prop| prop.sync }.length + @resource.flush if oos > 0 + end + end + + newproperty(:device) do + desc "The device providing the mount. This can be whatever + device is supporting by the mount, including network + devices or devices specified by UUID rather than device + path, depending on the operating system." + end + + # Solaris specifies two devices, not just one. + newproperty(:blockdevice) do + desc "The device to fsck. This is property is only valid + on Solaris, and in most cases will default to the correct + value." + + # Default to the device but with "dsk" replaced with "rdsk". + defaultto do + if Facter["operatingsystem"].value == "Solaris" + device = @resource.value(:device) + if device =~ %r{/dsk/} + device.sub(%r{/dsk/}, "/rdsk/") + else + nil + end + else + nil + end + end + end + + newproperty(:fstype) do + desc "The mount type. Valid values depend on the + operating system. This is a required option." + end + + newproperty(:options) do + desc "Mount options for the mounts, as they would + appear in the fstab." + end + + newproperty(:pass) do + desc "The pass in which the mount is checked." + + defaultto { + 0 if @resource.managed? + } + end + + newproperty(:atboot) do + desc "Whether to mount the mount at boot. Not all platforms + support this." + end + + newproperty(:dump) do + desc "Whether to dump the mount. Not all platform support this. + Valid values are `1` or `0`. or `2` on FreeBSD, Default is `0`." + + if Facter["operatingsystem"].value == "FreeBSD" + newvalue(%r{(0|1|2)}) + else + newvalue(%r{(0|1)}) + end + + newvalue(%r{(0|1)}) + + defaultto { + 0 if @resource.managed? + } + end + + newproperty(:target) do + desc "The file in which to store the mount table. Only used by + those providers that write to disk." + + defaultto { if @resource.class.defaultprovider.ancestors.include?(Puppet::Provider::ParsedFile) + @resource.class.defaultprovider.default_target + else + nil + end + } + end + + newparam(:name) do + desc "The mount path for the mount." + + isnamevar + end + + newparam(:path) do + desc "The deprecated name for the mount point. Please use `name` now." + + def value=(value) + warning "'path' is deprecated for mounts. Please use 'name'." + @resource[:name] = value + super + end + end + + newparam(:remounts) do + desc "Whether the mount can be remounted `mount -o remount`. If + this is false, then the filesystem will be unmounted and remounted + manually, which is prone to failure." + + newvalues(:true, :false) + defaultto do + case Facter.value(:operatingsystem) + when "FreeBSD", "Darwin", "AIX" + false + else + true + end + end + end + + def refresh + # Only remount if we're supposed to be mounted. + provider.remount if self.should(:fstype) != "swap" and provider.mounted? + end + + def value(name) + name = symbolize(name) + ret = nil + if property = @parameters[name] + return property.value + end + end + end +end diff --git a/mcollective/lib/puppet/type/nagios_command.rb b/mcollective/lib/puppet/type/nagios_command.rb new file mode 100644 index 000000000..0d0e11b17 --- /dev/null +++ b/mcollective/lib/puppet/type/nagios_command.rb @@ -0,0 +1,3 @@ +require 'puppet/util/nagios_maker' + +Puppet::Util::NagiosMaker.create_nagios_type :command diff --git a/mcollective/lib/puppet/type/nagios_contact.rb b/mcollective/lib/puppet/type/nagios_contact.rb new file mode 100644 index 000000000..d5a1f3cba --- /dev/null +++ b/mcollective/lib/puppet/type/nagios_contact.rb @@ -0,0 +1,3 @@ +require 'puppet/util/nagios_maker' + +Puppet::Util::NagiosMaker.create_nagios_type :contact diff --git a/mcollective/lib/puppet/type/nagios_contactgroup.rb b/mcollective/lib/puppet/type/nagios_contactgroup.rb new file mode 100644 index 000000000..b8f14c07b --- /dev/null +++ b/mcollective/lib/puppet/type/nagios_contactgroup.rb @@ -0,0 +1,3 @@ +require 'puppet/util/nagios_maker' + +Puppet::Util::NagiosMaker.create_nagios_type :contactgroup diff --git a/mcollective/lib/puppet/type/nagios_host.rb b/mcollective/lib/puppet/type/nagios_host.rb new file mode 100644 index 000000000..f2e03f6fb --- /dev/null +++ b/mcollective/lib/puppet/type/nagios_host.rb @@ -0,0 +1,3 @@ +require 'puppet/util/nagios_maker' + +Puppet::Util::NagiosMaker.create_nagios_type :host diff --git a/mcollective/lib/puppet/type/nagios_hostdependency.rb b/mcollective/lib/puppet/type/nagios_hostdependency.rb new file mode 100644 index 000000000..fea71a619 --- /dev/null +++ b/mcollective/lib/puppet/type/nagios_hostdependency.rb @@ -0,0 +1,3 @@ +require 'puppet/util/nagios_maker' + +Puppet::Util::NagiosMaker.create_nagios_type :hostdependency diff --git a/mcollective/lib/puppet/type/nagios_hostescalation.rb b/mcollective/lib/puppet/type/nagios_hostescalation.rb new file mode 100644 index 000000000..5d18af2a6 --- /dev/null +++ b/mcollective/lib/puppet/type/nagios_hostescalation.rb @@ -0,0 +1,3 @@ +require 'puppet/util/nagios_maker' + +Puppet::Util::NagiosMaker.create_nagios_type :hostescalation diff --git a/mcollective/lib/puppet/type/nagios_hostextinfo.rb b/mcollective/lib/puppet/type/nagios_hostextinfo.rb new file mode 100644 index 000000000..da8e08dd8 --- /dev/null +++ b/mcollective/lib/puppet/type/nagios_hostextinfo.rb @@ -0,0 +1,3 @@ +require 'puppet/util/nagios_maker' + +Puppet::Util::NagiosMaker.create_nagios_type :hostextinfo diff --git a/mcollective/lib/puppet/type/nagios_hostgroup.rb b/mcollective/lib/puppet/type/nagios_hostgroup.rb new file mode 100644 index 000000000..e1943beec --- /dev/null +++ b/mcollective/lib/puppet/type/nagios_hostgroup.rb @@ -0,0 +1,3 @@ +require 'puppet/util/nagios_maker' + +Puppet::Util::NagiosMaker.create_nagios_type :hostgroup diff --git a/mcollective/lib/puppet/type/nagios_service.rb b/mcollective/lib/puppet/type/nagios_service.rb new file mode 100644 index 000000000..22b987f56 --- /dev/null +++ b/mcollective/lib/puppet/type/nagios_service.rb @@ -0,0 +1,3 @@ +require 'puppet/util/nagios_maker' + +Puppet::Util::NagiosMaker.create_nagios_type :service diff --git a/mcollective/lib/puppet/type/nagios_servicedependency.rb b/mcollective/lib/puppet/type/nagios_servicedependency.rb new file mode 100644 index 000000000..0e3340c6e --- /dev/null +++ b/mcollective/lib/puppet/type/nagios_servicedependency.rb @@ -0,0 +1,3 @@ +require 'puppet/util/nagios_maker' + +Puppet::Util::NagiosMaker.create_nagios_type :servicedependency diff --git a/mcollective/lib/puppet/type/nagios_serviceescalation.rb b/mcollective/lib/puppet/type/nagios_serviceescalation.rb new file mode 100644 index 000000000..cb2af1545 --- /dev/null +++ b/mcollective/lib/puppet/type/nagios_serviceescalation.rb @@ -0,0 +1,3 @@ +require 'puppet/util/nagios_maker' + +Puppet::Util::NagiosMaker.create_nagios_type :serviceescalation diff --git a/mcollective/lib/puppet/type/nagios_serviceextinfo.rb b/mcollective/lib/puppet/type/nagios_serviceextinfo.rb new file mode 100644 index 000000000..6bdc70900 --- /dev/null +++ b/mcollective/lib/puppet/type/nagios_serviceextinfo.rb @@ -0,0 +1,3 @@ +require 'puppet/util/nagios_maker' + +Puppet::Util::NagiosMaker.create_nagios_type :serviceextinfo diff --git a/mcollective/lib/puppet/type/nagios_servicegroup.rb b/mcollective/lib/puppet/type/nagios_servicegroup.rb new file mode 100644 index 000000000..fef669639 --- /dev/null +++ b/mcollective/lib/puppet/type/nagios_servicegroup.rb @@ -0,0 +1,3 @@ +require 'puppet/util/nagios_maker' + +Puppet::Util::NagiosMaker.create_nagios_type :servicegroup diff --git a/mcollective/lib/puppet/type/nagios_timeperiod.rb b/mcollective/lib/puppet/type/nagios_timeperiod.rb new file mode 100644 index 000000000..25a06d3ed --- /dev/null +++ b/mcollective/lib/puppet/type/nagios_timeperiod.rb @@ -0,0 +1,3 @@ +require 'puppet/util/nagios_maker' + +Puppet::Util::NagiosMaker.create_nagios_type :timeperiod diff --git a/mcollective/lib/puppet/type/notify.rb b/mcollective/lib/puppet/type/notify.rb new file mode 100644 index 000000000..a6ec1dc8b --- /dev/null +++ b/mcollective/lib/puppet/type/notify.rb @@ -0,0 +1,44 @@ +# +# Simple module for logging messages on the client-side +# + +module Puppet + newtype(:notify) do + @doc = "Sends an arbitrary message to the agent run-time log." + + newproperty(:message) do + desc "The message to be sent to the log." + def sync + case @resource["withpath"] + when :true + send(@resource[:loglevel], self.should) + else + Puppet.send(@resource[:loglevel], self.should) + end + return + end + + def retrieve + :absent + end + + def insync?(is) + false + end + + defaultto { @resource[:name] } + end + + newparam(:withpath) do + desc "Whether to not to show the full object path." + defaultto :false + + newvalues(:true, :false) + end + + newparam(:name) do + desc "An arbitrary tag for your own reference; the name of the message." + isnamevar + end + end +end diff --git a/mcollective/lib/puppet/type/package.rb b/mcollective/lib/puppet/type/package.rb new file mode 100644 index 000000000..1222a5319 --- /dev/null +++ b/mcollective/lib/puppet/type/package.rb @@ -0,0 +1,323 @@ +# Define the different packaging systems. Each package system is implemented +# in a module, which then gets used to individually extend each package object. +# This allows packages to exist on the same machine using different packaging +# systems. + +module Puppet + newtype(:package) do + @doc = "Manage packages. There is a basic dichotomy in package + support right now: Some package types (e.g., yum and apt) can + retrieve their own package files, while others (e.g., rpm and sun) cannot. For those package formats that cannot retrieve + their own files, you can use the `source` parameter to point to + the correct file. + + Puppet will automatically guess the packaging format that you are + using based on the platform you are on, but you can override it + using the `provider` parameter; each provider defines what it + requires in order to function, and you must meet those requirements + to use a given provider. + + **Autorequires:** If Puppet is managing the files specified as a package's + `adminfile`, `responsefile`, or `source`, the package resource will autorequire + those files." + + feature :installable, "The provider can install packages.", + :methods => [:install] + feature :uninstallable, "The provider can uninstall packages.", + :methods => [:uninstall] + feature :upgradeable, "The provider can upgrade to the latest version of a + package. This feature is used by specifying `latest` as the + desired value for the package.", + :methods => [:update, :latest] + feature :purgeable, "The provider can purge packages. This generally means + that all traces of the package are removed, including + existing configuration files. This feature is thus destructive + and should be used with the utmost care.", + :methods => [:purge] + feature :versionable, "The provider is capable of interrogating the + package database for installed version(s), and can select + which out of a set of available versions of a package to + install if asked." + feature :holdable, "The provider is capable of placing packages on hold + such that they are not automatically upgraded as a result of + other package dependencies unless explicit action is taken by + a user or another package. Held is considered a superset of + installed.", + :methods => [:hold] + + ensurable do + desc "What state the package should be in. + *latest* only makes sense for those packaging formats that can + retrieve new packages on their own and will throw an error on + those that cannot. For those packaging systems that allow you + to specify package versions, specify them here. Similarly, + *purged* is only useful for packaging systems that support + the notion of managing configuration files separately from + 'normal' system files." + + attr_accessor :latest + + newvalue(:present, :event => :package_installed) do + provider.install + end + + newvalue(:absent, :event => :package_removed) do + provider.uninstall + end + + newvalue(:purged, :event => :package_purged, :required_features => :purgeable) do + provider.purge + end + + newvalue(:held, :event => :package_held, :required_features => :holdable) do + provider.hold + end + + # Alias the 'present' value. + aliasvalue(:installed, :present) + + newvalue(:latest, :required_features => :upgradeable) do + # Because yum always exits with a 0 exit code, there's a retrieve + # in the "install" method. So, check the current state now, + # to compare against later. + current = self.retrieve + begin + provider.update + rescue => detail + self.fail "Could not update: #{detail}" + end + + if current == :absent + :package_installed + else + :package_changed + end + end + + newvalue(/./, :required_features => :versionable) do + begin + provider.install + rescue => detail + self.fail "Could not update: #{detail}" + end + + if self.retrieve == :absent + :package_installed + else + :package_changed + end + end + + defaultto :installed + + # Override the parent method, because we've got all kinds of + # funky definitions of 'in sync'. + def insync?(is) + @latest ||= nil + @lateststamp ||= (Time.now.to_i - 1000) + # Iterate across all of the should values, and see how they + # turn out. + + @should.each { |should| + case should + when :present + return true unless [:absent, :purged, :held].include?(is) + when :latest + # Short-circuit packages that are not present + return false if is == :absent or is == :purged + + # Don't run 'latest' more than about every 5 minutes + if @latest and ((Time.now.to_i - @lateststamp) / 60) < 5 + #self.debug "Skipping latest check" + else + begin + @latest = provider.latest + @lateststamp = Time.now.to_i + rescue => detail + error = Puppet::Error.new("Could not get latest version: #{detail}") + error.set_backtrace(detail.backtrace) + raise error + end + end + + case is + when @latest + return true + when :present + # This will only happen on retarded packaging systems + # that can't query versions. + return true + else + self.debug "#{@resource.name} #{is.inspect} is installed, latest is #{@latest.inspect}" + end + when :absent + return true if is == :absent or is == :purged + when :purged + return true if is == :purged + when is + return true + end + } + + false + end + + # This retrieves the current state. LAK: I think this method is unused. + def retrieve + provider.properties[:ensure] + end + + # Provide a bit more information when logging upgrades. + def should_to_s(newvalue = @should) + if @latest + @latest.to_s + else + super(newvalue) + end + end + end + + newparam(:name) do + desc "The package name. This is the name that the packaging + system uses internally, which is sometimes (especially on Solaris) + a name that is basically useless to humans. If you want to + abstract package installation, then you can use aliases to provide + a common name to packages: + + # In the 'openssl' class + $ssl = $operatingsystem ? { + solaris => SMCossl, + default => openssl + } + + # It is not an error to set an alias to the same value as the + # object name. + package { $ssl: + ensure => installed, + alias => openssl + } + + . etc. . + + $ssh = $operatingsystem ? { + solaris => SMCossh, + default => openssh + } + + # Use the alias to specify a dependency, rather than + # having another selector to figure it out again. + package { $ssh: + ensure => installed, + alias => openssh, + require => Package[openssl] + } + + " + isnamevar + end + + newparam(:source) do + desc "Where to find the actual package. This must be a local file + (or on a network file system) or a URL that your specific + packaging type understands; Puppet will not retrieve files for you." + end + newparam(:instance) do + desc "A read-only parameter set by the package." + end + newparam(:status) do + desc "A read-only parameter set by the package." + end + + newparam(:type) do + desc "Deprecated form of `provider`." + + munge do |value| + warning "'type' is deprecated; use 'provider' instead" + @resource[:provider] = value + + @resource[:provider] + end + end + + newparam(:adminfile) do + desc "A file containing package defaults for installing packages. + This is currently only used on Solaris. The value will be + validated according to system rules, which in the case of + Solaris means that it should either be a fully qualified path + or it should be in `/var/sadm/install/admin`." + end + + newparam(:responsefile) do + desc "A file containing any necessary answers to questions asked by + the package. This is currently used on Solaris and Debian. The + value will be validated according to system rules, but it should + generally be a fully qualified path." + end + + newparam(:configfiles) do + desc "Whether configfiles should be kept or replaced. Most packages + types do not support this parameter." + + defaultto :keep + + newvalues(:keep, :replace) + end + + newparam(:category) do + desc "A read-only parameter set by the package." + end + newparam(:platform) do + desc "A read-only parameter set by the package." + end + newparam(:root) do + desc "A read-only parameter set by the package." + end + newparam(:vendor) do + desc "A read-only parameter set by the package." + end + newparam(:description) do + desc "A read-only parameter set by the package." + end + + newparam(:allowcdrom) do + desc "Tells apt to allow cdrom sources in the sources.list file. + Normally apt will bail if you try this." + + newvalues(:true, :false) + end + + newparam(:flavor) do + desc "Newer versions of OpenBSD support 'flavors', which are + further specifications for which type of package you want." + end + + autorequire(:file) do + autos = [] + [:responsefile, :adminfile].each { |param| + if val = self[param] + autos << val + end + } + + if source = self[:source] + if source =~ /^#{File::SEPARATOR}/ + autos << source + end + end + autos + end + + # This only exists for testing. + def clear + if obj = @parameters[:ensure] + obj.latest = nil + end + end + + # The 'query' method returns a hash of info if the package + # exists and returns nil if it does not. + def exists? + @provider.get(:ensure) != :absent + end + end +end diff --git a/mcollective/lib/puppet/type/port.rb b/mcollective/lib/puppet/type/port.rb new file mode 100755 index 000000000..e19988515 --- /dev/null +++ b/mcollective/lib/puppet/type/port.rb @@ -0,0 +1,119 @@ +#module Puppet +# newtype(:port) do +# @doc = "Installs and manages port entries. For most systems, these +# entries will just be in /etc/services, but some systems (notably OS X) +# will have different solutions." +# +# ensurable +# +# newproperty(:protocols) do +# desc "The protocols the port uses. Valid values are *udp* and *tcp*. +# Most services have both protocols, but not all. If you want +# both protocols, you must specify that; Puppet replaces the +# current values, it does not merge with them. If you specify +# multiple protocols they must be as an array." +# +# def is=(value) +# case value +# when String +# @is = value.split(/\s+/) +# else +# @is = value +# end +# end +# +# def is +# @is +# end +# +# # We actually want to return the whole array here, not just the first +# # value. +# def should +# if defined?(@should) +# if @should[0] == :absent +# return :absent +# else +# return @should +# end +# else +# return nil +# end +# end +# +# validate do |value| +# valids = ["udp", "tcp", "ddp", :absent] +# unless valids.include? value +# raise Puppet::Error, +# "Protocols can be either 'udp' or 'tcp', not #{value}" +# end +# end +# end +# +# newproperty(:number) do +# desc "The port number." +# end +# +# newproperty(:description) do +# desc "The port description." +# end +# +# newproperty(:port_aliases) do +# desc 'Any aliases the port might have. Multiple values must be +# specified as an array. Note that this property is not the same as +# the "alias" metaparam; use this property to add aliases to a port +# in the services file, and "alias" to aliases for use in your Puppet +# scripts.' +# +# # We actually want to return the whole array here, not just the first +# # value. +# def should +# if defined?(@should) +# if @should[0] == :absent +# return :absent +# else +# return @should +# end +# else +# return nil +# end +# end +# +# validate do |value| +# if value.is_a? String and value =~ /\s/ +# raise Puppet::Error, +# "Aliases cannot have whitespace in them: %s" % +# value.inspect +# end +# end +# +# munge do |value| +# unless value == "absent" or value == :absent +# # Add the :alias metaparam in addition to the property +# @resource.newmetaparam( +# @resource.class.metaparamclass(:alias), value +# ) +# end +# value +# end +# end +# +# newproperty(:target) do +# desc "The file in which to store service information. Only used by +# those providers that write to disk." +# +# defaultto { if @resource.class.defaultprovider.ancestors.include?(Puppet::Provider::ParsedFile) +# @resource.class.defaultprovider.default_target +# else +# nil +# end +# } +# end +# +# newparam(:name) do +# desc "The port name." +# +# isnamevar +# end +# end +#end + diff --git a/mcollective/lib/puppet/type/resources.rb b/mcollective/lib/puppet/type/resources.rb new file mode 100644 index 000000000..ae41b883b --- /dev/null +++ b/mcollective/lib/puppet/type/resources.rb @@ -0,0 +1,131 @@ +require 'puppet' + +Puppet::Type.newtype(:resources) do + @doc = "This is a metatype that can manage other resource types. Any + metaparams specified here will be passed on to any generated resources, + so you can purge umanaged resources but set `noop` to true so the + purging is only logged and does not actually happen." + + + newparam(:name) do + desc "The name of the type to be managed." + + validate do |name| + raise ArgumentError, "Could not find resource type '#{name}'" unless Puppet::Type.type(name) + end + + munge { |v| v.to_s } + end + + newparam(:purge, :boolean => true) do + desc "Purge unmanaged resources. This will delete any resource + that is not specified in your configuration + and is not required by any specified resources." + + newvalues(:true, :false) + + validate do |value| + if [:true, true, "true"].include?(value) + unless @resource.resource_type.respond_to?(:instances) + raise ArgumentError, "Purging resources of type #{@resource[:name]} is not supported, since they cannot be queried from the system" + end + raise ArgumentError, "Purging is only supported on types that accept 'ensure'" unless @resource.resource_type.validproperty?(:ensure) + end + end + end + + newparam(:unless_system_user) do + desc "This keeps system users from being purged. By default, it + does not purge users whose UIDs are less than or equal to 500, but you can specify + a different UID as the inclusive limit." + + newvalues(:true, :false, /^\d+$/) + + munge do |value| + case value + when /^\d+/ + Integer(value) + when :true, true + 500 + when :false, false + false + when Integer; value + else + raise ArgumentError, "Invalid value #{value.inspect}" + end + end + + defaultto { + if @resource[:name] == "user" + 500 + else + nil + end + } + end + + def check(resource) + @checkmethod ||= "#{self[:name]}_check" + @hascheck ||= respond_to?(@checkmethod) + if @hascheck + return send(@checkmethod, resource) + else + return true + end + end + + def able_to_ensure_absent?(resource) + resource[:ensure] = :absent + rescue ArgumentError, Puppet::Error => detail + err "The 'ensure' attribute on #{self[:name]} resources does not accept 'absent' as a value" + false + end + + # Generate any new resources we need to manage. This is pretty hackish + # right now, because it only supports purging. + def generate + return [] unless self.purge? + resource_type.instances. + reject { |r| catalog.resource_refs.include? r.ref }. + select { |r| check(r) }. + select { |r| r.class.validproperty?(:ensure) }. + select { |r| able_to_ensure_absent?(r) }. + each { |resource| + @parameters.each do |name, param| + resource[name] = param.value if param.metaparam? + end + + # Mark that we're purging, so transactions can handle relationships + # correctly + resource.purging + } + end + + def resource_type + unless defined?(@resource_type) + unless type = Puppet::Type.type(self[:name]) + raise Puppet::DevError, "Could not find resource type" + end + @resource_type = type + end + @resource_type + end + + # Make sure we don't purge users below a certain uid, if the check + # is enabled. + def user_check(resource) + return true unless self[:name] == "user" + return true unless self[:unless_system_user] + + resource[:audit] = :uid + current_values = resource.retrieve_resource + + return false if system_users.include?(resource[:name]) + + current_values[resource.property(:uid)] > self[:unless_system_user] + end + + def system_users + %w{root nobody bin noaccess daemon sys} + end +end diff --git a/mcollective/lib/puppet/type/schedule.rb b/mcollective/lib/puppet/type/schedule.rb new file mode 100755 index 000000000..5fb008f6f --- /dev/null +++ b/mcollective/lib/puppet/type/schedule.rb @@ -0,0 +1,349 @@ +module Puppet + newtype(:schedule) do + @doc = "Defined schedules for Puppet. The important thing to understand + about how schedules are currently implemented in Puppet is that they + can only be used to stop a resource from being applied, they never + guarantee that it is applied. + + Every time Puppet applies its configuration, it will collect the + list of resources whose schedule does not eliminate them from + running right then, but there is currently no system in place to + guarantee that a given resource runs at a given time. If you + specify a very restrictive schedule and Puppet happens to run at a + time within that schedule, then the resources will get applied; + otherwise, that work may never get done. + + Thus, it behooves you to use wider scheduling (e.g., over a couple of + hours) combined with periods and repetitions. For instance, if you + wanted to restrict certain resources to only running once, between + the hours of two and 4 AM, then you would use this schedule: + + schedule { maint: + range => \"2 - 4\", + period => daily, + repeat => 1 + } + + With this schedule, the first time that Puppet runs between 2 and 4 AM, + all resources with this schedule will get applied, but they won't + get applied again between 2 and 4 because they will have already + run once that day, and they won't get applied outside that schedule + because they will be outside the scheduled range. + + Puppet automatically creates a schedule for each valid period with the + same name as that period (e.g., hourly and daily). Additionally, + a schedule named *puppet* is created and used as the default, + with the following attributes: + + schedule { puppet: + period => hourly, + repeat => 2 + } + + This will cause resources to be applied every 30 minutes by default. + " + + newparam(:name) do + desc "The name of the schedule. This name is used to retrieve the + schedule when assigning it to an object: + + schedule { daily: + period => daily, + range => \"2 - 4\", + } + + exec { \"/usr/bin/apt-get update\": + schedule => daily + } + + " + isnamevar + end + + newparam(:range) do + desc "The earliest and latest that a resource can be applied. This + is always a range within a 24 hour period, and hours must be + specified in numbers between 0 and 23, inclusive. Minutes and + seconds can be provided, using the normal colon as a separator. + For instance: + + schedule { maintenance: + range => \"1:30 - 4:30\" + } + + This is mostly useful for restricting certain resources to being + applied in maintenance windows or during off-peak hours." + + # This is lame; properties all use arrays as values, but parameters don't. + # That's going to hurt eventually. + validate do |values| + values = [values] unless values.is_a?(Array) + values.each { |value| + unless value.is_a?(String) and + value =~ /\d+(:\d+){0,2}\s*-\s*\d+(:\d+){0,2}/ + self.fail "Invalid range value '#{value}'" + end + } + end + + munge do |values| + values = [values] unless values.is_a?(Array) + ret = [] + + values.each { |value| + range = [] + # Split each range value into a hour, minute, second triad + value.split(/\s*-\s*/).each { |val| + # Add the values as an array. + range << val.split(":").collect { |n| n.to_i } + } + + self.fail "Invalid range #{value}" if range.length != 2 + + # Make sure the hours are valid + [range[0][0], range[1][0]].each do |n| + raise ArgumentError, "Invalid hour '#{n}'" if n < 0 or n > 23 + end + + [range[0][1], range[1][1]].each do |n| + raise ArgumentError, "Invalid minute '#{n}'" if n and (n < 0 or n > 59) + end + if range[0][0] > range[1][0] + self.fail(("Invalid range #{value}; ") + + "ranges cannot span days." + ) + end + ret << range + } + + # Now our array of arrays + ret + end + + def match?(previous, now) + # The lowest-level array is of the hour, minute, second triad + # then it's an array of two of those, to present the limits + # then it's array of those ranges + @value = [@value] unless @value[0][0].is_a?(Array) + + @value.each do |value| + limits = value.collect do |range| + ary = [now.year, now.month, now.day, range[0]] + if range[1] + ary << range[1] + else + ary << now.min + end + + if range[2] + ary << range[2] + else + ary << now.sec + end + + time = Time.local(*ary) + + unless time.hour == range[0] + self.devfail( + "Incorrectly converted time: #{time}: #{time.hour} vs #{range[0]}" + ) + end + + time + end + + unless limits[0] < limits[1] + self.info( + "Assuming upper limit should be that time the next day" + ) + + ary = limits[1].to_a + ary[3] += 1 + limits[1] = Time.local(*ary) + + #self.devfail("Lower limit is above higher limit: %s" % + # limits.inspect + #) + end + + #self.info limits.inspect + #self.notice now + return now.between?(*limits) + end + + # Else, return false, since our current time isn't between + # any valid times + false + end + end + + newparam(:periodmatch) do + desc "Whether periods should be matched by number (e.g., the two times + are in the same hour) or by distance (e.g., the two times are + 60 minutes apart)." + + newvalues(:number, :distance) + + defaultto :distance + end + + newparam(:period) do + desc "The period of repetition for a resource. Choose from among + a fixed list of *hourly*, *daily*, *weekly*, and *monthly*. + The default is for a resource to get applied every time that + Puppet runs, whatever that period is. + + Note that the period defines how often a given resource will get + applied but not when; if you would like to restrict the hours + that a given resource can be applied (e.g., only at night during + a maintenance window) then use the `range` attribute. + + If the provided periods are not sufficient, you can provide a + value to the *repeat* attribute, which will cause Puppet to + schedule the affected resources evenly in the period the + specified number of times. Take this schedule: + + schedule { veryoften: + period => hourly, + repeat => 6 + } + + This can cause Puppet to apply that resource up to every 10 minutes. + + At the moment, Puppet cannot guarantee that level of + repetition; that is, it can run up to every 10 minutes, but + internal factors might prevent it from actually running that + often (e.g., long-running Puppet runs will squash conflictingly scheduled runs). + + See the `periodmatch` attribute for tuning whether to match + times by their distance apart or by their specific value." + + newvalues(:hourly, :daily, :weekly, :monthly, :never) + + @@scale = { + :hourly => 3600, + :daily => 86400, + :weekly => 604800, + :monthly => 2592000 + } + @@methods = { + :hourly => :hour, + :daily => :day, + :monthly => :month, + :weekly => proc do |prev, now| + # Run the resource if the previous day was after this weekday (e.g., prev is wed, current is tue) + # or if it's been more than a week since we ran + prev.wday > now.wday or (now - prev) > (24 * 3600 * 7) + end + } + + def match?(previous, now) + return false if value == :never + + value = self.value + case @resource[:periodmatch] + when :number + method = @@methods[value] + if method.is_a?(Proc) + return method.call(previous, now) + else + # We negate it, because if they're equal we don't run + return now.send(method) != previous.send(method) + end + when :distance + scale = @@scale[value] + + # If the number of seconds between the two times is greater + # than the unit of time, we match. We divide the scale + # by the repeat, so that we'll repeat that often within + # the scale. + diff = (now.to_i - previous.to_i) + comparison = (scale / @resource[:repeat]) + + return (now.to_i - previous.to_i) >= (scale / @resource[:repeat]) + end + end + end + + newparam(:repeat) do + desc "How often the application gets repeated in a given period. + Defaults to 1. Must be an integer." + + defaultto 1 + + validate do |value| + unless value.is_a?(Integer) or value =~ /^\d+$/ + raise Puppet::Error, + "Repeat must be a number" + end + + # This implicitly assumes that 'periodmatch' is distance -- that + # is, if there's no value, we assume it's a valid value. + return unless @resource[:periodmatch] + + if value != 1 and @resource[:periodmatch] != :distance + raise Puppet::Error, + "Repeat must be 1 unless periodmatch is 'distance', not '#{@resource[:periodmatch]}'" + end + end + + munge do |value| + value = Integer(value) unless value.is_a?(Integer) + + value + end + + def match?(previous, now) + true + end + end + + def self.instances + [] + end + + def self.mkdefaultschedules + result = [] + Puppet.debug "Creating default schedules" + + result << self.new( + + :name => "puppet", + :period => :hourly, + + :repeat => "2" + ) + + # And then one for every period + @parameters.find { |p| p.name == :period }.value_collection.values.each { |value| + + result << self.new( + :name => value.to_s, + :period => value + ) + } + + result + end + + def match?(previous = nil, now = nil) + + # If we've got a value, then convert it to a Time instance + previous &&= Time.at(previous) + + now ||= Time.now + + # Pull them in order + self.class.allattrs.each { |param| + if @parameters.include?(param) and + @parameters[param].respond_to?(:match?) + return false unless @parameters[param].match?(previous, now) + end + } + + # If we haven't returned false, then return true; in other words, + # any provided schedules need to all match + true + end + end +end diff --git a/mcollective/lib/puppet/type/selboolean.rb b/mcollective/lib/puppet/type/selboolean.rb new file mode 100644 index 000000000..204b89056 --- /dev/null +++ b/mcollective/lib/puppet/type/selboolean.rb @@ -0,0 +1,26 @@ +module Puppet + newtype(:selboolean) do + @doc = "Manages SELinux booleans on systems with SELinux support. The supported booleans + are any of the ones found in `/selinux/booleans/`." + + newparam(:name) do + desc "The name of the SELinux boolean to be managed." + isnamevar + end + + newproperty(:value) do + desc "Whether the the SELinux boolean should be enabled or disabled." + newvalue(:on) + newvalue(:off) + end + + newparam(:persistent) do + desc "If set true, SELinux booleans will be written to disk and persist accross reboots. + The default is `false`." + + defaultto :false + newvalues(:true, :false) + end + + end +end diff --git a/mcollective/lib/puppet/type/selmodule.rb b/mcollective/lib/puppet/type/selmodule.rb new file mode 100644 index 000000000..e76c18cc0 --- /dev/null +++ b/mcollective/lib/puppet/type/selmodule.rb @@ -0,0 +1,55 @@ +# +# Simple module for manageing SELinux policy modules +# + +Puppet::Type.newtype(:selmodule) do + @doc = "Manages loading and unloading of SELinux policy modules + on the system. Requires SELinux support. See man semodule(8) + for more information on SELinux policy modules. + + **Autorequires:** If Puppet is managing the file containing this SELinux policy module (which is either explicitly specified in the `selmodulepath` attribute or will be found at {`selmoduledir`}/{`name`}.pp), the selmodule resource will autorequire that file." + + ensurable + + newparam(:name) do + desc "The name of the SELinux policy to be managed. You should not + include the customary trailing .pp extension." + isnamevar + end + + newparam(:selmoduledir) do + + desc "The directory to look for the compiled pp module file in. + Currently defaults to `/usr/share/selinux/targeted`. If selmodulepath + is not specified the module will be looked for in this directory in a + in a file called NAME.pp, where NAME is the value of the name parameter." + + defaultto "/usr/share/selinux/targeted" + end + + newparam(:selmodulepath) do + + desc "The full path to the compiled .pp policy module. You only need to use + this if the module file is not in the directory pointed at by selmoduledir." + + end + + newproperty(:syncversion) do + + desc "If set to `true`, the policy will be reloaded if the + version found in the on-disk file differs from the loaded + version. If set to `false` (the default) the the only check + that will be made is if the policy is loaded at all or not." + + newvalue(:true) + newvalue(:false) + end + + autorequire(:file) do + if self[:selmodulepath] + [self[:selmodulepath]] + else + ["#{self[:selmoduledir]}/#{self[:name]}.pp"] + end + end +end diff --git a/mcollective/lib/puppet/type/service.rb b/mcollective/lib/puppet/type/service.rb new file mode 100644 index 000000000..3658e2837 --- /dev/null +++ b/mcollective/lib/puppet/type/service.rb @@ -0,0 +1,199 @@ +# This is our main way of managing processes right now. +# +# a service is distinct from a process in that services +# can only be managed through the interface of an init script +# which is why they have a search path for initscripts and such + +module Puppet + + newtype(:service) do + @doc = "Manage running services. Service support unfortunately varies + widely by platform --- some platforms have very little if any + concept of a running service, and some have a very codified and + powerful concept. Puppet's service support will generally be able + to do the right thing regardless (e.g., if there is no + 'status' command, then Puppet will look in the process table for a + command matching the service name), but the more information you + can provide, the better behaviour you will get. In particular, any + virtual services that don't have a predictable entry in the process table + (for example, `network` on Red Hat/CentOS systems) will manifest odd + behavior on restarts if you don't specify `hasstatus` or a `status` + command. + + Note that if a `service` receives an event from another resource, + the service will get restarted. The actual command to restart the + service depends on the platform. You can provide an explicit command + for restarting with the `restart` attribute, or use the init script's + restart command with the `hasrestart` attribute; if you do neither, + the service's stop and start commands will be used." + + feature :refreshable, "The provider can restart the service.", + :methods => [:restart] + + feature :enableable, "The provider can enable and disable the service", + :methods => [:disable, :enable, :enabled?] + + feature :controllable, "The provider uses a control variable." + + newproperty(:enable, :required_features => :enableable) do + desc "Whether a service should be enabled to start at boot. + This property behaves quite differently depending on the platform; + wherever possible, it relies on local tools to enable or disable + a given service." + + newvalue(:true, :event => :service_enabled) do + provider.enable + end + + newvalue(:false, :event => :service_disabled) do + provider.disable + end + + def retrieve + provider.enabled? + end + end + + # Handle whether the service should actually be running right now. + newproperty(:ensure) do + desc "Whether a service should be running." + + newvalue(:stopped, :event => :service_stopped) do + provider.stop + end + + newvalue(:running, :event => :service_started) do + provider.start + end + + aliasvalue(:false, :stopped) + aliasvalue(:true, :running) + + def retrieve + provider.status + end + + def sync + event = super() + + if property = @resource.property(:enable) + val = property.retrieve + property.sync unless property.safe_insync?(val) + end + + event + end + end + + newparam(:binary) do + desc "The path to the daemon. This is only used for + systems that do not support init scripts. This binary will be + used to start the service if no `start` parameter is + provided." + end + + newparam(:hasstatus) do + desc "Declare the the service's init script has a + functional status command. Based on testing, it was found + that a large number of init scripts on different platforms do + not support any kind of status command; thus, you must specify + manually whether the service you are running has such a + command. Alternately, you can provide a specific command using the + `status` attribute. + + If you specify neither of these, then Puppet will look for the + service name in the process table. Be aware that 'virtual' init + scripts such as networking will respond poorly to refresh events + (via notify and subscribe relationships) if you don't override + this default behavior." + + newvalues(:true, :false) + end + newparam(:name) do + desc "The name of the service to run. This name is used to find + the service in whatever service subsystem it is in." + isnamevar + end + + newparam(:path) do + desc "The search path for finding init scripts. Multiple values should + be separated by colons or provided as an array." + + munge do |value| + value = [value] unless value.is_a?(Array) + # LAK:NOTE See http://snurl.com/21zf8 [groups_google_com] + # It affects stand-alone blocks, too. + paths = value.flatten.collect { |p| x = p.split(":") }.flatten + end + + defaultto { provider.class.defpath if provider.class.respond_to?(:defpath) } + end + newparam(:pattern) do + desc "The pattern to search for in the process table. + This is used for stopping services on platforms that do not + support init scripts, and is also used for determining service + status on those service whose init scripts do not include a status + command. + + If this is left unspecified and is needed to check the status + of a service, then the service name will be used instead. + + The pattern can be a simple string or any legal Ruby pattern." + + defaultto { @resource[:binary] || @resource[:name] } + end + newparam(:restart) do + desc "Specify a *restart* command manually. If left + unspecified, the service will be stopped and then started." + end + newparam(:start) do + desc "Specify a *start* command manually. Most service subsystems + support a `start` command, so this will not need to be + specified." + end + newparam(:status) do + desc "Specify a *status* command manually. This command must + return 0 if the service is running and a nonzero value otherwise. + Ideally, these return codes should conform to + [the LSB's specification for init script status actions](http://refspecs.freestandards.org/LSB_3.1.1/LSB-Core-generic/LSB-Core-generic/iniscrptact.html), + but puppet only considers the difference between 0 and nonzero + to be relevant. + + If left unspecified, the status method will be determined + automatically, usually by looking for the service in the process + table." + end + + newparam(:stop) do + desc "Specify a *stop* command manually." + end + + newparam(:control) do + desc "The control variable used to manage services (originally for HP-UX). + Defaults to the upcased service name plus `START` replacing dots with + underscores, for those providers that support the `controllable` feature." + defaultto { resource.name.gsub(".","_").upcase + "_START" if resource.provider.controllable? } + end + + newparam :hasrestart do + desc "Specify that an init script has a `restart` option. Otherwise, + the init script's `stop` and `start` methods are used." + newvalues(:true, :false) + end + + newparam(:manifest) do + desc "Specify a command to config a service, or a path to a manifest to do so." + end + + # Basically just a synonym for restarting. Used to respond + # to events. + def refresh + # Only restart if we're actually running + if (@parameters[:ensure] || newattr(:ensure)).retrieve == :running + provider.restart + else + debug "Skipping restart; service is not running" + end + end + end +end diff --git a/mcollective/lib/puppet/type/ssh_authorized_key.rb b/mcollective/lib/puppet/type/ssh_authorized_key.rb new file mode 100644 index 000000000..8338e2d64 --- /dev/null +++ b/mcollective/lib/puppet/type/ssh_authorized_key.rb @@ -0,0 +1,102 @@ +module Puppet + newtype(:ssh_authorized_key) do + @doc = "Manages SSH authorized keys. Currently only type 2 keys are + supported. + + **Autorequires:** If Puppet is managing the user account in which this + SSH key should be installed, the `ssh_authorized_key` resource will autorequire + that user." + + ensurable + + newparam(:name) do + desc "The SSH key comment. This attribute is currently used as a + system-wide primary key and therefore has to be unique." + + isnamevar + end + + newproperty(:type) do + desc "The encryption type used: ssh-dss or ssh-rsa." + + newvalue("ssh-dss") + newvalue("ssh-rsa") + + aliasvalue(:dsa, "ssh-dss") + aliasvalue(:rsa, "ssh-rsa") + end + + newproperty(:key) do + desc "The key itself; generally a long string of hex digits." + end + + newproperty(:user) do + desc "The user account in which the SSH key should be installed. + The resource will automatically depend on this user." + end + + newproperty(:target) do + desc "The absolute filename in which to store the SSH key. This + property is optional and should only be used in cases where keys + are stored in a non-standard location (i.e.` not in + `~user/.ssh/authorized_keys`)." + + defaultto :absent + + def should + return super if defined?(@should) and @should[0] != :absent + + return nil unless user = resource[:user] + + begin + return File.expand_path("~#{user}/.ssh/authorized_keys") + rescue + Puppet.debug "The required user is not yet present on the system" + return nil + end + end + + def insync?(is) + is == should + end + end + + newproperty(:options, :array_matching => :all) do + desc "Key options, see sshd(8) for possible values. Multiple values + should be specified as an array." + + defaultto do :absent end + + def is_to_s(value) + if value == :absent or value.include?(:absent) + super + else + value.join(",") + end + end + + def should_to_s(value) + if value == :absent or value.include?(:absent) + super + else + value.join(",") + end + end + end + + autorequire(:user) do + should(:user) if should(:user) + end + + validate do + # Go ahead if target attribute is defined + return if @parameters[:target].shouldorig[0] != :absent + + # Go ahead if user attribute is defined + return if @parameters.include?(:user) + + # If neither target nor user is defined, this is an error + raise Puppet::Error, "Attribute 'user' or 'target' is mandatory" + end + end +end diff --git a/mcollective/lib/puppet/type/sshkey.rb b/mcollective/lib/puppet/type/sshkey.rb new file mode 100755 index 000000000..b7a1b8a8d --- /dev/null +++ b/mcollective/lib/puppet/type/sshkey.rb @@ -0,0 +1,67 @@ +module Puppet + newtype(:sshkey) do + @doc = "Installs and manages ssh host keys. At this point, this type + only knows how to install keys into `/etc/ssh/ssh_known_hosts`. See + the `ssh_authorized_key` type to manage authorized keys." + + ensurable + + newproperty(:type) do + desc "The encryption type used. Probably ssh-dss or ssh-rsa." + + newvalue("ssh-dss") + newvalue("ssh-rsa") + aliasvalue(:dsa, "ssh-dss") + aliasvalue(:rsa, "ssh-rsa") + end + + newproperty(:key) do + desc "The key itself; generally a long string of hex digits." + end + + # FIXME This should automagically check for aliases to the hosts, just + # to see if we can automatically glean any aliases. + newproperty(:host_aliases) do + desc 'Any aliases the host might have. Multiple values must be + specified as an array.' + + attr_accessor :meta + + def insync?(is) + is == @should + end + # We actually want to return the whole array here, not just the first + # value. + def should + defined?(@should) ? @should : nil + end + + validate do |value| + if value =~ /\s/ + raise Puppet::Error, "Aliases cannot include whitespace" + end + if value =~ /,/ + raise Puppet::Error, "Aliases cannot include whitespace" + end + end + end + + newparam(:name) do + desc "The host name that the key is associated with." + + isnamevar + end + + newproperty(:target) do + desc "The file in which to store the ssh key. Only used by + the `parsed` provider." + + defaultto { if @resource.class.defaultprovider.ancestors.include?(Puppet::Provider::ParsedFile) + @resource.class.defaultprovider.default_target + else + nil + end + } + end + end +end diff --git a/mcollective/lib/puppet/type/stage.rb b/mcollective/lib/puppet/type/stage.rb new file mode 100644 index 000000000..0736dc4b9 --- /dev/null +++ b/mcollective/lib/puppet/type/stage.rb @@ -0,0 +1,19 @@ +Puppet::Type.newtype(:stage) do + desc "A resource type for specifying run stages. The actual stage should + be specified on resources: + + class { foo: stage => pre } + + And you must manually control stage order: + + stage { pre: before => Stage[main] } + + You automatically get a 'main' stage created, and by default all resources + get inserted into that stage. + + You can only set stages on class resources, not normal builtin resources." + + newparam :name do + desc "The name of the stage. This will be used as the 'stage' for each resource." + end +end diff --git a/mcollective/lib/puppet/type/tidy.rb b/mcollective/lib/puppet/type/tidy.rb new file mode 100755 index 000000000..65cc077cf --- /dev/null +++ b/mcollective/lib/puppet/type/tidy.rb @@ -0,0 +1,331 @@ +Puppet::Type.newtype(:tidy) do + require 'puppet/file_serving/fileset' + require 'puppet/file_bucket/dipper' + + @doc = "Remove unwanted files based on specific criteria. Multiple + criteria are OR'd together, so a file that is too large but is not + old enough will still get tidied. + + If you don't specify either `age` or `size`, then all files will + be removed. + + This resource type works by generating a file resource for every file + that should be deleted and then letting that resource perform the + actual deletion. + " + + newparam(:path) do + desc "The path to the file or directory to manage. Must be fully + qualified." + isnamevar + end + + newparam(:recurse) do + desc "If target is a directory, recursively descend + into the directory looking for files to tidy." + + newvalues(:true, :false, :inf, /^[0-9]+$/) + + # Replace the validation so that we allow numbers in + # addition to string representations of them. + validate { |arg| } + munge do |value| + newval = super(value) + case newval + when :true, :inf; true + when :false; false + when Integer, Fixnum, Bignum; value + when /^\d+$/; Integer(value) + else + raise ArgumentError, "Invalid recurse value #{value.inspect}" + end + end + end + + newparam(:matches) do + desc "One or more (shell type) file glob patterns, which restrict + the list of files to be tidied to those whose basenames match + at least one of the patterns specified. Multiple patterns can + be specified using an array. + + Example: + + tidy { \"/tmp\": + age => \"1w\", + recurse => 1, + matches => [ \"[0-9]pub*.tmp\", \"*.temp\", \"tmpfile?\" ] + } + + This removes files from `/tmp` if they are one week old or older, + are not in a subdirectory and match one of the shell globs given. + + Note that the patterns are matched against the basename of each + file -- that is, your glob patterns should not have any '/' + characters in them, since you are only specifying against the last + bit of the file. + + Finally, note that you must now specify a non-zero/non-false value + for recurse if matches is used, as matches only apply to files found + by recursion (there's no reason to use static patterns match against + a statically determined path). Requiering explicit recursion clears + up a common source of confusion." + + # Make sure we convert to an array. + munge do |value| + fail "Tidy can't use matches with recurse 0, false, or undef" if "#{@resource[:recurse]}" =~ /^(0|false|)$/ + [value].flatten + end + + # Does a given path match our glob patterns, if any? Return true + # if no patterns have been provided. + def tidy?(path, stat) + basename = File.basename(path) + flags = File::FNM_DOTMATCH | File::FNM_PATHNAME + return(value.find {|pattern| File.fnmatch(pattern, basename, flags) } ? true : false) + end + end + + newparam(:backup) do + desc "Whether tidied files should be backed up. Any values are passed + directly to the file resources used for actual file deletion, so use + its backup documentation to determine valid values." + end + + newparam(:age) do + desc "Tidy files whose age is equal to or greater than + the specified time. You can choose seconds, minutes, + hours, days, or weeks by specifying the first letter of any + of those words (e.g., '1w'). + + Specifying 0 will remove all files." + + @@ageconvertors = { + :s => 1, + :m => 60 + } + + @@ageconvertors[:h] = @@ageconvertors[:m] * 60 + @@ageconvertors[:d] = @@ageconvertors[:h] * 24 + @@ageconvertors[:w] = @@ageconvertors[:d] * 7 + + def convert(unit, multi) + if num = @@ageconvertors[unit] + return num * multi + else + self.fail "Invalid age unit '#{unit}'" + end + end + + def tidy?(path, stat) + # If the file's older than we allow, we should get rid of it. + (Time.now.to_i - stat.send(resource[:type]).to_i) > value + end + + munge do |age| + unit = multi = nil + case age + when /^([0-9]+)(\w)\w*$/ + multi = Integer($1) + unit = $2.downcase.intern + when /^([0-9]+)$/ + multi = Integer($1) + unit = :d + else + self.fail "Invalid tidy age #{age}" + end + + convert(unit, multi) + end + end + + newparam(:size) do + desc "Tidy files whose size is equal to or greater than + the specified size. Unqualified values are in kilobytes, but + *b*, *k*, and *m* can be appended to specify *bytes*, *kilobytes*, + and *megabytes*, respectively. Only the first character is + significant, so the full word can also be used." + + @@sizeconvertors = { + :b => 0, + :k => 1, + :m => 2, + :g => 3 + } + + def convert(unit, multi) + if num = @@sizeconvertors[unit] + result = multi + num.times do result *= 1024 end + return result + else + self.fail "Invalid size unit '#{unit}'" + end + end + + def tidy?(path, stat) + stat.size >= value + end + + munge do |size| + case size + when /^([0-9]+)(\w)\w*$/ + multi = Integer($1) + unit = $2.downcase.intern + when /^([0-9]+)$/ + multi = Integer($1) + unit = :k + else + self.fail "Invalid tidy size #{age}" + end + + convert(unit, multi) + end + end + + newparam(:type) do + desc "Set the mechanism for determining age." + + newvalues(:atime, :mtime, :ctime) + + defaultto :atime + end + + newparam(:rmdirs, :boolean => true) do + desc "Tidy directories in addition to files; that is, remove + directories whose age is older than the specified criteria. + This will only remove empty directories, so all contained + files must also be tidied before a directory gets removed." + + newvalues :true, :false + end + + # Erase PFile's validate method + validate do + end + + def self.instances + [] + end + + @depthfirst = true + + def initialize(hash) + super + + # only allow backing up into filebuckets + self[:backup] = false unless self[:backup].is_a? Puppet::FileBucket::Dipper + end + + # Make a file resource to remove a given file. + def mkfile(path) + # Force deletion, so directories actually get deleted. + Puppet::Type.type(:file).new :path => path, :backup => self[:backup], :ensure => :absent, :force => true + end + + def retrieve + # Our ensure property knows how to retrieve everything for us. + if obj = @parameters[:ensure] + return obj.retrieve + else + return {} + end + end + + # Hack things a bit so we only ever check the ensure property. + def properties + [] + end + + def eval_generate + [] + end + + def generate + return [] unless stat(self[:path]) + + case self[:recurse] + when Integer, Fixnum, Bignum, /^\d+$/ + parameter = { :recurse => true, :recurselimit => self[:recurse] } + when true, :true, :inf + parameter = { :recurse => true } + end + + if parameter + files = Puppet::FileServing::Fileset.new(self[:path], parameter).files.collect do |f| + f == "." ? self[:path] : File.join(self[:path], f) + end + else + files = [self[:path]] + end + result = files.find_all { |path| tidy?(path) }.collect { |path| mkfile(path) }.each { |file| notice "Tidying #{file.ref}" }.sort { |a,b| b[:path] <=> a[:path] } + + # No need to worry about relationships if we don't have rmdirs; there won't be + # any directories. + return result unless rmdirs? + + # Now make sure that all directories require the files they contain, if all are available, + # so that a directory is emptied before we try to remove it. + files_by_name = result.inject({}) { |hash, file| hash[file[:path]] = file; hash } + + files_by_name.keys.sort { |a,b| b <=> b }.each do |path| + dir = File.dirname(path) + next unless resource = files_by_name[dir] + if resource[:require] + resource[:require] << Puppet::Resource.new(:file, path) + else + resource[:require] = [Puppet::Resource.new(:file, path)] + end + end + + result + end + + # Does a given path match our glob patterns, if any? Return true + # if no patterns have been provided. + def matches?(path) + return true unless self[:matches] + + basename = File.basename(path) + flags = File::FNM_DOTMATCH | File::FNM_PATHNAME + if self[:matches].find {|pattern| File.fnmatch(pattern, basename, flags) } + return true + else + debug "No specified patterns match #{path}, not tidying" + return false + end + end + + # Should we remove the specified file? + def tidy?(path) + return false unless stat = self.stat(path) + + return false if stat.ftype == "directory" and ! rmdirs? + + # The 'matches' parameter isn't OR'ed with the other tests -- + # it's just used to reduce the list of files we can match. + return false if param = parameter(:matches) and ! param.tidy?(path, stat) + + tested = false + [:age, :size].each do |name| + next unless param = parameter(name) + tested = true + return true if param.tidy?(path, stat) + end + + # If they don't specify either, then the file should always be removed. + return true unless tested + false + end + + def stat(path) + begin + File.lstat(path) + rescue Errno::ENOENT => error + info "File does not exist" + return nil + rescue Errno::EACCES => error + warning "Could not stat; permission denied" + return nil + end + end +end diff --git a/mcollective/lib/puppet/type/user.rb b/mcollective/lib/puppet/type/user.rb new file mode 100755 index 000000000..8d04fdc30 --- /dev/null +++ b/mcollective/lib/puppet/type/user.rb @@ -0,0 +1,457 @@ +require 'etc' +require 'facter' +require 'puppet/property/list' +require 'puppet/property/ordered_list' +require 'puppet/property/keyvalue' + +module Puppet + newtype(:user) do + @doc = "Manage users. This type is mostly built to manage system + users, so it is lacking some features useful for managing normal + users. + + This resource type uses the prescribed native tools for creating + groups and generally uses POSIX APIs for retrieving information + about them. It does not directly modify `/etc/passwd` or anything. + + **Autorequires:** If Puppet is managing the user's primary group (as provided in the `gid` attribute), the user resource will autorequire that group. If Puppet is managing any role accounts corresponding to the user's roles, the user resource will autorequire those role accounts." + + feature :allows_duplicates, + "The provider supports duplicate users with the same UID." + + feature :manages_homedir, + "The provider can create and remove home directories." + + feature :manages_passwords, + "The provider can modify user passwords, by accepting a password + hash." + + feature :manages_password_age, + "The provider can set age requirements and restrictions for + passwords." + + feature :manages_solaris_rbac, + "The provider can manage roles and normal users" + + feature :manages_expiry, + "The provider can manage the expiry date for a user." + + feature :system_users, + "The provider allows you to create system users with lower UIDs." + + newproperty(:ensure, :parent => Puppet::Property::Ensure) do + newvalue(:present, :event => :user_created) do + provider.create + end + + newvalue(:absent, :event => :user_removed) do + provider.delete + end + + newvalue(:role, :event => :role_created, :required_features => :manages_solaris_rbac) do + provider.create_role + end + + desc "The basic state that the object should be in." + + # If they're talking about the thing at all, they generally want to + # say it should exist. + defaultto do + if @resource.managed? + :present + else + nil + end + end + + def retrieve + if provider.exists? + if provider.respond_to?(:is_role?) and provider.is_role? + return :role + else + return :present + end + else + return :absent + end + end + end + + newproperty(:home) do + desc "The home directory of the user. The directory must be created + separately and is not currently checked for existence." + end + + newproperty(:uid) do + desc "The user ID. Must be specified numerically. For new users + being created, if no user ID is specified then one will be + chosen automatically, which will likely result in the same user + having different IDs on different systems, which is not + recommended. This is especially noteworthy if you use Puppet + to manage the same user on both Darwin and other platforms, + since Puppet does the ID generation for you on Darwin, but the + tools do so on other platforms." + + munge do |value| + case value + when String + if value =~ /^[-0-9]+$/ + value = Integer(value) + end + end + + return value + end + end + + newproperty(:gid) do + desc "The user's primary group. Can be specified numerically or + by name." + + munge do |value| + if value.is_a?(String) and value =~ /^[-0-9]+$/ + Integer(value) + else + value + end + end + + def insync?(is) + # We know the 'is' is a number, so we need to convert the 'should' to a number, + # too. + @should.each do |value| + return true if number = Puppet::Util.gid(value) and is == number + end + + false + end + + def sync + found = false + @should.each do |value| + if number = Puppet::Util.gid(value) + provider.gid = number + found = true + break + end + end + + fail "Could not find group(s) #{@should.join(",")}" unless found + + # Use the default event. + end + end + + newproperty(:comment) do + desc "A description of the user. Generally is a user's full name." + end + + newproperty(:shell) do + desc "The user's login shell. The shell must exist and be + executable." + end + + newproperty(:password, :required_features => :manages_passwords) do + desc "The user's password, in whatever encrypted format the local machine requires. Be sure to enclose any value that includes a dollar sign ($) in single quotes (\')." + + validate do |value| + raise ArgumentError, "Passwords cannot include ':'" if value.is_a?(String) and value.include?(":") + end + + def change_to_s(currentvalue, newvalue) + if currentvalue == :absent + return "created password" + else + return "changed password" + end + end + + def is_to_s( currentvalue ) + return '[old password hash redacted]' + end + def should_to_s( newvalue ) + return '[new password hash redacted]' + end + + end + + newproperty(:password_min_age, :required_features => :manages_password_age) do + desc "The minimum amount of time in days a password must be used before it may be changed" + + munge do |value| + case value + when String + Integer(value) + else + value + end + end + + validate do |value| + if value.to_s !~ /^-?\d+$/ + raise ArgumentError, "Password minimum age must be provided as a number" + end + end + end + + newproperty(:password_max_age, :required_features => :manages_password_age) do + desc "The maximum amount of time in days a password may be used before it must be changed" + + munge do |value| + case value + when String + Integer(value) + else + value + end + end + + validate do |value| + if value.to_s !~ /^-?\d+$/ + raise ArgumentError, "Password maximum age must be provided as a number" + end + end + end + + newproperty(:groups, :parent => Puppet::Property::List) do + desc "The groups of which the user is a member. The primary + group should not be listed. Multiple groups should be + specified as an array." + + validate do |value| + if value =~ /^\d+$/ + raise ArgumentError, "Group names must be provided, not numbers" + end + raise ArgumentError, "Group names must be provided as an array, not a comma-separated list" if value.include?(",") + end + end + + newparam(:name) do + desc "User name. While limitations are determined for + each operating system, it is generally a good idea to keep to + the degenerate 8 characters, beginning with a letter." + isnamevar + end + + newparam(:membership) do + desc "Whether specified groups should be treated as the only groups + of which the user is a member or whether they should merely + be treated as the minimum membership list." + + newvalues(:inclusive, :minimum) + + defaultto :minimum + end + + newparam(:system, :boolean => true) do + desc "Whether the user is a system user with lower UID." + + newvalues(:true, :false) + + defaultto false + end + + newparam(:allowdupe, :boolean => true) do + desc "Whether to allow duplicate UIDs." + + newvalues(:true, :false) + + defaultto false + end + + newparam(:managehome, :boolean => true) do + desc "Whether to manage the home directory when managing the user." + + newvalues(:true, :false) + + defaultto false + + validate do |val| + if val.to_s == "true" + raise ArgumentError, "User provider #{provider.class.name} can not manage home directories" unless provider.class.manages_homedir? + end + end + end + + newproperty(:expiry, :required_features => :manages_expiry) do + desc "The expiry date for this user. Must be provided in + a zero padded YYYY-MM-DD format - e.g 2010-02-19." + + validate do |value| + if value !~ /^\d{4}-\d{2}-\d{2}$/ + raise ArgumentError, "Expiry dates must be YYYY-MM-DD" + end + end + end + + # Autorequire the group, if it's around + autorequire(:group) do + autos = [] + + if obj = @parameters[:gid] and groups = obj.shouldorig + groups = groups.collect { |group| + if group =~ /^\d+$/ + Integer(group) + else + group + end + } + groups.each { |group| + case group + when Integer + if resource = catalog.resources.find { |r| r.is_a?(Puppet::Type.type(:group)) and r.should(:gid) == group } + autos << resource + end + else + autos << group + end + } + end + + if obj = @parameters[:groups] and groups = obj.should + autos += groups.split(",") + end + + autos + end + + # Provide an external hook. Yay breaking out of APIs. + def exists? + provider.exists? + end + + def retrieve + absent = false + properties.inject({}) { |prophash, property| + current_value = :absent + + if absent + prophash[property] = :absent + else + current_value = property.retrieve + prophash[property] = current_value + end + + if property.name == :ensure and current_value == :absent + absent = true + end + prophash + } + end + + newproperty(:roles, :parent => Puppet::Property::List, :required_features => :manages_solaris_rbac) do + desc "The roles the user has. Multiple roles should be + specified as an array." + + def membership + :role_membership + end + + validate do |value| + if value =~ /^\d+$/ + raise ArgumentError, "Role names must be provided, not numbers" + end + raise ArgumentError, "Role names must be provided as an array, not a comma-separated list" if value.include?(",") + end + end + + #autorequire the roles that the user has + autorequire(:user) do + reqs = [] + + if roles_property = @parameters[:roles] and roles = roles_property.should + reqs += roles.split(',') + end + + reqs + end + + newparam(:role_membership) do + desc "Whether specified roles should be treated as the only roles + of which the user is a member or whether they should merely + be treated as the minimum membership list." + + newvalues(:inclusive, :minimum) + + defaultto :minimum + end + + newproperty(:auths, :parent => Puppet::Property::List, :required_features => :manages_solaris_rbac) do + desc "The auths the user has. Multiple auths should be + specified as an array." + + def membership + :auth_membership + end + + validate do |value| + if value =~ /^\d+$/ + raise ArgumentError, "Auth names must be provided, not numbers" + end + raise ArgumentError, "Auth names must be provided as an array, not a comma-separated list" if value.include?(",") + end + end + + newparam(:auth_membership) do + desc "Whether specified auths should be treated as the only auths + of which the user is a member or whether they should merely + be treated as the minimum membership list." + + newvalues(:inclusive, :minimum) + + defaultto :minimum + end + + newproperty(:profiles, :parent => Puppet::Property::OrderedList, :required_features => :manages_solaris_rbac) do + desc "The profiles the user has. Multiple profiles should be + specified as an array." + + def membership + :profile_membership + end + + validate do |value| + if value =~ /^\d+$/ + raise ArgumentError, "Profile names must be provided, not numbers" + end + raise ArgumentError, "Profile names must be provided as an array, not a comma-separated list" if value.include?(",") + end + end + + newparam(:profile_membership) do + desc "Whether specified roles should be treated as the only roles + of which the user is a member or whether they should merely + be treated as the minimum membership list." + + newvalues(:inclusive, :minimum) + + defaultto :minimum + end + + newproperty(:keys, :parent => Puppet::Property::KeyValue, :required_features => :manages_solaris_rbac) do + desc "Specify user attributes in an array of keyvalue pairs" + + def membership + :key_membership + end + + validate do |value| + raise ArgumentError, "key value pairs must be seperated by an =" unless value.include?("=") + end + end + + newparam(:key_membership) do + desc "Whether specified key value pairs should be treated as the only attributes + of the user or whether they should merely + be treated as the minimum list." + + newvalues(:inclusive, :minimum) + + defaultto :minimum + end + + newproperty(:project, :required_features => :manages_solaris_rbac) do + desc "The name of the project associated with a user" + end + end +end diff --git a/mcollective/lib/puppet/type/whit.rb b/mcollective/lib/puppet/type/whit.rb new file mode 100644 index 000000000..55bfcfb46 --- /dev/null +++ b/mcollective/lib/puppet/type/whit.rb @@ -0,0 +1,11 @@ +Puppet::Type.newtype(:whit) do + desc "The smallest possible resource type, for when you need a resource and naught else." + + newparam :name do + desc "The name of the whit, because it must have one." + end + + def to_s + "Class[#{name}]" + end +end diff --git a/mcollective/lib/puppet/type/yumrepo.rb b/mcollective/lib/puppet/type/yumrepo.rb new file mode 100644 index 000000000..9b4c79428 --- /dev/null +++ b/mcollective/lib/puppet/type/yumrepo.rb @@ -0,0 +1,359 @@ +# Description of yum repositories + +require 'puppet/util/inifile' + +module Puppet + # A property for one entry in a .ini-style file + class IniProperty < Puppet::Property + def insync?(is) + # A should property of :absent is the same as nil + if is.nil? && should == :absent + return true + end + super(is) + end + + def sync + if safe_insync?(retrieve) + result = nil + else + result = set(self.should) + if should == :absent + resource.section[inikey] = nil + else + resource.section[inikey] = should + end + end + result + end + + def retrieve + resource.section[inikey] + end + + def inikey + name.to_s + end + + # Set the key associated with this property to KEY, instead + # of using the property's NAME + def self.inikey(key) + # Override the inikey instance method + # Is there a way to do this without resorting to strings ? + # Using a block fails because the block can't access + # the variable 'key' in the outer scope + self.class_eval("def inikey ; \"#{key.to_s}\" ; end") + end + + end + + # Doc string for properties that can be made 'absent' + ABSENT_DOC="Set this to 'absent' to remove it from the file completely" + + newtype(:yumrepo) do + @doc = "The client-side description of a yum repository. Repository + configurations are found by parsing `/etc/yum.conf` and + the files indicated by the `reposdir` option in that file + (see yum.conf(5) for details) + + Most parameters are identical to the ones documented + in yum.conf(5) + + Continuation lines that yum supports for example for the + baseurl are not supported. No attempt is made to access + files included with the **include** directive" + + class << self + attr_accessor :filetype + # The writer is only used for testing, there should be no need + # to change yumconf or inifile in any other context + attr_accessor :yumconf + attr_writer :inifile + end + + self.filetype = Puppet::Util::FileType.filetype(:flat) + + @inifile = nil + + @yumconf = "/etc/yum.conf" + + # Where to put files for brand new sections + @defaultrepodir = nil + + def self.instances + l = [] + check = validproperties + clear + inifile.each_section do |s| + next if s.name == "main" + obj = new(:name => s.name, :audit => check) + current_values = obj.retrieve + obj.eachproperty do |property| + if current_values[property].nil? + obj.delete(property.name) + else + property.should = current_values[property] + end + end + obj.delete(:audit) + l << obj + end + l + end + + # Return the Puppet::Util::IniConfig::File for the whole yum config + def self.inifile + if @inifile.nil? + @inifile = read + main = @inifile['main'] + raise Puppet::Error, "File #{yumconf} does not contain a main section" if main.nil? + reposdir = main['reposdir'] + reposdir ||= "/etc/yum.repos.d, /etc/yum/repos.d" + reposdir.gsub!(/[\n,]/, " ") + reposdir.split.each do |dir| + Dir::glob("#{dir}/*.repo").each do |file| + @inifile.read(file) if File.file?(file) + end + end + reposdir.split.each do |dir| + if File::directory?(dir) && File::writable?(dir) + @defaultrepodir = dir + break + end + end + end + @inifile + end + + # Parse the yum config files. Only exposed for the tests + # Non-test code should use self.inifile to get at the + # underlying file + def self.read + result = Puppet::Util::IniConfig::File.new + result.read(yumconf) + main = result['main'] + raise Puppet::Error, "File #{yumconf} does not contain a main section" if main.nil? + reposdir = main['reposdir'] + reposdir ||= "/etc/yum.repos.d, /etc/yum/repos.d" + reposdir.gsub!(/[\n,]/, " ") + reposdir.split.each do |dir| + Dir::glob("#{dir}/*.repo").each do |file| + result.read(file) if File.file?(file) + end + end + if @defaultrepodir.nil? + reposdir.split.each do |dir| + if File::directory?(dir) && File::writable?(dir) + @defaultrepodir = dir + break + end + end + end + result + end + + # Return the Puppet::Util::IniConfig::Section with name NAME + # from the yum config + def self.section(name) + result = inifile[name] + if result.nil? + # Brand new section + path = yumconf + path = File::join(@defaultrepodir, "#{name}.repo") unless @defaultrepodir.nil? + Puppet::info "create new repo #{name} in file #{path}" + result = inifile.add_section(name, path) + end + result + end + + # Store all modifications back to disk + def self.store + inifile.store + unless Puppet[:noop] + target_mode = 0644 # FIXME: should be configurable + inifile.each_file do |file| + current_mode = File.stat(file).mode & 0777 + unless current_mode == target_mode + Puppet::info "changing mode of #{file} from %03o to %03o" % [current_mode, target_mode] + File.chmod(target_mode, file) + end + end + end + end + + # This is only used during testing. + def self.clear + @inifile = nil + @yumconf = "/etc/yum.conf" + @defaultrepodir = nil + end + + # Return the Puppet::Util::IniConfig::Section for this yumrepo resource + def section + self.class.section(self[:name]) + end + + # Store modifications to this yumrepo resource back to disk + def flush + self.class.store + end + + newparam(:name) do + desc "The name of the repository. This corresponds to the + repositoryid parameter in yum.conf(5)." + isnamevar + end + + newproperty(:descr, :parent => Puppet::IniProperty) do + desc "A human readable description of the repository. + This corresponds to the name parameter in yum.conf(5). + #{ABSENT_DOC}" + newvalue(:absent) { self.should = :absent } + newvalue(/.*/) { } + inikey "name" + end + + newproperty(:mirrorlist, :parent => Puppet::IniProperty) do + desc "The URL that holds the list of mirrors for this repository. + #{ABSENT_DOC}" + newvalue(:absent) { self.should = :absent } + # Should really check that it's a valid URL + newvalue(/.*/) { } + end + + newproperty(:baseurl, :parent => Puppet::IniProperty) do + desc "The URL for this repository.\n#{ABSENT_DOC}" + newvalue(:absent) { self.should = :absent } + # Should really check that it's a valid URL + newvalue(/.*/) { } + end + + newproperty(:enabled, :parent => Puppet::IniProperty) do + desc "Whether this repository is enabled or disabled. Possible + values are '0', and '1'.\n#{ABSENT_DOC}" + newvalue(:absent) { self.should = :absent } + newvalue(%r{(0|1)}) { } + end + + newproperty(:gpgcheck, :parent => Puppet::IniProperty) do + desc "Whether to check the GPG signature on packages installed + from this repository. Possible values are '0', and '1'. + \n#{ABSENT_DOC}" + newvalue(:absent) { self.should = :absent } + newvalue(%r{(0|1)}) { } + end + + newproperty(:gpgkey, :parent => Puppet::IniProperty) do + desc "The URL for the GPG key with which packages from this + repository are signed.\n#{ABSENT_DOC}" + newvalue(:absent) { self.should = :absent } + # Should really check that it's a valid URL + newvalue(/.*/) { } + end + + newproperty(:include, :parent => Puppet::IniProperty) do + desc "A URL from which to include the config.\n#{ABSENT_DOC}" + newvalue(:absent) { self.should = :absent } + # Should really check that it's a valid URL + newvalue(/.*/) { } + end + + newproperty(:exclude, :parent => Puppet::IniProperty) do + desc "List of shell globs. Matching packages will never be + considered in updates or installs for this repo. + #{ABSENT_DOC}" + newvalue(:absent) { self.should = :absent } + newvalue(/.*/) { } + end + + newproperty(:includepkgs, :parent => Puppet::IniProperty) do + desc "List of shell globs. If this is set, only packages + matching one of the globs will be considered for + update or install.\n#{ABSENT_DOC}" + newvalue(:absent) { self.should = :absent } + newvalue(/.*/) { } + end + + newproperty(:enablegroups, :parent => Puppet::IniProperty) do + desc "Determines whether yum will allow the use of + package groups for this repository. Possible + values are '0', and '1'.\n#{ABSENT_DOC}" + newvalue(:absent) { self.should = :absent } + newvalue(%r{(0|1)}) { } + end + + newproperty(:failovermethod, :parent => Puppet::IniProperty) do + desc "Either 'roundrobin' or 'priority'.\n#{ABSENT_DOC}" + newvalue(:absent) { self.should = :absent } + newvalue(%r{roundrobin|priority}) { } + end + + newproperty(:keepalive, :parent => Puppet::IniProperty) do + desc "Either '1' or '0'. This tells yum whether or not HTTP/1.1 + keepalive should be used with this repository.\n#{ABSENT_DOC}" + newvalue(:absent) { self.should = :absent } + newvalue(%r{(0|1)}) { } + end + + newproperty(:http_caching, :parent => Puppet::IniProperty) do + desc "Either 'packages' or 'all' or 'none'.\n#{ABSENT_DOC}" + newvalue(:absent) { self.should = :absent } + newvalue(%r(packages|all|none)) { } + end + + newproperty(:timeout, :parent => Puppet::IniProperty) do + desc "Number of seconds to wait for a connection before timing + out.\n#{ABSENT_DOC}" + newvalue(:absent) { self.should = :absent } + newvalue(%r{[0-9]+}) { } + end + + newproperty(:metadata_expire, :parent => Puppet::IniProperty) do + desc "Number of seconds after which the metadata will expire. + #{ABSENT_DOC}" + newvalue(:absent) { self.should = :absent } + newvalue(%r{[0-9]+}) { } + end + + newproperty(:protect, :parent => Puppet::IniProperty) do + desc "Enable or disable protection for this repository. Requires + that the protectbase plugin is installed and enabled. + #{ABSENT_DOC}" + newvalue(:absent) { self.should = :absent } + newvalue(%r{(0|1)}) { } + end + + newproperty(:priority, :parent => Puppet::IniProperty) do + desc "Priority of this repository from 1-99. Requires that + the priorities plugin is installed and enabled. + #{ABSENT_DOC}" + newvalue(:absent) { self.should = :absent } + newvalue(%r{[1-9][0-9]?}) { } + end + + newproperty(:cost, :parent => Puppet::IniProperty) do + desc "Cost of this repository.\n#{ABSENT_DOC}" + newvalue(:absent) { self.should = :absent } + newvalue(%r{\d+}) { } + end + + newproperty(:proxy, :parent => Puppet::IniProperty) do + desc "URL to the proxy server for this repository.\n#{ABSENT_DOC}" + newvalue(:absent) { self.should = :absent } + # Should really check that it's a valid URL + newvalue(/.*/) { } + end + + newproperty(:proxy_username, :parent => Puppet::IniProperty) do + desc "Username for this proxy.\n#{ABSENT_DOC}" + newvalue(:absent) { self.should = :absent } + newvalue(/.*/) { } + end + + newproperty(:proxy_password, :parent => Puppet::IniProperty) do + desc "Password for this proxy.\n#{ABSENT_DOC}" + newvalue(:absent) { self.should = :absent } + newvalue(/.*/) { } + end + end +end diff --git a/mcollective/lib/puppet/type/zfs.rb b/mcollective/lib/puppet/type/zfs.rb new file mode 100755 index 000000000..6f04bddd8 --- /dev/null +++ b/mcollective/lib/puppet/type/zfs.rb @@ -0,0 +1,52 @@ +module Puppet + newtype(:zfs) do + @doc = "Manage zfs. Create destroy and set properties on zfs instances. + +**Autorequires:** If Puppet is managing the zpool at the root of this zfs instance, the zfs resource will autorequire it. If Puppet is managing any parent zfs instances, the zfs resource will autorequire them." + + ensurable + + newparam(:name) do + desc "The full name for this filesystem. (including the zpool)" + end + + newproperty(:mountpoint) do + desc "The mountpoint property." + end + + newproperty(:compression) do + desc "The compression property." + end + + newproperty(:copies) do + desc "The copies property." + end + + newproperty(:quota) do + desc "The quota property." + end + + newproperty(:reservation) do + desc "The reservation property." + end + + newproperty(:sharenfs) do + desc "The sharenfs property." + end + + newproperty(:snapdir) do + desc "The snapdir property." + end + + autorequire(:zpool) do + #strip the zpool off the zfs name and autorequire it + [@parameters[:name].value.split('/')[0]] + end + + autorequire(:zfs) do + #slice and dice, we want all the zfs before this one + names = @parameters[:name].value.split('/') + names.slice(1..-2).inject([]) { |a,v| a << "#{a.last}/#{v}" }.collect { |fs| names[0] + fs } + end + end +end diff --git a/mcollective/lib/puppet/type/zone.rb b/mcollective/lib/puppet/type/zone.rb new file mode 100644 index 000000000..471619c98 --- /dev/null +++ b/mcollective/lib/puppet/type/zone.rb @@ -0,0 +1,446 @@ +Puppet::Type.newtype(:zone) do + @doc = "Solaris zones. + +**Autorequires:** If Puppet is managing the directory specified as the root of the zone's filesystem (with the `path` attribute), the zone resource will autorequire that directory." + + # These properties modify the zone configuration, and they need to provide + # the text separately from syncing it, so all config statements can be rolled + # into a single creation statement. + class ZoneConfigProperty < Puppet::Property + # Perform the config operation. + def sync + provider.setconfig self.configtext + end + end + + # Those properties that can have multiple instances. + class ZoneMultiConfigProperty < ZoneConfigProperty + def configtext + list = @should + + current_value = self.retrieve + + unless current_value.is_a? Symbol + if current_value.is_a? Array + list += current_value + else + list << current_value if current_value + end + end + + # Some hackery so we can test whether current_value is an array or a symbol + if current_value.is_a? Array + tmpis = current_value + else + if current_value + tmpis = [current_value] + else + tmpis = [] + end + end + + rms = [] + adds = [] + + # Collect the modifications to make + list.sort.uniq.collect do |obj| + # Skip objectories that are configured and should be + next if tmpis.include?(obj) and @should.include?(obj) + + if tmpis.include?(obj) + rms << obj + else + adds << obj + end + end + + + # And then perform all of the removals before any of the adds. + (rms.collect { |o| rm(o) } + adds.collect { |o| add(o) }).join("\n") + end + + # We want all specified directories to be included. + def insync?(current_value) + if current_value.is_a? Array and @should.is_a? Array + current_value.sort == @should.sort + else + current_value == @should + end + end + end + + ensurable do + desc "The running state of the zone. The valid states directly reflect + the states that `zoneadm` provides. The states are linear, + in that a zone must be `configured` then `installed`, and + only then can be `running`. Note also that `halt` is currently + used to stop zones." + + @states = {} + @parametervalues = [] + + def self.alias_state(values) + @state_aliases ||= {} + values.each do |nick, name| + @state_aliases[nick] = name + end + end + + def self.newvalue(name, hash) + @parametervalues = [] if @parametervalues.is_a? Hash + + @parametervalues << name + + @states[name] = hash + hash[:name] = name + end + + def self.state_name(name) + if other = @state_aliases[name] + other + else + name + end + end + + newvalue :absent, :down => :destroy + newvalue :configured, :up => :configure, :down => :uninstall + newvalue :installed, :up => :install, :down => :stop + newvalue :running, :up => :start + + alias_state :incomplete => :installed, :ready => :installed, :shutting_down => :running + + defaultto :running + + def self.state_index(value) + @parametervalues.index(state_name(value)) + end + + # Return all of the states between two listed values, exclusive + # of the first item. + def self.state_sequence(first, second) + findex = sindex = nil + unless findex = @parametervalues.index(state_name(first)) + raise ArgumentError, "'#{first}' is not a valid zone state" + end + unless sindex = @parametervalues.index(state_name(second)) + raise ArgumentError, "'#{first}' is not a valid zone state" + end + list = nil + + # Apparently ranges are unidirectional, so we have to reverse + # the range op twice. + if findex > sindex + list = @parametervalues[sindex..findex].collect do |name| + @states[name] + end.reverse + else + list = @parametervalues[findex..sindex].collect do |name| + @states[name] + end + end + + # The first result is the current state, so don't return it. + list[1..-1] + end + + def retrieve + provider.properties[:ensure] + end + + def sync + method = nil + if up? + direction = :up + else + direction = :down + end + + # We need to get the state we're currently in and just call + # everything between it and us. + self.class.state_sequence(self.retrieve, self.should).each do |state| + if method = state[direction] + warned = false + while provider.processing? + unless warned + info "Waiting for zone to finish processing" + warned = true + end + sleep 1 + end + provider.send(method) + else + raise Puppet::DevError, "Cannot move #{direction} from #{st[:name]}" + end + end + + ("zone_#{self.should}").intern + end + + # Are we moving up the property tree? + def up? + current_value = self.retrieve + self.class.state_index(current_value) < self.class.state_index(self.should) + end + end + + newparam(:name) do + desc "The name of the zone." + + isnamevar + end + + newparam(:id) do + desc "The numerical ID of the zone. This number is autogenerated + and cannot be changed." + end + + newparam(:clone) do + desc "Instead of installing the zone, clone it from another zone. + If the zone root resides on a zfs file system, a snapshot will be + used to create the clone, is it redisides on ufs, a copy of the zone + will be used. The zone you clone from must not be running." + end + + newproperty(:ip, :parent => ZoneMultiConfigProperty) do + require 'ipaddr' + + desc "The IP address of the zone. IP addresses must be specified + with the interface, separated by a colon, e.g.: bge0:192.168.0.1. + For multiple interfaces, specify them in an array." + + # Add an interface. + def add(str) + interface, ip, defrouter = ipsplit(str) + cmd = "add net\n" + cmd += "set physical=#{interface}\n" if interface + cmd += "set address=#{ip}\n" if ip + cmd += "set defrouter=#{defrouter}\n" if defrouter + #if @resource[:iptype] == :shared + cmd += "end\n" + end + + # Convert a string into the component interface, address and defrouter + def ipsplit(str) + interface, address, defrouter = str.split(':') + return interface, address, defrouter + end + + # Remove an interface. + def rm(str) + interface, ip, defrouter = ipsplit(str) + # Reality seems to disagree with the documentation here; the docs + # specify that braces are required, but they're apparently only + # required if you're specifying multiple values. + if ip + "remove net address=#{ip}" + elsif interface + "remove net interface=#{interface}" + else + raise ArgumentError, "can not remove network based on default router" + end + end + end + + newproperty(:iptype, :parent => ZoneConfigProperty) do + desc "The IP stack type of the zone. Can either be 'shared' or 'exclusive'." + + defaultto :shared + + newvalue :shared + newvalue :exclusive + + def configtext + "set ip-type=#{self.should}" + end + end + + newproperty(:autoboot, :parent => ZoneConfigProperty) do + desc "Whether the zone should automatically boot." + + defaultto true + + newvalue(:true) {} + newvalue(:false) {} + + def configtext + "set autoboot=#{self.should}" + end + end + + newproperty(:pool, :parent => ZoneConfigProperty) do + desc "The resource pool for this zone." + + def configtext + "set pool=#{self.should}" + end + end + + newproperty(:shares, :parent => ZoneConfigProperty) do + desc "Number of FSS CPU shares allocated to the zone." + + def configtext + "add rctl\nset name=zone.cpu-shares\nadd value (priv=privileged,limit=#{self.should},action=none)\nend" + end + end + + newproperty(:inherit, :parent => ZoneMultiConfigProperty) do + desc "The list of directories that the zone inherits from the global + zone. All directories must be fully qualified." + + validate do |value| + unless value =~ /^\// + raise ArgumentError, "Inherited filesystems must be fully qualified" + end + end + + # Add a directory to our list of inherited directories. + def add(dir) + "add inherit-pkg-dir\nset dir=#{dir}\nend" + end + + def rm(dir) + # Reality seems to disagree with the documentation here; the docs + # specify that braces are required, but they're apparently only + # required if you're specifying multiple values. + "remove inherit-pkg-dir dir=#{dir}" + end + + def should + @should + end + end + + # Specify the sysidcfg file. This is pretty hackish, because it's + # only used to boot the zone the very first time. + newparam(:sysidcfg) do + desc %{The text to go into the sysidcfg file when the zone is first + booted. The best way is to use a template: + + # $templatedir/sysidcfg + system_locale=en_US + timezone=GMT + terminal=xterms + security_policy=NONE + root_password=<%= password %> + timeserver=localhost + name_service=DNS {domain_name=<%= domain %> name_server=<%= nameserver %>} + network_interface=primary {hostname=<%= realhostname %> + ip_address=<%= ip %> + netmask=<%= netmask %> + protocol_ipv6=no + default_route=<%= defaultroute %>} + nfs4_domain=dynamic + + And then call that: + + zone { myzone: + ip => "bge0:192.168.0.23", + sysidcfg => template(sysidcfg), + path => "/opt/zones/myzone", + realhostname => "fully.qualified.domain.name" + } + + The sysidcfg only matters on the first booting of the zone, + so Puppet only checks for it at that time.} + end + + newparam(:path) do + desc "The root of the zone's filesystem. Must be a fully qualified + file name. If you include '%s' in the path, then it will be + replaced with the zone's name. At this point, you cannot use + Puppet to move a zone." + + validate do |value| + unless value =~ /^\// + raise ArgumentError, "The zone base must be fully qualified" + end + end + + munge do |value| + if value =~ /%s/ + value % @resource[:name] + else + value + end + end + end + + newparam(:create_args) do + desc "Arguments to the zonecfg create command. This can be used to create branded zones." + end + + newparam(:install_args) do + desc "Arguments to the zoneadm install command. This can be used to create branded zones." + end + + newparam(:realhostname) do + desc "The actual hostname of the zone." + end + + # If Puppet is also managing the base dir or its parent dir, list them + # both as prerequisites. + autorequire(:file) do + if @parameters.include? :path + [@parameters[:path].value, File.dirname(@parameters[:path].value)] + else + nil + end + end + + def validate_ip(ip, name) + IPAddr.new(ip) if ip + rescue ArgumentError + self.fail "'#{ip}' is an invalid #{name}" + end + + validate do + value = self[:ip] + interface, address, defrouter = value.split(':') + if self[:iptype] == :shared + if (interface && address && defrouter.nil?) || + (interface && address && defrouter) + validate_ip(address, "IP address") + validate_ip(defrouter, "default router") + else + self.fail "ip must contain interface name and ip address separated by a \":\"" + end + else + self.fail "only interface may be specified when using exclusive IP stack: #{value}" unless interface && address.nil? && defrouter.nil? + end + + self.fail "zone path is required" unless self[:path] + end + + def retrieve + provider.flush + if hash = provider.properties and hash[:ensure] != :absent + result = setstatus(hash) + result + else + # Return all properties as absent. + return properties.inject({}) do | prophash, property| + prophash[property] = :absent + prophash + end + end + end + + # Take the results of a listing and set everything appropriately. + def setstatus(hash) + prophash = {} + hash.each do |param, value| + next if param == :name + case self.class.attrtype(param) + when :property + # Only try to provide values for the properties we're managing + if prop = self.property(param) + prophash[prop] = value + end + else + self[param] = value + end + end + prophash + end +end diff --git a/mcollective/lib/puppet/type/zpool.rb b/mcollective/lib/puppet/type/zpool.rb new file mode 100755 index 000000000..40ee8f286 --- /dev/null +++ b/mcollective/lib/puppet/type/zpool.rb @@ -0,0 +1,90 @@ +module Puppet + class Property + + class VDev < Property + + def flatten_and_sort(array) + array.collect { |a| a.split(' ') }.flatten.sort + end + + def insync?(is) + return @should == [:absent] if is == :absent + + flatten_and_sort(is) == flatten_and_sort(@should) + end + end + + class MultiVDev < VDev + def insync?(is) + return @should == [:absent] if is == :absent + + return false unless is.length == @should.length + + is.each_with_index { |list, i| return false unless flatten_and_sort(list) == flatten_and_sort(@should[i]) } + + #if we made it this far we are in sync + true + end + end + end + + newtype(:zpool) do + @doc = "Manage zpools. Create and delete zpools. The provider WILL NOT SYNC, only report differences. + + Supports vdevs with mirrors, raidz, logs and spares." + + ensurable + + newproperty(:disk, :array_matching => :all, :parent => Puppet::Property::VDev) do + desc "The disk(s) for this pool. Can be an array or space separated string" + end + + newproperty(:mirror, :array_matching => :all, :parent => Puppet::Property::MultiVDev) do + desc "List of all the devices to mirror for this pool. Each mirror should be a + space separated string: + + mirror => [\"disk1 disk2\", \"disk3 disk4\"], + + " + + validate do |value| + raise ArgumentError, "mirror names must be provided as string separated, not a comma-separated list" if value.include?(",") + end + end + + newproperty(:raidz, :array_matching => :all, :parent => Puppet::Property::MultiVDev) do + desc "List of all the devices to raid for this pool. Should be an array of + space separated strings: + + raidz => [\"disk1 disk2\", \"disk3 disk4\"], + + " + + validate do |value| + raise ArgumentError, "raid names must be provided as string separated, not a comma-separated list" if value.include?(",") + end + end + + newproperty(:spare, :array_matching => :all, :parent => Puppet::Property::VDev) do + desc "Spare disk(s) for this pool." + end + + newproperty(:log, :array_matching => :all, :parent => Puppet::Property::VDev) do + desc "Log disks for this pool. (doesn't support mirroring yet)" + end + + newparam(:pool) do + desc "The name for this pool." + isnamevar + end + + newparam(:raid_parity) do + desc "Determines parity when using raidz property." + end + + validate do + has_should = [:disk, :mirror, :raidz].select { |prop| self.should(prop) } + self.fail "You cannot specify #{has_should.join(" and ")} on this type (only one)" if has_should.length > 1 + end + end +end diff --git a/mcollective/lib/puppet/util.rb b/mcollective/lib/puppet/util.rb new file mode 100644 index 000000000..58cdc1794 --- /dev/null +++ b/mcollective/lib/puppet/util.rb @@ -0,0 +1,498 @@ +# A module to collect utility functions. +require 'puppet/util/monkey_patches' +require 'puppet/external/lock' +require 'puppet/util/execution_stub' +require 'sync' +require 'monitor' +require 'tempfile' +require 'pathname' + +module Puppet + # A command failed to execute. + require 'puppet/error' + class ExecutionFailure < Puppet::Error + end +module Util + require 'benchmark' + + # These are all for backward compatibility -- these are methods that used + # to be in Puppet::Util but have been moved into external modules. + require 'puppet/util/posix' + extend Puppet::Util::POSIX + + @@sync_objects = {}.extend MonitorMixin + + def self.activerecord_version + if (defined?(::ActiveRecord) and defined?(::ActiveRecord::VERSION) and defined?(::ActiveRecord::VERSION::MAJOR) and defined?(::ActiveRecord::VERSION::MINOR)) + ([::ActiveRecord::VERSION::MAJOR, ::ActiveRecord::VERSION::MINOR].join('.').to_f) + else + 0 + end + end + + def self.synchronize_on(x,type) + sync_object,users = 0,1 + begin + @@sync_objects.synchronize { + (@@sync_objects[x] ||= [Sync.new,0])[users] += 1 + } + @@sync_objects[x][sync_object].synchronize(type) { yield } + ensure + @@sync_objects.synchronize { + @@sync_objects.delete(x) unless (@@sync_objects[x][users] -= 1) > 0 + } + end + end + + # Change the process to a different user + def self.chuser + if group = Puppet[:group] + begin + Puppet::Util::SUIDManager.change_group(group, true) + rescue => detail + Puppet.warning "could not change to group #{group.inspect}: #{detail}" + $stderr.puts "could not change to group #{group.inspect}" + + # Don't exit on failed group changes, since it's + # not fatal + #exit(74) + end + end + + if user = Puppet[:user] + begin + Puppet::Util::SUIDManager.change_user(user, true) + rescue => detail + $stderr.puts "Could not change to user #{user}: #{detail}" + exit(74) + end + end + end + + # Create instance methods for each of the log levels. This allows + # the messages to be a little richer. Most classes will be calling this + # method. + def self.logmethods(klass, useself = true) + Puppet::Util::Log.eachlevel { |level| + klass.send(:define_method, level, proc { |args| + args = args.join(" ") if args.is_a?(Array) + if useself + + Puppet::Util::Log.create( + :level => level, + :source => self, + :message => args + ) + else + + Puppet::Util::Log.create( + :level => level, + :message => args + ) + end + }) + } + end + + # Proxy a bunch of methods to another object. + def self.classproxy(klass, objmethod, *methods) + classobj = class << klass; self; end + methods.each do |method| + classobj.send(:define_method, method) do |*args| + obj = self.send(objmethod) + + obj.send(method, *args) + end + end + end + + # Proxy a bunch of methods to another object. + def self.proxy(klass, objmethod, *methods) + methods.each do |method| + klass.send(:define_method, method) do |*args| + obj = self.send(objmethod) + + obj.send(method, *args) + end + end + end + + # XXX this should all be done using puppet objects, not using + # normal mkdir + def self.recmkdir(dir,mode = 0755) + if FileTest.exist?(dir) + return false + else + tmp = dir.sub(/^\//,'') + path = [File::SEPARATOR] + tmp.split(File::SEPARATOR).each { |dir| + path.push dir + if ! FileTest.exist?(File.join(path)) + Dir.mkdir(File.join(path), mode) + elsif FileTest.directory?(File.join(path)) + next + else FileTest.exist?(File.join(path)) + raise "Cannot create #{dir}: basedir #{File.join(path)} is a file" + end + } + return true + end + end + + # Execute a given chunk of code with a new umask. + def self.withumask(mask) + cur = File.umask(mask) + + begin + yield + ensure + File.umask(cur) + end + end + + def benchmark(*args) + msg = args.pop + level = args.pop + object = nil + + if args.empty? + if respond_to?(level) + object = self + else + object = Puppet + end + else + object = args.pop + end + + raise Puppet::DevError, "Failed to provide level to :benchmark" unless level + + unless level == :none or object.respond_to? level + raise Puppet::DevError, "Benchmarked object does not respond to #{level}" + end + + # Only benchmark if our log level is high enough + if level != :none and Puppet::Util::Log.sendlevel?(level) + result = nil + seconds = Benchmark.realtime { + yield + } + object.send(level, msg + (" in %0.2f seconds" % seconds)) + return seconds + else + yield + end + end + + def which(bin) + if bin =~ /^\// + return bin if FileTest.file? bin and FileTest.executable? bin + else + ENV['PATH'].split(File::PATH_SEPARATOR).each do |dir| + dest=File.join(dir, bin) + return dest if FileTest.file? dest and FileTest.executable? dest + end + end + nil + end + module_function :which + + # Execute the provided command in a pipe, yielding the pipe object. + def execpipe(command, failonfail = true) + if respond_to? :debug + debug "Executing '#{command}'" + else + Puppet.debug "Executing '#{command}'" + end + + output = open("| #{command} 2>&1") do |pipe| + yield pipe + end + + if failonfail + unless $CHILD_STATUS == 0 + raise ExecutionFailure, output + end + end + + output + end + + def execfail(command, exception) + output = execute(command) + return output + rescue ExecutionFailure + raise exception, output + end + + # Execute the desired command, and return the status and output. + # def execute(command, failonfail = true, uid = nil, gid = nil) + # :combine sets whether or not to combine stdout/stderr in the output + # :stdinfile sets a file that can be used for stdin. Passing a string + # for stdin is not currently supported. + def execute(command, arguments = {:failonfail => true, :combine => true}) + if command.is_a?(Array) + command = command.flatten.collect { |i| i.to_s } + str = command.join(" ") + else + # We require an array here so we know where we're incorrectly + # using a string instead of an array. Once everything is + # switched to an array, we might relax this requirement. + raise ArgumentError, "Must pass an array to execute()" + end + + if respond_to? :debug + debug "Executing '#{str}'" + else + Puppet.debug "Executing '#{str}'" + end + + if execution_stub = Puppet::Util::ExecutionStub.current_value + return execution_stub.call(command, arguments) + end + + @@os ||= Facter.value(:operatingsystem) + output = nil + child_pid, child_status = nil + # There are problems with read blocking with badly behaved children + # read.partialread doesn't seem to capture either stdout or stderr + # We hack around this using a temporary file + + # The idea here is to avoid IO#read whenever possible. + output_file="/dev/null" + error_file="/dev/null" + if ! arguments[:squelch] + output_file = Tempfile.new("puppet") + error_file=output_file if arguments[:combine] + end + + if Puppet.features.posix? + oldverb = $VERBOSE + $VERBOSE = nil + child_pid = Kernel.fork + $VERBOSE = oldverb + if child_pid + # Parent process executes this + child_status = (Process.waitpid2(child_pid)[1]).to_i >> 8 + else + # Child process executes this + Process.setsid + begin + if arguments[:stdinfile] + $stdin.reopen(arguments[:stdinfile]) + else + $stdin.reopen("/dev/null") + end + $stdout.reopen(output_file) + $stderr.reopen(error_file) + + 3.upto(256){|fd| IO::new(fd).close rescue nil} + Puppet::Util::SUIDManager.change_privileges(arguments[:uid], arguments[:gid], true) + ENV['LANG'] = ENV['LC_ALL'] = ENV['LC_MESSAGES'] = ENV['LANGUAGE'] = 'C' + if command.is_a?(Array) + Kernel.exec(*command) + else + Kernel.exec(command) + end + rescue => detail + puts detail.to_s + exit!(1) + end + end + elsif Puppet.features.microsoft_windows? + command = command.collect {|part| '"' + part.gsub(/"/, '\\"') + '"'}.join(" ") if command.is_a?(Array) + Puppet.debug "Creating process '#{command}'" + processinfo = Process.create( :command_line => command ) + child_status = (Process.waitpid2(child_pid)[1]).to_i >> 8 + end + + # read output in if required + if ! arguments[:squelch] + + # Make sure the file's actually there. This is + # basically a race condition, and is probably a horrible + # way to handle it, but, well, oh well. + unless FileTest.exists?(output_file.path) + Puppet.warning "sleeping" + sleep 0.5 + unless FileTest.exists?(output_file.path) + Puppet.warning "sleeping 2" + sleep 1 + unless FileTest.exists?(output_file.path) + Puppet.warning "Could not get output" + output = "" + end + end + end + unless output + # We have to explicitly open here, so that it reopens + # after the child writes. + output = output_file.open.read + + # The 'true' causes the file to get unlinked right away. + output_file.close(true) + end + end + + if arguments[:failonfail] + unless child_status == 0 + raise ExecutionFailure, "Execution of '#{str}' returned #{child_status}: #{output}" + end + end + + output + end + + module_function :execute + + # Create an exclusive lock. + def threadlock(resource, type = Sync::EX) + Puppet::Util.synchronize_on(resource,type) { yield } + end + + # Because some modules provide their own version of this method. + alias util_execute execute + + module_function :benchmark + + def memory + unless defined?(@pmap) + @pmap = which('pmap') + end + if @pmap + %x{#{@pmap} #{Process.pid}| grep total}.chomp.sub(/^\s*total\s+/, '').sub(/K$/, '').to_i + else + 0 + end + end + + def symbolize(value) + if value.respond_to? :intern + value.intern + else + value + end + end + + def symbolizehash(hash) + newhash = {} + hash.each do |name, val| + if name.is_a? String + newhash[name.intern] = val + else + newhash[name] = val + end + end + end + + def symbolizehash!(hash) + hash.each do |name, val| + if name.is_a? String + hash[name.intern] = val + hash.delete(name) + end + end + + hash + end + module_function :symbolize, :symbolizehash, :symbolizehash! + + # Just benchmark, with no logging. + def thinmark + seconds = Benchmark.realtime { + yield + } + + seconds + end + + module_function :memory, :thinmark + + # Replace a file, securely. This takes a block, and passes it the file + # handle of a file open for writing. Write the replacement content inside + # the block and it will safely replace the target file. + # + # This method will make no changes to the target file until the content is + # successfully written and the block returns without raising an error. + # + # As far as possible the state of the existing file, such as mode, is + # preserved. This works hard to avoid loss of any metadata, but will result + # in an inode change for the file. + # + # Arguments: `filename`, `default_mode` + # + # The filename is the file we are going to replace. + # + # The default_mode is the mode to use when the target file doesn't already + # exist; if the file is present we copy the existing mode/owner/group values + # across. + def replace_file(file, default_mode, &block) + raise Puppet::DevError, "replace_file requires a block" unless block_given? + + file = Pathname(file) + tempfile = Tempfile.new(file.basename.to_s, file.dirname.to_s) + + file_exists = file.exist? + + # If the file exists, use its current mode/owner/group. If it doesn't, use + # the supplied mode, and default to current user/group. + if file_exists + stat = file.lstat + + # We only care about the four lowest-order octets. Higher octets are + # filesystem-specific. + mode = stat.mode & 07777 + uid = stat.uid + gid = stat.gid + else + mode = default_mode + uid = Process.euid + gid = Process.egid + end + + # Set properties of the temporary file before we write the content, because + # Tempfile doesn't promise to be safe from reading by other people, just + # that it avoids races around creating the file. + tempfile.chmod(mode) + tempfile.chown(uid, gid) + + # OK, now allow the caller to write the content of the file. + yield tempfile + + # Now, make sure the data (which includes the mode) is safe on disk. + tempfile.flush + begin + tempfile.fsync + rescue NotImplementedError + # fsync may not be implemented by Ruby on all platforms, but + # there is absolutely no recovery path if we detect that. So, we just + # ignore the return code. + # + # However, don't be fooled: that is accepting that we are running in + # an unsafe fashion. If you are porting to a new platform don't stub + # that out. + end + + tempfile.close + + File.rename(tempfile.path, file) + + # Ideally, we would now fsync the directory as well, but Ruby doesn't + # have support for that, and it doesn't matter /that/ much... + + # Return something true, and possibly useful. + file + end + module_function :replace_file +end +end + +require 'puppet/util/errors' +require 'puppet/util/methodhelper' +require 'puppet/util/metaid' +require 'puppet/util/classgen' +require 'puppet/util/docs' +require 'puppet/util/execution' +require 'puppet/util/logging' +require 'puppet/util/package' +require 'puppet/util/warnings' diff --git a/mcollective/lib/puppet/util/autoload.rb b/mcollective/lib/puppet/util/autoload.rb new file mode 100644 index 000000000..f0dd0a5c5 --- /dev/null +++ b/mcollective/lib/puppet/util/autoload.rb @@ -0,0 +1,156 @@ +require 'puppet/util/warnings' +require 'puppet/util/cacher' + +# Autoload paths, either based on names or all at once. +class Puppet::Util::Autoload + require 'puppet/util/autoload/file_cache' + + include Puppet::Util + include Puppet::Util::Warnings + include Puppet::Util::Cacher + include Puppet::Util::Autoload::FileCache + + @autoloaders = {} + @loaded = [] + + class << self + attr_reader :autoloaders + private :autoloaders + end + + # Send [], []=, and :clear to the @autloaders hash + Puppet::Util.classproxy self, :autoloaders, "[]", "[]=" + + # List all loaded files. + def self.list_loaded + @loaded.sort { |a,b| a[0] <=> b[0] }.collect do |path, hash| + "#{path}: #{hash[:file]}" + end + end + + # Has a given path been loaded? This is used for testing whether a + # changed file should be loaded or just ignored. This is only + # used in network/client/master, when downloading plugins, to + # see if a given plugin is currently loaded and thus should be + # reloaded. + def self.loaded?(path) + path = path.to_s.sub(/\.rb$/, '') + @loaded.include?(path) + end + + # Save the fact that a given path has been loaded. This is so + # we can load downloaded plugins if they've already been loaded + # into memory. + def self.loaded(file) + $" << file + ".rb" unless $".include?(file) + @loaded << file unless @loaded.include?(file) + end + + attr_accessor :object, :path, :objwarn, :wrap + + def initialize(obj, path, options = {}) + @path = path.to_s + raise ArgumentError, "Autoload paths cannot be fully qualified" if @path !~ /^\w/ + @object = obj + + self.class[obj] = self + + options.each do |opt, value| + opt = opt.intern if opt.is_a? String + begin + self.send(opt.to_s + "=", value) + rescue NoMethodError + raise ArgumentError, "#{opt} is not a valid option" + end + end + + @wrap = true unless defined?(@wrap) + end + + # Load a single plugin by name. We use 'load' here so we can reload a + # given plugin. + def load(name,env=nil) + path = name.to_s + ".rb" + + searchpath(env).each do |dir| + file = File.join(dir, path) + next unless file_exist?(file) + begin + Kernel.load file, @wrap + name = symbolize(name) + loaded name, file + return true + rescue SystemExit,NoMemoryError + raise + rescue Exception => detail + puts detail.backtrace if Puppet[:trace] + raise Puppet::Error, "Could not autoload #{name}: #{detail}" + end + end + false + end + + # Mark the named object as loaded. Note that this supports unqualified + # queries, while we store the result as a qualified query in the class. + def loaded(name, file) + self.class.loaded(File.join(@path, name.to_s)) + end + + # Indicate whether the specfied plugin has been loaded. + def loaded?(name) + self.class.loaded?(File.join(@path, name.to_s)) + end + + # Load all instances that we can. This uses require, rather than load, + # so that already-loaded files don't get reloaded unnecessarily. + def loadall + # Load every instance of everything we can find. + searchpath.each do |dir| + Dir.glob("#{dir}/*.rb").each do |file| + name = File.basename(file).sub(".rb", '').intern + next if loaded?(name) + begin + Kernel.require file + loaded(name, file) + rescue SystemExit,NoMemoryError + raise + rescue Exception => detail + puts detail.backtrace if Puppet[:trace] + raise Puppet::Error, "Could not autoload #{file}: #{detail}" + end + end + end + end + + # The list of directories to search through for loadable plugins. + def searchpath(env=nil) + search_directories(env).collect { |d| File.join(d, @path) }.find_all { |d| FileTest.directory?(d) } + end + + def module_directories(env=nil) + # We have to require this late in the process because otherwise we might have + # load order issues. + require 'puppet/node/environment' + + real_env = Puppet::Node::Environment.new(env) + + # We're using a per-thread cache of said module directories, so that + # we don't scan the filesystem each time we try to load something with + # this autoload instance. But since we don't want to cache for the eternity + # this env_module_directories gets reset after the compilation on the master. + # This is also reset after an agent ran. + # One of the side effect of this change is that this module directories list will be + # shared among all autoload that we have running at a time. But that won't be an issue + # as by definition those directories are shared by all autoload. + Thread.current[:env_module_directories] ||= {} + Thread.current[:env_module_directories][real_env] ||= real_env.modulepath.collect do |dir| + Dir.entries(dir).reject { |f| f =~ /^\./ }.collect { |f| File.join(dir, f) } + end.flatten.collect { |d| [File.join(d, "plugins"), File.join(d, "lib")] }.flatten.find_all do |d| + FileTest.directory?(d) + end + end + + def search_directories(env=nil) + [module_directories(env), Puppet[:libdir].split(File::PATH_SEPARATOR), $LOAD_PATH].flatten + end +end diff --git a/mcollective/lib/puppet/util/autoload/file_cache.rb b/mcollective/lib/puppet/util/autoload/file_cache.rb new file mode 100644 index 000000000..b55547362 --- /dev/null +++ b/mcollective/lib/puppet/util/autoload/file_cache.rb @@ -0,0 +1,92 @@ +module Puppet::Util::Autoload::FileCache + @found_files = {} + @missing_files = {} + class << self + attr_reader :found_files, :missing_files + end + + # Only used for testing. + def self.clear + @found_files.clear + @missing_files.clear + end + + def found_files + Puppet::Util::Autoload::FileCache.found_files + end + + def missing_files + Puppet::Util::Autoload::FileCache.missing_files + end + + def directory_exist?(path) + cache = cached_data?(path, :directory?) + return cache unless cache.nil? + + protect(path) do + stat = File.lstat(path) + if stat.directory? + found_file(path, stat) + return true + else + missing_file(path) + return false + end + end + end + + def file_exist?(path) + cache = cached_data?(path) + return cache unless cache.nil? + + protect(path) do + stat = File.lstat(path) + found_file(path, stat) + return true + end + end + + def found_file?(path, type = nil) + if data = found_files[path] and ! data_expired?(data[:time]) + return(type and ! data[:stat].send(type)) ? false : true + else + return false + end + end + + def found_file(path, stat) + found_files[path] = {:stat => stat, :time => Time.now} + end + + def missing_file?(path) + !!(time = missing_files[path] and ! data_expired?(time)) + end + + def missing_file(path) + missing_files[path] = Time.now + end + + private + + def cached_data?(path, type = nil) + if found_file?(path, type) + return true + elsif missing_file?(path) + return false + else + return nil + end + end + + def data_expired?(time) + Time.now - time > 15 + end + + def protect(path) + yield + rescue => detail + raise unless detail.class.to_s.include?("Errno") + missing_file(path) + return false + end +end diff --git a/mcollective/lib/puppet/util/backups.rb b/mcollective/lib/puppet/util/backups.rb new file mode 100644 index 000000000..c01bdd400 --- /dev/null +++ b/mcollective/lib/puppet/util/backups.rb @@ -0,0 +1,87 @@ +require 'find' +require 'fileutils' +module Puppet::Util::Backups + + # Deal with backups. + def perform_backup(file = nil) + # if they specifically don't want a backup, then just say + # we're good + return true unless self[:backup] + + # let the path be specified + file ||= self[:path] + return true unless FileTest.exists?(file) + + return(self.bucket ? perform_backup_with_bucket(file) : perform_backup_with_backuplocal(file, self[:backup])) + end + + private + + def perform_backup_with_bucket(fileobj) + file = (fileobj.class == String) ? fileobj : fileobj.name + case File.stat(file).ftype + when "directory" + # we don't need to backup directories when recurse is on + return true if self[:recurse] + info "Recursively backing up to filebucket" + Find.find(self[:path]) { |f| backup_file_with_filebucket(f) if File.file?(f) } + when "file"; backup_file_with_filebucket(file) + when "link"; + end + true + end + + def perform_backup_with_backuplocal(fileobj, backup) + file = (fileobj.class == String) ? fileobj : fileobj.name + newfile = file + backup + + remove_backup(newfile) + + begin + bfile = file + backup + + # Ruby 1.8.1 requires the 'preserve' addition, but + # later versions do not appear to require it. + # N.B. cp_r works on both files and directories + FileUtils.cp_r(file, bfile, :preserve => true) + return true + rescue => detail + # since they said they want a backup, let's error out + # if we couldn't make one + self.fail "Could not back #{file} up: #{detail.message}" + end + end + + def remove_backup(newfile) + if self.class.name == :file and self[:links] != :follow + method = :lstat + else + method = :stat + end + + begin + stat = File.send(method, newfile) + rescue Errno::ENOENT + return + end + + if stat.ftype == "directory" + raise Puppet::Error, "Will not remove directory backup #{newfile}; use a filebucket" + end + + info "Removing old backup of type #{stat.ftype}" + + begin + File.unlink(newfile) + rescue => detail + puts detail.backtrace if Puppet[:trace] + self.fail "Could not remove old backup: #{detail}" + end + end + + def backup_file_with_filebucket(f) + sum = self.bucket.backup(f) + self.info "Filebucketed #{f} to #{self.bucket.name} with sum #{sum}" + return sum + end +end diff --git a/mcollective/lib/puppet/util/cacher.rb b/mcollective/lib/puppet/util/cacher.rb new file mode 100644 index 000000000..3dddec0d4 --- /dev/null +++ b/mcollective/lib/puppet/util/cacher.rb @@ -0,0 +1,132 @@ +require 'monitor' + +module Puppet::Util::Cacher + module Expirer + attr_reader :timestamp + + # Cause all cached values to be considered expired. + def expire + @timestamp = Time.now + end + + # Is the provided timestamp earlier than our expiration timestamp? + # If it is, then the associated value is expired. + def dependent_data_expired?(ts) + return false unless timestamp + + timestamp > ts + end + end + + extend Expirer + + # Our module has been extended in a class; we can only add the Instance methods, + # which become *class* methods in the class. + def self.extended(other) + class << other + extend ClassMethods + include InstanceMethods + end + end + + # Our module has been included in a class, which means the class gets the class methods + # and all of its instances get the instance methods. + def self.included(other) + other.extend(ClassMethods) + other.send(:include, InstanceMethods) + end + + # Methods that can get added to a class. + module ClassMethods + # Provide a means of defining an attribute whose value will be cached. + # Must provide a block capable of defining the value if it's flushed.. + def cached_attr(name, options = {}, &block) + init_method = "init_#{name}" + define_method(init_method, &block) + + define_method(name) do + cached_value(name) + end + + define_method(name.to_s + "=") do |value| + # Make sure the cache timestamp is set + cache_timestamp + value_cache.synchronize { value_cache[name] = value } + end + + if ttl = options[:ttl] + set_attr_ttl(name, ttl) + end + end + + def attr_ttl(name) + return nil unless @attr_ttls + @attr_ttls[name] + end + + def set_attr_ttl(name, value) + @attr_ttls ||= {} + @attr_ttls[name] = Integer(value) + end + end + + # Methods that get added to instances. + module InstanceMethods + + def expire + # Only expire if we have an expirer. This is + # mostly so that we can comfortably handle cases + # like Puppet::Type instances, which use their + # catalog as their expirer, and they often don't + # have a catalog. + if e = expirer + e.expire + end + end + + def expirer + Puppet::Util::Cacher + end + + private + + def cache_timestamp + @cache_timestamp ||= Time.now + end + + def cached_value(name) + value_cache.synchronize do + # Allow a nil expirer, in which case we regenerate the value every time. + if expired_by_expirer?(name) + value_cache.clear + @cache_timestamp = Time.now + elsif expired_by_ttl?(name) + value_cache.delete(name) + end + value_cache[name] = send("init_#{name}") unless value_cache.include?(name) + value_cache[name] + end + end + + def expired_by_expirer?(name) + if expirer.nil? + return true unless self.class.attr_ttl(name) + end + expirer.dependent_data_expired?(cache_timestamp) + end + + def expired_by_ttl?(name) + return false unless self.class.respond_to?(:attr_ttl) + return false unless ttl = self.class.attr_ttl(name) + + @ttl_timestamps ||= {} + @ttl_timestamps[name] ||= Time.now + + (Time.now - @ttl_timestamps[name]) > ttl + end + + def value_cache + @value_cache ||= {}.extend(MonitorMixin) + end + end +end diff --git a/mcollective/lib/puppet/util/checksums.rb b/mcollective/lib/puppet/util/checksums.rb new file mode 100644 index 000000000..e129301e6 --- /dev/null +++ b/mcollective/lib/puppet/util/checksums.rb @@ -0,0 +1,148 @@ +# A stand-alone module for calculating checksums +# in a generic way. +module Puppet::Util::Checksums + class FakeChecksum + def <<(*args) + self + end + end + + # Is the provided string a checksum? + def checksum?(string) + string =~ /^\{(\w{3,5})\}\S+/ + end + + # Strip the checksum type from an existing checksum + def sumdata(checksum) + checksum =~ /^\{(\w+)\}(.+)/ ? $2 : nil + end + + # Strip the checksum type from an existing checksum + def sumtype(checksum) + checksum =~ /^\{(\w+)\}/ ? $1 : nil + end + + # Calculate a checksum using Digest::MD5. + def md5(content) + require 'digest/md5' + Digest::MD5.hexdigest(content) + end + + # Calculate a checksum of the first 500 chars of the content using Digest::MD5. + def md5lite(content) + md5(content[0..511]) + end + + # Calculate a checksum of a file's content using Digest::MD5. + def md5_file(filename, lite = false) + require 'digest/md5' + + digest = Digest::MD5.new + checksum_file(digest, filename, lite) + end + + # Calculate a checksum of the first 500 chars of a file's content using Digest::MD5. + def md5lite_file(filename) + md5_file(filename, true) + end + + def md5_stream(&block) + require 'digest/md5' + digest = Digest::MD5.new + yield digest + digest.hexdigest + end + + alias :md5lite_stream :md5_stream + + # Return the :mtime timestamp of a file. + def mtime_file(filename) + File.stat(filename).send(:mtime) + end + + # by definition this doesn't exist + # but we still need to execute the block given + def mtime_stream + noop_digest = FakeChecksum.new + yield noop_digest + nil + end + + def mtime(content) + "" + end + + # Calculate a checksum using Digest::SHA1. + def sha1(content) + require 'digest/sha1' + Digest::SHA1.hexdigest(content) + end + + # Calculate a checksum of the first 500 chars of the content using Digest::SHA1. + def sha1lite(content) + sha1(content[0..511]) + end + + # Calculate a checksum of a file's content using Digest::SHA1. + def sha1_file(filename, lite = false) + require 'digest/sha1' + + digest = Digest::SHA1.new + checksum_file(digest, filename, lite) + end + + # Calculate a checksum of the first 500 chars of a file's content using Digest::SHA1. + def sha1lite_file(filename) + sha1_file(filename, true) + end + + def sha1_stream + require 'digest/sha1' + digest = Digest::SHA1.new + yield digest + digest.hexdigest + end + + alias :sha1lite_stream :sha1_stream + + # Return the :ctime of a file. + def ctime_file(filename) + File.stat(filename).send(:ctime) + end + + alias :ctime_stream :mtime_stream + + def ctime(content) + "" + end + + # Return a "no checksum" + def none_file(filename) + "" + end + + def none_stream + noop_digest = FakeChecksum.new + yield noop_digest + "" + end + + def none(content) + "" + end + + private + + # Perform an incremental checksum on a file. + def checksum_file(digest, filename, lite = false) + buffer = lite ? 512 : 4096 + File.open(filename, 'r') do |file| + while content = file.read(buffer) + digest << content + break if lite + end + end + + digest.hexdigest + end +end diff --git a/mcollective/lib/puppet/util/classgen.rb b/mcollective/lib/puppet/util/classgen.rb new file mode 100644 index 000000000..ed69c5878 --- /dev/null +++ b/mcollective/lib/puppet/util/classgen.rb @@ -0,0 +1,197 @@ +module Puppet + class ConstantAlreadyDefined < Error; end + class SubclassAlreadyDefined < Error; end +end + +module Puppet::Util::ClassGen + include Puppet::Util::MethodHelper + include Puppet::Util + + # Create a new subclass. Valid options are: + # * :array: An array of existing classes. If specified, the new + # class is added to this array. + # * :attributes: A hash of attributes to set before the block is + # evaluated. + # * :block: The block to evaluate in the context of the class. + # You can also just pass the block normally, but it will still be evaluated + # with class_eval. + # * :constant: What to set the constant as. Defaults to the + # capitalized name. + # * :hash: A hash of existing classes. If specified, the new + # class is added to this hash, and it is also used for overwrite tests. + # * :overwrite: Whether to overwrite an existing class. + # * :parent: The parent class for the generated class. Defaults to + # self. + # * :prefix: The constant prefix. Default to nothing; if specified, + # the capitalized name is appended and the result is set as the constant. + def genclass(name, options = {}, &block) + genthing(name, Class, options, block) + end + + # Create a new module. Valid options are: + # * :array: An array of existing classes. If specified, the new + # class is added to this array. + # * :attributes: A hash of attributes to set before the block is + # evaluated. + # * :block: The block to evaluate in the context of the class. + # You can also just pass the block normally, but it will still be evaluated + # with class_eval. + # * :constant: What to set the constant as. Defaults to the + # capitalized name. + # * :hash: A hash of existing classes. If specified, the new + # class is added to this hash, and it is also used for overwrite tests. + # * :overwrite: Whether to overwrite an existing class. + # * :prefix: The constant prefix. Default to nothing; if specified, + # the capitalized name is appended and the result is set as the constant. + def genmodule(name, options = {}, &block) + genthing(name, Module, options, block) + end + + # Remove an existing class + def rmclass(name, options) + options = symbolize_options(options) + const = genconst_string(name, options) + retval = false + if const_defined?(const) + remove_const(const) + retval = true + end + + if hash = options[:hash] and hash.include? name + hash.delete(name) + retval = true + end + + # Let them know whether we did actually delete a subclass. + retval + end + + private + + # Generate the constant to create or remove. + def genconst_string(name, options) + unless const = options[:constant] + prefix = options[:prefix] || "" + const = prefix + name2const(name) + end + + const + end + + # This does the actual work of creating our class or module. It's just a + # slightly abstract version of genclass. + def genthing(name, type, options, block) + options = symbolize_options(options) + + name = symbolize(name.to_s.downcase) + + if type == Module + #evalmethod = :module_eval + evalmethod = :class_eval + # Create the class, with the correct name. + klass = Module.new do + class << self + attr_reader :name + end + @name = name + end + else + options[:parent] ||= self + evalmethod = :class_eval + # Create the class, with the correct name. + klass = Class.new(options[:parent]) do + @name = name + end + end + + # Create the constant as appropriation. + handleclassconst(klass, name, options) + + # Initialize any necessary variables. + initclass(klass, options) + + block ||= options[:block] + + # Evaluate the passed block if there is one. This should usually + # define all of the work. + klass.send(evalmethod, &block) if block + + klass.postinit if klass.respond_to? :postinit + + # Store the class in hashes or arrays or whatever. + storeclass(klass, name, options) + + klass + end + + # Handle the setting and/or removing of the associated constant. + def handleclassconst(klass, name, options) + const = genconst_string(name, options) + + if const_defined?(const) + if options[:overwrite] + Puppet.info "Redefining #{name} in #{self}" + remove_const(const) + else + raise Puppet::ConstantAlreadyDefined, + "Class #{const} is already defined in #{self}" + end + end + const_set(const, klass) + + const + end + + # Perform the initializations on the class. + def initclass(klass, options) + klass.initvars if klass.respond_to? :initvars + + if attrs = options[:attributes] + attrs.each do |param, value| + method = param.to_s + "=" + klass.send(method, value) if klass.respond_to? method + end + end + + [:include, :extend].each do |method| + if set = options[method] + set = [set] unless set.is_a?(Array) + set.each do |mod| + klass.send(method, mod) + end + end + end + + klass.preinit if klass.respond_to? :preinit + end + + # Convert our name to a constant. + def name2const(name) + name.to_s.capitalize + end + + # Store the class in the appropriate places. + def storeclass(klass, klassname, options) + if hash = options[:hash] + if hash.include? klassname and ! options[:overwrite] + raise Puppet::SubclassAlreadyDefined, + "Already a generated class named #{klassname}" + end + + hash[klassname] = klass + end + + # If we were told to stick it in a hash, then do so + if array = options[:array] + if (klass.respond_to? :name and + array.find { |c| c.name == klassname } and + ! options[:overwrite]) + raise Puppet::SubclassAlreadyDefined, + "Already a generated class named #{klassname}" + end + + array << klass + end + end +end + diff --git a/mcollective/lib/puppet/util/command_line.rb b/mcollective/lib/puppet/util/command_line.rb new file mode 100644 index 000000000..52b5f81ef --- /dev/null +++ b/mcollective/lib/puppet/util/command_line.rb @@ -0,0 +1,102 @@ +require "puppet/util/plugins" + +module Puppet + module Util + class CommandLine + + LegacyName = Hash.new{|h,k| k}.update( + 'agent' => 'puppetd', + 'cert' => 'puppetca', + 'doc' => 'puppetdoc', + 'filebucket' => 'filebucket', + 'apply' => 'puppet', + 'describe' => 'pi', + 'queue' => 'puppetqd', + 'resource' => 'ralsh', + 'kick' => 'puppetrun', + 'master' => 'puppetmasterd' + ) + + def initialize( zero = $0, argv = ARGV, stdin = STDIN ) + @zero = zero + @argv = argv.dup + @stdin = stdin + + @subcommand_name, @args = subcommand_and_args( @zero, @argv, @stdin ) + Puppet::Plugins.on_commandline_initialization(:command_line_object => self) + end + + attr :subcommand_name + attr :args + + def appdir + File.join('puppet', 'application') + end + + def available_subcommands + absolute_appdirs = $LOAD_PATH.collect do |x| + File.join(x,'puppet','application') + end.select{ |x| File.directory?(x) } + absolute_appdirs.inject([]) do |commands, dir| + commands + Dir[File.join(dir, '*.rb')].map{|fn| File.basename(fn, '.rb')} + end.uniq + end + + def usage_message + usage = "Usage: puppet command " + available = "Available commands are: #{available_subcommands.sort.join(', ')}" + [usage, available].join("\n") + end + + def require_application(application) + require File.join(appdir, application) + end + + def execute + if subcommand_name.nil? + puts usage_message + elsif available_subcommands.include?(subcommand_name) #subcommand + require_application subcommand_name + app = Puppet::Application.find(subcommand_name).new(self) + Puppet::Plugins.on_application_initialization(:appliation_object => self) + app.run + else + abort "Error: Unknown command #{subcommand_name}.\n#{usage_message}" unless execute_external_subcommand + end + end + + def execute_external_subcommand + external_command = "puppet-#{subcommand_name}" + + require 'puppet/util' + path_to_subcommand = Puppet::Util.which( external_command ) + return false unless path_to_subcommand + + system( path_to_subcommand, *args ) + true + end + + def legacy_executable_name + LegacyName[ subcommand_name ] + end + + private + + def subcommand_and_args( zero, argv, stdin ) + zero = File.basename(zero, '.rb') + + if zero == 'puppet' + case argv.first + when nil; [ stdin.tty? ? nil : "apply", argv] # ttys get usage info + when "--help", "-h"; [nil, argv] # help should give you usage, not the help for `puppet apply` + when /^-|\.pp$|\.rb$/; ["apply", argv] + else [ argv.first, argv[1..-1] ] + end + else + [ zero, argv ] + end + end + + end + end +end diff --git a/mcollective/lib/puppet/util/command_line/filebucket b/mcollective/lib/puppet/util/command_line/filebucket new file mode 100755 index 000000000..34b01508e --- /dev/null +++ b/mcollective/lib/puppet/util/command_line/filebucket @@ -0,0 +1,97 @@ +#!/usr/bin/env ruby + +# +# = Synopsis +# +# A stand-alone Puppet filebucket client. +# +# = Usage +# +# puppet filebucket [-h|--help] [-V|--version] [-d|--debug] [-v|--verbose] +# [-l|--local] [-r|--remote] +# [-s|--server ] [-b|--bucket ] ... +# +# = Description +# +# This is a stand-alone filebucket client for sending files to a local +# or central filebucket. +# +# = Usage +# +# This client can operate in three modes, with only one mode per call: +# +# backup:: +# Send one or more files to the specified file bucket. Each sent file +# is printed with its resulting md5 sum. +# +# get:: +# Return the text associated with an md5 sum. The text is printed to +# stdout, and only one file can be retrieved at a time. +# +# restore:: +# Given a file path and an md5 sum, store the content associated with the +# sum into the specified file path. You can specify an entirely new path +# to this argument; you are not restricted to restoring the content to its +# original location. +# +# Note that +filebucket+ defaults to using a network-based filebucket available on +# the server named +puppet+. To use this, you'll have to be running as a user +# with valid Puppet certificates. Alternatively, you can use your local file bucket +# by specifying +--local+. +# +# = Example +# +# $ puppet filebucket backup /etc/passwd +# /etc/passwd: 429b225650b912a2ee067b0a4cf1e949 +# $ puppet filebucket restore /tmp/passwd 429b225650b912a2ee067b0a4cf1e949 +# $ +# +# = Options +# +# Note that any configuration parameter that's valid in the configuration file +# is also a valid long argument. For example, 'ssldir' is a valid configuration +# parameter, so you can specify '--ssldir ' as an argument. +# +# See the configuration file documentation at +# http://docs.puppetlabs.com/references/stable/configuration.html for +# the full list of acceptable parameters. A commented list of all +# configuration options can also be generated by running puppet with +# '--genconfig'. +# +# debug:: +# Enable full debugging. +# +# help:: +# Print this help message +# +# local:: +# Use the local filebucket. This will use the default configuration +# information. +# +# remote:: +# Use a remote filebucket. This will use the default configuration +# information. +# +# server:: +# The server to send the file to, instead of locally. +# +# verbose:: +# Print extra information. +# +# version:: +# Print version information. +# +# = Example +# +# puppet filebucket -b /tmp/filebucket /my/file +# +# = Author +# +# Luke Kanies +# +# = Copyright +# +# Copyright (c) 2005 Puppet Labs, LLC +# Licensed under the GNU Public License + +#Puppet::Application[:filebucket].run diff --git a/mcollective/lib/puppet/util/command_line/pi b/mcollective/lib/puppet/util/command_line/pi new file mode 100755 index 000000000..3d80eea8f --- /dev/null +++ b/mcollective/lib/puppet/util/command_line/pi @@ -0,0 +1,48 @@ +#!/usr/bin/env ruby + +# +# = Synopsis +# +# Print help about puppet types on the console. Run with '-h' to get detailed +# help. +# = Usage +# +# puppet describe [-h|--help] [-s|--short] [-p|--providers] [-l|--list] [-m|--meta] +# +# = Description +# +# Prints details of Puppet types, providers and metaparameters on the console. +# +# = Options +# +# help:: +# Print this help text +# +# providers:: +# Describe providers in detail for each type +# +# list:: +# List all types +# +# meta:: +# List all metaparameters +# +# short:: +# List only parameters without detail +# +# = Example +# +# puppet describe --list +# puppet describe file --providers +# puppet describe user -s -m +# +# = Author +# +# David Lutterkort +# +# = Copyright +# +# Copyright (c) 2005 Puppet Labs, LLC +# Licensed under the GNU Public License + +#Puppet::Application[:describe].run diff --git a/mcollective/lib/puppet/util/command_line/puppet b/mcollective/lib/puppet/util/command_line/puppet new file mode 100755 index 000000000..ba3d57c19 --- /dev/null +++ b/mcollective/lib/puppet/util/command_line/puppet @@ -0,0 +1,73 @@ + +# +# = Synopsis +# +# Run a stand-alone +puppet+ manifest. +# +# = Usage +# +# puppet apply [-h|--help] [-V|--version] [-d|--debug] [-v|--verbose] [-e|--execute] +# [--detailed-exitcodes] [-l|--logdest ] [--apply catalog] +# +# = Description +# +# This is the standalone puppet execution tool; use it to execute +# individual manifests that you write. If you need to execute site-wide +# manifests, use 'puppet agent' and 'puppet master'. +# +# = Options +# +# Note that any configuration parameter that's valid in the configuration file +# is also a valid long argument. For example, 'ssldir' is a valid configuration +# parameter, so you can specify '--ssldir ' as an argument. +# +# See the configuration file documentation at +# http://docs.puppetlabs.com/references/stable/configuration.html for +# the full list of acceptable parameters. A commented list of all +# configuration options can also be generated by running puppet with +# '--genconfig'. +# +# debug:: +# Enable full debugging. +# +# detailed-exitcodes:: +# Provide transaction information via exit codes. If this is enabled, an exit +# code of '2' means there were changes, and an exit code of '4' means that there +# were failures during the transaction. +# +# help:: +# Print this help message +# +# loadclasses:: +# Load any stored classes. 'puppet agent' caches configured classes (usually at +# /etc/puppet/classes.txt), and setting this option causes all of those classes +# to be set in your puppet manifest. +# +# logdest:: +# Where to send messages. Choose between syslog, the console, and a log file. +# Defaults to sending messages to the console. +# +# execute:: +# Execute a specific piece of Puppet code +# +# verbose:: +# Print extra information. +# +# apply:: +# Capability to apply JSON catalog (such as one generated with --compile on the Puppet master). +# You can either specify a JSON catalog file or pipe in JSON from standard input. +# +# = Example +# +# puppet -l /tmp/manifest.log manifest.pp +# +# = Author +# +# Luke Kanies +# +# = Copyright +# +# Copyright (c) 2005 Puppet Labs, LLC +# Licensed under the GNU Public License + +#Puppet::Application[:apply].run diff --git a/mcollective/lib/puppet/util/command_line/puppetca b/mcollective/lib/puppet/util/command_line/puppetca new file mode 100755 index 000000000..62ca96eba --- /dev/null +++ b/mcollective/lib/puppet/util/command_line/puppetca @@ -0,0 +1,131 @@ +#!/usr/bin/env ruby + +# +# = Synopsis +# +# Stand-alone certificate authority. Capable of generating certificates +# but mostly meant for signing certificate requests from puppet clients. +# +# = Usage +# +# puppet cert [-h|--help] [-V|--version] [-d|--debug] [-v|--verbose] +# [-g|--generate] [-l|--list] [-s|--sign] [-r|--revoke] +# [-p|--print] [-c|--clean] [--verify] [--digest DIGEST] +# [--fingerprint] [host] +# +# = Description +# +# Because the puppetmasterd daemon defaults to not signing client certificate +# requests, this script is available for signing outstanding requests. It +# can be used to list outstanding requests and then either sign them individually +# or sign all of them. +# +# = Options +# +# Note that any configuration parameter that's valid in the configuration file +# is also a valid long argument. For example, 'ssldir' is a valid configuration +# parameter, so you can specify '--ssldir ' as an argument. +# +# See the configuration file documentation at +# http://docs.puppetlabs.com/references/stable/configuration.html for +# the full list of acceptable parameters. A commented list of all +# configuration options can also be generated by running puppet cert with +# '--genconfig'. +# +# all:: +# Operate on all items. Currently only makes sense with '--sign', +# '--clean', or '--list'. +# +# digest:: +# Set the digest for fingerprinting (defaults to md5). Valid values depends +# on your openssl and openssl ruby extension version, but should contain at +# least md5, sha1, md2, sha256. +# +# clean:: +# Remove all files related to a host from puppet cert's storage. This is +# useful when rebuilding hosts, since new certificate signing requests +# will only be honored if puppet cert does not have a copy of a signed +# certificate for that host. The certificate of the host is also revoked. +# If '--all' is specified then all host certificates, both signed and +# unsigned, will be removed. +# +# debug:: +# Enable full debugging. +# +# generate:: +# Generate a certificate for a named client. A certificate/keypair will be +# generated for each client named on the command line. +# +# When generate is used the additional `--subject-alt-name` argument can be +# used. The names, separated by `:`, passed will be added as the +# subjectAltName of the final certificate. +# +# help:: +# Print this help message +# +# list:: +# List outstanding certificate requests. If '--all' is specified, +# signed certificates are also listed, prefixed by '+', and revoked +# or invalid certificates are prefixed by '-' (the verification outcome +# is printed in parenthesis). +# +# print:: +# Print the full-text version of a host's certificate. +# +# fingerprint:: +# Print the DIGEST (defaults to md5) fingerprint of a host's certificate. +# +# revoke:: +# Revoke the certificate of a client. The certificate can be specified +# either by its serial number, given as a decimal number or a hexadecimal +# number prefixed by '0x', or by its hostname. The certificate is revoked +# by adding it to the Certificate Revocation List given by the 'cacrl' +# config parameter. Note that the puppetmasterd needs to be restarted +# after revoking certificates. +# +# sign:: +# Sign an outstanding certificate request. Unless '--all' is specified, +# hosts must be listed after all flags. +# +# Puppet will refuse to sign a CSR that requests a `subjectAltName` +# extension unless you specify `--allow-subject-alt-name`. This is required +# because of the critical security risks around allowing `subjectAltName` +# from client generated certificates. +# +# To further enforce security, if `--allow-subject-alt-name` is given Puppet +# will refuse to sign any certificate that does not have request additional +# names. +# +# Finally, Puppet will still enforce security policy over the +# `subjectAltName` field, and will refuse to allow unknown values, or +# wildcards, as part of the certificate. +# +# verbose:: +# Enable verbosity. +# +# version:: +# Print the puppet version number and exit. +# +# verify:: +# Verify the named certificate against the local CA certificate. +# +# = Example +# +# $ puppet cert -l +# culain.madstop.com +# $ puppet cert -s culain.madstop.com +# +# Signing a certificate with `subjectAltName` set, which will be requested +# automatically when you bring up a new master in a distributed CA +# environment: +# +# $ puppet cert --sign --allow-subject-alt-name master12.local +# +# = Author +# +# Luke Kanies +# +# = Copyright +# +# Copyright (c) 2005 Puppet Labs, LLC +# Licensed under the GNU Public License diff --git a/mcollective/lib/puppet/util/command_line/puppetd b/mcollective/lib/puppet/util/command_line/puppetd new file mode 100755 index 000000000..b4eafb483 --- /dev/null +++ b/mcollective/lib/puppet/util/command_line/puppetd @@ -0,0 +1,188 @@ +#!/usr/bin/env ruby + +# == Synopsis +# +# Retrieve the client configuration from the puppet master and apply +# it to the local host. +# +# Currently must be run out periodically, using cron or something similar. +# +# = Usage +# +# puppet agent [-D|--daemonize|--no-daemonize] [-d|--debug] +# [--detailed-exitcodes] [--disable] [--enable] +# [-h|--help] [--certname ] [-l|--logdest syslog||console] +# [-o|--onetime] [--serve ] [-t|--test] [--noop] +# [--digest ] [--fingerprint] [-V|--version] +# [-v|--verbose] [-w|--waitforcert ] +# +# = Description +# +# This is the main puppet client. Its job is to retrieve the local machine's +# configuration from a remote server and apply it. In order to successfully +# communicate with the remote server, the client must have a certificate signed +# by a certificate authority that the server trusts; the recommended method +# for this, at the moment, is to run a certificate authority as part of the +# puppet server (which is the default). The client will connect and request +# a signed certificate, and will continue connecting until it receives one. +# +# Once the client has a signed certificate, it will retrieve its configuration +# and apply it. +# +# = Usage Notes +# +# +puppet agent+ does its best to find a compromise between interactive use and +# daemon use. Run with no arguments and no configuration, it will go into the +# backgroun, attempt to get a signed certificate, and retrieve and apply its +# configuration every 30 minutes. +# +# Some flags are meant specifically for interactive use -- in particular, +# +test+, +tags+ or +fingerprint+ are useful. +test+ enables verbose logging, causes +# the daemon to stay in the foreground, exits if the server's configuration is +# invalid (this happens if, for instance, you've left a syntax error on the +# server), and exits after running the configuration once (rather than hanging +# around as a long-running process). +# +# +tags+ allows you to specify what portions of a configuration you want to apply. +# Puppet elements are tagged with all of the class or definition names that +# contain them, and you can use the +tags+ flag to specify one of these names, +# causing only configuration elements contained within that class or definition +# to be applied. This is very useful when you are testing new configurations -- +# for instance, if you are just starting to manage +ntpd+, you would put all of +# the new elements into an +ntpd+ class, and call puppet with +--tags ntpd+, +# which would only apply that small portion of the configuration during your +# testing, rather than applying the whole thing. +# +# +fingerprint+ is a one-time flag. In this mode +puppet agent+ will run once and +# display on the console (and in the log) the current certificate (or certificate +# request) fingerprint. Providing the +--digest+ option allows to use a different +# digest algorithm to generate the fingerprint. The main use is to verify that +# before signing a certificate request on the master, the certificate request the +# master received is the same as the one the client sent (to prevent against +# man-in-the-middle attacks when signing certificates). +# +# +# = Options +# +# Note that any configuration parameter that's valid in the configuration file +# is also a valid long argument. For example, 'server' is a valid configuration +# parameter, so you can specify '--server ' as an argument. +# +# See the configuration file documentation at +# http://docs.puppetlabs.com/references/stable/configuration.html for +# the full list of acceptable parameters. A commented list of all +# configuration options can also be generated by running puppet agent with +# '--genconfig'. +# +# daemonize:: +# Send the process into the background. This is the default. +# +# no-daemonize:: +# Do not send the process into the background. +# +# debug:: +# Enable full debugging. +# +# digest:: +# Change the certificate fingerprinting digest algorithm. The default is MD5. +# Valid values depends on the version of OpenSSL installed, but should always +# at least contain MD5, MD2, SHA1 and SHA256. +# +# detailed-exitcodes:: +# Provide transaction information via exit codes. If this is enabled, an +# exit code of '2' means there were changes, and an exit code of '4' means +# that there were failures during the transaction. This option only makes +# sense in conjunction with --onetime. +# +# disable:: +# Disable working on the local system. This puts a lock file in place, +# causing +puppet agent+ not to work on the system until the lock file is removed. +# This is useful if you are testing a configuration and do not want the central +# configuration to override the local state until everything is tested and +# committed. +# +# +puppet agent+ uses the same lock file while it is running, so no more than one +# +puppet agent+ process is working at a time. +# +# +puppet agent+ exits after executing this. +# +# enable:: +# Enable working on the local system. This removes any lock file, causing +# +puppet agent+ to start managing the local system again (although it will continue +# to use its normal scheduling, so it might not start for another half hour). +# +# +puppet agent+ exits after executing this. +# +# certname:: +# Set the certname (unique ID) of the client. The master reads this unique +# identifying string, which is usually set to the node's fully-qualified domain +# name, to determine which configurations the node will receive. Use this option +# to debug setup problems or implement unusual node identification schemes. +# +# help:: +# Print this help message +# +# logdest:: +# Where to send messages. Choose between syslog, the console, and a log file. +# Defaults to sending messages to syslog, or the console if debugging or +# verbosity is enabled. +# +# no-client:: +# Do not create a config client. This will cause the daemon to run +# without ever checking for its configuration automatically, and only +# makes sense when used in conjunction with --listen. +# +# onetime:: +# Run the configuration once. Runs a single (normally daemonized) Puppet run. +# Useful for interactively running puppet agent when used in conjunction with +# the --no-daemonize option. +# +# fingerprint:: +# Display the current certificate or certificate signing request fingerprint +# and then exit. Use the +--digest+ option to change the digest algorithm used. +# +# serve:: +# Start another type of server. By default, +puppet agent+ will start +# a service handler that allows authenticated and authorized remote nodes to +# trigger the configuration to be pulled down and applied. You can specify +# any handler here that does not require configuration, e.g., filebucket, ca, +# or resource. The handlers are in +lib/puppet/network/handler+, and the names +# must match exactly, both in the call to +serve+ and in +namespaceauth.conf+. +# +# test:: +# Enable the most common options used for testing. These are +onetime+, +# +verbose+, +ignorecache, +no-daemonize+, +no-usecacheonfailure+, +# +detailed-exit-codes+, +no-splay+, and +show_diff+. +# +# noop:: +# Use +noop+ mode where the daemon runs in a no-op or dry-run mode. This is useful +# for seeing what changes Puppet will make without actually executing the changes. +# +# verbose:: +# Turn on verbose reporting. +# +# version:: +# Print the puppet version number and exit. +# +# waitforcert:: +# This option only matters for daemons that do not yet have certificates +# and it is enabled by default, with a value of 120 (seconds). This causes +# +puppet agent+ to connect to the server every 2 minutes and ask it to sign a +# certificate request. This is useful for the initial setup of a puppet +# client. You can turn off waiting for certificates by specifying a time +# of 0. +# +# = Example +# +# puppet agent --server puppet.domain.com +# +# = Author +# +# Luke Kanies +# +# = Copyright +# +# Copyright (c) 2005, 2006 Puppet Labs, LLC +# Licensed under the GNU Public License + +#Puppet::Application[:agent].run diff --git a/mcollective/lib/puppet/util/command_line/puppetdoc b/mcollective/lib/puppet/util/command_line/puppetdoc new file mode 100755 index 000000000..45a9c6518 --- /dev/null +++ b/mcollective/lib/puppet/util/command_line/puppetdoc @@ -0,0 +1,67 @@ +#!/usr/bin/env ruby + +# +# = Synopsis +# +# Generate a reference for all Puppet types. Largely meant for internal Puppet +# Labs use. +# +# = Usage +# +# puppet doc [-a|--all] [-h|--help] [-o|--outputdir ] [-m|--mode ] +# [-r|--reference <[type]|configuration|..>] [--charset CHARSET] [manifest-file] +# +# = Description +# +# If mode is not 'rdoc', then this command generates a Markdown document describing all installed +# Puppet types or all allowable arguments to puppet executables. It is largely +# meant for internal use and is used to generate the reference document +# available on the Puppet Labs web site. +# +# In 'rdoc' mode, this command generates an html RDoc hierarchy describing the manifests that +# are in 'manifestdir' and 'modulepath' configuration directives. +# The generated documentation directory is doc by default but can be changed with the 'outputdir' option. +# +# If the command is started with 'manifest-file' command-line arguments, puppet doc generate a single +# manifest documentation that is output on stdout. +# +# = Options +# +# all:: +# Output the docs for all of the reference types. In 'rdoc' modes, this also outputs documentation for all resources +# +# help:: +# Print this help message +# +# outputdir:: +# Specifies the directory where to output the rdoc documentation in 'rdoc' mode. +# +# mode:: +# Determine the output mode. Valid modes are 'text', 'pdf' and 'rdoc'. The 'pdf' mode creates PDF formatted files in the /tmp directory. The default mode is 'text'. In 'rdoc' mode you must provide 'manifests-path' +# +# reference:: +# Build a particular reference. Get a list of references by running +puppet doc --list+. +# +# charset:: +# Used only in 'rdoc' mode. It sets the charset used in the html files produced. +# +# = Example +# +# $ puppet doc -r type > /tmp/type_reference.markdown +# or +# $ puppet doc --outputdir /tmp/rdoc --mode rdoc /path/to/manifests +# or +# $ puppet doc /etc/puppet/manifests/site.pp +# or +# $ puppet doc -m pdf -r configuration +# +# = Author +# +# Luke Kanies +# +# = Copyright +# +# Copyright (c) 2005-2007 Puppet Labs, LLC +# Licensed under the GNU Public License + +#Puppet::Application[:doc].run diff --git a/mcollective/lib/puppet/util/command_line/puppetmasterd b/mcollective/lib/puppet/util/command_line/puppetmasterd new file mode 100755 index 000000000..3b76db82b --- /dev/null +++ b/mcollective/lib/puppet/util/command_line/puppetmasterd @@ -0,0 +1,70 @@ +#!/usr/bin/env ruby + +# +# = Synopsis +# +# The central puppet server. Functions as a certificate authority by default. +# +# = Usage +# +# puppet master [-D|--daemonize|--no-daemonize] [-d|--debug] [-h|--help] +# [-l|--logdest |console|syslog] [-v|--verbose] [-V|--version] +# [--compile ] +# +# = Description +# +# This is the puppet central daemon. +# +# = Options +# +# Note that any configuration parameter that's valid in the configuration file +# is also a valid long argument. For example, 'ssldir' is a valid configuration +# parameter, so you can specify '--ssldir ' as an argument. +# +# See the configuration file documentation at +# http://docs.puppetlabs.com/references/stable/configuration.html for +# the full list of acceptable parameters. A commented list of all +# configuration options can also be generated by running puppetmasterdd with +# '--genconfig'. +# +# daemonize:: +# Send the process into the background. This is the default. +# +# no-daemonize:: +# Do not send the process into the background. +# +# debug:: +# Enable full debugging. +# +# help:: +# Print this help message. +# +# logdest:: +# Where to send messages. Choose between syslog, the console, and a log file. +# Defaults to sending messages to syslog, or the console +# if debugging or verbosity is enabled. +# +# verbose:: +# Enable verbosity. +# +# version:: +# Print the puppet version number and exit. +# +# compile:: +# Capability to compile a catalogue and output it in JSON from the Puppet master. Uses +# facts contained in the $vardir/yaml/ directory to compile the catalog. +# +# = Example +# +# puppet master +# +# = Author +# +# Luke Kanies +# +# = Copyright +# +# Copyright (c) 2005 Puppet Labs, LLC +# Licensed under the GNU Public License + +#Puppet::Application[:master].run diff --git a/mcollective/lib/puppet/util/command_line/puppetqd b/mcollective/lib/puppet/util/command_line/puppetqd new file mode 100755 index 000000000..81963d537 --- /dev/null +++ b/mcollective/lib/puppet/util/command_line/puppetqd @@ -0,0 +1,53 @@ +#!/usr/bin/env ruby + +# == Synopsis +# +# Retrieve serialized records from a queue and process them in order. +# +# = Usage +# +# puppet queue [-d|--debug] [-v|--verbose] +# +# = Description +# +# This is a simple application that just processes entities in a queue as they +# are recieved. +# +# = Options +# +# Note that any configuration parameter that's valid in the configuration file +# is also a valid long argument. For example, 'server' is a valid configuration +# parameter, so you can specify '--server ' as an argument. +# +# See the configuration file documentation at +# http://docs.puppetlabs.com/references/stable/configuration.html for +# the full list of acceptable parameters. A commented list of all +# configuration options can also be generated by running puppetd with +# '--genconfig'. +# +# debug:: +# Enable full debugging. +# +# help:: +# Print this help message +# +# verbose:: +# Turn on verbose reporting. +# +# version:: +# Print the puppet version number and exit. +# +# = Example +# +# puppet queue +# +# = Author +# +# Luke Kanies +# +# = Copyright +# +# Copyright (c) 2009 Puppet Labs, LLC +# Licensed under the GNU Public License + +#Puppet::Application[:queue].run diff --git a/mcollective/lib/puppet/util/command_line/puppetrun b/mcollective/lib/puppet/util/command_line/puppetrun new file mode 100755 index 000000000..3437405b0 --- /dev/null +++ b/mcollective/lib/puppet/util/command_line/puppetrun @@ -0,0 +1,125 @@ +#!/usr/bin/env ruby + +# +# = Synopsis +# +# Trigger a puppet agent run on a set of hosts. +# +# = Usage +# +# puppet kick [-a|--all] [-c|--class ] [-d|--debug] [-f|--foreground] +# [-h|--help] [--host ] [--no-fqdn] [--ignoreschedules] +# [-t|--tag ] [--test] [-p|--ping] [ [...]] +# +# = Description +# +# This script can be used to connect to a set of machines running +puppet agent+ +# and trigger them to run their configurations. The most common usage would +# be to specify a class of hosts and a set of tags, and +puppet kick+ would +# look up in LDAP all of the hosts matching that class, then connect to +# each host and trigger a run of all of the objects with the specified tags. +# +# If you are not storing your host configurations in LDAP, you can specify +# hosts manually. +# +# You will most likely have to run +puppet kick+ as root to get access to +# the SSL certificates. +# +# +puppet kick+ reads +puppet master+'s configuration file, so that it can copy +# things like LDAP settings. +# +# = Usage Notes +# +# +puppet kick+ is useless unless +puppet agent+ is listening. See its documentation +# for more information, but the gist is that you must enable +listen+ on the +# +puppet agent+ daemon, either using +--listen+ on the command line or adding +# 'listen: true' in its config file. In addition, you need to set the daemons +# up to specifically allow connections by creating the +namespaceauth+ file, +# normally at '/etc/puppet/namespaceauth.conf'. This file specifies who has +# access to each namespace; if you create the file you must add every namespace +# you want any Puppet daemon to allow -- it is currently global to all Puppet +# daemons. +# +# An example file looks like this:: +# +# [fileserver] +# allow *.madstop.com +# +# [puppetmaster] +# allow *.madstop.com +# +# [puppetrunner] +# allow culain.madstop.com +# +# This is what you would install on your Puppet master; non-master hosts could +# leave off the 'fileserver' and 'puppetmaster' namespaces. +# +# = Options +# +# Note that any configuration parameter that's valid in the configuration file +# is also a valid long argument. For example, 'ssldir' is a valid configuration +# parameter, so you can specify '--ssldir ' as an argument. +# +# See the configuration file documentation at +# http://reductivelabs.com/projects/puppet/reference/configref.html for +# the full list of acceptable parameters. A commented list of all +# configuration options can also be generated by running puppet master with +# '--genconfig'. +# +# +# all:: +# Connect to all available hosts. Requires LDAP support at this point. +# +# class:: +# Specify a class of machines to which to connect. This only works if you +# have LDAP configured, at the moment. +# +# debug:: +# Enable full debugging. +# +# foreground:: +# Run each configuration in the foreground; that is, when connecting to a host, +# do not return until the host has finished its run. The default is false. +# +# help:: +# Print this help message +# +# host:: +# A specific host to which to connect. This flag can be specified more +# than once. +# +# ignoreschedules:: +# Whether the client should ignore schedules when running its configuration. +# This can be used to force the client to perform work it would not normally +# perform so soon. The default is false. +# +# parallel:: +# How parallel to make the connections. Parallelization is provided by forking +# for each client to which to connect. The default is 1, meaning serial execution. +# +# tag:: +# Specify a tag for selecting the objects to apply. Does not work with the +# --test option. +# +# +# test:: +# Print the hosts you would connect to but do not actually connect. This +# option requires LDAP support at this point. +# +# ping:: +# Do a ICMP echo against the target host. Skip hosts that don't respond to ping. +# +# = Example +# +# sudo puppet kick -p 10 -t remotefile -t webserver host1 host2 +# +# = Author +# +# Luke Kanies +# +# = Copyright +# +# Copyright (c) 2005 Puppet Labs, LLC +# Licensed under the GNU Public License + +#Puppet::Application[:kick].run diff --git a/mcollective/lib/puppet/util/command_line/ralsh b/mcollective/lib/puppet/util/command_line/ralsh new file mode 100755 index 000000000..5c1f719e2 --- /dev/null +++ b/mcollective/lib/puppet/util/command_line/ralsh @@ -0,0 +1,89 @@ +#!/usr/bin/env ruby + +# +# = Synopsis +# +# Use the Puppet RAL to directly interact with the system. +# +# = Usage +# +# puppet resource [-h|--help] [-d|--debug] [-v|--verbose] [-e|--edit] +# [-H|--host ] [-p|--param ] [-t|--types] +# type +# +# = Description +# +# This command provides simple facilities for converting current system state +# into Puppet code, along with some ability to use Puppet to affect the current +# state. +# +# By default, you must at least provide a type to list, which case puppet resource +# will tell you everything it knows about all instances of that type. You can +# optionally specify an instance name, and puppet resource will only describe that single +# instance. +# +# You can also add +--edit+ as an argument, and puppet resource will write its output +# to a file, open that file in an editor, and then apply the file as a Puppet +# transaction. You can easily use this to use Puppet to make simple changes to +# a system. +# +# = Options +# +# Note that any configuration parameter that's valid in the configuration file +# is also a valid long argument. For example, 'ssldir' is a valid configuration +# parameter, so you can specify '--ssldir ' as an argument. +# +# See the configuration file documentation at +# http://docs.puppetlabs.com/references/stable/configuration.html for +# the full list of acceptable parameters. A commented list of all +# configuration options can also be generated by running puppet with +# '--genconfig'. +# +# debug:: +# Enable full debugging. +# +# edit: +# Write the results of the query to a file, open the file in an editor, +# and read the file back in as an executable Puppet manifest. +# +# host: +# When specified, connect to the resource server on the named host +# and retrieve the list of resouces of the type specified. +# +# help: +# Print this help message. +# +# param: +# Add more parameters to be outputted from queries. +# +# types: +# List all available types. +# +# verbose: +# Print extra information. +# +# = Example +# +# This example uses `puppet resource` to return Puppet configuration for the user `luke`: +# +# $ puppet resource user luke +# user { 'luke': +# home => '/home/luke', +# uid => '100', +# ensure => 'present', +# comment => 'Luke Kanies,,,', +# gid => '1000', +# shell => '/bin/bash', +# groups => ['sysadmin','audio','video','puppet'] +# } +# +# = Author +# +# Luke Kanies +# +# = Copyright +# +# Copyright (c) 2005-2007 Puppet Labs, LLC +# Licensed under the GNU Public License + +#Puppet::Application[:resource].run diff --git a/mcollective/lib/puppet/util/constant_inflector.rb b/mcollective/lib/puppet/util/constant_inflector.rb new file mode 100644 index 000000000..8f93e3255 --- /dev/null +++ b/mcollective/lib/puppet/util/constant_inflector.rb @@ -0,0 +1,15 @@ +# Created on 2008-02-12 +# Copyright Luke Kanies + +# A common module for converting between constants and +# file names. +module Puppet::Util::ConstantInflector + def file2constant(file) + # LAK:NOTE See http://snurl.com/21zf8 [groups_google_com] + x = file.split("/").collect { |name| name.capitalize }.join("::").gsub(/_+(.)/) { |term| $1.capitalize } + end + + def constant2file(constant) + constant.to_s.gsub(/([a-z])([A-Z])/) { |term| $1 + "_#{$2}" }.gsub("::", "/").downcase + end +end diff --git a/mcollective/lib/puppet/util/diff.rb b/mcollective/lib/puppet/util/diff.rb new file mode 100644 index 000000000..73e1bc014 --- /dev/null +++ b/mcollective/lib/puppet/util/diff.rb @@ -0,0 +1,74 @@ +# Provide a diff between two strings. +module Puppet::Util::Diff + include Puppet::Util + require 'tempfile' + + def diff(old, new) + command = [Puppet[:diff]] + if args = Puppet[:diff_args] and args != "" + command << args + end + command << old << new + execute(command, :failonfail => false) + end + + module_function :diff + + # return diff string of two input strings + # format defaults to unified + # context defaults to 3 lines + def lcs_diff(data_old, data_new, format=:unified, context_lines=3) + unless Puppet.features.diff? + Puppet.warning "Cannot provide diff without the diff/lcs Ruby library" + return "" + end + data_old = data_old.split(/\n/).map! { |e| e.chomp } + data_new = data_new.split(/\n/).map! { |e| e.chomp } + + output = "" + + diffs = ::Diff::LCS.diff(data_old, data_new) + return output if diffs.empty? + + oldhunk = hunk = nil + file_length_difference = 0 + + diffs.each do |piece| + begin + + hunk = ::Diff::LCS::Hunk.new( + data_old, data_new, piece, + context_lines, + + file_length_difference) + file_length_difference = hunk.file_length_difference + next unless oldhunk + # Hunks may overlap, which is why we need to be careful when our + # diff includes lines of context. Otherwise, we might print + # redundant lines. + if (context_lines > 0) and hunk.overlaps?(oldhunk) + hunk.unshift(oldhunk) + else + output << oldhunk.diff(format) + end + ensure + oldhunk = hunk + output << "\n" + end + end + + # Handle the last remaining hunk + output << oldhunk.diff(format) << "\n" + end + + def string_file_diff(path, string) + require 'tempfile' + tempfile = Tempfile.new("puppet-diffing") + tempfile.open + tempfile.print string + tempfile.close + print diff(path, tempfile.path) + tempfile.delete + end +end + diff --git a/mcollective/lib/puppet/util/docs.rb b/mcollective/lib/puppet/util/docs.rb new file mode 100644 index 000000000..4344d67ab --- /dev/null +++ b/mcollective/lib/puppet/util/docs.rb @@ -0,0 +1,107 @@ +# Some simple methods for helping manage automatic documentation generation. +module Puppet::Util::Docs + # Specify the actual doc string. + def desc(str) + @doc = str + end + + # Add a new autodoc block. We have to define these as class methods, + # rather than just sticking them in a hash, because otherwise they're + # too difficult to do inheritance with. + def dochook(name, &block) + method = "dochook_#{name}" + + meta_def method, &block + end + + attr_writer :doc + + # Generate the full doc string. + def doc + extra = methods.find_all { |m| m.to_s =~ /^dochook_.+/ }.sort.collect { |m| + self.send(m) + }.join(" ") + + if @doc + @doc + extra + else + extra + end + end + + # Build a table + def doctable(headers, data) + str = "\n\n" + + lengths = [] + # Figure out the longest field for all columns + data.each do |name, values| + [name, values].flatten.each_with_index do |value, i| + lengths[i] ||= 0 + lengths[i] = value.to_s.length if value.to_s.length > lengths[i] + end + end + + # The headers could also be longest + headers.each_with_index do |value, i| + lengths[i] = value.to_s.length if value.to_s.length > lengths[i] + end + + # Add the header names + str += headers.zip(lengths).collect { |value, num| pad(value, num) }.join(" | ") + " |" + "\n" + + # And the header row + str += lengths.collect { |num| "-" * num }.join(" | ") + " |" + "\n" + + # Now each data row + data.sort { |a, b| a[0].to_s <=> b[0].to_s }.each do |name, rows| + str += [name, rows].flatten.zip(lengths).collect do |value, length| + pad(value, length) + end.join(" | ") + " |" + "\n" + end + + str + "\n" + end + + attr_reader :nodoc + def nodoc? + nodoc + end + + # Pad a field with spaces + def pad(value, length) + value.to_s + (" " * (length - value.to_s.length)) + end + + # Handle the inline indentation in the docs. + def scrub(text) + # Stupid markdown + #text = text.gsub("<%=", "<%=") + # For text with no carriage returns, there's nothing to do. + return text if text !~ /\n/ + indent = nil + + # If we can match an indentation, then just remove that same level of + # indent from every line. However, ignore any indentation on the + # first line, since that can be inconsistent. + text = text.lstrip + text.gsub!(/^([\t]+)/) { |s| " "*8*s.length; } # Expand leading tabs + # Find first non-empty line after the first line: + line2start = (text =~ /(\n?\s*\n)/) + line2start += $1.length + if (text[line2start..-1] =~ /^([ ]+)\S/) == 0 + indent = Regexp.quote($1) + begin + return text.gsub(/^#{indent}/,'') + rescue => detail + puts detail.backtrace + puts detail + end + else + return text + end + + end + + module_function :scrub +end diff --git a/mcollective/lib/puppet/util/errors.rb b/mcollective/lib/puppet/util/errors.rb new file mode 100644 index 000000000..5a7a7630b --- /dev/null +++ b/mcollective/lib/puppet/util/errors.rb @@ -0,0 +1,63 @@ +# Some helper methods for throwing errors. +module Puppet::Util::Errors + # Throw a dev error. + def devfail(msg) + self.fail(Puppet::DevError, msg) + end + + # Add line and file info if available and appropriate. + def adderrorcontext(error, other = nil) + error.line ||= self.line if self.respond_to?(:line) and self.line + error.file ||= self.file if self.respond_to?(:file) and self.file + + error.set_backtrace other.backtrace if other and other.respond_to?(:backtrace) + + error + end + + def error_context + if file and line + " at #{file}:#{line}" + elsif line + " at line #{line}" + elsif file + " in #{file}" + else + "" + end + end + + # Wrap a call in such a way that we always throw the right exception and keep + # as much context as possible. + def exceptwrap(options = {}) + options[:type] ||= Puppet::DevError + begin + return yield + rescue Puppet::Error => detail + raise adderrorcontext(detail) + rescue => detail + message = options[:message] || "#{self.class} failed with error #{detail.class}: #{detail}" + + error = options[:type].new(message) + # We can't use self.fail here because it always expects strings, + # not exceptions. + raise adderrorcontext(error, detail) + end + + retval + end + + # Throw an error, defaulting to a Puppet::Error. + def fail(*args) + if args[0].is_a?(Class) + type = args.shift + else + type = Puppet::Error + end + + error = adderrorcontext(type.new(args.join(" "))) + + raise error + end +end + diff --git a/mcollective/lib/puppet/util/execution.rb b/mcollective/lib/puppet/util/execution.rb new file mode 100644 index 000000000..69f4f2c15 --- /dev/null +++ b/mcollective/lib/puppet/util/execution.rb @@ -0,0 +1,20 @@ +module Puppet::Util::Execution + module_function + + # Run some code with a specific environment. Resets the environment back to + # what it was at the end of the code. + def withenv(hash) + saved = ENV.to_hash + hash.each do |name, val| + ENV[name.to_s] = val + end + + yield + ensure + ENV.clear + saved.each do |name, val| + ENV[name] = val + end + end +end + diff --git a/mcollective/lib/puppet/util/execution_stub.rb b/mcollective/lib/puppet/util/execution_stub.rb new file mode 100644 index 000000000..af74e0f72 --- /dev/null +++ b/mcollective/lib/puppet/util/execution_stub.rb @@ -0,0 +1,26 @@ +module Puppet::Util + class ExecutionStub + class << self + # Set a stub block that Puppet::Util.execute() should invoke instead + # of actually executing commands on the target machine. Intended + # for spec testing. + # + # The arguments passed to the block are |command, options|, where + # command is an array of strings and options is an options hash. + def set(&block) + @value = block + end + + # Uninstall any execution stub, so that calls to + # Puppet::Util.execute() behave normally again. + def reset + @value = nil + end + + # Retrieve the current execution stub, or nil if there is no stub. + def current_value + @value + end + end + end +end diff --git a/mcollective/lib/puppet/util/feature.rb b/mcollective/lib/puppet/util/feature.rb new file mode 100644 index 000000000..2f704104a --- /dev/null +++ b/mcollective/lib/puppet/util/feature.rb @@ -0,0 +1,84 @@ +# Created by Luke Kanies on 2006-11-07. +# Copyright (c) 2006. All rights reserved. + +class Puppet::Util::Feature + attr_reader :path + + # Create a new feature test. You have to pass the feature name, + # and it must be unique. You can either provide a block that + # will get executed immediately to determine if the feature + # is present, or you can pass an option to determine it. + # Currently, the only supported option is 'libs' (must be + # passed as a symbol), which will make sure that each lib loads + # successfully. + def add(name, options = {}) + method = name.to_s + "?" + raise ArgumentError, "Feature #{name} is already defined" if self.class.respond_to?(method) + + if block_given? + begin + result = yield + rescue Exception => detail + warn "Failed to load feature test for #{name}: #{detail}" + result = false + end + @results[name] = result + end + + meta_def(method) do + @results[name] = test(name, options) unless @results.include?(name) + @results[name] + end + end + + # Create a new feature collection. + def initialize(path) + @path = path + @results = {} + @loader = Puppet::Util::Autoload.new(self, @path) + end + + def load + @loader.loadall + end + + def method_missing(method, *args) + return super unless method.to_s =~ /\?$/ + + feature = method.to_s.sub(/\?$/, '') + @loader.load(feature) + + respond_to?(method) && self.send(method) + end + + # Actually test whether the feature is present. We only want to test when + # someone asks for the feature, so we don't unnecessarily load + # files. + def test(name, options) + return true unless ary = options[:libs] + ary = [ary] unless ary.is_a?(Array) + + ary.each do |lib| + return false unless load_library(lib, name) + end + + # We loaded all of the required libraries + true + end + + private + + def load_library(lib, name) + raise ArgumentError, "Libraries must be passed as strings not #{lib.class}" unless lib.is_a?(String) + + begin + require lib + rescue SystemExit,NoMemoryError + raise + rescue Exception + Puppet.debug "Failed to load library '#{lib}' for feature '#{name}'" + return false + end + true + end +end diff --git a/mcollective/lib/puppet/util/file_locking.rb b/mcollective/lib/puppet/util/file_locking.rb new file mode 100644 index 000000000..18744cab7 --- /dev/null +++ b/mcollective/lib/puppet/util/file_locking.rb @@ -0,0 +1,47 @@ +require 'puppet/util' + +module Puppet::Util::FileLocking + module_function + + # Create a shared lock for reading + def readlock(file) + raise ArgumentError, "#{file} is not a file" unless !File.exists?(file) or File.file?(file) + Puppet::Util.synchronize_on(file,Sync::SH) do + File.open(file) { |f| + f.lock_shared { |lf| yield lf } + } + end + end + + # Create an exclusive lock for writing, and do the writing in a + # tmp file. + def writelock(file, mode = nil) + raise Puppet::DevError, "Cannot create #{file}; directory #{File.dirname(file)} does not exist" unless FileTest.directory?(File.dirname(file)) + raise ArgumentError, "#{file} is not a file" unless !File.exists?(file) or File.file?(file) + tmpfile = file + ".tmp" + + unless mode + # It's far more likely that the file will be there than not, so it's + # better to stat once to check for existence and mode. + # If we can't stat, it's most likely because the file's not there, + # but could also be because the directory isn't readable, in which case + # we won't be able to write anyway. + begin + mode = File.stat(file).mode + rescue + mode = 0600 + end + end + + Puppet::Util.synchronize_on(file,Sync::EX) do + File.open(file, File::Constants::CREAT | File::Constants::WRONLY, mode) do |rf| + rf.lock_exclusive do |lrf| + # poor's man open(2) O_EXLOCK|O_TRUNC + lrf.seek(0, IO::SEEK_SET) + lrf.truncate(0) + yield lrf + end + end + end + end +end diff --git a/mcollective/lib/puppet/util/fileparsing.rb b/mcollective/lib/puppet/util/fileparsing.rb new file mode 100644 index 000000000..5c2a29cd5 --- /dev/null +++ b/mcollective/lib/puppet/util/fileparsing.rb @@ -0,0 +1,373 @@ +# A mini-language for parsing files. This is only used file the ParsedFile +# provider, but it makes more sense to split it out so it's easy to maintain +# in one place. +# +# You can use this module to create simple parser/generator classes. For instance, +# the following parser should go most of the way to parsing /etc/passwd: +# +# class Parser +# include Puppet::Util::FileParsing +# record_line :user, :fields => %w{name password uid gid gecos home shell}, +# :separator => ":" +# end +# +# You would use it like this: +# +# parser = Parser.new +# lines = parser.parse(File.read("/etc/passwd")) +# +# lines.each do |type, hash| # type will always be :user, since we only have one +# p hash +# end +# +# Each line in this case would be a hash, with each field set appropriately. +# You could then call 'parser.to_line(hash)' on any of those hashes to generate +# the text line again. + +require 'puppet/util/methodhelper' + +module Puppet::Util::FileParsing + include Puppet::Util + attr_writer :line_separator, :trailing_separator + + class FileRecord + include Puppet::Util + include Puppet::Util::MethodHelper + attr_accessor :absent, :joiner, :rts, :separator, :rollup, :name, :match, :block_eval + + attr_reader :fields, :optional, :type + + INVALID_FIELDS = [:record_type, :target, :on_disk] + + # Customize this so we can do a bit of validation. + def fields=(fields) + @fields = fields.collect do |field| + r = symbolize(field) + raise ArgumentError.new("Cannot have fields named #{r}") if INVALID_FIELDS.include?(r) + r + end + end + + def initialize(type, options = {}, &block) + @type = symbolize(type) + raise ArgumentError, "Invalid record type #{@type}" unless [:record, :text].include?(@type) + + set_options(options) + + if self.type == :record + # Now set defaults. + self.absent ||= "" + self.separator ||= /\s+/ + self.joiner ||= " " + self.optional ||= [] + @rollup = true unless defined?(@rollup) + end + + if block_given? + @block_eval ||= :process + + # Allow the developer to specify that a block should be instance-eval'ed. + if @block_eval == :instance + instance_eval(&block) + else + meta_def(@block_eval, &block) + end + end + end + + # Convert a record into a line by joining the fields together appropriately. + # This is pulled into a separate method so it can be called by the hooks. + def join(details) + joinchar = self.joiner + + fields.collect { |field| + # If the field is marked absent, use the appropriate replacement + if details[field] == :absent or details[field] == [:absent] or details[field].nil? + if self.optional.include?(field) + self.absent + else + raise ArgumentError, "Field '#{field}' is required" + end + else + details[field].to_s + end + }.reject { |c| c.nil?}.join(joinchar) + end + + # Customize this so we can do a bit of validation. + def optional=(optional) + @optional = optional.collect do |field| + symbolize(field) + end + end + + # Create a hook that modifies the hash resulting from parsing. + def post_parse=(block) + meta_def(:post_parse, &block) + end + + # Create a hook that modifies the hash just prior to generation. + def pre_gen=(block) + meta_def(:pre_gen, &block) + end + + # Are we a text type? + def text? + type == :text + end + + def to_line=(block) + meta_def(:to_line, &block) + end + end + + # Clear all existing record definitions. Only used for testing. + def clear_records + @record_types.clear + @record_order.clear + end + + def fields(type) + if record = record_type(type) + record.fields.dup + else + nil + end + end + + # Try to match a specific text line. + def handle_text_line(line, record) + line =~ record.match ? {:record_type => record.name, :line => line} : nil + end + + # Try to match a record. + def handle_record_line(line, record) + ret = nil + if record.respond_to?(:process) + if ret = record.send(:process, line.dup) + unless ret.is_a?(Hash) + raise Puppet::DevError, + "Process record type #{record.name} returned non-hash" + end + else + return nil + end + elsif regex = record.match + # In this case, we try to match the whole line and then use the + # match captures to get our fields. + if match = regex.match(line) + fields = [] + ret = {} + record.fields.zip(match.captures).each do |field, value| + if value == record.absent + ret[field] = :absent + else + ret[field] = value + end + end + else + nil + end + else + ret = {} + sep = record.separator + + # String "helpfully" replaces ' ' with /\s+/ in splitting, so we + # have to work around it. + if sep == " " + sep = / / + end + line_fields = line.split(sep) + record.fields.each do |param| + value = line_fields.shift + if value and value != record.absent + ret[param] = value + else + ret[param] = :absent + end + end + + if record.rollup and ! line_fields.empty? + last_field = record.fields[-1] + val = ([ret[last_field]] + line_fields).join(record.joiner) + ret[last_field] = val + end + end + + if ret + ret[:record_type] = record.name + return ret + else + return nil + end + end + + def line_separator + @line_separator ||= "\n" + + @line_separator + end + + # Split text into separate lines using the record separator. + def lines(text) + # Remove any trailing separators, and then split based on them + # LAK:NOTE See http://snurl.com/21zf8 [groups_google_com] + x = text.sub(/#{self.line_separator}\Q/,'').split(self.line_separator) + end + + # Split a bunch of text into lines and then parse them individually. + def parse(text) + count = 1 + lines(text).collect do |line| + count += 1 + if val = parse_line(line) + val + else + error = Puppet::Error.new("Could not parse line #{line.inspect}") + error.line = count + raise error + end + end + end + + # Handle parsing a single line. + def parse_line(line) + raise Puppet::DevError, "No record types defined; cannot parse lines" unless records? + + @record_order.each do |record| + # These are basically either text or record lines. + method = "handle_#{record.type}_line" + if respond_to?(method) + if result = send(method, line, record) + record.send(:post_parse, result) if record.respond_to?(:post_parse) + return result + end + else + raise Puppet::DevError, + "Somehow got invalid line type #{record.type}" + end + end + + nil + end + + # Define a new type of record. These lines get split into hashes. Valid + # options are: + # * :absent: What to use as value within a line, when a field is + # absent. Note that in the record object, the literal :absent symbol is + # used, and not this value. Defaults to "". + # * :fields: The list of fields, as an array. By default, all + # fields are considered required. + # * :joiner: How to join fields together. Defaults to '\t'. + # * :optional: Which fields are optional. If these are missing, + # you'll just get the 'absent' value instead of an ArgumentError. + # * :rts: Whether to remove trailing whitespace. Defaults to false. + # If true, whitespace will be removed; if a regex, then whatever matches + # the regex will be removed. + # * :separator: The record separator. Defaults to /\s+/. + def record_line(name, options, &block) + raise ArgumentError, "Must include a list of fields" unless options.include?(:fields) + + record = FileRecord.new(:record, options, &block) + record.name = symbolize(name) + + new_line_type(record) + end + + # Are there any record types defined? + def records? + defined?(@record_types) and ! @record_types.empty? + end + + # Define a new type of text record. + def text_line(name, options, &block) + raise ArgumentError, "You must provide a :match regex for text lines" unless options.include?(:match) + + record = FileRecord.new(:text, options, &block) + record.name = symbolize(name) + + new_line_type(record) + end + + # Generate a file from a bunch of hash records. + def to_file(records) + text = records.collect { |record| to_line(record) }.join(line_separator) + + text += line_separator if trailing_separator + + text + end + + # Convert our parsed record into a text record. + def to_line(details) + unless record = record_type(details[:record_type]) + raise ArgumentError, "Invalid record type #{details[:record_type].inspect}" + end + + if record.respond_to?(:pre_gen) + details = details.dup + record.send(:pre_gen, details) + end + + case record.type + when :text; return details[:line] + else + return record.to_line(details) if record.respond_to?(:to_line) + + line = record.join(details) + + if regex = record.rts + # If they say true, then use whitespace; else, use their regex. + if regex == true + regex = /\s+$/ + end + return line.sub(regex,'') + else + return line + end + end + end + + # Whether to add a trailing separator to the file. Defaults to true + def trailing_separator + if defined?(@trailing_separator) + return @trailing_separator + else + return true + end + end + + def valid_attr?(type, attr) + type = symbolize(type) + if record = record_type(type) and record.fields.include?(symbolize(attr)) + return true + else + if symbolize(attr) == :ensure + return true + else + false + end + end + end + + private + + # Define a new type of record. + def new_line_type(record) + @record_types ||= {} + @record_order ||= [] + + raise ArgumentError, "Line type #{record.name} is already defined" if @record_types.include?(record.name) + + @record_types[record.name] = record + @record_order << record + + record + end + + # Retrieve the record object. + def record_type(type) + @record_types[symbolize(type)] + end +end + diff --git a/mcollective/lib/puppet/util/filetype.rb b/mcollective/lib/puppet/util/filetype.rb new file mode 100755 index 000000000..fdc6ccd9b --- /dev/null +++ b/mcollective/lib/puppet/util/filetype.rb @@ -0,0 +1,283 @@ +# Basic classes for reading, writing, and emptying files. Not much +# to see here. + +require 'puppet/util/selinux' +require 'fileutils' + +class Puppet::Util::FileType + attr_accessor :loaded, :path, :synced + + include Puppet::Util::SELinux + + class << self + attr_accessor :name + include Puppet::Util::ClassGen + end + + # Create a new filetype. + def self.newfiletype(name, &block) + @filetypes ||= {} + + + klass = genclass( + name, + :block => block, + :prefix => "FileType", + + :hash => @filetypes + ) + + # Rename the read and write methods, so that we're sure they + # maintain the stats. + klass.class_eval do + # Rename the read method + define_method(:real_read, instance_method(:read)) + define_method(:read) do + begin + val = real_read + @loaded = Time.now + if val + return val.gsub(/# HEADER.*\n/,'') + else + return "" + end + rescue Puppet::Error => detail + raise + rescue => detail + puts detail.backtrace if Puppet[:trace] + raise Puppet::Error, "#{self.class} could not read #{@path}: #{detail}" + end + end + + # And then the write method + define_method(:real_write, instance_method(:write)) + define_method(:write) do |text| + begin + val = real_write(text) + @synced = Time.now + return val + rescue Puppet::Error => detail + raise + rescue => detail + puts detail.backtrace if Puppet[:debug] + raise Puppet::Error, "#{self.class} could not write #{@path}: #{detail}" + end + end + end + end + + def self.filetype(type) + @filetypes[type] + end + + # Pick or create a filebucket to use. + def bucket + @bucket ||= Puppet::Type.type(:filebucket).mkdefaultbucket.bucket + end + + def initialize(path) + raise ArgumentError.new("Path is nil") if path.nil? + @path = path + end + + # Operate on plain files. + newfiletype(:flat) do + # Back the file up before replacing it. + def backup + bucket.backup(@path) if File.exists?(@path) + end + + # Read the file. + def read + if File.exist?(@path) + File.read(@path) + else + return nil + end + end + + # Remove the file. + def remove + File.unlink(@path) if File.exist?(@path) + end + + # Overwrite the file. + def write(text) + require "tempfile" + tf = Tempfile.new("puppet") + tf.print text; tf.flush + FileUtils.cp(tf.path, @path) + tf.close + # If SELinux is present, we need to ensure the file has its expected context + set_selinux_default_context(@path) + end + end + + # Operate on plain files. + newfiletype(:ram) do + @@tabs = {} + + def self.clear + @@tabs.clear + end + + def initialize(path) + super + @@tabs[@path] ||= "" + end + + # Read the file. + def read + Puppet.info "Reading #{@path} from RAM" + @@tabs[@path] + end + + # Remove the file. + def remove + Puppet.info "Removing #{@path} from RAM" + @@tabs[@path] = "" + end + + # Overwrite the file. + def write(text) + Puppet.info "Writing #{@path} to RAM" + @@tabs[@path] = text + end + end + + # Handle Linux-style cron tabs. + newfiletype(:crontab) do + def initialize(user) + self.path = user + end + + def path=(user) + begin + @uid = Puppet::Util.uid(user) + rescue Puppet::Error => detail + raise Puppet::Error, "Could not retrieve user #{user}" + end + + # XXX We have to have the user name, not the uid, because some + # systems *cough*linux*cough* require it that way + @path = user + end + + # Read a specific @path's cron tab. + def read + %x{#{cmdbase} -l 2>/dev/null} + end + + # Remove a specific @path's cron tab. + def remove + if %w{Darwin FreeBSD}.include?(Facter.value("operatingsystem")) + %x{/bin/echo yes | #{cmdbase} -r 2>/dev/null} + else + %x{#{cmdbase} -r 2>/dev/null} + end + end + + # Overwrite a specific @path's cron tab; must be passed the @path name + # and the text with which to create the cron tab. + def write(text) + IO.popen("#{cmdbase()} -", "w") { |p| + p.print text + } + end + + private + + # Only add the -u flag when the @path is different. Fedora apparently + # does not think I should be allowed to set the @path to my own user name + def cmdbase + cmd = nil + if @uid == Puppet::Util::SUIDManager.uid || Facter.value(:operatingsystem) == "HP-UX" + return "crontab" + else + return "crontab -u #{@path}" + end + end + end + + # SunOS has completely different cron commands; this class implements + # its versions. + newfiletype(:suntab) do + # Read a specific @path's cron tab. + def read + output = Puppet::Util.execute(%w{crontab -l}, :uid => @path) + return "" if output.include?("can't open your crontab") + raise Puppet::Error, "User #{@path} not authorized to use cron" if output.include?("you are not authorized to use cron") + return output + rescue => detail + raise Puppet::Error, "Could not read crontab for #{@path}: #{detail}" + end + + # Remove a specific @path's cron tab. + def remove + Puppet::Util.execute(%w{crontab -r}, :uid => @path) + rescue => detail + raise Puppet::Error, "Could not remove crontab for #{@path}: #{detail}" + end + + # Overwrite a specific @path's cron tab; must be passed the @path name + # and the text with which to create the cron tab. + def write(text) + puts text + require "tempfile" + output_file = Tempfile.new("puppet") + fh = output_file.open + fh.print text + fh.close + + # We have to chown the stupid file to the user. + File.chown(Puppet::Util.uid(@path), nil, output_file.path) + + begin + Puppet::Util.execute(["crontab", output_file.path], :uid => @path) + rescue => detail + raise Puppet::Error, "Could not write crontab for #{@path}: #{detail}" + end + output_file.delete + end + end + + # Support for AIX crontab with output different than suntab's crontab command. + newfiletype(:aixtab) do + # Read a specific @path's cron tab. + def read + output = Puppet::Util.execute(%w{crontab -l}, :uid => @path) + raise Puppet::Error, "User #{@path} not authorized to use cron" if output.include?("You are not authorized to use the cron command") + return output + rescue => detail + raise Puppet::Error, "Could not read crontab for #{@path}: #{detail}" + end + + # Remove a specific @path's cron tab. + def remove + Puppet::Util.execute(%w{crontab -r}, :uid => @path) + rescue => detail + raise Puppet::Error, "Could not remove crontab for #{@path}: #{detail}" + end + + # Overwrite a specific @path's cron tab; must be passed the @path name + # and the text with which to create the cron tab. + def write(text) + require "tempfile" + output_file = Tempfile.new("puppet") + fh = output_file.open + fh.print text + fh.close + + # We have to chown the stupid file to the user. + File.chown(Puppet::Util.uid(@path), nil, output_file.path) + + begin + Puppet::Util.execute(["crontab", output_file.path], :uid => @path) + rescue => detail + raise Puppet::Error, "Could not write crontab for #{@path}: #{detail}" + ensure + output_file.delete + end + end + end +end diff --git a/mcollective/lib/puppet/util/graph.rb b/mcollective/lib/puppet/util/graph.rb new file mode 100644 index 000000000..9598d281e --- /dev/null +++ b/mcollective/lib/puppet/util/graph.rb @@ -0,0 +1,30 @@ +# Created by Luke Kanies on 2006-11-16. +# Copyright (c) 2006. All rights reserved. + +require 'puppet' +require 'puppet/simple_graph' + +# A module that handles the small amount of graph stuff in Puppet. +module Puppet::Util::Graph + # Make a graph where each of our children gets converted to + # the receiving end of an edge. Call the same thing on all + # of our children, optionally using a block + def to_graph(graph = nil, &block) + # Allow our calling function to send in a graph, so that we + # can call this recursively with one graph. + graph ||= Puppet::SimpleGraph.new + + self.each do |child| + unless block_given? and ! yield(child) + graph.add_edge(self, child) + + child.to_graph(graph, &block) if child.respond_to?(:to_graph) + end + end + + # Do a topsort, which will throw an exception if the graph is cyclic. + + graph + end +end + diff --git a/mcollective/lib/puppet/util/inifile.rb b/mcollective/lib/puppet/util/inifile.rb new file mode 100644 index 000000000..82f074d53 --- /dev/null +++ b/mcollective/lib/puppet/util/inifile.rb @@ -0,0 +1,203 @@ +# Module Puppet::IniConfig +# A generic way to parse .ini style files and manipulate them in memory +# One 'file' can be made up of several physical files. Changes to sections +# on the file are tracked so that only the physical files in which +# something has changed are written back to disk +# Great care is taken to preserve comments and blank lines from the original +# files +# +# The parsing tries to stay close to python's ConfigParser + +require 'puppet/util/filetype' + +module Puppet::Util::IniConfig + # A section in a .ini file + class Section + attr_reader :name, :file + + def initialize(name, file) + @name = name + @file = file + @dirty = false + @entries = [] + end + + # Has this section been modified since it's been read in + # or written back to disk + def dirty? + @dirty + end + + # Should only be used internally + def mark_clean + @dirty = false + end + + # Add a line of text (e.g., a comment) Such lines + # will be written back out in exactly the same + # place they were read in + def add_line(line) + @entries << line + end + + # Set the entry 'key=value'. If no entry with the + # given key exists, one is appended to teh end of the section + def []=(key, value) + entry = find_entry(key) + @dirty = true + if entry.nil? + @entries << [key, value] + else + entry[1] = value + end + end + + # Return the value associated with KEY. If no such entry + # exists, return nil + def [](key) + entry = find_entry(key) + return(entry.nil? ? nil : entry[1]) + end + + # Format the section as text in the way it should be + # written to file + def format + text = "[#{name}]\n" + @entries.each do |entry| + if entry.is_a?(Array) + key, value = entry + text << "#{key}=#{value}\n" unless value.nil? + else + text << entry + end + end + text + end + + private + def find_entry(key) + @entries.each do |entry| + return entry if entry.is_a?(Array) && entry[0] == key + end + nil + end + + end + + # A logical .ini-file that can be spread across several physical + # files. For each physical file, call #read with the filename + class File + def initialize + @files = {} + end + + # Add the contents of the file with name FILE to the + # already existing sections + def read(file) + text = Puppet::Util::FileType.filetype(:flat).new(file).read + raise "Could not find #{file}" if text.nil? + + section = nil # The name of the current section + optname = nil # The name of the last option in section + line = 0 + @files[file] = [] + text.each_line do |l| + line += 1 + if l.strip.empty? || "#;".include?(l[0,1]) || + (l.split(nil, 2)[0].downcase == "rem" && l[0,1].downcase == "r") + # Whitespace or comment + if section.nil? + @files[file] << l + else + section.add_line(l) + end + elsif " \t\r\n\f".include?(l[0,1]) && section && optname + # continuation line + section[optname] += "\n#{l.chomp}" + elsif l =~ /^\[([^\]]+)\]/ + # section heading + section.mark_clean unless section.nil? + section = add_section($1, file) + optname = nil + elsif l =~ /^\s*([^\s=]+)\s*\=(.*)$/ + # We allow space around the keys, but not the values + # For the values, we don't know if space is significant + if section.nil? + raise "#{file}:#{line}:Key/value pair outside of a section for key #{$1}" + else + section[$1] = $2 + optname = $1 + end + else + raise "#{file}:#{line}: Can't parse '#{l.chomp}'" + end + end + section.mark_clean unless section.nil? + end + + # Store all modifications made to sections in this file back + # to the physical files. If no modifications were made to + # a physical file, nothing is written + def store + @files.each do |file, lines| + text = "" + dirty = false + lines.each do |l| + if l.is_a?(Section) + dirty ||= l.dirty? + text << l.format + l.mark_clean + else + text << l + end + end + if dirty + Puppet::Util::FileType.filetype(:flat).new(file).write(text) + return file + end + end + end + + # Execute BLOCK, passing each section in this file + # as an argument + def each_section(&block) + @files.each do |file, list| + list.each do |entry| + yield(entry) if entry.is_a?(Section) + end + end + end + + # Execute BLOCK, passing each file constituting this inifile + # as an argument + def each_file(&block) + @files.keys.each do |file| + yield(file) + end + end + + # Return the Section with the given name or nil + def [](name) + name = name.to_s + each_section do |section| + return section if section.name == name + end + nil + end + + # Return true if the file contains a section with name NAME + def include?(name) + ! self[name].nil? + end + + # Add a section to be stored in FILE when store is called + def add_section(name, file) + raise "A section with name #{name} already exists" if include?(name) + result = Section.new(name, file) + @files[file] ||= [] + @files[file] << result + result + end + end +end + diff --git a/mcollective/lib/puppet/util/inline_docs.rb b/mcollective/lib/puppet/util/inline_docs.rb new file mode 100644 index 000000000..62818c1b8 --- /dev/null +++ b/mcollective/lib/puppet/util/inline_docs.rb @@ -0,0 +1,26 @@ +module Puppet::Util::InlineDocs + def self.included(klass) + klass.send(:include, InstanceMethods) + klass.extend ClassMethods + end + + module ClassMethods + attr_accessor :use_docs + def associates_doc + self.use_docs = true + end + end + + module InstanceMethods + attr_writer :doc + + def doc + @doc ||= "" + end + + # don't fetch lexer comment by default + def use_docs + self.class.use_docs + end + end +end diff --git a/mcollective/lib/puppet/util/instance_loader.rb b/mcollective/lib/puppet/util/instance_loader.rb new file mode 100755 index 000000000..5e16bd7fa --- /dev/null +++ b/mcollective/lib/puppet/util/instance_loader.rb @@ -0,0 +1,81 @@ +require 'puppet/util/autoload' +require 'puppet/util' + +# A module that can easily autoload things for us. Uses an instance +# of Puppet::Util::Autoload +module Puppet::Util::InstanceLoader + include Puppet::Util + + # Are we instance-loading this type? + def instance_loading?(type) + defined?(@autoloaders) and @autoloaders.include?(symbolize(type)) + end + + # Define a new type of autoloading. + def instance_load(type, path, options = {}) + @autoloaders ||= {} + @instances ||= {} + type = symbolize(type) + @instances[type] = {} + @autoloaders[type] = Puppet::Util::Autoload.new(self, path, options) + + # Now define our new simple methods + unless respond_to?(type) + meta_def(type) do |name| + loaded_instance(type, name) + end + end + end + + # Return a list of the names of all instances + def loaded_instances(type) + @instances[type].keys + end + + # Collect the docs for all of our instances. + def instance_docs(type) + docs = "" + + # Load all instances. + instance_loader(type).loadall + + # Use this method so they all get loaded + loaded_instances(type).sort { |a,b| a.to_s <=> b.to_s }.each do |name| + mod = self.loaded_instance(name) + docs += "#{name}\n#{"-" * name.to_s.length}\n" + + docs += Puppet::Util::Docs.scrub(mod.doc) + "\n\n" + end + + docs + end + + # Return the instance hash for our type. + def instance_hash(type) + @instances[symbolize(type)] + end + + # Return the Autoload object for a given type. + def instance_loader(type) + @autoloaders[symbolize(type)] + end + + # Retrieve an alread-loaded instance, or attempt to load our instance. + def loaded_instance(type, name) + name = symbolize(name) + return nil unless instances = instance_hash(type) + unless instances.include? name + if instance_loader(type).load(name) + unless instances.include? name + Puppet.warning( + "Loaded #{type} file for #{name} but #{type} was not defined" + ) + return nil + end + else + return nil + end + end + instances[name] + end +end diff --git a/mcollective/lib/puppet/util/ldap.rb b/mcollective/lib/puppet/util/ldap.rb new file mode 100644 index 000000000..33f01f789 --- /dev/null +++ b/mcollective/lib/puppet/util/ldap.rb @@ -0,0 +1,5 @@ +# +# Created by Luke Kanies on 2008-3-23. +# Copyright (c) 2008. All rights reserved. +module Puppet::Util::Ldap +end diff --git a/mcollective/lib/puppet/util/ldap/connection.rb b/mcollective/lib/puppet/util/ldap/connection.rb new file mode 100644 index 000000000..03240eae9 --- /dev/null +++ b/mcollective/lib/puppet/util/ldap/connection.rb @@ -0,0 +1,77 @@ +# +# Created by Luke Kanies on 2008-3-23. +# Copyright (c) 2008. All rights reserved. +require 'puppet/util/ldap' + +class Puppet::Util::Ldap::Connection + attr_accessor :host, :port, :user, :password, :reset, :ssl + + attr_reader :connection + + # Return a default connection, using our default settings. + def self.instance + ssl = if Puppet[:ldaptls] + :tls + elsif Puppet[:ldapssl] + true + else + false + end + + options = {} + options[:ssl] = ssl + if user = Puppet.settings[:ldapuser] and user != "" + options[:user] = user + if pass = Puppet.settings[:ldappassword] and pass != "" + options[:password] = pass + end + end + + new(Puppet[:ldapserver], Puppet[:ldapport], options) + end + + def close + connection.unbind if connection.bound? + end + + def initialize(host, port, options = {}) + raise Puppet::Error, "Could not set up LDAP Connection: Missing ruby/ldap libraries" unless Puppet.features.ldap? + + @host, @port = host, port + + options.each do |param, value| + begin + send(param.to_s + "=", value) + rescue + raise ArgumentError, "LDAP connections do not support #{param} parameters" + end + end + end + + # Create a per-connection unique name. + def name + [host, port, user, password, ssl].collect { |p| p.to_s }.join("/") + end + + # Should we reset the connection? + def reset? + reset + end + + # Start our ldap connection. + def start + case ssl + when :tls + @connection = LDAP::SSLConn.new(host, port, true) + when true + @connection = LDAP::SSLConn.new(host, port) + else + @connection = LDAP::Conn.new(host, port) + end + @connection.set_option(LDAP::LDAP_OPT_PROTOCOL_VERSION, 3) + @connection.set_option(LDAP::LDAP_OPT_REFERRALS, LDAP::LDAP_OPT_ON) + @connection.simple_bind(user, password) + rescue => detail + raise Puppet::Error, "Could not connect to LDAP: #{detail}" + end +end diff --git a/mcollective/lib/puppet/util/ldap/generator.rb b/mcollective/lib/puppet/util/ldap/generator.rb new file mode 100644 index 000000000..2aaa9c370 --- /dev/null +++ b/mcollective/lib/puppet/util/ldap/generator.rb @@ -0,0 +1,45 @@ +# +# Created by Luke Kanies on 2008-3-28. +# Copyright (c) 2008. All rights reserved. +require 'puppet/util/ldap' + +class Puppet::Util::Ldap::Generator + # Declare the attribute we'll use to generate the value. + def from(source) + @source = source + self + end + + # Actually do the generation. + def generate(value = nil) + if value.nil? + @generator.call + else + @generator.call(value) + end + end + + # Initialize our generator with the name of the parameter + # being generated. + def initialize(name) + @name = name + end + + def name + @name.to_s + end + + def source + if @source + @source.to_s + else + nil + end + end + + # Provide the code that does the generation. + def with(&block) + @generator = block + self + end +end diff --git a/mcollective/lib/puppet/util/ldap/manager.rb b/mcollective/lib/puppet/util/ldap/manager.rb new file mode 100644 index 000000000..2ccd102bc --- /dev/null +++ b/mcollective/lib/puppet/util/ldap/manager.rb @@ -0,0 +1,279 @@ +require 'puppet/util/ldap' +require 'puppet/util/ldap/connection' +require 'puppet/util/ldap/generator' + +# The configuration class for LDAP providers, plus +# connection handling for actually interacting with ldap. +class Puppet::Util::Ldap::Manager + attr_reader :objectclasses, :puppet2ldap, :location, :rdn + + # A null-op that just returns the config. + def and + self + end + + # Set the offset from the search base and return the config. + def at(location) + @location = location + self + end + + # The basic search base. + def base + [location, Puppet[:ldapbase]].join(",") + end + + # Convert the name to a dn, then pass the args along to + # our connection. + def create(name, attributes) + attributes = attributes.dup + + # Add the objectclasses + attributes["objectClass"] = objectclasses.collect { |o| o.to_s } + attributes["objectClass"] << "top" unless attributes["objectClass"].include?("top") + + attributes[rdn.to_s] = [name] + + # Generate any new values we might need. + generate(attributes) + + # And create our resource. + connect { |conn| conn.add dn(name), attributes } + end + + # Open, yield, and close the connection. Cannot be left + # open, at this point. + def connect + raise ArgumentError, "You must pass a block to #connect" unless block_given? + + unless @connection + if Puppet[:ldaptls] + ssl = :tls + elsif Puppet[:ldapssl] + ssl = true + else + ssl = false + end + options = {:ssl => ssl} + if user = Puppet[:ldapuser] and user != "" + options[:user] = user + end + if password = Puppet[:ldappassword] and password != "" + options[:password] = password + end + @connection = Puppet::Util::Ldap::Connection.new(Puppet[:ldapserver], Puppet[:ldapport], options) + end + @connection.start + begin + yield @connection.connection + ensure + @connection.close + end + nil + end + + # Convert the name to a dn, then pass the args along to + # our connection. + def delete(name) + connect { |connection| connection.delete dn(name) } + end + + # Calculate the dn for a given resource. + def dn(name) + ["#{rdn}=#{name}", base].join(",") + end + + # Convert an ldap-style entry hash to a provider-style hash. + def entry2provider(entry) + raise ArgumentError, "Could not get dn from ldap entry" unless entry["dn"] + + # DN is always a single-entry array. Strip off the bits before the + # first comma, then the bits after the remaining equal sign. This is the + # name. + name = entry["dn"].dup.pop.split(",").shift.split("=").pop + + result = {:name => name} + + @ldap2puppet.each do |ldap, puppet| + result[puppet] = entry[ldap.to_s] || :absent + end + + result + end + + # Create our normal search filter. + def filter + return(objectclasses.length == 1 ? "objectclass=#{objectclasses[0]}" : "(&(objectclass=" + objectclasses.join(")(objectclass=") + "))") + end + + # Find the associated entry for a resource. Returns a hash, minus + # 'dn', or nil if the entry cannot be found. + def find(name) + result = nil + connect do |conn| + begin + conn.search2(dn(name), 0, "objectclass=*") do |result| + # Convert to puppet-appropriate attributes + return entry2provider(result) + end + rescue => detail + return nil + end + end + end + + # Declare a new attribute generator. + def generates(parameter) + @generators << Puppet::Util::Ldap::Generator.new(parameter) + @generators[-1] + end + + # Generate any extra values we need to make the ldap entry work. + def generate(values) + return unless @generators.length > 0 + + @generators.each do |generator| + # Don't override any values that might exist. + next if values[generator.name] + + if generator.source + unless value = values[generator.source] + raise ArgumentError, "#{generator.source} must be defined to generate #{generator.name}" + end + result = generator.generate(value) + else + result = generator.generate + end + + result = [result] unless result.is_a?(Array) + result = result.collect { |r| r.to_s } + + values[generator.name] = result + end + end + + def initialize + @rdn = :cn + @generators = [] + end + + # Specify what classes this provider models. + def manages(*classes) + @objectclasses = classes + self + end + + # Specify the attribute map. Assumes the keys are the puppet + # attributes, and the values are the ldap attributes, and creates a map + # for each direction. + def maps(attributes) + # The map with the puppet attributes as the keys + @puppet2ldap = attributes + + # and the ldap attributes as the keys. + @ldap2puppet = attributes.inject({}) { |map, ary| map[ary[1]] = ary[0]; map } + + self + end + + # Return the ldap name for a puppet attribute. + def ldap_name(attribute) + @puppet2ldap[attribute].to_s + end + + # Convert the name to a dn, then pass the args along to + # our connection. + def modify(name, mods) + connect { |connection| connection.modify dn(name), mods } + end + + # Specify the rdn that we use to build up our dn. + def named_by(attribute) + @rdn = attribute + self + end + + # Return the puppet name for an ldap attribute. + def puppet_name(attribute) + @ldap2puppet[attribute] + end + + # Search for all entries at our base. A potentially expensive search. + def search(sfilter = nil) + sfilter ||= filter + + result = [] + connect do |conn| + conn.search2(base, 1, sfilter) do |entry| + result << entry2provider(entry) + end + end + return(result.empty? ? nil : result) + end + + # Update the ldap entry with the desired state. + def update(name, is, should) + if should[:ensure] == :absent + Puppet.info "Removing #{dn(name)} from ldap" + delete(name) + return + end + + # We're creating a new entry + if is.empty? or is[:ensure] == :absent + Puppet.info "Creating #{dn(name)} in ldap" + # Remove any :absent params and :ensure, then convert the names to ldap names. + attrs = ldap_convert(should) + create(name, attrs) + return + end + + # We're modifying an existing entry. Yuck. + + mods = [] + # For each attribute we're deleting that is present, create a + # modify instance for deletion. + [is.keys, should.keys].flatten.uniq.each do |property| + # They're equal, so do nothing. + next if is[property] == should[property] + + attributes = ldap_convert(should) + + prop_name = ldap_name(property).to_s + + # We're creating it. + if is[property] == :absent or is[property].nil? + mods << LDAP::Mod.new(LDAP::LDAP_MOD_ADD, prop_name, attributes[prop_name]) + next + end + + # We're deleting it + if should[property] == :absent or should[property].nil? + mods << LDAP::Mod.new(LDAP::LDAP_MOD_DELETE, prop_name, []) + next + end + + # We're replacing an existing value + mods << LDAP::Mod.new(LDAP::LDAP_MOD_REPLACE, prop_name, attributes[prop_name]) + end + + modify(name, mods) + end + + # Is this a complete ldap configuration? + def valid? + location and objectclasses and ! objectclasses.empty? and puppet2ldap + end + + private + + # Convert a hash of attributes to ldap-like forms. This mostly means + # getting rid of :ensure and making sure everything's an array of strings. + def ldap_convert(attributes) + attributes.reject { |param, value| value == :absent or param == :ensure }.inject({}) do |result, ary| + value = (ary[1].is_a?(Array) ? ary[1] : [ary[1]]).collect { |v| v.to_s } + result[ldap_name(ary[0])] = value + result + end + end +end diff --git a/mcollective/lib/puppet/util/loadedfile.rb b/mcollective/lib/puppet/util/loadedfile.rb new file mode 100755 index 000000000..d2f5d0923 --- /dev/null +++ b/mcollective/lib/puppet/util/loadedfile.rb @@ -0,0 +1,61 @@ +# A simple class that tells us when a file has changed and thus whether we +# should reload it + +require 'puppet' + +module Puppet + class NoSuchFile < Puppet::Error; end + class Util::LoadedFile + attr_reader :file, :statted + + # Provide a hook for setting the timestamp during testing, so we don't + # have to depend on the granularity of the filesystem. + attr_writer :tstamp + + # Determine whether the file has changed and thus whether it should + # be reparsed. + def changed? + # Allow the timeout to be disabled entirely. + return true if Puppet[:filetimeout] < 0 + tmp = stamp + + # We use a different internal variable than the stamp method + # because it doesn't keep historical state and we do -- that is, + # we will always be comparing two timestamps, whereas + # stamp just always wants the latest one. + if tmp == @tstamp + return false + else + @tstamp = tmp + return @tstamp + end + end + + # Create the file. Must be passed the file path. + def initialize(file) + @file = file + @statted = 0 + @stamp = nil + @tstamp = stamp + end + + # Retrieve the filestamp, but only refresh it if we're beyond our + # filetimeout + def stamp + if @stamp.nil? or (Time.now.to_i - @statted >= Puppet[:filetimeout]) + @statted = Time.now.to_i + begin + @stamp = File.stat(@file).ctime + rescue Errno::ENOENT, Errno::ENOTDIR + @stamp = Time.now + end + end + @stamp + end + + def to_s + @file + end + end +end + diff --git a/mcollective/lib/puppet/util/log.rb b/mcollective/lib/puppet/util/log.rb new file mode 100644 index 000000000..ba1690078 --- /dev/null +++ b/mcollective/lib/puppet/util/log.rb @@ -0,0 +1,264 @@ +require 'puppet/util/tagging' +require 'puppet/util/classgen' + +# Pass feedback to the user. Log levels are modeled after syslog's, and it is +# expected that that will be the most common log destination. Supports +# multiple destinations, one of which is a remote server. +class Puppet::Util::Log + include Puppet::Util + extend Puppet::Util::ClassGen + include Puppet::Util::Tagging + + @levels = [:debug,:info,:notice,:warning,:err,:alert,:emerg,:crit] + @loglevel = 2 + + @desttypes = {} + + # Create a new destination type. + def self.newdesttype(name, options = {}, &block) + + dest = genclass( + name, + :parent => Puppet::Util::Log::Destination, + :prefix => "Dest", + :block => block, + :hash => @desttypes, + :attributes => options + ) + dest.match(dest.name) + + dest + end + + require 'puppet/util/log/destination' + require 'puppet/util/log/destinations' + + @destinations = {} + + @queued = [] + + class << self + include Puppet::Util + include Puppet::Util::ClassGen + + attr_reader :desttypes + end + + # Reset log to basics. Basically just flushes and closes files and + # undefs other objects. + def Log.close(destination) + if @destinations.include?(destination) + @destinations[destination].flush if @destinations[destination].respond_to?(:flush) + @destinations[destination].close if @destinations[destination].respond_to?(:close) + @destinations.delete(destination) + end + end + + def self.close_all + destinations.keys.each { |dest| + close(dest) + } + end + + # Flush any log destinations that support such operations. + def Log.flush + @destinations.each { |type, dest| + dest.flush if dest.respond_to?(:flush) + } + end + + def Log.autoflush=(v) + @destinations.each do |type, dest| + dest.autoflush = v if dest.respond_to?(:autoflush=) + end + end + + # Create a new log message. The primary role of this method is to + # avoid creating log messages below the loglevel. + def Log.create(hash) + raise Puppet::DevError, "Logs require a level" unless hash.include?(:level) + raise Puppet::DevError, "Invalid log level #{hash[:level]}" unless @levels.index(hash[:level]) + @levels.index(hash[:level]) >= @loglevel ? Puppet::Util::Log.new(hash) : nil + end + + def Log.destinations + @destinations + end + + # Yield each valid level in turn + def Log.eachlevel + @levels.each { |level| yield level } + end + + # Return the current log level. + def Log.level + @levels[@loglevel] + end + + # Set the current log level. + def Log.level=(level) + level = level.intern unless level.is_a?(Symbol) + + raise Puppet::DevError, "Invalid loglevel #{level}" unless @levels.include?(level) + + @loglevel = @levels.index(level) + end + + def Log.levels + @levels.dup + end + + # Create a new log destination. + def Log.newdestination(dest) + # Each destination can only occur once. + if @destinations.find { |name, obj| obj.name == dest } + return + end + + name, type = @desttypes.find do |name, klass| + klass.match?(dest) + end + + raise Puppet::DevError, "Unknown destination type #{dest}" unless type + + begin + if type.instance_method(:initialize).arity == 1 + @destinations[dest] = type.new(dest) + else + @destinations[dest] = type.new + end + flushqueue + @destinations[dest] + rescue => detail + puts detail.backtrace if Puppet[:debug] + + # If this was our only destination, then add the console back in. + newdestination(:console) if @destinations.empty? and (dest != :console and dest != "console") + end + end + + # Route the actual message. FIXME There are lots of things this method + # should do, like caching and a bit more. It's worth noting that there's + # a potential for a loop here, if the machine somehow gets the destination set as + # itself. + def Log.newmessage(msg) + return if @levels.index(msg.level) < @loglevel + + queuemessage(msg) if @destinations.length == 0 + + @destinations.each do |name, dest| + threadlock(dest) do + dest.handle(msg) + end + end + end + + def Log.queuemessage(msg) + @queued.push(msg) + end + + def Log.flushqueue + return unless @destinations.size >= 1 + @queued.each do |msg| + Log.newmessage(msg) + end + @queued.clear + end + + def Log.sendlevel?(level) + @levels.index(level) >= @loglevel + end + + # Reopen all of our logs. + def Log.reopen + Puppet.notice "Reopening log files" + types = @destinations.keys + @destinations.each { |type, dest| + dest.close if dest.respond_to?(:close) + } + @destinations.clear + # We need to make sure we always end up with some kind of destination + begin + types.each { |type| + Log.newdestination(type) + } + rescue => detail + if @destinations.empty? + Log.newdestination(:syslog) + Puppet.err detail.to_s + end + end + end + + # Is the passed level a valid log level? + def self.validlevel?(level) + @levels.include?(level) + end + + attr_accessor :time, :remote, :file, :line, :source + attr_reader :level, :message + + def initialize(args) + self.level = args[:level] + self.message = args[:message] + self.source = args[:source] || "Puppet" + + @time = Time.now + + if tags = args[:tags] + tags.each { |t| self.tag(t) } + end + + [:file, :line].each do |attr| + next unless value = args[attr] + send(attr.to_s + "=", value) + end + + Log.newmessage(self) + end + + def message=(msg) + raise ArgumentError, "Puppet::Util::Log requires a message" unless msg + @message = msg.to_s + end + + def level=(level) + raise ArgumentError, "Puppet::Util::Log requires a log level" unless level + @level = level.to_sym + raise ArgumentError, "Invalid log level #{@level}" unless self.class.validlevel?(@level) + + # Tag myself with my log level + tag(level) + end + + # If they pass a source in to us, we make sure it is a string, and + # we retrieve any tags we can. + def source=(source) + if source.respond_to?(:source_descriptors) + descriptors = source.source_descriptors + @source = descriptors[:path] + + descriptors[:tags].each { |t| tag(t) } + + [:file, :line].each do |param| + next unless descriptors[param] + send(param.to_s + "=", descriptors[param]) + end + else + @source = source.to_s + end + end + + def to_report + "#{time} #{source} (#{level}): #{to_s}" + end + + def to_s + message + end +end + +# This is for backward compatibility from when we changed the constant to Puppet::Util::Log +# because the reports include the constant name. Apparently the alias was created in +# March 2007, should could probably be removed soon. +Puppet::Log = Puppet::Util::Log diff --git a/mcollective/lib/puppet/util/log/destination.rb b/mcollective/lib/puppet/util/log/destination.rb new file mode 100644 index 000000000..11ebd7d05 --- /dev/null +++ b/mcollective/lib/puppet/util/log/destination.rb @@ -0,0 +1,49 @@ +# A type of log destination. +class Puppet::Util::Log::Destination + class << self + attr_accessor :name + end + + def self.initvars + @matches = [] + end + + # Mark the things we're supposed to match. + def self.match(obj) + @matches ||= [] + @matches << obj + end + + # See whether we match a given thing. + def self.match?(obj) + # Convert single-word strings into symbols like :console and :syslog + if obj.is_a? String and obj =~ /^\w+$/ + obj = obj.downcase.intern + end + + @matches.each do |thing| + # Search for direct matches or class matches + return true if thing === obj or thing == obj.class.to_s + end + false + end + + def name + if defined?(@name) + return @name + else + return self.class.name + end + end + + # Set how to handle a message. + def self.sethandler(&block) + define_method(:handle, &block) + end + + # Mark how to initialize our object. + def self.setinit(&block) + define_method(:initialize, &block) + end +end + diff --git a/mcollective/lib/puppet/util/log/destinations.rb b/mcollective/lib/puppet/util/log/destinations.rb new file mode 100644 index 000000000..c70edeb02 --- /dev/null +++ b/mcollective/lib/puppet/util/log/destinations.rb @@ -0,0 +1,219 @@ +Puppet::Util::Log.newdesttype :syslog do + def close + Syslog.close + end + + def initialize + Syslog.close if Syslog.opened? + name = Puppet[:name] + name = "puppet-#{name}" unless name =~ /puppet/ + + options = Syslog::LOG_PID | Syslog::LOG_NDELAY + + # XXX This should really be configurable. + str = Puppet[:syslogfacility] + begin + facility = Syslog.const_get("LOG_#{str.upcase}") + rescue NameError + raise Puppet::Error, "Invalid syslog facility #{str}" + end + + @syslog = Syslog.open(name, options, facility) + end + + def handle(msg) + # XXX Syslog currently has a bug that makes it so you + # cannot log a message with a '%' in it. So, we get rid + # of them. + if msg.source == "Puppet" + @syslog.send(msg.level, msg.to_s.gsub("%", '%%')) + else + @syslog.send(msg.level, "(%s) %s" % [msg.source.to_s.gsub("%", ""), + msg.to_s.gsub("%", '%%') + ] + ) + end + end +end + +Puppet::Util::Log.newdesttype :file do + match(/^\//) + + def close + if defined?(@file) + @file.close + @file = nil + end + end + + def flush + @file.flush if defined?(@file) + end + + attr_accessor :autoflush + + def initialize(path) + @name = path + # first make sure the directory exists + # We can't just use 'Config.use' here, because they've + # specified a "special" destination. + unless FileTest.exist?(File.dirname(path)) + Puppet.recmkdir(File.dirname(path)) + Puppet.info "Creating log directory #{File.dirname(path)}" + end + + # create the log file, if it doesn't already exist + file = File.open(path, File::WRONLY|File::CREAT|File::APPEND) + + @file = file + + @autoflush = Puppet[:autoflush] + end + + def handle(msg) + @file.puts("#{msg.time} #{msg.source} (#{msg.level}): #{msg}") + + @file.flush if @autoflush + end +end + +Puppet::Util::Log.newdesttype :console do + + + RED = {:console => "", :html => "FFA0A0"} + GREEN = {:console => "", :html => "00CD00"} + YELLOW = {:console => "", :html => "FFFF60"} + BLUE = {:console => "", :html => "80A0FF"} + PURPLE = {:console => "", :html => "FFA500"} + CYAN = {:console => "", :html => "40FFFF"} + WHITE = {:console => "", :html => "FFFFFF"} + HRED = {:console => "", :html => "FFA0A0"} + HGREEN = {:console => "", :html => "00CD00"} + HYELLOW = {:console => "", :html => "FFFF60"} + HBLUE = {:console => "", :html => "80A0FF"} + HPURPLE = {:console => "", :html => "FFA500"} + HCYAN = {:console => "", :html => "40FFFF"} + HWHITE = {:console => "", :html => "FFFFFF"} + RESET = {:console => "", :html => "" } + + @@colormap = { + :debug => WHITE, + :info => GREEN, + :notice => CYAN, + :warning => YELLOW, + :err => HPURPLE, + :alert => RED, + :emerg => HRED, + :crit => HRED + } + + def colorize(level, str) + case Puppet[:color] + when true, :ansi, "ansi", "yes"; console_color(level, str) + when :html, "html"; html_color(level, str) + else + str + end + end + + def console_color(level, str) + @@colormap[level][:console] + str + RESET[:console] + end + + def html_color(level, str) + %{%s} % [@@colormap[level][:html], str] + end + + def initialize + # Flush output immediately. + $stdout.sync = true + end + + def handle(msg) + if msg.source == "Puppet" + puts colorize(msg.level, "#{msg.level}: #{msg}") + else + puts colorize(msg.level, "#{msg.level}: #{msg.source}: #{msg}") + end + end +end + +Puppet::Util::Log.newdesttype :host do + def initialize(host) + Puppet.info "Treating #{host} as a hostname" + args = {} + if host =~ /:(\d+)/ + args[:Port] = $1 + args[:Server] = host.sub(/:\d+/, '') + else + args[:Server] = host + end + + @name = host + + @driver = Puppet::Network::Client::LogClient.new(args) + end + + def handle(msg) + unless msg.is_a?(String) or msg.remote + @hostname ||= Facter["hostname"].value + unless defined?(@domain) + @domain = Facter["domain"].value + @hostname += ".#{@domain}" if @domain + end + if msg.source =~ /^\// + msg.source = @hostname + ":#{msg.source}" + elsif msg.source == "Puppet" + msg.source = @hostname + " #{msg.source}" + else + msg.source = @hostname + " #{msg.source}" + end + begin + #puts "would have sent #{msg}" + #puts "would have sent %s" % + # CGI.escape(YAML.dump(msg)) + begin + tmp = CGI.escape(YAML.dump(msg)) + rescue => detail + puts "Could not dump: #{detail}" + return + end + # Add the hostname to the source + @driver.addlog(tmp) + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err detail + Puppet::Util::Log.close(self) + end + end + end +end + +# Log to a transaction report. +Puppet::Util::Log.newdesttype :report do + attr_reader :report + + match "Puppet::Transaction::Report" + + def initialize(report) + @report = report + end + + def handle(msg) + @report << msg + end +end + +# Log to an array, just for testing. +Puppet::Util::Log.newdesttype :array do + match "Array" + + def initialize(messages) + @messages = messages + end + + def handle(msg) + @messages << msg + end +end + diff --git a/mcollective/lib/puppet/util/log_paths.rb b/mcollective/lib/puppet/util/log_paths.rb new file mode 100644 index 000000000..2fefd4505 --- /dev/null +++ b/mcollective/lib/puppet/util/log_paths.rb @@ -0,0 +1,27 @@ +# Created by Luke Kanies on 2007-07-04. +# Copyright (c) 2007. All rights reserved. + +module Puppet::Util::LogPaths + # return the full path to us, for logging and rollback + # some classes (e.g., FileTypeRecords) will have to override this + def path + @path ||= pathbuilder + + "/" + @path.join("/") + end + + def source_descriptors + descriptors = {} + + descriptors[:tags] = tags + + [:path, :file, :line].each do |param| + next unless value = send(param) + descriptors[param] = value + end + + descriptors + end + +end + diff --git a/mcollective/lib/puppet/util/logging.rb b/mcollective/lib/puppet/util/logging.rb new file mode 100644 index 000000000..bc52b17f0 --- /dev/null +++ b/mcollective/lib/puppet/util/logging.rb @@ -0,0 +1,40 @@ +# A module to make logging a bit easier. +require 'puppet/util/log' + +module Puppet::Util::Logging + + def send_log(level, message) + Puppet::Util::Log.create({:level => level, :source => log_source, :message => message}.merge(log_metadata)) + end + + # Create a method for each log level. + Puppet::Util::Log.eachlevel do |level| + define_method(level) do |args| + args = args.join(" ") if args.is_a?(Array) + send_log(level, args) + end + end + + private + + def is_resource? + defined?(Puppet::Type) && is_a?(Puppet::Type) + end + + def is_resource_parameter? + defined?(Puppet::Parameter) && is_a?(Puppet::Parameter) + end + + def log_metadata + [:file, :line, :tags].inject({}) do |result, attr| + result[attr] = send(attr) if respond_to?(attr) + result + end + end + + def log_source + # We need to guard the existence of the constants, since this module is used by the base Puppet module. + (is_resource? or is_resource_parameter?) and respond_to?(:path) and return path.to_s + to_s + end +end diff --git a/mcollective/lib/puppet/util/metaid.rb b/mcollective/lib/puppet/util/metaid.rb new file mode 100644 index 000000000..4092e4fda --- /dev/null +++ b/mcollective/lib/puppet/util/metaid.rb @@ -0,0 +1,21 @@ +class Object + # The hidden singleton lurks behind everyone + def singleton_class; class << self; self; end; end + def meta_eval(&blk); singleton_class.instance_eval(&blk); end + + # Adds methods to a singleton_class + def meta_def(name, &blk) + meta_eval { define_method name, &blk } + end + + # Remove singleton_class methods. + def meta_undef(name, &blk) + meta_eval { remove_method name } + end + + # Defines an instance method within a class + def class_def(name, &blk) + class_eval { define_method name, &blk } + end +end + diff --git a/mcollective/lib/puppet/util/methodhelper.rb b/mcollective/lib/puppet/util/methodhelper.rb new file mode 100644 index 000000000..2dd3afd79 --- /dev/null +++ b/mcollective/lib/puppet/util/methodhelper.rb @@ -0,0 +1,32 @@ +# Where we store helper methods related to, um, methods. +module Puppet::Util::MethodHelper + def requiredopts(*names) + names.each do |name| + devfail("#{name} is a required option for #{self.class}") if self.send(name).nil? + end + end + + # Iterate over a hash, treating each member as an attribute. + def set_options(options) + options.each do |param,value| + method = param.to_s + "=" + if respond_to? method + self.send(method, value) + else + raise ArgumentError, "Invalid parameter #{param} to object class #{self.class}" + end + end + end + + # Take a hash and convert all of the keys to symbols if possible. + def symbolize_options(options) + options.inject({}) do |hash, opts| + if opts[0].respond_to? :intern + hash[opts[0].intern] = opts[1] + else + hash[opts[0]] = opts[1] + end + hash + end + end +end diff --git a/mcollective/lib/puppet/util/metric.rb b/mcollective/lib/puppet/util/metric.rb new file mode 100644 index 000000000..835e1d610 --- /dev/null +++ b/mcollective/lib/puppet/util/metric.rb @@ -0,0 +1,186 @@ +# included so we can test object types +require 'puppet' + +# A class for handling metrics. This is currently ridiculously hackish. +class Puppet::Util::Metric + + attr_accessor :type, :name, :value, :label + attr_writer :values + + attr_writer :basedir + + # Return a specific value + def [](name) + if value = @values.find { |v| v[0] == name } + return value[2] + else + return 0 + end + end + + def basedir + if defined?(@basedir) + @basedir + else + Puppet[:rrddir] + end + end + + def create(start = nil) + Puppet.settings.use(:main, :metrics) + + start ||= Time.now.to_i - 5 + + args = [] + + if Puppet.features.rrd_legacy? && ! Puppet.features.rrd? + @rrd = RRDtool.new(self.path) + end + + values.each { |value| + # the 7200 is the heartbeat -- this means that any data that isn't + # more frequently than every two hours gets thrown away + args.push "DS:#{value[0]}:GAUGE:7200:U:U" + } + args.push "RRA:AVERAGE:0.5:1:300" + + begin + if Puppet.features.rrd_legacy? && ! Puppet.features.rrd? + @rrd.create( Puppet[:rrdinterval].to_i, start, args) + else + RRD.create( self.path, '-s', Puppet[:rrdinterval].to_i.to_s, '-b', start.to_i.to_s, *args) + end + rescue => detail + raise "Could not create RRD file #{path}: #{detail}" + end + end + + def dump + if Puppet.features.rrd_legacy? && ! Puppet.features.rrd? + puts @rrd.info + else + puts RRD.info(self.path) + end + end + + def graph(range = nil) + unless Puppet.features.rrd? || Puppet.features.rrd_legacy? + Puppet.warning "RRD library is missing; cannot graph metrics" + return + end + + unit = 60 * 60 * 24 + colorstack = %w{#00ff00 #ff0000 #0000ff #ffff00 #ff99ff #ff9966 #66ffff #990000 #099000 #000990 #f00990 #0f0f0f #555555 #333333 #ffffff} + + {:daily => unit, :weekly => unit * 7, :monthly => unit * 30, :yearly => unit * 365}.each do |name, time| + file = self.path.sub(/\.rrd$/, "-#{name}.png") + args = [file] + + args.push("--title",self.label) + args.push("--imgformat","PNG") + args.push("--interlace") + i = 0 + defs = [] + lines = [] + #p @values.collect { |s,l| s } + values.zip(colorstack).each { |value,color| + next if value.nil? + # this actually uses the data label + defs.push("DEF:#{value[0]}=#{self.path}:#{value[0]}:AVERAGE") + lines.push("LINE2:#{value[0]}#{color}:#{value[1]}") + } + args << defs + args << lines + args.flatten! + if range + if Puppet.features.rrd_legacy? && ! Puppet.features.rrd? + args.push("--start",range[0],"--end",range[1]) + else + args.push("--start",range[0].to_i.to_s,"--end",range[1].to_i.to_s) + end + else + if Puppet.features.rrd_legacy? && ! Puppet.features.rrd? + args.push("--start", Time.now.to_i - time, "--end", Time.now.to_i) + else + args.push("--start", (Time.now.to_i - time).to_s, "--end", Time.now.to_i.to_s) + end + end + + begin + #Puppet.warning "args = #{args}" + if Puppet.features.rrd_legacy? && ! Puppet.features.rrd? + RRDtool.graph( args ) + else + RRD.graph( *args ) + end + rescue => detail + Puppet.err "Failed to graph #{self.name}: #{detail}" + end + end + end + + def initialize(name,label = nil) + @name = name.to_s + + @label = label || self.class.labelize(name) + + @values = [] + end + + def path + File.join(self.basedir, @name + ".rrd") + end + + def newvalue(name,value,label = nil) + raise ArgumentError.new("metric name #{name.inspect} is not a string") unless name.is_a? String + label ||= self.class.labelize(name) + @values.push [name,label,value] + end + + def store(time) + unless Puppet.features.rrd? || Puppet.features.rrd_legacy? + Puppet.warning "RRD library is missing; cannot store metrics" + return + end + self.create(time - 5) unless FileTest.exists?(self.path) + + if Puppet.features.rrd_legacy? && ! Puppet.features.rrd? + @rrd ||= RRDtool.new(self.path) + end + + # XXX this is not terribly error-resistant + args = [time] + temps = [] + values.each { |value| + #Puppet.warning "value[0]: #{value[0]}; value[1]: #{value[1]}; value[2]: #{value[2]}; " + args.push value[2] + temps.push value[0] + } + arg = args.join(":") + template = temps.join(":") + begin + if Puppet.features.rrd_legacy? && ! Puppet.features.rrd? + @rrd.update( template, [ arg ] ) + else + RRD.update( self.path, '-t', template, arg ) + end + #system("rrdtool updatev #{self.path} '#{arg}'") + rescue => detail + raise Puppet::Error, "Failed to update #{self.name}: #{detail}" + end + end + + def values + @values.sort { |a, b| a[1] <=> b[1] } + end + + # Convert a name into a label. + def self.labelize(name) + name.to_s.capitalize.gsub("_", " ") + end +end + +# This is necessary because we changed the class path in early 2007, +# and reports directly yaml-dump these metrics, so both client and server +# have to agree on the class name. +Puppet::Metric = Puppet::Util::Metric diff --git a/mcollective/lib/puppet/util/monkey_patches.rb b/mcollective/lib/puppet/util/monkey_patches.rb new file mode 100644 index 000000000..2bd2c80aa --- /dev/null +++ b/mcollective/lib/puppet/util/monkey_patches.rb @@ -0,0 +1,140 @@ + +unless defined? JRUBY_VERSION + Process.maxgroups = 1024 +end + +module RDoc + def self.caller(skip=nil) + in_gem_wrapper = false + Kernel.caller.reject { |call| + in_gem_wrapper ||= call =~ /#{Regexp.escape $0}:\d+:in `load'/ + } + end +end + + +require "yaml" +require "puppet/util/zaml.rb" + +class Symbol + def to_zaml(z) + z.emit("!ruby/sym ") + to_s.to_zaml(z) + end + def <=> (other) + self.to_s <=> other.to_s + end +end + +[Object, Exception, Integer, Struct, Date, Time, Range, Regexp, Hash, Array, Float, String, FalseClass, TrueClass, Symbol, NilClass, Class].each { |cls| + cls.class_eval do + def to_yaml(ignored=nil) + ZAML.dump(self) + end + end +} + +def YAML.dump(*args) + ZAML.dump(*args) +end + +# +# Workaround for bug in MRI 1.8.7, see +# http://redmine.ruby-lang.org/issues/show/2708 +# for details +# +if RUBY_VERSION == '1.8.7' + class NilClass + def closed? + true + end + end +end + +class Object + # The following code allows callers to make assertions that are only + # checked when the environment variable PUPPET_ENABLE_ASSERTIONS is + # set to a non-empty string. For example: + # + # assert_that { condition } + # assert_that(message) { condition } + if ENV["PUPPET_ENABLE_ASSERTIONS"].to_s != '' + def assert_that(message = nil) + unless yield + raise Exception.new("Assertion failure: #{message}") + end + end + else + def assert_that(message = nil) + end + end +end + +# Workaround for yaml_initialize, which isn't supported before Ruby +# 1.8.3. +if RUBY_VERSION == '1.8.1' || RUBY_VERSION == '1.8.2' + YAML.add_ruby_type( /^object/ ) { |tag, val| + type, obj_class = YAML.read_type_class( tag, Object ) + r = YAML.object_maker( obj_class, val ) + if r.respond_to? :yaml_initialize + r.instance_eval { instance_variables.each { |name| remove_instance_variable name } } + r.yaml_initialize(tag, val) + end + r + } +end + +class Array + # Ruby < 1.8.7 doesn't have this method but we use it in tests + def combination(num) + return [] if num < 0 || num > size + return [[]] if num == 0 + return map{|e| [e] } if num == 1 + tmp = self.dup + self[0, size - (num - 1)].inject([]) do |ret, e| + tmp.shift + ret += tmp.combination(num - 1).map{|a| a.unshift(e) } + end + end unless method_defined? :combination + + alias :count :length unless method_defined? :count +end + + +class Symbol + def to_proc + Proc.new { |*args| args.shift.__send__(self, *args) } + end unless method_defined? :to_proc +end + +module Enumerable + # Use *args so we can distinguish no argument from nil. + def count(*args) + seq = 0 + if !args.empty? + item = args[0] + each { |o| seq += 1 if item == o } + elsif block_given? + each { |o| seq += 1 if yield(o) } + else + each { seq += 1 } + end + seq + end unless method_defined? :count +end + +class String + def lines(separator = $/) + lines = split(separator) + block_given? and lines.each {|line| yield line } + lines + end +end + +class IO + def lines(separator = $/) + lines = split(separator) + block_given? and lines.each {|line| yield line } + lines + end +end diff --git a/mcollective/lib/puppet/util/nagios_maker.rb b/mcollective/lib/puppet/util/nagios_maker.rb new file mode 100644 index 000000000..863fe24fa --- /dev/null +++ b/mcollective/lib/puppet/util/nagios_maker.rb @@ -0,0 +1,60 @@ +require 'puppet/external/nagios' +require 'puppet/external/nagios/base' +require 'puppet/provider/naginator' + +module Puppet::Util::NagiosMaker + # Create a new nagios type, using all of the parameters + # from the parser. + def self.create_nagios_type(name) + name = name.to_sym + full_name = ("nagios_#{name}").to_sym + + raise(Puppet::DevError, "No nagios type for #{name}") unless nagtype = Nagios::Base.type(name) + + type = Puppet::Type.newtype(full_name) {} + + type.ensurable + + type.newparam(nagtype.namevar, :namevar => true) do + desc "The name parameter for Nagios type #{nagtype.name}" + end + + # We deduplicate the parameters because it makes sense to allow Naginator to have dupes. + nagtype.parameters.uniq.each do |param| + next if param == nagtype.namevar + + # We can't turn these parameter names into constants, so at least for now they aren't + # supported. + next if param.to_s =~ /^[0-9]/ + + type.newproperty(param) do + desc "Nagios configuration file parameter." + end + end + + type.newproperty(:target) do + desc 'target' + + defaultto do + resource.class.defaultprovider.default_target + end + end + + target = "/etc/nagios/#{full_name.to_s}.cfg" + provider = type.provide(:naginator, :parent => Puppet::Provider::Naginator, :default_target => target) {} + provider.nagios_type + + type.desc "The Nagios type #{name.to_s}. This resource type is autogenerated using the + model developed in Naginator, and all of the Nagios types are generated using the + same code and the same library. + + This type generates Nagios configuration statements in Nagios-parseable configuration + files. By default, the statements will be added to `#{target}`, but + you can send them to a different file by setting their `target` attribute. + + You can purge Nagios resources using the `resources` type, but *only* + in the default file locations. This is an architectural limitation. + + " + end +end diff --git a/mcollective/lib/puppet/util/package.rb b/mcollective/lib/puppet/util/package.rb new file mode 100644 index 000000000..5f1fc7465 --- /dev/null +++ b/mcollective/lib/puppet/util/package.rb @@ -0,0 +1,31 @@ +module Puppet::Util::Package + def versioncmp(version_a, version_b) + vre = /[-.]|\d+|[^-.\d]+/ + ax = version_a.scan(vre) + bx = version_b.scan(vre) + + while (ax.length>0 && bx.length>0) + a = ax.shift + b = bx.shift + + if( a == b ) then next + elsif (a == '-' && b == '-') then next + elsif (a == '-') then return -1 + elsif (b == '-') then return 1 + elsif (a == '.' && b == '.') then next + elsif (a == '.' ) then return -1 + elsif (b == '.' ) then return 1 + elsif (a =~ /^\d+$/ && b =~ /^\d+$/) then + if( a =~ /^0/ or b =~ /^0/ ) then + return a.to_s.upcase <=> b.to_s.upcase + end + return a.to_i <=> b.to_i + else + return a.upcase <=> b.upcase + end + end + version_a <=> version_b; + end + + module_function :versioncmp +end diff --git a/mcollective/lib/puppet/util/pidlock.rb b/mcollective/lib/puppet/util/pidlock.rb new file mode 100644 index 000000000..05e1459d0 --- /dev/null +++ b/mcollective/lib/puppet/util/pidlock.rb @@ -0,0 +1,68 @@ +require 'fileutils' + +class Puppet::Util::Pidlock + attr_reader :lockfile + + def initialize(lockfile) + @lockfile = lockfile + end + + def locked? + clear_if_stale + File.exists? @lockfile + end + + def mine? + Process.pid == lock_pid + end + + def anonymous? + return false unless File.exists?(@lockfile) + File.read(@lockfile) == "" + end + + def lock(opts = {}) + opts = {:anonymous => false}.merge(opts) + + if locked? + mine? + else + if opts[:anonymous] + File.open(@lockfile, 'w') { |fd| true } + else + File.open(@lockfile, "w") { |fd| fd.write(Process.pid) } + end + true + end + end + + def unlock(opts = {}) + opts = {:anonymous => false}.merge(opts) + + if mine? or (opts[:anonymous] and anonymous?) + File.unlink(@lockfile) + true + else + false + end + end + + private + def lock_pid + if File.exists? @lockfile + File.read(@lockfile).to_i + else + nil + end + end + + def clear_if_stale + return if lock_pid.nil? + + begin + Process.kill(0, lock_pid) + rescue Errno::ESRCH + File.unlink(@lockfile) + end + end +end diff --git a/mcollective/lib/puppet/util/plugins.rb b/mcollective/lib/puppet/util/plugins.rb new file mode 100644 index 000000000..105fdcd75 --- /dev/null +++ b/mcollective/lib/puppet/util/plugins.rb @@ -0,0 +1,82 @@ +# +# This system manages an extensible set of metadata about plugins which it +# collects by searching for files named "plugin_init.rb" in a series of +# directories. Initially, these are simply the $LOAD_PATH. +# +# The contents of each file found is executed in the context of a Puppet::Plugins +# object (and thus scoped). An example file might contain: +# +# ------------------------------------------------------- +# @name = "Greet the CA" +# +# @description = %q{ +# This plugin causes a friendly greeting to print out on a master +# that is operating as the CA, after it has been set up but before +# it does anything. +# } +# +# def after_application_setup(options) +# if options[:application_object].is_a?(Puppet::Application::Master) && Puppet::SSL::CertificateAuthority.ca? +# puts "Hey, this is the CA!" +# end +# end +# ------------------------------------------------------- +# +# Note that the instance variables are local to this Puppet::Plugin (and so may be used +# for maintaining state, etc.) but the plugin system does not provide any thread safety +# assurances, so they may not be adequate for some complex use cases. +# +# +module Puppet + class Plugins + Paths = [] # Where we might find plugin initialization code + Loaded = [] # Code we have found (one-to-one with paths once searched) + # + # Return all the Puppet::Plugins we know about, searching any new paths + # + def self.known + Paths[Loaded.length...Paths.length].each { |path| + file = File.join(path,'plugin_init.rb') + Loaded << (File.exist?(file) && new(file)) + } + Loaded.compact + end + # + # Add more places to look for plugins without adding duplicates or changing the + # order of ones we've already found. + # + def self.look_in(*paths) + Paths.replace Paths | paths.flatten.collect { |path| File.expand_path(path) } + end + # + # Initially just look in $LOAD_PATH + # + look_in $LOAD_PATH + # + # Calling methods (hooks) on the class calls the method of the same name on + # all plugins that use that hook, passing in the same arguments to each + # and returning an array containing the results returned by each plugin as + # an array of [plugin_name,result] pairs. + # + def self.method_missing(hook,*args,&block) + known. + select { |p| p.respond_to? hook }. + collect { |p| [p.name,p.send(hook,*args,&block)] } + end + # + # + # + attr_reader :path,:name + def initialize(path) + @name = @path = path + class << self + private + def define_hooks + eval File.read(path),nil,path,1 + end + end + define_hooks + end + end +end + diff --git a/mcollective/lib/puppet/util/posix.rb b/mcollective/lib/puppet/util/posix.rb new file mode 100755 index 000000000..4cabe24af --- /dev/null +++ b/mcollective/lib/puppet/util/posix.rb @@ -0,0 +1,137 @@ +# Utility methods for interacting with POSIX objects; mostly user and group +module Puppet::Util::POSIX + + # Retrieve a field from a POSIX Etc object. The id can be either an integer + # or a name. This only works for users and groups. It's also broken on + # some platforms, unfortunately, which is why we fall back to the other + # method search_posix_field in the gid and uid methods if a sanity check + # fails + def get_posix_field(space, field, id) + raise Puppet::DevError, "Did not get id from caller" unless id + + if id.is_a?(Integer) + if id > Puppet[:maximum_uid].to_i + Puppet.err "Tried to get #{field} field for silly id #{id}" + return nil + end + method = methodbyid(space) + else + method = methodbyname(space) + end + + begin + return Etc.send(method, id).send(field) + rescue ArgumentError => detail + # ignore it; we couldn't find the object + return nil + end + end + + # A degenerate method of retrieving name/id mappings. The job of this method is + # to retrieve all objects of a certain type, search for a specific entry + # and then return a given field from that entry. + def search_posix_field(type, field, id) + idmethod = idfield(type) + integer = false + if id.is_a?(Integer) + integer = true + if id > Puppet[:maximum_uid].to_i + Puppet.err "Tried to get #{field} field for silly id #{id}" + return nil + end + end + + Etc.send(type) do |object| + if integer and object.send(idmethod) == id + return object.send(field) + elsif object.name == id + return object.send(field) + end + end + + # Apparently the group/passwd methods need to get reset; if we skip + # this call, then new users aren't found. + case type + when :passwd; Etc.send(:endpwent) + when :group; Etc.send(:endgrent) + end + nil + end + + # Determine what the field name is for users and groups. + def idfield(space) + case Puppet::Util.symbolize(space) + when :gr, :group; return :gid + when :pw, :user, :passwd; return :uid + else + raise ArgumentError.new("Can only handle users and groups") + end + end + + # Determine what the method is to get users and groups by id + def methodbyid(space) + case Puppet::Util.symbolize(space) + when :gr, :group; return :getgrgid + when :pw, :user, :passwd; return :getpwuid + else + raise ArgumentError.new("Can only handle users and groups") + end + end + + # Determine what the method is to get users and groups by name + def methodbyname(space) + case Puppet::Util.symbolize(space) + when :gr, :group; return :getgrnam + when :pw, :user, :passwd; return :getpwnam + else + raise ArgumentError.new("Can only handle users and groups") + end + end + + # Get the GID of a given group, provided either a GID or a name + def gid(group) + begin + group = Integer(group) + rescue ArgumentError + # pass + end + if group.is_a?(Integer) + return nil unless name = get_posix_field(:group, :name, group) + gid = get_posix_field(:group, :gid, name) + check_value = gid + else + return nil unless gid = get_posix_field(:group, :gid, group) + name = get_posix_field(:group, :name, gid) + check_value = name + end + if check_value != group + return search_posix_field(:group, :gid, group) + else + return gid + end + end + + # Get the UID of a given user, whether a UID or name is provided + def uid(user) + begin + user = Integer(user) + rescue ArgumentError + # pass + end + if user.is_a?(Integer) + return nil unless name = get_posix_field(:passwd, :name, user) + uid = get_posix_field(:passwd, :uid, name) + check_value = uid + else + return nil unless uid = get_posix_field(:passwd, :uid, user) + name = get_posix_field(:passwd, :name, uid) + check_value = name + end + if check_value != user + return search_posix_field(:passwd, :uid, user) + else + return uid + end + end +end + diff --git a/mcollective/lib/puppet/util/provider_features.rb b/mcollective/lib/puppet/util/provider_features.rb new file mode 100644 index 000000000..30e8dcb39 --- /dev/null +++ b/mcollective/lib/puppet/util/provider_features.rb @@ -0,0 +1,169 @@ +# Provides feature definitions. +require 'puppet/util/methodhelper' +require 'puppet/util/docs' +require 'puppet/util' +module Puppet::Util::ProviderFeatures + include Puppet::Util::Docs + # The class that models the features and handles checking whether the features + # are present. + class ProviderFeature + include Puppet::Util + include Puppet::Util::MethodHelper + include Puppet::Util::Docs + attr_accessor :name, :docs, :methods + + # Are all of the requirements met? + def available?(obj) + if self.methods + return !!methods_available?(obj) + else + # In this case, the provider has to declare support for this + # feature, and that's been checked before we ever get to the + # method checks. + return false + end + end + + def initialize(name, docs, hash) + self.name = symbolize(name) + self.docs = docs + hash = symbolize_options(hash) + set_options(hash) + end + + private + + # Are all of the required methods available? + def methods_available?(obj) + methods.each do |m| + if obj.is_a?(Class) + return false unless obj.public_method_defined?(m) + else + return false unless obj.respond_to?(m) + end + end + true + end + end + + # Define one or more features. At a minimum, features require a name + # and docs, and at this point they should also specify a list of methods + # required to determine if the feature is present. + def feature(name, docs, hash = {}) + @features ||= {} + raise(Puppet::DevError, "Feature #{name} is already defined") if @features.include?(name) + begin + obj = ProviderFeature.new(name, docs, hash) + @features[obj.name] = obj + rescue ArgumentError => detail + error = ArgumentError.new( + "Could not create feature #{name}: #{detail}" + ) + error.set_backtrace(detail.backtrace) + raise error + end + end + + # Return a hash of all feature documentation. + def featuredocs + str = "" + @features ||= {} + return nil if @features.empty? + names = @features.keys.sort { |a,b| a.to_s <=> b.to_s } + names.each do |name| + doc = @features[name].docs.gsub(/\n\s+/, " ") + str += "- *#{name}*: #{doc}\n" + end + + if providers.length > 0 + headers = ["Provider", names].flatten + data = {} + providers.each do |provname| + data[provname] = [] + prov = provider(provname) + names.each do |name| + if prov.feature?(name) + data[provname] << "*X*" + else + data[provname] << "" + end + end + end + str += doctable(headers, data) + end + str + end + + # Return a list of features. + def features + @features ||= {} + @features.keys + end + + # Generate a module that sets up the boolean methods to test for given + # features. + def feature_module + unless defined?(@feature_module) + @features ||= {} + @feature_module = ::Module.new + const_set("FeatureModule", @feature_module) + features = @features + # Create a feature? method that can be passed a feature name and + # determine if the feature is present. + @feature_module.send(:define_method, :feature?) do |name| + method = name.to_s + "?" + return !!(respond_to?(method) and send(method)) + end + + # Create a method that will list all functional features. + @feature_module.send(:define_method, :features) do + return false unless defined?(features) + features.keys.find_all { |n| feature?(n) }.sort { |a,b| + a.to_s <=> b.to_s + } + end + + # Create a method that will determine if a provided list of + # features are satisfied by the curred provider. + @feature_module.send(:define_method, :satisfies?) do |*needed| + ret = true + needed.flatten.each do |feature| + unless feature?(feature) + ret = false + break + end + end + ret + end + + # Create a boolean method for each feature so you can test them + # individually as you might need. + @features.each do |name, feature| + method = name.to_s + "?" + @feature_module.send(:define_method, method) do + (is_a?(Class) ? declared_feature?(name) : self.class.declared_feature?(name)) or feature.available?(self) + end + end + + # Allow the provider to declare that it has a given feature. + @feature_module.send(:define_method, :has_features) do |*names| + @declared_features ||= [] + names.each do |name| + name = symbolize(name) + @declared_features << name + end + end + # Aaah, grammatical correctness + @feature_module.send(:alias_method, :has_feature, :has_features) + end + @feature_module + end + + # Return the actual provider feature instance. Really only used for testing. + def provider_feature(name) + return nil unless defined?(@features) + + @features[name] + end +end + diff --git a/mcollective/lib/puppet/util/pson.rb b/mcollective/lib/puppet/util/pson.rb new file mode 100644 index 000000000..1441069c0 --- /dev/null +++ b/mcollective/lib/puppet/util/pson.rb @@ -0,0 +1,13 @@ +# A simple module to provide consistency between how we use PSON and how +# ruby expects it to be used. Basically, we don't want to require +# that the sender specify a class. +# Ruby wants everyone to provide a 'type' field, and the PSON support +# requires such a field to track the class down. Because we use our URL to +# figure out what class we're working on, we don't need that, and we don't want +# our consumers and producers to need to know anything about our internals. +module Puppet::Util::Pson + def pson_create(pson) + raise ArgumentError, "No data provided in pson data" unless pson['data'] + from_pson(pson['data']) + end +end diff --git a/mcollective/lib/puppet/util/queue.rb b/mcollective/lib/puppet/util/queue.rb new file mode 100644 index 000000000..636bdcf2e --- /dev/null +++ b/mcollective/lib/puppet/util/queue.rb @@ -0,0 +1,96 @@ + +require 'puppet/indirector' +require 'puppet/util/instance_loader' + +# Implements a message queue client type plugin registry for use by the indirector facility. +# Client modules for speaking a particular protocol (e.g. Stomp::Client for Stomp message +# brokers, Memcached for Starling and Sparrow, etc.) register themselves with this module. +# +# Client classes are expected to live under the Puppet::Util::Queue namespace and corresponding +# directory; the attempted use of a client via its typename (see below) will cause Puppet::Util::Queue +# to attempt to load the corresponding plugin if it is not yet loaded. The client class registers itself +# with Puppet::Util::Queue and should use the same type name as the autloader expects for the plugin file. +# class Puppet::Util::Queue::SpecialMagicalClient < Messaging::SpecialMagic +# ... +# Puppet::Util::Queue.register_queue_type_class(self) +# end +# +# This module reduces the rightmost segment of the class name into a pretty symbol that will +# serve as the queuing client's name. Which means that the "SpecialMagicalClient" above will +# be named :special_magical_client within the registry. +# +# Another class/module may mix-in this module, and may then make use of the registered clients. +# class Queue::Fue +# # mix it in at the class object level rather than instance level +# extend ::Puppet::Util::Queue +# end +# +# Queue::Fue instances can get a message queue client through the registry through the mixed-in method +# +client+, which will return a class-wide singleton client instance, determined by +client_class+. +# +# The client plugins are expected to implement an interface similar to that of Stomp::Client: +# * new should return a connected, ready-to-go client instance. Note that no arguments are passed in. +# * publish_message(queue, message) should publish the _message_ to the specified _queue_. +# * subscribe(queue) _block_ subscribes to _queue_ and executes _block_ upon receiving a message. +# * _queue_ names are simple names independent of the message broker or client library. No "/queue/" prefixes like in Stomp::Client. +module Puppet::Util::Queue + extend Puppet::Util::InstanceLoader + instance_load :queue_clients, 'puppet/util/queue' + + # Adds a new class/queue-type pair to the registry. The _type_ argument is optional; if not provided, + # _type_ defaults to a lowercased, underscored symbol programmatically derived from the rightmost + # namespace of klass.name. + # + # # register with default name +:you+ + # register_queue_type(Foo::You) + # + # # register with explicit queue type name +:myself+ + # register_queue_type(Foo::Me, :myself) + # + # If the type is already registered, an exception is thrown. No checking is performed of _klass_, + # however; a given class could be registered any number of times, as long as the _type_ differs with + # each registration. + def self.register_queue_type(klass, type = nil) + type ||= queue_type_from_class(klass) + raise Puppet::Error, "Queue type #{type} is already registered" if instance_hash(:queue_clients).include?(type) + instance_hash(:queue_clients)[type] = klass + end + + # Given a queue type symbol, returns the associated +Class+ object. If the queue type is unknown + # (meaning it hasn't been registered with this module), an exception is thrown. + def self.queue_type_to_class(type) + c = loaded_instance :queue_clients, type + raise Puppet::Error, "Queue type #{type} is unknown." unless c + c + end + + # Given a class object _klass_, returns the programmatic default queue type name symbol for _klass_. + # The algorithm is as shown in earlier examples; the last namespace segment of _klass.name_ is taken + # and converted from mixed case to underscore-separated lowercase, and interned. + # queue_type_from_class(Foo) -> :foo + # queue_type_from_class(Foo::Too) -> :too + # queue_type_from_class(Foo::ForYouTwo) -> :for_you_too + # + # The implicit assumption here, consistent with Puppet's approach to plugins in general, + # is that all your client modules live in the same namespace, such that reduction to + # a flat namespace of symbols is reasonably safe. + def self.queue_type_from_class(klass) + # convert last segment of classname from studly caps to lower case with underscores, and symbolize + klass.name.split('::').pop.sub(/^[A-Z]/) {|c| c.downcase}.gsub(/[A-Z]/) {|c| '_' + c.downcase }.intern + end + + # The class object for the client to be used, determined by queue configuration + # settings. + # Looks to the :queue_type configuration entry in the running application for + # the default queue type to use. + def client_class + Puppet::Util::Queue.queue_type_to_class(Puppet[:queue_type]) + end + + # Returns (instantiating as necessary) the singleton queue client instance, according to the + # client_class. No arguments go to the client class constructor, meaning its up to the client class + # to know how to determine its queue message source (presumably through Puppet configuration data). + def client + @client ||= client_class.new + end +end diff --git a/mcollective/lib/puppet/util/queue/stomp.rb b/mcollective/lib/puppet/util/queue/stomp.rb new file mode 100644 index 000000000..cabc56627 --- /dev/null +++ b/mcollective/lib/puppet/util/queue/stomp.rb @@ -0,0 +1,47 @@ +require 'puppet/util/queue' +require 'stomp' +require 'uri' + +# Implements the Ruby Stomp client as a queue type within the Puppet::Indirector::Queue::Client +# registry, for use with the :queue indirection terminus type. +# +# Looks to Puppet[:queue_source] for the sole argument to the underlying Stomp::Client constructor; +# consequently, for this client to work, Puppet[:queue_source] must use the Stomp::Client URL-like +# syntax for identifying the Stomp message broker: login:pass@host.port +class Puppet::Util::Queue::Stomp + attr_accessor :stomp_client + + def initialize + begin + uri = URI.parse(Puppet[:queue_source]) + rescue => detail + raise ArgumentError, "Could not create Stomp client instance - queue source #{Puppet[:queue_source]} is invalid: #{detail}" + end + unless uri.scheme == "stomp" + raise ArgumentError, "Could not create Stomp client instance - queue source #{Puppet[:queue_source]} is not a Stomp URL: #{detail}" + end + + begin + self.stomp_client = Stomp::Client.new(uri.user, uri.password, uri.host, uri.port, true) + rescue => detail + raise ArgumentError, "Could not create Stomp client instance with queue source #{Puppet[:queue_source]}: got internal Stomp client error #{detail}" + end + end + + def publish_message(target, msg) + stomp_client.publish(stompify_target(target), msg, :persistent => true) + end + + def subscribe(target) + stomp_client.subscribe(stompify_target(target), :ack => :client) do |stomp_message| + yield(stomp_message.body) + stomp_client.acknowledge(stomp_message) + end + end + + def stompify_target(target) + '/queue/' + target.to_s + end + + Puppet::Util::Queue.register_queue_type(self, :stomp) +end diff --git a/mcollective/lib/puppet/util/rails/cache_accumulator.rb b/mcollective/lib/puppet/util/rails/cache_accumulator.rb new file mode 100644 index 000000000..4b3717f79 --- /dev/null +++ b/mcollective/lib/puppet/util/rails/cache_accumulator.rb @@ -0,0 +1,65 @@ +require 'puppet/util' + +module Puppet::Util::CacheAccumulator + def self.included(klass) + klass.extend ClassMethods + end + + class Base + attr_reader :klass, :attribute + + def initialize(klass, attribute) + @klass = klass + @attribute = attribute + @find_or_create = "find_or_create_by_#{@attribute.to_s}".intern + end + + def store + @store || reset + end + + def reset + @store = {} + end + + def find(*keys) + result = nil + if keys.length == 1 + result = store[keys[0]] ||= @klass.send(@find_or_create, *keys) + else + found, missing = keys.partition {|k| store.include? k} + result = found.length + result += do_multi_find(missing) if missing.length > 0 + end + result + end + + def do_multi_find(keys) + result = 0 + @klass.find(:all, :conditions => {@attribute => keys}).each do |obj| + store[obj.send(@attribute)] = obj + result += 1 + end + result + end + end + + module ClassMethods + def accumulates(*attributes) + attributes.each {|attrib| install_accumulator(attrib)} + end + + def accumulators + @accumulators ||= {} + end + + def install_accumulator(attribute) + self.accumulators[attribute] = Base.new(self, attribute) + module_eval %{ + def self.accumulate_by_#{attribute.to_s}(*keys) + accumulators[:#{attribute.to_s}].find(*keys) + end + } + end + end +end diff --git a/mcollective/lib/puppet/util/rails/collection_merger.rb b/mcollective/lib/puppet/util/rails/collection_merger.rb new file mode 100644 index 000000000..f95d3d3bd --- /dev/null +++ b/mcollective/lib/puppet/util/rails/collection_merger.rb @@ -0,0 +1,17 @@ +module Puppet::Util::CollectionMerger + def ar_hash_merge(db_hash, mem_hash, args) + (db_hash.keys | mem_hash.keys).each do |key| + if (db_hash[key] && mem_hash[key]) + # in both, update value + args[:modify].call(db_hash[key], mem_hash[key]) + elsif (db_hash[key]) + # in db, not memory, delete from database + args[:delete].call(db_hash[key]) + else + # in mem, not in db, insert into the database + args[:create].call(key, mem_hash[key]) + end + end + end +end + diff --git a/mcollective/lib/puppet/util/rails/reference_serializer.rb b/mcollective/lib/puppet/util/rails/reference_serializer.rb new file mode 100644 index 000000000..9beeb0048 --- /dev/null +++ b/mcollective/lib/puppet/util/rails/reference_serializer.rb @@ -0,0 +1,32 @@ + +module Puppet::Util::ReferenceSerializer + def unserialize_value(val) + case val + when /^--- / + YAML.load(val) + when "true" + true + when "false" + false + else + val + end + end + + def serialize_value(val) + case val + when Puppet::Resource + YAML.dump(val) + when true, false + # The database does this for us, but I prefer the + # methods be their exact inverses. + # Note that this means quoted booleans get returned + # as actual booleans, but there doesn't appear to be + # a way to fix that while keeping the ability to + # search for parameters set to true. + val.to_s + else + val + end + end +end diff --git a/mcollective/lib/puppet/util/rdoc.rb b/mcollective/lib/puppet/util/rdoc.rb new file mode 100644 index 000000000..bdac579d6 --- /dev/null +++ b/mcollective/lib/puppet/util/rdoc.rb @@ -0,0 +1,92 @@ + +module Puppet::Util::RDoc + + module_function + + # launch a rdoc documenation process + # with the files/dir passed in +files+ + def rdoc(outputdir, files, charset = nil) + Puppet[:ignoreimport] = true + + # then rdoc + require 'rdoc/rdoc' + require 'rdoc/options' + + # load our parser + require 'puppet/util/rdoc/parser' + + r = RDoc::RDoc.new + + RDoc::RDoc::GENERATORS["puppet"] = RDoc::RDoc::Generator.new( + "puppet/util/rdoc/generators/puppet_generator.rb", + "PuppetGenerator".intern, + "puppet") + + # specify our own format & where to output + options = [ "--fmt", "puppet", + "--quiet", + "--exclude", "/modules/[^/]*/files/.*\.pp$", + "--op", outputdir ] + + options << "--force-update" if Options::OptionList.options.any? { |o| o[0] == "--force-update" } + options += [ "--charset", charset] if charset + options += files + + # launch the documentation process + r.document(options) + rescue RDoc::RDocError => e + raise Puppet::ParseError.new("RDoc error #{e}") + end + + # launch a output to console manifest doc + def manifestdoc(files) + Puppet[:ignoreimport] = true + files.select { |f| FileTest.file?(f) }.each do |f| + parser = Puppet::Parser::Parser.new(Puppet::Node::Environment.new(Puppet[:environment])) + parser.file = f + ast = parser.parse + output(f, ast) + end + end + + # Ouputs to the console the documentation + # of a manifest + def output(file, ast) + astobj = [] + ast.nodes.each do |name, k| + astobj << k if k.file == file + end + + ast.hostclasses.each do |name,k| + astobj << k if k.file == file + end + + ast.definitions.each do |name, k| + astobj << k if k.file == file + end + astobj.sort! {|a,b| a.line <=> b.line }.each do |k| + output_astnode_doc(k) + end + end + + def output_astnode_doc(ast) + puts ast.doc if !ast.doc.nil? and !ast.doc.empty? + if Puppet.settings[:document_all] + # scan each underlying resources to produce documentation + code = ast.code.children if ast.code.is_a?(Puppet::Parser::AST::ASTArray) + code ||= ast.code + output_resource_doc(code) unless code.nil? + end + end + + def output_resource_doc(code) + code.sort { |a,b| a.line <=> b.line }.each do |stmt| + output_resource_doc(stmt.children) if stmt.is_a?(Puppet::Parser::AST::ASTArray) + + if stmt.is_a?(Puppet::Parser::AST::Resource) + puts stmt.doc if !stmt.doc.nil? and !stmt.doc.empty? + end + end + end + +end \ No newline at end of file diff --git a/mcollective/lib/puppet/util/rdoc/code_objects.rb b/mcollective/lib/puppet/util/rdoc/code_objects.rb new file mode 100644 index 000000000..3c789a0c5 --- /dev/null +++ b/mcollective/lib/puppet/util/rdoc/code_objects.rb @@ -0,0 +1,280 @@ +require 'rdoc/code_objects' + +module RDoc + + # This modules contains various class that are used to hold information + # about the various Puppet language structures we found while parsing. + # + # Those will be mapped to their html counterparts which are defined in + # PuppetGenerator. + + # PuppetTopLevel is a top level (usually a .pp/.rb file) + class PuppetTopLevel < TopLevel + attr_accessor :module_name, :global + + # will contain all plugins + @@all_plugins = {} + + # contains all cutoms facts + @@all_facts = {} + + def initialize(toplevel) + super(toplevel.file_relative_name) + end + + def self.all_plugins + @@all_plugins.values + end + + def self.all_facts + @@all_facts.values + end + end + + # PuppetModule holds a Puppet Module + # This is mapped to an HTMLPuppetModule + # it leverage the RDoc (ruby) module infrastructure + class PuppetModule < NormalModule + attr_accessor :facts, :plugins + + def initialize(name,superclass=nil) + @facts = [] + @plugins = [] + super(name,superclass) + end + + def initialize_classes_and_modules + super + @nodes = {} + end + + def add_plugin(plugin) + add_to(@plugins, plugin) + end + + def add_fact(fact) + add_to(@facts, fact) + end + + def add_node(name,superclass) + cls = @nodes[name] + unless cls + cls = PuppetNode.new(name, superclass) + @nodes[name] = cls if !@done_documenting + cls.parent = self + cls.section = @current_section + end + cls + end + + def each_fact + @facts.each {|c| yield c} + end + + def each_plugin + @plugins.each {|c| yield c} + end + + def each_node + @nodes.each {|c| yield c} + end + + def nodes + @nodes.values + end + end + + # PuppetClass holds a puppet class + # It is mapped to a HTMLPuppetClass for display + # It leverages RDoc (ruby) Class + class PuppetClass < ClassModule + attr_accessor :resource_list, :requires, :childs, :realizes + + def initialize(name, superclass) + super(name,superclass) + @resource_list = [] + @requires = [] + @realizes = [] + @childs = [] + end + + def add_resource(resource) + add_to(@resource_list, resource) + end + + def is_module? + false + end + + def superclass=(superclass) + @superclass = superclass + end + + # we're (ab)using the RDoc require system here. + # we're adding a required Puppet class, overriding + # the RDoc add_require method which sees ruby required files. + def add_require(required) + add_to(@requires, required) + end + + def add_realize(realized) + add_to(@realizes, realized) + end + + def add_child(child) + @childs << child + end + + # Look up the given symbol. RDoc only looks for class1::class2.method + # or class1::class2#method. Since our definitions are mapped to RDoc methods + # but are written class1::class2::define we need to perform the lookup by + # ourselves. + def find_symbol(symbol, method=nil) + result = super + if not result and symbol =~ /::/ + modules = symbol.split(/::/) + unless modules.empty? + module_name = modules.shift + result = find_module_named(module_name) + if result + last_name = "" + previous = nil + modules.each do |module_name| + previous = result + last_name = module_name + result = result.find_module_named(module_name) + break unless result + end + unless result + result = previous + method = last_name + end + end + end + if result && method + if !result.respond_to?(:find_local_symbol) + p result.name + p method + fail + end + result = result.find_local_symbol(method) + end + end + result + end + + end + + # PuppetNode holds a puppet node + # It is mapped to a HTMLPuppetNode for display + # A node is just a variation of a class + class PuppetNode < PuppetClass + def initialize(name, superclass) + super(name,superclass) + end + + def is_module? + false + end + end + + # Plugin holds a native puppet plugin (function,type...) + # It is mapped to a HTMLPuppetPlugin for display + class Plugin < Context + attr_accessor :name, :type + + def initialize(name, type) + super() + @name = name + @type = type + @comment = "" + end + + def <=>(other) + @name <=> other.name + end + + def full_name + @name + end + + def http_url(prefix) + path = full_name.split("::") + File.join(prefix, *path) + ".html" + end + + def is_fact? + false + end + + def to_s + res = self.class.name + ": #{@name} (#{@type})\n" + res << @comment.to_s + res + end + end + + # Fact holds a custom fact + # It is mapped to a HTMLPuppetPlugin for display + class Fact < Context + attr_accessor :name, :confine + + def initialize(name, confine) + super() + @name = name + @confine = confine + @comment = "" + end + + def <=>(other) + @name <=> other.name + end + + def is_fact? + true + end + + def full_name + @name + end + + def to_s + res = self.class.name + ": #{@name}\n" + res << @comment.to_s + res + end + end + + # PuppetResource holds a puppet resource + # It is mapped to a HTMLPuppetResource for display + # A resource is defined by its "normal" form Type[title] + class PuppetResource < CodeObject + attr_accessor :type, :title, :params + + def initialize(type, title, comment, params) + super() + @type = type + @title = title + @comment = comment + @params = params + end + + def <=>(other) + full_name <=> other.full_name + end + + def full_name + @type + "[#{@title}]" + end + + def name + full_name + end + + def to_s + res = @type + "[#{@title}]\n" + res << @comment.to_s + res + end + end +end diff --git a/mcollective/lib/puppet/util/rdoc/generators/puppet_generator.rb b/mcollective/lib/puppet/util/rdoc/generators/puppet_generator.rb new file mode 100644 index 000000000..249c9a8ba --- /dev/null +++ b/mcollective/lib/puppet/util/rdoc/generators/puppet_generator.rb @@ -0,0 +1,911 @@ +require 'rdoc/generators/html_generator' +require 'puppet/util/rdoc/code_objects' +require 'digest/md5' + +module Generators + + # This module holds all the classes needed to generate the HTML documentation + # of a bunch of puppet manifests. + # + # It works by traversing all the code objects defined by the Puppet RDoc::Parser + # and produces HTML counterparts objects that in turns are used by RDoc template engine + # to produce the final HTML. + # + # It is also responsible of creating the whole directory hierarchy, and various index + # files. + # + # It is to be noted that the whole system is built on top of ruby RDoc. As such there + # is an implicit mapping of puppet entities to ruby entitites: + # + # Puppet => Ruby + # ------------------------ + # Module Module + # Class Class + # Definition Method + # Resource + # Node + # Plugin + # Fact + + MODULE_DIR = "modules" + NODE_DIR = "nodes" + PLUGIN_DIR = "plugins" + + # We're monkey patching RDoc markup to allow + # lowercase class1::class2::class3 crossref hyperlinking + module MarkUp + alias :old_markup :markup + + def new_markup(str, remove_para=false) + first = @markup.nil? + res = old_markup(str, remove_para) + if first and not @markup.nil? + @markup.add_special(/\b([a-z]\w+(::\w+)*)/,:CROSSREF) + # we need to call it again, since we added a rule + res = old_markup(str, remove_para) + end + res + end + alias :markup :new_markup + end + + # This is a specialized HTMLGenerator tailored to Puppet manifests + class PuppetGenerator < HTMLGenerator + + def PuppetGenerator.for(options) + AllReferences::reset + HtmlMethod::reset + + if options.all_one_file + PuppetGeneratorInOne.new(options) + else + PuppetGenerator.new(options) + end + end + + def initialize(options) #:not-new: + @options = options + load_html_template + end + + # loads our own html template file + def load_html_template + require 'puppet/util/rdoc/generators/template/puppet/puppet' + extend RDoc::Page + rescue LoadError + $stderr.puts "Could not find Puppet template '#{template}'" + exit 99 + end + + def gen_method_index + # we don't generate an all define index + # as the presentation is per module/per class + end + + # This is the central method, it generates the whole structures + # along with all the indices. + def generate_html + super + gen_into(@nodes) + gen_into(@plugins) + end + + ## + # Generate: + # the list of modules + # the list of classes and definitions of a specific module + # the list of all classes + # the list of nodes + # the list of resources + def build_indices + @allfiles = [] + @nodes = [] + @plugins = [] + + # contains all the seen modules + @modules = {} + @allclasses = {} + + # remove unknown toplevels + # it can happen that RDoc triggers a different parser for some files (ie .c, .cc or .h) + # in this case RDoc generates a RDoc::TopLevel which we do not support in this generator + # So let's make sure we don't generate html for those. + @toplevels = @toplevels.select { |tl| tl.is_a? RDoc::PuppetTopLevel } + + # build the modules, classes and per modules classes and define list + @toplevels.each do |toplevel| + next unless toplevel.document_self + file = HtmlFile.new(toplevel, @options, FILE_DIR) + classes = [] + methods = [] + modules = [] + nodes = [] + + # find all classes of this toplevel + # store modules if we find one + toplevel.each_classmodule do |k| + generate_class_list(classes, modules, k, toplevel, CLASS_DIR) + end + + # find all defines belonging to this toplevel + HtmlMethod.all_methods.each do |m| + # find parent module, check this method is not already + # defined. + if m.context.parent.toplevel === toplevel + methods << m + end + end + + classes.each do |k| + @allclasses[k.index_name] = k if !@allclasses.has_key?(k.index_name) + end + + # generate nodes and plugins found + classes.each do |k| + if k.context.is_module? + k.context.each_node do |name,node| + nodes << HTMLPuppetNode.new(node, toplevel, NODE_DIR, @options) + @nodes << nodes.last + end + k.context.each_plugin do |plugin| + @plugins << HTMLPuppetPlugin.new(plugin, toplevel, PLUGIN_DIR, @options) + end + k.context.each_fact do |fact| + @plugins << HTMLPuppetPlugin.new(fact, toplevel, PLUGIN_DIR, @options) + end + end + end + + @files << file + @allfiles << { "file" => file, "modules" => modules, "classes" => classes, "methods" => methods, "nodes" => nodes } + end + + # scan all classes to create the childs references + @allclasses.values.each do |klass| + if superklass = klass.context.superclass + if superklass = AllReferences[superklass] and (superklass.is_a?(HTMLPuppetClass) or superklass.is_a?(HTMLPuppetNode)) + superklass.context.add_child(klass.context) + end + end + end + + @classes = @allclasses.values + end + + # produce a class/module list of HTMLPuppetModule/HTMLPuppetClass + # based on the code object traversal. + def generate_class_list(classes, modules, from, html_file, class_dir) + if from.is_module? and !@modules.has_key?(from.name) + k = HTMLPuppetModule.new(from, html_file, class_dir, @options) + classes << k + @modules[from.name] = k + modules << @modules[from.name] + elsif from.is_module? + modules << @modules[from.name] + elsif !from.is_module? + k = HTMLPuppetClass.new(from, html_file, class_dir, @options) + classes << k + end + from.each_classmodule do |mod| + generate_class_list(classes, modules, mod, html_file, class_dir) + end + end + + # generate all the subdirectories, modules, classes and files + def gen_sub_directories + super + File.makedirs(MODULE_DIR) + File.makedirs(NODE_DIR) + File.makedirs(PLUGIN_DIR) + rescue + $stderr.puts $ERROR_INFO.message + exit 1 + end + + # generate the index of modules + def gen_file_index + gen_top_index(@modules.values, 'All Modules', RDoc::Page::TOP_INDEX, "fr_modules_index.html") + end + + # generate a top index + def gen_top_index(collection, title, template, filename) + template = TemplatePage.new(RDoc::Page::FR_INDEX_BODY, template) + res = [] + collection.sort.each do |f| + if f.document_self + res << { "classlist" => CGI.escapeHTML("#{MODULE_DIR}/fr_#{f.index_name}.html"), "module" => CGI.escapeHTML("#{CLASS_DIR}/#{f.index_name}.html"),"name" => CGI.escapeHTML(f.index_name) } + end + end + + values = { + "entries" => res, + 'list_title' => CGI.escapeHTML(title), + 'index_url' => main_url, + 'charset' => @options.charset, + 'style_url' => style_url('', @options.css), + } + + File.open(filename, "w") do |f| + template.write_html_on(f, values) + end + end + + # generate the all classes index file and the combo index + def gen_class_index + gen_an_index(@classes, 'All Classes', RDoc::Page::CLASS_INDEX, "fr_class_index.html") + @allfiles.each do |file| + unless file['file'].context.file_relative_name =~ /\.rb$/ + + gen_composite_index( + file, + RDoc::Page::COMBO_INDEX, + + "#{MODULE_DIR}/fr_#{file["file"].context.module_name}.html") + end + end + end + + def gen_composite_index(collection, template, filename)\ + return if FileTest.exists?(filename) + + template = TemplatePage.new(RDoc::Page::FR_INDEX_BODY, template) + res1 = [] + collection['classes'].sort.each do |f| + if f.document_self + res1 << { "href" => "../"+CGI.escapeHTML(f.path), "name" => CGI.escapeHTML(f.index_name) } unless f.context.is_module? + end + end + + res2 = [] + collection['methods'].sort.each do |f| + res2 << { "href" => "../#{f.path}", "name" => f.index_name.sub(/\(.*\)$/,'') } if f.document_self + end + + module_name = [] + res3 = [] + res4 = [] + collection['modules'].sort.each do |f| + module_name << { "href" => "../"+CGI.escapeHTML(f.path), "name" => CGI.escapeHTML(f.index_name) } + unless f.facts.nil? + f.facts.each do |fact| + res3 << {"href" => "../"+CGI.escapeHTML(AllReferences["PLUGIN(#{fact.name})"].path), "name" => CGI.escapeHTML(fact.name)} + end + end + unless f.plugins.nil? + f.plugins.each do |plugin| + res4 << {"href" => "../"+CGI.escapeHTML(AllReferences["PLUGIN(#{plugin.name})"].path), "name" => CGI.escapeHTML(plugin.name)} + end + end + end + + res5 = [] + collection['nodes'].sort.each do |f| + res5 << { "href" => "../"+CGI.escapeHTML(f.path), "name" => CGI.escapeHTML(f.name) } if f.document_self + end + + values = { + "module" => module_name, + "classes" => res1, + 'classes_title' => CGI.escapeHTML("Classes"), + 'defines_title' => CGI.escapeHTML("Defines"), + 'facts_title' => CGI.escapeHTML("Custom Facts"), + 'plugins_title' => CGI.escapeHTML("Plugins"), + 'nodes_title' => CGI.escapeHTML("Nodes"), + 'index_url' => main_url, + 'charset' => @options.charset, + 'style_url' => style_url('', @options.css), + } + + values["defines"] = res2 if res2.size>0 + values["facts"] = res3 if res3.size>0 + values["plugins"] = res4 if res4.size>0 + values["nodes"] = res5 if res5.size>0 + + File.open(filename, "w") do |f| + template.write_html_on(f, values) + end + end + + # returns the initial_page url + def main_url + main_page = @options.main_page + ref = nil + if main_page + ref = AllReferences[main_page] + if ref + ref = ref.path + else + $stderr.puts "Could not find main page #{main_page}" + end + end + + unless ref + for file in @files + if file.document_self and file.context.global + ref = CGI.escapeHTML("#{CLASS_DIR}/#{file.context.module_name}.html") + break + end + end + end + + unless ref + for file in @files + if file.document_self and !file.context.global + ref = CGI.escapeHTML("#{CLASS_DIR}/#{file.context.module_name}.html") + break + end + end + end + + unless ref + $stderr.puts "Couldn't find anything to document" + $stderr.puts "Perhaps you've used :stopdoc: in all classes" + exit(1) + end + + ref + end + + end + + # This module is used to generate a referenced full name list of ContextUser + module ReferencedListBuilder + def build_referenced_list(list) + res = [] + list.each do |i| + ref = AllReferences[i.name] || @context.find_symbol(i.name) + ref = ref.viewer if ref and ref.respond_to?(:viewer) + name = i.respond_to?(:full_name) ? i.full_name : i.name + h_name = CGI.escapeHTML(name) + if ref and ref.document_self + path = url(ref.path) + res << { "name" => h_name, "aref" => path } + else + res << { "name" => h_name } + end + end + res + end + end + + # This module is used to hold/generate a list of puppet resources + # this is used in HTMLPuppetClass and HTMLPuppetNode + module ResourceContainer + def collect_resources + list = @context.resource_list + @resources = list.collect {|m| HTMLPuppetResource.new(m, self, @options) } + end + + def build_resource_summary_list(path_prefix='') + collect_resources unless @resources + resources = @resources.sort + res = [] + resources.each do |r| + res << { + "name" => CGI.escapeHTML(r.name), + "aref" => CGI.escape(path_prefix)+"\#"+CGI.escape(r.aref) + } + end + res + end + + def build_resource_detail_list(section) + outer = [] + resources = @resources.sort + resources.each do |r| + row = {} + if r.section == section and r.document_self + row["name"] = CGI.escapeHTML(r.name) + desc = r.description.strip + row["m_desc"] = desc unless desc.empty? + row["aref"] = r.aref + row["params"] = r.params + outer << row + end + end + outer + end + end + + class HTMLPuppetClass < HtmlClass + include ResourceContainer, ReferencedListBuilder + + def value_hash + super + rl = build_resource_summary_list + @values["resources"] = rl unless rl.empty? + + @context.sections.each do |section| + secdata = @values["sections"].select { |secdata| secdata["secsequence"] == section.sequence } + if secdata.size == 1 + secdata = secdata[0] + + rdl = build_resource_detail_list(section) + secdata["resource_list"] = rdl unless rdl.empty? + end + end + + rl = build_require_list(@context) + @values["requires"] = rl unless rl.empty? + + rl = build_realize_list(@context) + @values["realizes"] = rl unless rl.empty? + + cl = build_child_list(@context) + @values["childs"] = cl unless cl.empty? + + @values + end + + def build_require_list(context) + build_referenced_list(context.requires) + end + + def build_realize_list(context) + build_referenced_list(context.realizes) + end + + def build_child_list(context) + build_referenced_list(context.childs) + end + end + + class HTMLPuppetNode < ContextUser + include ResourceContainer, ReferencedListBuilder + + attr_reader :path + + def initialize(context, html_file, prefix, options) + super(context, options) + + @html_file = html_file + @is_module = context.is_module? + @values = {} + + context.viewer = self + + if options.all_one_file + @path = context.full_name + else + @path = http_url(context.full_name, prefix) + end + + AllReferences.add("NODE(#{@context.full_name})", self) + end + + def name + @context.name + end + + # return the relative file name to store this class in, + # which is also its url + def http_url(full_name, prefix) + path = full_name.dup + path.gsub!(/<<\s*(\w*)/) { "from-#$1" } if path['<<'] + File.join(prefix, path.split("::").collect { |p| Digest::MD5.hexdigest(p) }) + ".html" + end + + def parent_name + @context.parent.full_name + end + + def index_name + name + end + + def write_on(f) + value_hash + + template = TemplatePage.new( + RDoc::Page::BODYINC, + RDoc::Page::NODE_PAGE, + + RDoc::Page::METHOD_LIST) + template.write_html_on(f, @values) + end + + def value_hash + class_attribute_values + add_table_of_sections + + @values["charset"] = @options.charset + @values["style_url"] = style_url(path, @options.css) + + d = markup(@context.comment) + @values["description"] = d unless d.empty? + + ml = build_method_summary_list + @values["methods"] = ml unless ml.empty? + + rl = build_resource_summary_list + @values["resources"] = rl unless rl.empty? + + il = build_include_list(@context) + @values["includes"] = il unless il.empty? + + rl = build_require_list(@context) + @values["requires"] = rl unless rl.empty? + + rl = build_realize_list(@context) + @values["realizes"] = rl unless rl.empty? + + cl = build_child_list(@context) + @values["childs"] = cl unless cl.empty? + + @values["sections"] = @context.sections.map do |section| + + secdata = { + "sectitle" => section.title, + "secsequence" => section.sequence, + "seccomment" => markup(section.comment) + } + + al = build_alias_summary_list(section) + secdata["aliases"] = al unless al.empty? + + co = build_constants_summary_list(section) + secdata["constants"] = co unless co.empty? + + al = build_attribute_list(section) + secdata["attributes"] = al unless al.empty? + + cl = build_class_list(0, @context, section) + secdata["classlist"] = cl unless cl.empty? + + mdl = build_method_detail_list(section) + secdata["method_list"] = mdl unless mdl.empty? + + rdl = build_resource_detail_list(section) + secdata["resource_list"] = rdl unless rdl.empty? + + secdata + end + + @values + end + + def build_attribute_list(section) + atts = @context.attributes.sort + res = [] + atts.each do |att| + next unless att.section == section + if att.visibility == :public || att.visibility == :protected || @options.show_all + entry = { + "name" => CGI.escapeHTML(att.name), + "rw" => att.rw, + "a_desc" => markup(att.comment, true) + } + unless att.visibility == :public || att.visibility == :protected + entry["rw"] << "-" + end + res << entry + end + end + res + end + + def class_attribute_values + h_name = CGI.escapeHTML(name) + + @values["classmod"] = "Node" + @values["title"] = CGI.escapeHTML("#{@values['classmod']}: #{h_name}") + + c = @context + c = c.parent while c and !c.diagram + + @values["diagram"] = diagram_reference(c.diagram) if c && c.diagram + + @values["full_name"] = h_name + + parent_class = @context.superclass + + if parent_class + @values["parent"] = CGI.escapeHTML(parent_class) + + if parent_name + lookup = parent_name + "::#{parent_class}" + else + lookup = parent_class + end + lookup = "NODE(#{lookup})" + parent_url = AllReferences[lookup] || AllReferences[parent_class] + @values["par_url"] = aref_to(parent_url.path) if parent_url and parent_url.document_self + end + + files = [] + @context.in_files.each do |f| + res = {} + full_path = CGI.escapeHTML(f.file_absolute_name) + + res["full_path"] = full_path + res["full_path_url"] = aref_to(f.viewer.path) if f.document_self + + res["cvsurl"] = cvs_url( @options.webcvs, full_path ) if @options.webcvs + + files << res + end + + @values['infiles'] = files + end + + def build_require_list(context) + build_referenced_list(context.requires) + end + + def build_realize_list(context) + build_referenced_list(context.realizes) + end + + def build_child_list(context) + build_referenced_list(context.childs) + end + + def <=>(other) + self.name <=> other.name + end + end + + class HTMLPuppetModule < HtmlClass + + def initialize(context, html_file, prefix, options) + super(context, html_file, prefix, options) + end + + def value_hash + @values = super + + fl = build_facts_summary_list + @values["facts"] = fl unless fl.empty? + + pl = build_plugins_summary_list + @values["plugins"] = pl unless pl.empty? + + nl = build_nodes_list(0, @context) + @values["nodelist"] = nl unless nl.empty? + + @values + end + + def build_nodes_list(level, context) + res = "" + prefix = "  ::" * level; + + context.nodes.sort.each do |node| + if node.document_self + res << + prefix << + "Node " << + href(url(node.viewer.path), "link", node.full_name) << + "
\n" + end + end + res + end + + def build_facts_summary_list + potentially_referenced_list(context.facts) {|fn| ["PLUGIN(#{fn})"] } + end + + def build_plugins_summary_list + potentially_referenced_list(context.plugins) {|fn| ["PLUGIN(#{fn})"] } + end + + def facts + @context.facts + end + + def plugins + @context.plugins + end + + end + + class HTMLPuppetPlugin < ContextUser + attr_reader :path + + def initialize(context, html_file, prefix, options) + super(context, options) + + @html_file = html_file + @is_module = false + @values = {} + + context.viewer = self + + if options.all_one_file + @path = context.full_name + else + @path = http_url(context.full_name, prefix) + end + + AllReferences.add("PLUGIN(#{@context.full_name})", self) + end + + def name + @context.name + end + + # return the relative file name to store this class in, + # which is also its url + def http_url(full_name, prefix) + path = full_name.dup + path.gsub!(/<<\s*(\w*)/) { "from-#$1" } if path['<<'] + File.join(prefix, path.split("::")) + ".html" + end + + def parent_name + @context.parent.full_name + end + + def index_name + name + end + + def write_on(f) + value_hash + + template = TemplatePage.new( + RDoc::Page::BODYINC, + RDoc::Page::PLUGIN_PAGE, + + RDoc::Page::PLUGIN_LIST) + template.write_html_on(f, @values) + end + + def value_hash + attribute_values + add_table_of_sections + + @values["charset"] = @options.charset + @values["style_url"] = style_url(path, @options.css) + + d = markup(@context.comment) + @values["description"] = d unless d.empty? + + if context.is_fact? + unless context.confine.empty? + res = {} + res["type"] = context.confine[:type] + res["value"] = context.confine[:value] + @values["confine"] = [res] + end + else + @values["type"] = context.type + end + + @values["sections"] = @context.sections.map do |section| + secdata = { + "sectitle" => section.title, + "secsequence" => section.sequence, + "seccomment" => markup(section.comment) + } + secdata + end + + @values + end + + def attribute_values + h_name = CGI.escapeHTML(name) + + if @context.is_fact? + @values["classmod"] = "Fact" + else + @values["classmod"] = "Plugin" + end + @values["title"] = "#{@values['classmod']}: #{h_name}" + + c = @context + @values["full_name"] = h_name + + files = [] + @context.in_files.each do |f| + res = {} + full_path = CGI.escapeHTML(f.file_absolute_name) + + res["full_path"] = full_path + res["full_path_url"] = aref_to(f.viewer.path) if f.document_self + + res["cvsurl"] = cvs_url( @options.webcvs, full_path ) if @options.webcvs + + files << res + end + + @values['infiles'] = files + end + + def <=>(other) + self.name <=> other.name + end + + end + + class HTMLPuppetResource + include MarkUp + + attr_reader :context + + @@seq = "R000000" + + def initialize(context, html_class, options) + @context = context + @html_class = html_class + @options = options + @@seq = @@seq.succ + @seq = @@seq + + context.viewer = self + + AllReferences.add(name, self) + end + + def as_href(from_path) + if @options.all_one_file + "##{path}" + else + HTMLGenerator.gen_url(from_path, path) + end + end + + def name + @context.name + end + + def section + @context.section + end + + def index_name + "#{@context.name}" + end + + def params + @context.params + end + + def parent_name + if @context.parent.parent + @context.parent.parent.full_name + else + nil + end + end + + def aref + @seq + end + + def path + if @options.all_one_file + aref + else + @html_class.path + "##{aref}" + end + end + + def description + markup(@context.comment) + end + + def <=>(other) + @context <=> other.context + end + + def document_self + @context.document_self + end + + def find_symbol(symbol, method=nil) + res = @context.parent.find_symbol(symbol, method) + res &&= res.viewer + end + + end + + class PuppetGeneratorInOne < HTMLGeneratorInOne + def gen_method_index + gen_an_index(HtmlMethod.all_methods, 'Defines') + end + end + +end diff --git a/mcollective/lib/puppet/util/rdoc/generators/template/puppet/puppet.rb b/mcollective/lib/puppet/util/rdoc/generators/template/puppet/puppet.rb new file mode 100644 index 000000000..e03381f22 --- /dev/null +++ b/mcollective/lib/puppet/util/rdoc/generators/template/puppet/puppet.rb @@ -0,0 +1,1077 @@ +# +# = CSS2 RDoc HTML template +# +# This is a template for RDoc that uses XHTML 1.0 Transitional and dictates a +# bit more of the appearance of the output to cascading stylesheets than the +# default. It was designed for clean inline code display, and uses DHTMl to +# toggle the visbility of each method's source with each click on the '[source]' +# link. +# +# == Authors +# +# * Michael Granger +# +# Copyright (c) 2002, 2003 The FaerieMUD Consortium. Some rights reserved. +# +# This work is licensed under the Creative Commons Attribution License. To view +# a copy of this license, visit http://creativecommons.org/licenses/by/1.0/ or +# send a letter to Creative Commons, 559 Nathan Abbott Way, Stanford, California +# 94305, USA. +# + +module RDoc + module Page + + FONTS = "Verdana,Arial,Helvetica,sans-serif" + +STYLE = %{ +body { + font-family: Verdana,Arial,Helvetica,sans-serif; + font-size: 90%; + margin: 0; + margin-left: 40px; + padding: 0; + background: white; +} + +h1,h2,h3,h4 { margin: 0; color: #efefef; background: transparent; } +h1 { font-size: 150%; } +h2,h3,h4 { margin-top: 1em; } + +a { background: #eef; color: #039; text-decoration: none; } +a:hover { background: #039; color: #eef; } + +/* Override the base stylesheet's Anchor inside a table cell */ +td > a { + background: transparent; + color: #039; + text-decoration: none; +} + +/* and inside a section title */ +.section-title > a { + background: transparent; + color: #eee; + text-decoration: none; +} + +/* === Structural elements =================================== */ + +div#index { + margin: 0; + margin-left: -40px; + padding: 0; + font-size: 90%; +} + + +div#index a { + margin-left: 0.7em; +} + +div#index .section-bar { + margin-left: 0px; + padding-left: 0.7em; + background: #ccc; + font-size: small; +} + + +div#classHeader, div#fileHeader { + width: auto; + color: white; + padding: 0.5em 1.5em 0.5em 1.5em; + margin: 0; + margin-left: -40px; + border-bottom: 3px solid #006; +} + +div#classHeader a, div#fileHeader a { + background: inherit; + color: white; +} + +div#classHeader td, div#fileHeader td { + background: inherit; + color: white; +} + + +div#fileHeader { + background: #057; +} + +div#classHeader { + background: #048; +} + +div#nodeHeader { + background: #7f7f7f; +} + +.class-name-in-header { + font-size: 180%; + font-weight: bold; +} + + +div#bodyContent { + padding: 0 1.5em 0 1.5em; +} + +div#description { + padding: 0.5em 1.5em; + background: #efefef; + border: 1px dotted #999; +} + +div#description h1,h2,h3,h4,h5,h6 { + color: #125;; + background: transparent; +} + +div#validator-badges { + text-align: center; +} +div#validator-badges img { border: 0; } + +div#copyright { + color: #333; + background: #efefef; + font: 0.75em sans-serif; + margin-top: 5em; + margin-bottom: 0; + padding: 0.5em 2em; +} + + +/* === Classes =================================== */ + +table.header-table { + color: white; + font-size: small; +} + +.type-note { + font-size: small; + color: #DEDEDE; +} + +.xxsection-bar { + background: #eee; + color: #333; + padding: 3px; +} + +.section-bar { + color: #333; + border-bottom: 1px solid #999; + margin-left: -20px; +} + + +.section-title { + background: #79a; + color: #eee; + padding: 3px; + margin-top: 2em; + margin-left: -30px; + border: 1px solid #999; +} + +.top-aligned-row { vertical-align: top } +.bottom-aligned-row { vertical-align: bottom } + +/* --- Context section classes ----------------------- */ + +.context-row { } +.context-item-name { font-family: monospace; font-weight: bold; color: black; } +.context-item-value { font-size: small; color: #448; } +.context-item-desc { color: #333; padding-left: 2em; } + +/* --- Method classes -------------------------- */ +.method-detail { + background: #efefef; + padding: 0; + margin-top: 0.5em; + margin-bottom: 1em; + border: 1px dotted #ccc; +} +.method-heading { + color: black; + background: #ccc; + border-bottom: 1px solid #666; + padding: 0.2em 0.5em 0 0.5em; +} +.method-signature { color: black; background: inherit; } +.method-name { font-weight: bold; } +.method-args { font-style: italic; } +.method-description { padding: 0 0.5em 0 0.5em; } + +/* --- Source code sections -------------------- */ + +a.source-toggle { font-size: 90%; } +div.method-source-code { + background: #262626; + color: #ffdead; + margin: 1em; + padding: 0.5em; + border: 1px dashed #999; + overflow: hidden; +} + +div.method-source-code pre { color: #ffdead; overflow: hidden; } + +/* --- Ruby keyword styles --------------------- */ + +.standalone-code { background: #221111; color: #ffdead; overflow: hidden; } + +.ruby-constant { color: #7fffd4; background: transparent; } +.ruby-keyword { color: #00ffff; background: transparent; } +.ruby-ivar { color: #eedd82; background: transparent; } +.ruby-operator { color: #00ffee; background: transparent; } +.ruby-identifier { color: #ffdead; background: transparent; } +.ruby-node { color: #ffa07a; background: transparent; } +.ruby-comment { color: #b22222; font-weight: bold; background: transparent; } +.ruby-regexp { color: #ffa07a; background: transparent; } +.ruby-value { color: #7fffd4; background: transparent; } +} + + +##################################################################### +### H E A D E R T E M P L A T E +##################################################################### + +XHTML_PREAMBLE = %{ + +} + +HEADER = XHTML_PREAMBLE + %{ + + + %title% + + + + + + + +} + + +##################################################################### +### C O N T E X T C O N T E N T T E M P L A T E +##################################################################### + +CONTEXT_CONTENT = %{ +} + + +##################################################################### +### F O O T E R T E M P L A T E +##################################################################### +FOOTER = %{ + + + + +} + + +##################################################################### +### F I L E P A G E H E A D E R T E M P L A T E +##################################################################### + +FILE_PAGE = %{ +
+

%short_name%

+ + + + + + + + + +
Path:%full_path% +IF:cvsurl +  (CVS) +ENDIF:cvsurl +
Last Update:%dtm_modified%
+
+} + + +##################################################################### +### C L A S S P A G E H E A D E R T E M P L A T E +##################################################################### + +CLASS_PAGE = %{ +
+ + + + + + + + + + +IF:parent + + + + +ENDIF:parent +
%classmod%%full_name%
In: +START:infiles +IF:full_path_url + +ENDIF:full_path_url + %full_path% +IF:full_path_url + +ENDIF:full_path_url +IF:cvsurl +  (CVS) +ENDIF:cvsurl +
+END:infiles +
Parent: +IF:par_url + +ENDIF:par_url + %parent% +IF:par_url + +ENDIF:par_url +
+
+} + +NODE_PAGE = %{ +
+ + + + + + + + + + +IF:parent + + + + +ENDIF:parent +
%classmod%%full_name%
In: +START:infiles +IF:full_path_url + +ENDIF:full_path_url + %full_path% +IF:full_path_url + +ENDIF:full_path_url +IF:cvsurl +  (CVS) +ENDIF:cvsurl +
+END:infiles +
Parent: +IF:par_url + +ENDIF:par_url + %parent% +IF:par_url + +ENDIF:par_url +
+
+} + +PLUGIN_PAGE = %{ +
+ + + + + + + + + +
%classmod%%full_name%
In: +START:infiles +IF:full_path_url + +ENDIF:full_path_url + %full_path% +IF:full_path_url + +ENDIF:full_path_url +IF:cvsurl +  (CVS) +ENDIF:cvsurl +
+END:infiles +
+
+} + + +##################################################################### +### M E T H O D L I S T T E M P L A T E +##################################################################### + +PLUGIN_LIST = %{ + +
+IF:description +
+ %description% +
+ENDIF:description + + +IF:toc +
+

Contents

+ +ENDIF:toc +
+ +
+ + +IF:confine +START:confine +
+

Confine

+ %type% %value% +
+
+
+END:confine +ENDIF:confine + + +IF:type +
+

Type

+ %type% +
+
+
+ENDIF:type + +START:sections +
+IF:sectitle +

%sectitle%

+IF:seccomment +
+ %seccomment% +
+ENDIF:seccomment +ENDIF:sectitle +END:sections +} + + +METHOD_LIST = %{ + +
+IF:diagram +
+ %diagram% +
+ENDIF:diagram + +IF:description +
+ %description% +
+ENDIF:description + + +IF:toc +
+

Contents

+ +ENDIF:toc +
+ + +IF:childs +
+

Inherited by

+
+START:childs + HREF:aref:name: +END:childs +
+
+ENDIF:childs + +IF:methods +
+

Defines

+ +
+START:methods + HREF:aref:name:   +END:methods +
+
+ENDIF:methods + +IF:resources +
+

Resources

+ +
+START:resources + HREF:aref:name:   +END:resources +
+
+ENDIF:resources + +
+ + + +IF:includes +
+

Included Classes

+ +
+START:includes + HREF:aref:name: +END:includes +
+
+ENDIF:includes + + +IF:requires +
+

Required Classes

+ +
+START:requires + HREF:aref:name: +END:requires +
+
+ENDIF:requires + + +IF:realizes +
+

Realized Resources

+ +
+START:realizes + HREF:aref:name: +END:realizes +
+
+ENDIF:realizes + +START:sections +
+IF:sectitle +

%sectitle%

+IF:seccomment +
+ %seccomment% +
+ENDIF:seccomment +ENDIF:sectitle + + +IF:facts +
+

Custom Facts

+START:facts + HREF:aref:name:   +END:facts +
+ENDIF:facts + + +IF:plugins +
+

Plugins

+START:plugins +HREF:aref:name:   +END:plugins +
+ENDIF:plugins + + +IF:nodelist +
+

Nodes

+ + %nodelist% +
+ENDIF:nodelist + + +IF:classlist +
+

Classes and Modules

+ + %classlist% +
+ENDIF:classlist + +IF:constants +
+

Global Variables

+ +
+ +START:constants + + + + +IF:desc + + +ENDIF:desc + +END:constants +
%name%=%value% %desc%
+
+
+ENDIF:constants + +IF:aliases +
+

External Aliases

+ +
+ +START:aliases + + + + + +IF:desc + + + + +ENDIF:desc +END:aliases +
%old_name%->%new_name%
 %desc%
+
+
+ENDIF:aliases + + +IF:attributes +
+

Attributes

+ +
+ +START:attributes + + +IF:rw + +ENDIF:rw +IFNOT:rw + +ENDIF:rw + + +END:attributes +
%name% [%rw%]   %a_desc%
+
+
+ENDIF:attributes + + + + +IF:method_list +
+START:method_list +IF:methods +

Defines

+ +START:methods +
+ + + + +
+IF:m_desc + %m_desc% +ENDIF:m_desc +IF:sourcecode +

[Source]

+
+
+%sourcecode%
+
+
+ENDIF:sourcecode +
+
+ +END:methods +ENDIF:methods +END:method_list + +
+ENDIF:method_list + + + +IF:resource_list +
+

Resources

+START:resource_list + +
+ + +
+ %name%
+IF:params +START:params +    %name% => %value%
+END:params +ENDIF:params +
+ +
+IF:m_desc + %m_desc% +ENDIF:m_desc +
+
+END:resource_list + +
+ENDIF:resource_list + +END:sections +} + + +##################################################################### +### B O D Y T E M P L A T E +##################################################################### + +BODY = HEADER + %{ + +!INCLUDE! + +
+ +} + METHOD_LIST + %{ + +
+ +} + FOOTER + +BODYINC = HEADER + %{ + +!INCLUDE! + +
+ +!INCLUDE! + +
+ +} + FOOTER + + + +##################################################################### +### S O U R C E C O D E T E M P L A T E +##################################################################### + +SRC_PAGE = XHTML_PREAMBLE + %{ + + + %title% + + + + +
%code%
+ + +} + + +##################################################################### +### I N D E X F I L E T E M P L A T E S +##################################################################### + +FR_INDEX_BODY = %{ +!INCLUDE! +} + +FILE_INDEX = XHTML_PREAMBLE + %{ + + + + %list_title% + + + + + +
+

%list_title%

+
+START:entries + %name%
+END:entries +
+
+ + +} + +TOP_INDEX = XHTML_PREAMBLE + %{ + + + + %list_title% + + + + + + +
+

%list_title%

+
+START:entries + %name%
+END:entries +
+
+ + +} + + +CLASS_INDEX = FILE_INDEX +METHOD_INDEX = FILE_INDEX + +COMBO_INDEX = XHTML_PREAMBLE + %{ + + + + %classes_title% & %defines_title% + + + + + + + +
+ + All Classes
+ + +

Module

+
+START:module + %name%
+END:module +
+
+
+ +IF:nodes +

%nodes_title%

+
+START:nodes +%name%
+END:nodes +
+ENDIF:nodes + +IF:classes +

%classes_title%

+
+START:classes +%name%
+END:classes +
+ENDIF:classes + +IF:defines +

%defines_title%

+
+START:defines +%name%
+END:defines +
+ENDIF:defines + +IF:facts +

%facts_title%

+
+START:facts +%name%
+END:facts +
+ENDIF:facts + + +IF:plugins +

%plugins_title%

+
+START:plugins +%name%
+END:plugins +
+ENDIF:plugins + +
+ + +} + +INDEX = %{ + + + + + + %title% + + + + + + + + + + +} + + + + end # module Page +end # class RDoc + +require 'rdoc/generators/template/html/one_page_html' diff --git a/mcollective/lib/puppet/util/rdoc/parser.rb b/mcollective/lib/puppet/util/rdoc/parser.rb new file mode 100644 index 000000000..ea7439ad7 --- /dev/null +++ b/mcollective/lib/puppet/util/rdoc/parser.rb @@ -0,0 +1,482 @@ +# Puppet "parser" for the rdoc system +# The parser uses puppet parser and traverse the AST to instruct RDoc about +# our current structures. It also parses ruby files that could contain +# either custom facts or puppet plugins (functions, types...) + +# rdoc mandatory includes +require "rdoc/code_objects" +require "puppet/util/rdoc/code_objects" +require "rdoc/tokenstream" +require "rdoc/markup/simple_markup/preprocess" +require "rdoc/parsers/parserfactory" + +module RDoc + +class Parser + extend ParserFactory + + SITE = "__site__" + + attr_accessor :ast, :input_file_name, :top_level + + # parser registration into RDoc + parse_files_matching(/\.(rb|pp)$/) + + # called with the top level file + def initialize(top_level, file_name, content, options, stats) + @options = options + @stats = stats + @input_file_name = file_name + @top_level = PuppetTopLevel.new(top_level) + @progress = $stderr unless options.quiet + end + + # main entry point + def scan + env = Puppet::Node::Environment.new + unless env.known_resource_types.watching_file?(@input_file_name) + Puppet.info "rdoc: scanning #{@input_file_name}" + if @input_file_name =~ /\.pp$/ + @parser = Puppet::Parser::Parser.new(env) + @parser.file = @input_file_name + @ast = @parser.parse + end + else + @ast = env.known_resource_types + end + scan_top_level(@top_level) + @top_level + end + + # Due to a bug in RDoc, we need to roll our own find_module_named + # The issue is that RDoc tries harder by asking the parent for a class/module + # of the name. But by doing so, it can mistakenly use a module of same name + # but from which we are not descendant. + def find_object_named(container, name) + return container if container.name == name + container.each_classmodule do |m| + return m if m.name == name + end + nil + end + + # walk down the namespace and lookup/create container as needed + def get_class_or_module(container, name) + + # class ::A -> A is in the top level + if name =~ /^::/ + container = @top_level + end + + names = name.split('::') + + final_name = names.pop + names.each do |name| + prev_container = container + container = find_object_named(container, name) + container ||= prev_container.add_class(PuppetClass, name, nil) + end + [container, final_name] + end + + # split_module tries to find if +path+ belongs to the module path + # if it does, it returns the module name, otherwise if we are sure + # it is part of the global manifest path, "__site__" is returned. + # And finally if this path couldn't be mapped anywhere, nil is returned. + def split_module(path) + # find a module + fullpath = File.expand_path(path) + Puppet.debug "rdoc: testing #{fullpath}" + if fullpath =~ /(.*)\/([^\/]+)\/(?:manifests|plugins|lib)\/.+\.(pp|rb)$/ + modpath = $1 + name = $2 + Puppet.debug "rdoc: module #{name} into #{modpath} ?" + Puppet::Module.modulepath.each do |mp| + if File.identical?(modpath,mp) + Puppet.debug "rdoc: found module #{name}" + return name + end + end + end + if fullpath =~ /\.(pp|rb)$/ + # there can be paths we don't want to scan under modules + # imagine a ruby or manifest that would be distributed as part as a module + # but we don't want those to be hosted under + Puppet::Module.modulepath.each do |mp| + # check that fullpath is a descendant of mp + dirname = fullpath + while (dirname = File.dirname(dirname)) != '/' + return nil if File.identical?(dirname,mp) + end + end + end + # we are under a global manifests + Puppet.debug "rdoc: global manifests" + SITE + end + + # create documentation for the top level +container+ + def scan_top_level(container) + # use the module README as documentation for the module + comment = "" + readme = File.join(File.dirname(File.dirname(@input_file_name)), "README") + comment = File.open(readme,"r") { |f| f.read } if FileTest.readable?(readme) + look_for_directives_in(container, comment) unless comment.empty? + + # infer module name from directory + name = split_module(@input_file_name) + if name.nil? + # skip .pp files that are not in manifests directories as we can't guarantee they're part + # of a module or the global configuration. + container.document_self = false + return + end + + Puppet.debug "rdoc: scanning for #{name}" + + container.module_name = name + container.global=true if name == SITE + + @stats.num_modules += 1 + container, name = get_class_or_module(container,name) + mod = container.add_module(PuppetModule, name) + mod.record_location(@top_level) + mod.comment = comment + + if @input_file_name =~ /\.pp$/ + parse_elements(mod) + elsif @input_file_name =~ /\.rb$/ + parse_plugins(mod) + end + end + + # create documentation for include statements we can find in +code+ + # and associate it with +container+ + def scan_for_include_or_require(container, code) + code = [code] unless code.is_a?(Array) + code.each do |stmt| + scan_for_include_or_require(container,stmt.children) if stmt.is_a?(Puppet::Parser::AST::ASTArray) + + if stmt.is_a?(Puppet::Parser::AST::Function) and ['include','require'].include?(stmt.name) + stmt.arguments.each do |included| + Puppet.debug "found #{stmt.name}: #{included}" + container.send("add_#{stmt.name}",Include.new(included.to_s, stmt.doc)) + end + end + end + end + + # create documentation for realize statements we can find in +code+ + # and associate it with +container+ + def scan_for_realize(container, code) + code = [code] unless code.is_a?(Array) + code.each do |stmt| + scan_for_realize(container,stmt.children) if stmt.is_a?(Puppet::Parser::AST::ASTArray) + + if stmt.is_a?(Puppet::Parser::AST::Function) and stmt.name == 'realize' + stmt.arguments.each do |realized| + Puppet.debug "found #{stmt.name}: #{realized}" + container.add_realize(Include.new(realized.to_s, stmt.doc)) + end + end + end + end + + # create documentation for global variables assignements we can find in +code+ + # and associate it with +container+ + def scan_for_vardef(container, code) + code = [code] unless code.is_a?(Array) + code.each do |stmt| + scan_for_vardef(container,stmt.children) if stmt.is_a?(Puppet::Parser::AST::ASTArray) + + if stmt.is_a?(Puppet::Parser::AST::VarDef) + Puppet.debug "rdoc: found constant: #{stmt.name} = #{stmt.value}" + container.add_constant(Constant.new(stmt.name.to_s, stmt.value.to_s, stmt.doc)) + end + end + end + + # create documentation for resources we can find in +code+ + # and associate it with +container+ + def scan_for_resource(container, code) + code = [code] unless code.is_a?(Array) + code.each do |stmt| + scan_for_resource(container,stmt.children) if stmt.is_a?(Puppet::Parser::AST::ASTArray) + + if stmt.is_a?(Puppet::Parser::AST::Resource) and !stmt.type.nil? + begin + type = stmt.type.split("::").collect { |s| s.capitalize }.join("::") + title = stmt.title.is_a?(Puppet::Parser::AST::ASTArray) ? stmt.title.to_s.gsub(/\[(.*)\]/,'\1') : stmt.title.to_s + Puppet.debug "rdoc: found resource: #{type}[#{title}]" + + param = [] + stmt.parameters.children.each do |p| + res = {} + res["name"] = p.param + res["value"] = "#{p.value.to_s}" unless p.value.nil? + + param << res + end + + container.add_resource(PuppetResource.new(type, title, stmt.doc, param)) + rescue => detail + raise Puppet::ParseError, "impossible to parse resource in #{stmt.file} at line #{stmt.line}: #{detail}" + end + end + end + end + + def resource_stmt_to_ref(stmt) + type = stmt.type.split("::").collect { |s| s.capitalize }.join("::") + title = stmt.title.is_a?(Puppet::Parser::AST::ASTArray) ? stmt.title.to_s.gsub(/\[(.*)\]/,'\1') : stmt.title.to_s + + param = stmt.params.children.collect do |p| + {"name" => p.param, "value" => p.value.to_s} + end + PuppetResource.new(type, title, stmt.doc, param) + end + + # create documentation for a class named +name+ + def document_class(name, klass, container) + Puppet.debug "rdoc: found new class #{name}" + container, name = get_class_or_module(container, name) + + superclass = klass.parent + superclass = "" if superclass.nil? or superclass.empty? + + @stats.num_classes += 1 + comment = klass.doc + look_for_directives_in(container, comment) unless comment.empty? + cls = container.add_class(PuppetClass, name, superclass) + # it is possible we already encountered this class, while parsing some namespaces + # from other classes of other files. But at that time we couldn't know this class superclass + # so, now we know it and force it. + cls.superclass = superclass + cls.record_location(@top_level) + + # scan class code for include + code = klass.code.children if klass.code.is_a?(Puppet::Parser::AST::ASTArray) + code ||= klass.code + unless code.nil? + scan_for_include_or_require(cls, code) + scan_for_realize(cls, code) + scan_for_resource(cls, code) if Puppet.settings[:document_all] + end + + cls.comment = comment + rescue => detail + raise Puppet::ParseError, "impossible to parse class '#{name}' in #{klass.file} at line #{klass.line}: #{detail}" + end + + # create documentation for a node + def document_node(name, node, container) + Puppet.debug "rdoc: found new node #{name}" + superclass = node.parent + superclass = "" if superclass.nil? or superclass.empty? + + comment = node.doc + look_for_directives_in(container, comment) unless comment.empty? + n = container.add_node(name, superclass) + n.record_location(@top_level) + + code = node.code.children if node.code.is_a?(Puppet::Parser::AST::ASTArray) + code ||= node.code + unless code.nil? + scan_for_include_or_require(n, code) + scan_for_realize(n, code) + scan_for_vardef(n, code) + scan_for_resource(n, code) if Puppet.settings[:document_all] + end + + n.comment = comment + rescue => detail + raise Puppet::ParseError, "impossible to parse node '#{name}' in #{node.file} at line #{node.line}: #{detail}" + end + + # create documentation for a define + def document_define(name, define, container) + Puppet.debug "rdoc: found new definition #{name}" + # find superclas if any + @stats.num_methods += 1 + + # find the parent + # split define name by :: to find the complete module hierarchy + container, name = get_class_or_module(container,name) + + # build up declaration + declaration = "" + define.arguments.each do |arg,value| + declaration << "\$#{arg}" + unless value.nil? + declaration << " => " + case value + when Puppet::Parser::AST::Leaf + declaration << "'#{value.value}'" + when Puppet::Parser::AST::ASTArray + declaration << "[#{value.children.collect { |v| "'#{v}'" }.join(", ")}]" + else + declaration << "#{value.to_s}" + end + end + declaration << ", " + end + declaration.chop!.chop! if declaration.size > 1 + + # register method into the container + meth = AnyMethod.new(declaration, name) + meth.comment = define.doc + container.add_method(meth) + look_for_directives_in(container, meth.comment) unless meth.comment.empty? + meth.params = "( #{declaration} )" + meth.visibility = :public + meth.document_self = true + meth.singleton = false + rescue => detail + raise Puppet::ParseError, "impossible to parse definition '#{name}' in #{define.file} at line #{define.line}: #{detail}" + end + + # Traverse the AST tree and produce code-objects node + # that contains the documentation + def parse_elements(container) + Puppet.debug "rdoc: scanning manifest" + @ast.hostclasses.values.sort { |a,b| a.name <=> b.name }.each do |klass| + name = klass.name + if klass.file == @input_file_name + unless name.empty? + document_class(name,klass,container) + else # on main class document vardefs + code = klass.code.children if klass.code.is_a?(Puppet::Parser::AST::ASTArray) + code ||= klass.code + scan_for_vardef(container, code) unless code.nil? + end + end + end + + @ast.definitions.each do |name, define| + if define.file == @input_file_name + document_define(name,define,container) + end + end + + @ast.nodes.each do |name, node| + if node.file == @input_file_name + document_node(name.to_s,node,container) + end + end + end + + # create documentation for plugins + def parse_plugins(container) + Puppet.debug "rdoc: scanning plugin or fact" + if @input_file_name =~ /\/facter\/[^\/]+\.rb$/ + parse_fact(container) + else + parse_puppet_plugin(container) + end + end + + # this is a poor man custom fact parser :-) + def parse_fact(container) + comments = "" + current_fact = nil + File.open(@input_file_name) do |of| + of.each do |line| + # fetch comments + if line =~ /^[ \t]*# ?(.*)$/ + comments += $1 + "\n" + elsif line =~ /^[ \t]*Facter.add\(['"](.*?)['"]\)/ + current_fact = Fact.new($1,{}) + look_for_directives_in(container, comments) unless comments.empty? + current_fact.comment = comments + container.add_fact(current_fact) + current_fact.record_location(@top_level) + comments = "" + Puppet.debug "rdoc: found custom fact #{current_fact.name}" + elsif line =~ /^[ \t]*confine[ \t]*:(.*?)[ \t]*=>[ \t]*(.*)$/ + current_fact.confine = { :type => $1, :value => $2 } unless current_fact.nil? + else # unknown line type + comments ="" + end + end + end + end + + # this is a poor man puppet plugin parser :-) + # it doesn't extract doc nor desc :-( + def parse_puppet_plugin(container) + comments = "" + current_plugin = nil + + File.open(@input_file_name) do |of| + of.each do |line| + # fetch comments + if line =~ /^[ \t]*# ?(.*)$/ + comments += $1 + "\n" + elsif line =~ /^[ \t]*newfunction[ \t]*\([ \t]*:(.*?)[ \t]*,[ \t]*:type[ \t]*=>[ \t]*(:rvalue|:lvalue)\)/ + current_plugin = Plugin.new($1, "function") + container.add_plugin(current_plugin) + look_for_directives_in(container, comments) unless comments.empty? + current_plugin.comment = comments + current_plugin.record_location(@top_level) + comments = "" + Puppet.debug "rdoc: found new function plugins #{current_plugin.name}" + elsif line =~ /^[ \t]*Puppet::Type.newtype[ \t]*\([ \t]*:(.*?)\)/ + current_plugin = Plugin.new($1, "type") + container.add_plugin(current_plugin) + look_for_directives_in(container, comments) unless comments.empty? + current_plugin.comment = comments + current_plugin.record_location(@top_level) + comments = "" + Puppet.debug "rdoc: found new type plugins #{current_plugin.name}" + elsif line =~ /module Puppet::Parser::Functions/ + # skip + else # unknown line type + comments ="" + end + end + end + end + + # look_for_directives_in scans the current +comment+ for RDoc directives + def look_for_directives_in(context, comment) + preprocess = SM::PreProcess.new(@input_file_name, @options.rdoc_include) + + preprocess.handle(comment) do |directive, param| + case directive + when "stopdoc" + context.stop_doc + "" + when "startdoc" + context.start_doc + context.force_documentation = true + "" + when "enddoc" + #context.done_documenting = true + #"" + throw :enddoc + when "main" + options = Options.instance + options.main_page = param + "" + when "title" + options = Options.instance + options.title = param + "" + when "section" + context.set_current_section(param, comment) + comment.replace("") # 1.8 doesn't support #clear + break + else + warn "Unrecognized directive '#{directive}'" + break + end + end + remove_private_comments(comment) + end + + def remove_private_comments(comment) + comment.gsub!(/^#--.*?^#\+\+/m, '') + comment.sub!(/^#--.*/m, '') + end +end +end diff --git a/mcollective/lib/puppet/util/reference.rb b/mcollective/lib/puppet/util/reference.rb new file mode 100644 index 000000000..ab74b3ca3 --- /dev/null +++ b/mcollective/lib/puppet/util/reference.rb @@ -0,0 +1,141 @@ +require 'puppet/util/instance_loader' +require 'fileutils' + +# Manage Reference Documentation. +class Puppet::Util::Reference + include Puppet::Util + include Puppet::Util::Docs + + extend Puppet::Util::InstanceLoader + + instance_load(:reference, 'puppet/reference') + + def self.footer + "\n\n----------------\n\n*This page autogenerated on #{Time.now}*\n" + end + + def self.modes + %w{pdf text} + end + + def self.newreference(name, options = {}, &block) + ref = self.new(name, options, &block) + instance_hash(:reference)[symbolize(name)] = ref + + ref + end + + def self.page(*sections) + depth = 4 + # Use the minimum depth + sections.each do |name| + section = reference(name) or raise "Could not find section #{name}" + depth = section.depth if section.depth < depth + end + end + + def self.pdf(text) + puts "creating pdf" + rst2latex = which('rst2latex') || which('rst2latex.py') || + raise("Could not find rst2latex") + + cmd = %{#{rst2latex} /tmp/puppetdoc.txt > /tmp/puppetdoc.tex} + Puppet::Util.replace_file("/tmp/puppetdoc.txt") {|f| f.puts text } + # There used to be an attempt to use secure_open / replace_file to secure + # the target, too, but that did nothing: the race was still here. We can + # get exactly the same benefit from running this effort: + File.unlink('/tmp/puppetdoc.tex') rescue nil + output = %x{#{cmd}} + unless $CHILD_STATUS == 0 + $stderr.puts "rst2latex failed" + $stderr.puts output + exit(1) + end + $stderr.puts output + + # Now convert to pdf + Dir.chdir("/tmp") do + %x{texi2pdf puppetdoc.tex >/dev/null 2>/dev/null} + end + + end + + def self.references + instance_loader(:reference).loadall + loaded_instances(:reference).sort { |a,b| a.to_s <=> b.to_s } + end + + HEADER_LEVELS = [nil, "#", "##", "###", "####", "#####"] + + attr_accessor :page, :depth, :header, :title, :dynamic + attr_writer :doc + + def doc + if defined?(@doc) + return "#{@name} - #{@doc}" + else + return @title + end + end + + def dynamic? + self.dynamic + end + + def h(name, level) + "#{HEADER_LEVELS[level]} #{name}\n\n" + end + + def initialize(name, options = {}, &block) + @name = name + options.each do |option, value| + send(option.to_s + "=", value) + end + + meta_def(:generate, &block) + + # Now handle the defaults + @title ||= "#{@name.to_s.capitalize} Reference" + @page ||= @title.gsub(/\s+/, '') + @depth ||= 2 + @header ||= "" + end + + # Indent every line in the chunk except those which begin with '..'. + def indent(text, tab) + text.gsub(/(^|\A)/, tab).gsub(/^ +\.\./, "..") + end + + def option(name, value) + ":#{name.to_s.capitalize}: #{value}\n" + end + + def paramwrap(name, text, options = {}) + options[:level] ||= 5 + #str = "#{name} : " + str = h(name, options[:level]) + str += "- **namevar**\n\n" if options[:namevar] + str += text + #str += text.gsub(/\n/, "\n ") + + str += "\n\n" + end + + def text + puts output + end + + def to_markdown(withcontents = true) + # First the header + text = h(@title, 1) + text += "\n\n**This page is autogenerated; any changes will get overwritten** *(last generated on #{Time.now.to_s})*\n\n" + + text += @header + + text += generate + + text += self.class.footer if withcontents + + text + end +end diff --git a/mcollective/lib/puppet/util/resource_template.rb b/mcollective/lib/puppet/util/resource_template.rb new file mode 100644 index 000000000..b12b125b5 --- /dev/null +++ b/mcollective/lib/puppet/util/resource_template.rb @@ -0,0 +1,61 @@ +require 'puppet/util' +require 'puppet/util/logging' +require 'erb' + +# A template wrapper that evaluates a template in the +# context of a resource, allowing the resource attributes +# to be looked up from within the template. +# This provides functionality essentially equivalent to +# the language's template() function. You pass your file +# path and the resource you want to use into the initialization +# method, then call result on the instance, and you get back +# a chunk of text. +# The resource's parameters are available as instance variables +# (as opposed to the language, where we use a method_missing trick). +# For example, say you have a resource that generates a file. You would +# need to implement the following style of `generate` method: +# +# def generate +# template = Puppet::Util::ResourceTemplate.new("/path/to/template", self) +# +# return Puppet::Type.type(:file).new :path => "/my/file", +# :content => template.evaluate +# end +# +# This generated file gets added to the catalog (which is what `generate` does), +# and its content is the result of the template. You need to use instance +# variables in your template, so if your template just needs to have the name +# of the generating resource, it would just have: +# +# <%= @name %> +# +# Since the ResourceTemplate class sets as instance variables all of the resource's +# parameters. +# +# Note that this example uses the generating resource as its source of +# parameters, which is generally most useful, since it allows you to configure +# the generated resource via the generating resource. +class Puppet::Util::ResourceTemplate + include Puppet::Util::Logging + + def evaluate + set_resource_variables + ERB.new(File.read(@file), 0, "-").result(binding) + end + + def initialize(file, resource) + raise ArgumentError, "Template #{file} does not exist" unless FileTest.exist?(file) + @file = file + @resource = resource + end + + private + + def set_resource_variables + @resource.to_hash.each do |param, value| + var = "@#{param.to_s}" + instance_variable_set(var, value) + end + end +end + diff --git a/mcollective/lib/puppet/util/run_mode.rb b/mcollective/lib/puppet/util/run_mode.rb new file mode 100644 index 000000000..450cbf1a6 --- /dev/null +++ b/mcollective/lib/puppet/util/run_mode.rb @@ -0,0 +1,81 @@ +module Puppet + module Util + class RunMode + def initialize(name) + @name = name.to_sym + end + + @@run_modes = Hash.new {|h, k| h[k] = RunMode.new(k)} + + attr :name + + def self.[](name) + @@run_modes[name] + end + + def master? + name == :master + end + + def agent? + name == :agent + end + + def user? + name == :user + end + + def conf_dir + which_dir( + (Puppet.features.microsoft_windows? ? File.join(Dir::WINDOWS, "puppet", "etc") : "/etc/puppet"), + "~/.puppet" + ) + end + + def var_dir + which_dir( + (Puppet.features.microsoft_windows? ? File.join(Dir::WINDOWS, "puppet", "var") : "/var/lib/puppet"), + "~/.puppet/var" + ) + end + + def run_dir + "$vardir/run" + end + + def logopts + if master? + { + :default => "$vardir/log", + :mode => 0750, + :owner => "service", + :group => "service", + :desc => "The Puppet log directory." + } + else + ["$vardir/log", "The Puppet log directory."] + end + end + + private + + def which_dir( global, user ) + #FIXME: we should test if we're user "puppet" + # there's a comment that suggests that we do that + # and we currently don't. + expand_path case + when name == :master; global + when Puppet.features.root?; global + else user + end + end + + def expand_path( dir ) + require 'etc' + ENV["HOME"] ||= Etc.getpwuid(Process.uid).dir + File.expand_path(dir) + end + + end + end +end diff --git a/mcollective/lib/puppet/util/selinux.rb b/mcollective/lib/puppet/util/selinux.rb new file mode 100644 index 000000000..9d0e0a715 --- /dev/null +++ b/mcollective/lib/puppet/util/selinux.rb @@ -0,0 +1,216 @@ +# Provides utility functions to help interfaces Puppet to SELinux. +# +# This requires the very new SELinux Ruby bindings. These bindings closely +# mirror the SELinux C library interface. +# +# Support for the command line tools is not provided because the performance +# was abysmal. At this time (2008-11-02) the only distribution providing +# these Ruby SELinux bindings which I am aware of is Fedora (in libselinux-ruby). + +Puppet.features.selinux? # check, but continue even if it's not + +require 'pathname' + +module Puppet::Util::SELinux + + def selinux_support? + return false unless defined?(Selinux) + if Selinux.is_selinux_enabled == 1 + return true + end + false + end + + # Retrieve and return the full context of the file. If we don't have + # SELinux support or if the SELinux call fails then return nil. + def get_selinux_current_context(file) + return nil unless selinux_support? + retval = Selinux.lgetfilecon(file) + if retval == -1 + return nil + end + retval[1] + end + + # Retrieve and return the default context of the file. If we don't have + # SELinux support or if the SELinux call fails to file a default then return nil. + def get_selinux_default_context(file) + return nil unless selinux_support? + # If the filesystem has no support for SELinux labels, return a default of nil + # instead of what matchpathcon would return + return nil unless selinux_label_support?(file) + # If the file exists we should pass the mode to matchpathcon for the most specific + # matching. If not, we can pass a mode of 0. + begin + filestat = File.lstat(file) + mode = filestat.mode + rescue Errno::ENOENT + mode = 0 + end + retval = Selinux.matchpathcon(file, mode) + if retval == -1 + return nil + end + retval[1] + end + + # Take the full SELinux context returned from the tools and parse it + # out to the three (or four) component parts. Supports :seluser, :selrole, + # :seltype, and on systems with range support, :selrange. + def parse_selinux_context(component, context) + if context.nil? or context == "unlabeled" + return nil + end + unless context =~ /^([a-z0-9_]+):([a-z0-9_]+):([a-zA-Z0-9_]+)(?::([a-zA-Z0-9:,._-]+))?/ + raise Puppet::Error, "Invalid context to parse: #{context}" + end + ret = { + :seluser => $1, + :selrole => $2, + :seltype => $3, + :selrange => $4, + } + ret[component] + end + + # This updates the actual SELinux label on the file. You can update + # only a single component or update the entire context. + # The caveat is that since setting a partial context makes no sense the + # file has to already exist. Puppet (via the File resource) will always + # just try to set components, even if all values are specified by the manifest. + # I believe that the OS should always provide at least a fall-through context + # though on any well-running system. + def set_selinux_context(file, value, component = false) + return nil unless selinux_support? && selinux_label_support?(file) + + if component + # Must first get existing context to replace a single component + context = Selinux.lgetfilecon(file)[1] + if context == -1 + # We can't set partial context components when no context exists + # unless/until we can find a way to make Puppet call this method + # once for all selinux file label attributes. + Puppet.warning "Can't set SELinux context on file unless the file already has some kind of context" + return nil + end + context = context.split(':') + case component + when :seluser + context[0] = value + when :selrole + context[1] = value + when :seltype + context[2] = value + when :selrange + context[3] = value + else + raise ArguementError, "set_selinux_context component must be one of :seluser, :selrole, :seltype, or :selrange" + end + context = context.join(':') + else + context = value + end + + retval = Selinux.lsetfilecon(file, context) + if retval == 0 + return true + else + Puppet.warning "Failed to set SELinux context #{context} on #{file}" + return false + end + end + + # Since this call relies on get_selinux_default_context it also needs a + # full non-relative path to the file. Fortunately, that seems to be all + # Puppet uses. This will set the file's SELinux context to the policy's + # default context (if any) if it differs from the context currently on + # the file. + def set_selinux_default_context(file) + new_context = get_selinux_default_context(file) + return nil unless new_context + cur_context = get_selinux_current_context(file) + if new_context != cur_context + set_selinux_context(file, new_context) + return new_context + end + nil + end + + # Internal helper function to read and parse /proc/mounts + def read_mounts + mounts = "" + begin + if File.instance_methods.include? "read_nonblock" + # If possible we use read_nonblock in a loop rather than read to work- + # a linux kernel bug. See ticket #1963 for details. + mountfh = File.open("/proc/mounts") + mounts += mountfh.read_nonblock(1024) while true + else + # Otherwise we shell out and let cat do it for us + mountfh = IO.popen("/bin/cat /proc/mounts") + mounts = mountfh.read + end + rescue EOFError + # that's expected + rescue + return nil + ensure + mountfh.close if mountfh + end + + mntpoint = {} + + # Read all entries in /proc/mounts. The second column is the + # mountpoint and the third column is the filesystem type. + # We skip rootfs because it is always mounted at / + mounts.collect do |line| + params = line.split(' ') + next if params[2] == 'rootfs' + mntpoint[params[1]] = params[2] + end + mntpoint + end + + def realpath(path) + path, rest = Pathname.new(path), [] + path, rest = path.dirname, [path.basename] + rest while ! path.exist? + File.join( path.realpath, *rest ) + end + + def parent_directory(path) + Pathname.new(path).dirname.to_s + end + + # Internal helper function to return which type of filesystem a + # given file path resides on + def find_fs(path) + unless mnts = read_mounts + return nil + end + + # For a given file: + # Check if the filename is in the data structure; + # return the fstype if it is. + # Just in case: return something if you're down to "/" or "" + # Remove the last slash and everything after it, + # and repeat with that as the file for the next loop through. + path = realpath(path) + while not path.empty? + return mnts[path] if mnts.has_key?(path) + path = parent_directory(path) + end + mnts['/'] + end + + # Check filesystem a path resides on for SELinux support against + # whitelist of known-good filesystems. + # Returns true if the filesystem can support SELinux labels and + # false if not. + def selinux_label_support?(file) + fstype = find_fs(file) + return false if fstype.nil? + filesystems = ['ext2', 'ext3', 'ext4', 'gfs', 'gfs2', 'xfs', 'jfs'] + filesystems.include?(fstype) + end + +end diff --git a/mcollective/lib/puppet/util/settings.rb b/mcollective/lib/puppet/util/settings.rb new file mode 100644 index 000000000..889a16f96 --- /dev/null +++ b/mcollective/lib/puppet/util/settings.rb @@ -0,0 +1,937 @@ +require 'puppet' +require 'sync' +require 'getoptlong' +require 'puppet/external/event-loop' +require 'puppet/util/cacher' +require 'puppet/util/loadedfile' + +# The class for handling configuration files. +class Puppet::Util::Settings + include Enumerable + include Puppet::Util::Cacher + + require 'puppet/util/settings/setting' + require 'puppet/util/settings/file_setting' + require 'puppet/util/settings/boolean_setting' + + attr_accessor :file + attr_reader :timer + + ReadOnly = [:run_mode, :name] + + # Retrieve a config value + def [](param) + value(param) + end + + # Set a config value. This doesn't set the defaults, it sets the value itself. + def []=(param, value) + set_value(param, value, :memory) + end + + # Generate the list of valid arguments, in a format that GetoptLong can + # understand, and add them to the passed option list. + def addargs(options) + # Add all of the config parameters as valid options. + self.each { |name, setting| + setting.getopt_args.each { |args| options << args } + } + + options + end + + # Generate the list of valid arguments, in a format that OptionParser can + # understand, and add them to the passed option list. + def optparse_addargs(options) + # Add all of the config parameters as valid options. + self.each { |name, setting| + options << setting.optparse_args + } + + options + end + + # Is our parameter a boolean parameter? + def boolean?(param) + param = param.to_sym + !!(@config.include?(param) and @config[param].kind_of? BooleanSetting) + end + + # Remove all set values, potentially skipping cli values. + def clear(exceptcli = false) + @sync.synchronize do + unsafe_clear(exceptcli) + end + end + + # Remove all set values, potentially skipping cli values. + def unsafe_clear(exceptcli = false) + @values.each do |name, values| + @values.delete(name) unless exceptcli and name == :cli + end + + # Don't clear the 'used' in this case, since it's a config file reparse, + # and we want to retain this info. + @used = [] unless exceptcli + + @cache.clear + end + + # This is mostly just used for testing. + def clearused + @cache.clear + @used = [] + end + + # Do variable interpolation on the value. + def convert(value, environment = nil) + return value unless value + return value unless value.is_a? String + newval = value.gsub(/\$(\w+)|\$\{(\w+)\}/) do |value| + varname = $2 || $1 + if varname == "environment" and environment + environment + elsif pval = self.value(varname, environment) + pval + else + raise Puppet::DevError, "Could not find value for #{value}" + end + end + + newval + end + + # Return a value's description. + def description(name) + if obj = @config[name.to_sym] + obj.desc + else + nil + end + end + + def each + @config.each { |name, object| + yield name, object + } + end + + # Iterate over each section name. + def eachsection + yielded = [] + @config.each do |name, object| + section = object.section + unless yielded.include? section + yield section + yielded << section + end + end + end + + # Return an object by name. + def setting(param) + param = param.to_sym + @config[param] + end + + # Handle a command-line argument. + def handlearg(opt, value = nil) + @cache.clear + value &&= munge_value(value) + str = opt.sub(/^--/,'') + + bool = true + newstr = str.sub(/^no-/, '') + if newstr != str + str = newstr + bool = false + end + str = str.intern + + if @config[str].is_a?(Puppet::Util::Settings::BooleanSetting) + if value == "" or value.nil? + value = bool + end + end + + set_value(str, value, :cli) + end + + def include?(name) + name = name.intern if name.is_a? String + @config.include?(name) + end + + # check to see if a short name is already defined + def shortinclude?(short) + short = short.intern if name.is_a? String + @shortnames.include?(short) + end + + # Create a new collection of config settings. + def initialize + @config = {} + @shortnames = {} + + @created = [] + @searchpath = nil + + # Mutex-like thing to protect @values + @sync = Sync.new + + # Keep track of set values. + @values = Hash.new { |hash, key| hash[key] = {} } + + # And keep a per-environment cache + @cache = Hash.new { |hash, key| hash[key] = {} } + + # The list of sections we've used. + @used = [] + end + + # NOTE: ACS ahh the util classes. . .sigh + # as part of a fix for 1183, I pulled the logic for the following 5 methods out of the executables and puppet.rb + # They probably deserve their own class, but I don't want to do that until I can refactor environments + # its a little better than where they were + + # Prints the contents of a config file with the available config settings, or it + # prints a single value of a config setting. + def print_config_options + env = value(:environment) + val = value(:configprint) + if val == "all" + hash = {} + each do |name, obj| + val = value(name,env) + val = val.inspect if val == "" + hash[name] = val + end + hash.sort { |a,b| a[0].to_s <=> b[0].to_s }.each do |name, val| + puts "#{name} = #{val}" + end + else + val.split(/\s*,\s*/).sort.each do |v| + if include?(v) + #if there is only one value, just print it for back compatibility + if v == val + puts value(val,env) + break + end + puts "#{v} = #{value(v,env)}" + else + puts "invalid parameter: #{v}" + return false + end + end + end + true + end + + def generate_config + puts to_config + true + end + + def generate_manifest + puts to_manifest + true + end + + def print_configs + return print_config_options if value(:configprint) != "" + return generate_config if value(:genconfig) + generate_manifest if value(:genmanifest) + end + + def print_configs? + (value(:configprint) != "" || value(:genconfig) || value(:genmanifest)) && true + end + + # Return a given object's file metadata. + def metadata(param) + if obj = @config[param.to_sym] and obj.is_a?(FileSetting) + return [:owner, :group, :mode].inject({}) do |meta, p| + if v = obj.send(p) + meta[p] = v + end + meta + end + else + nil + end + end + + # Make a directory with the appropriate user, group, and mode + def mkdir(default) + obj = get_config_file_default(default) + + Puppet::Util::SUIDManager.asuser(obj.owner, obj.group) do + mode = obj.mode || 0750 + Dir.mkdir(obj.value, mode) + end + end + + # Figure out the section name for the run_mode. + def run_mode + Puppet.run_mode.name + end + + # Return all of the parameters associated with a given section. + def params(section = nil) + if section + section = section.intern if section.is_a? String + @config.find_all { |name, obj| + obj.section == section + }.collect { |name, obj| + name + } + else + @config.keys + end + end + + # Parse the configuration file. Just provides + # thread safety. + def parse + raise "No :config setting defined; cannot parse unknown config file" unless self[:config] + + @sync.synchronize do + unsafe_parse(self[:config]) + end + + # Create a timer so that this file will get checked automatically + # and reparsed if necessary. + set_filetimeout_timer + end + + # Unsafely parse the file -- this isn't thread-safe and causes plenty of problems if used directly. + def unsafe_parse(file) + return unless FileTest.exist?(file) + begin + data = parse_file(file) + rescue => details + puts details.backtrace if Puppet[:trace] + Puppet.err "Could not parse #{file}: #{details}" + return + end + + unsafe_clear(true) + + metas = {} + data.each do |area, values| + metas[area] = values.delete(:_meta) + values.each do |key,value| + set_value(key, value, area, :dont_trigger_handles => true, :ignore_bad_settings => true ) + end + end + + # Determine our environment, if we have one. + if @config[:environment] + env = self.value(:environment).to_sym + else + env = "none" + end + + # Call any hooks we should be calling. + settings_with_hooks.each do |setting| + each_source(env) do |source| + if value = @values[source][setting.name] + # We still have to use value to retrieve the value, since + # we want the fully interpolated value, not $vardir/lib or whatever. + # This results in extra work, but so few of the settings + # will have associated hooks that it ends up being less work this + # way overall. + setting.handle(self.value(setting.name, env)) + break + end + end + end + + # We have to do it in the reverse of the search path, + # because multiple sections could set the same value + # and I'm too lazy to only set the metadata once. + searchpath.reverse.each do |source| + source = run_mode if source == :run_mode + source = @name if (@name && source == :name) + if meta = metas[source] + set_metadata(meta) + end + end + end + + # Create a new setting. The value is passed in because it's used to determine + # what kind of setting we're creating, but the value itself might be either + # a default or a value, so we can't actually assign it. + def newsetting(hash) + klass = nil + hash[:section] = hash[:section].to_sym if hash[:section] + if type = hash[:type] + unless klass = {:setting => Setting, :file => FileSetting, :boolean => BooleanSetting}[type] + raise ArgumentError, "Invalid setting type '#{type}'" + end + hash.delete(:type) + else + case hash[:default] + when true, false, "true", "false" + klass = BooleanSetting + when /^\$\w+\//, /^\//, /^\w:\// + klass = FileSetting + when String, Integer, Float # nothing + klass = Setting + else + raise ArgumentError, "Invalid value '#{hash[:default].inspect}' for #{hash[:name]}" + end + end + hash[:settings] = self + setting = klass.new(hash) + + setting + end + + # This has to be private, because it doesn't add the settings to @config + private :newsetting + + # Iterate across all of the objects in a given section. + def persection(section) + section = section.to_sym + self.each { |name, obj| + if obj.section == section + yield obj + end + } + end + + # Cache this in an easily clearable way, since we were + # having trouble cleaning it up after tests. + cached_attr(:file) do + if path = self[:config] and FileTest.exist?(path) + Puppet::Util::LoadedFile.new(path) + end + end + + # Reparse our config file, if necessary. + def reparse + if file and file.changed? + Puppet.notice "Reparsing #{file.file}" + parse + reuse + end + end + + def reuse + return unless defined?(@used) + @sync.synchronize do # yay, thread-safe + new = @used + @used = [] + self.use(*new) + end + end + + # The order in which to search for values. + def searchpath(environment = nil) + if environment + [:cli, :memory, environment, :run_mode, :main, :mutable_defaults] + else + [:cli, :memory, :run_mode, :main, :mutable_defaults] + end + end + + # Get a list of objects per section + def sectionlist + sectionlist = [] + self.each { |name, obj| + section = obj.section || "puppet" + sections[section] ||= [] + sectionlist << section unless sectionlist.include?(section) + sections[section] << obj + } + + return sectionlist, sections + end + + def service_user_available? + return @service_user_available if defined?(@service_user_available) + + return @service_user_available = false unless user_name = self[:user] + + user = Puppet::Type.type(:user).new :name => self[:user], :audit => :ensure + + @service_user_available = user.exists? + end + + def legacy_to_mode(type, param) + if not defined?(@app_names) + require 'puppet/util/command_line' + command_line = Puppet::Util::CommandLine.new + @app_names = Puppet::Util::CommandLine::LegacyName.inject({}) do |hash, pair| + app, legacy = pair + command_line.require_application app + hash[legacy.to_sym] = Puppet::Application.find(app).run_mode.name + hash + end + end + if new_type = @app_names[type] + Puppet.warning "You have configuration parameter $#{param} specified in [#{type}], which is a deprecated section. I'm assuming you meant [#{new_type}]" + return new_type + end + type + end + + def set_value(param, value, type, options = {}) + param = param.to_sym + unless setting = @config[param] + if options[:ignore_bad_settings] + return + else + raise ArgumentError, + "Attempt to assign a value to unknown configuration parameter #{param.inspect}" + end + end + value = setting.munge(value) if setting.respond_to?(:munge) + setting.handle(value) if setting.respond_to?(:handle) and not options[:dont_trigger_handles] + if ReadOnly.include? param and type != :mutable_defaults + raise ArgumentError, + "You're attempting to set configuration parameter $#{param}, which is read-only." + end + type = legacy_to_mode(type, param) + @sync.synchronize do # yay, thread-safe + # Allow later inspection to determine if the setting was set on the + # command line, or through some other code path. Used for the + # `dns_alt_names` option during cert generate. --daniel 2011-10-18 + setting.setbycli = true if type == :cli + + @values[type][param] = value + @cache.clear + + clearused + + # Clear the list of environments, because they cache, at least, the module path. + # We *could* preferentially just clear them if the modulepath is changed, + # but we don't really know if, say, the vardir is changed and the modulepath + # is defined relative to it. We need the defined?(stuff) because of loading + # order issues. + Puppet::Node::Environment.clear if defined?(Puppet::Node) and defined?(Puppet::Node::Environment) + end + + value + end + + # Set a bunch of defaults in a given section. The sections are actually pretty + # pointless, but they help break things up a bit, anyway. + def setdefaults(section, defs) + section = section.to_sym + call = [] + defs.each { |name, hash| + if hash.is_a? Array + unless hash.length == 2 + raise ArgumentError, "Defaults specified as an array must contain only the default value and the decription" + end + tmp = hash + hash = {} + [:default, :desc].zip(tmp).each { |p,v| hash[p] = v } + end + name = name.to_sym + hash[:name] = name + hash[:section] = section + raise ArgumentError, "Parameter #{name} is already defined" if @config.include?(name) + tryconfig = newsetting(hash) + if short = tryconfig.short + if other = @shortnames[short] + raise ArgumentError, "Parameter #{other.name} is already using short name '#{short}'" + end + @shortnames[short] = tryconfig + end + @config[name] = tryconfig + + # Collect the settings that need to have their hooks called immediately. + # We have to collect them so that we can be sure we're fully initialized before + # the hook is called. + call << tryconfig if tryconfig.call_on_define + } + + call.each { |setting| setting.handle(self.value(setting.name)) } + end + + # Create a timer to check whether the file should be reparsed. + def set_filetimeout_timer + return unless timeout = self[:filetimeout] and timeout = Integer(timeout) and timeout > 0 + timer = EventLoop::Timer.new(:interval => timeout, :tolerance => 1, :start? => true) { self.reparse } + end + + # Convert the settings we manage into a catalog full of resources that model those settings. + def to_catalog(*sections) + sections = nil if sections.empty? + + catalog = Puppet::Resource::Catalog.new("Settings") + + @config.values.find_all { |value| value.is_a?(FileSetting) }.each do |file| + next unless (sections.nil? or sections.include?(file.section)) + next unless resource = file.to_resource + next if catalog.resource(resource.ref) + + catalog.add_resource(resource) + end + + add_user_resources(catalog, sections) + + catalog + end + + # Convert our list of config settings into a configuration file. + def to_config + str = %{The configuration file for #{Puppet[:name]}. Note that this file +is likely to have unused configuration parameters in it; any parameter that's +valid anywhere in Puppet can be in any config file, even if it's not used. + +Every section can specify three special parameters: owner, group, and mode. +These parameters affect the required permissions of any files specified after +their specification. Puppet will sometimes use these parameters to check its +own configured state, so they can be used to make Puppet a bit more self-managing. + +Generated on #{Time.now}. + +}.gsub(/^/, "# ") + +# Add a section heading that matches our name. +if @config.include?(:run_mode) + str += "[#{self[:run_mode]}]\n" + end + eachsection do |section| + persection(section) do |obj| + str += obj.to_config + "\n" unless ReadOnly.include? obj.name or obj.name == :genconfig + end + end + + return str + end + + # Convert to a parseable manifest + def to_manifest + catalog = to_catalog + catalog.resource_refs.collect do |ref| + catalog.resource(ref).to_manifest + end.join("\n\n") + end + + # Create the necessary objects to use a section. This is idempotent; + # you can 'use' a section as many times as you want. + def use(*sections) + sections = sections.collect { |s| s.to_sym } + @sync.synchronize do # yay, thread-safe + sections = sections.reject { |s| @used.include?(s) } + + return if sections.empty? + + begin + catalog = to_catalog(*sections).to_ral + rescue => detail + puts detail.backtrace if Puppet[:trace] + Puppet.err "Could not create resources for managing Puppet's files and directories in sections #{sections.inspect}: #{detail}" + + # We need some way to get rid of any resources created during the catalog creation + # but not cleaned up. + return + end + + catalog.host_config = false + catalog.apply do |transaction| + if transaction.any_failed? + report = transaction.report + failures = report.logs.find_all { |log| log.level == :err } + raise "Got #{failures.length} failure(s) while initializing: #{failures.collect { |l| l.to_s }.join("; ")}" + end + end + + sections.each { |s| @used << s } + @used.uniq! + end + end + + def valid?(param) + param = param.to_sym + @config.has_key?(param) + end + + def uninterpolated_value(param, environment = nil) + param = param.to_sym + environment &&= environment.to_sym + + # See if we can find it within our searchable list of values + val = catch :foundval do + each_source(environment) do |source| + # Look for the value. We have to test the hash for whether + # it exists, because the value might be false. + @sync.synchronize do + throw :foundval, @values[source][param] if @values[source].include?(param) + end + end + throw :foundval, nil + end + + # If we didn't get a value, use the default + val = @config[param].default if val.nil? + + val + end + + # Find the correct value using our search path. Optionally accept an environment + # in which to search before the other configuration sections. + def value(param, environment = nil) + param = param.to_sym + environment &&= environment.to_sym + + # Short circuit to nil for undefined parameters. + return nil unless @config.include?(param) + + # Yay, recursion. + #self.reparse unless [:config, :filetimeout].include?(param) + + # Check the cache first. It needs to be a per-environment + # cache so that we don't spread values from one env + # to another. + if cached = @cache[environment||"none"][param] + return cached + end + + val = uninterpolated_value(param, environment) + + if param == :code + # if we interpolate code, all hell breaks loose. + return val + end + + # Convert it if necessary + val = convert(val, environment) + + # And cache it + @cache[environment||"none"][param] = val + val + end + + # Open a file with the appropriate user, group, and mode + def write(default, *args, &bloc) + obj = get_config_file_default(default) + writesub(default, value(obj.name), *args, &bloc) + end + + # Open a non-default file under a default dir with the appropriate user, + # group, and mode + def writesub(default, file, *args, &bloc) + obj = get_config_file_default(default) + chown = nil + if Puppet.features.root? + chown = [obj.owner, obj.group] + else + chown = [nil, nil] + end + + Puppet::Util::SUIDManager.asuser(*chown) do + mode = obj.mode ? obj.mode.to_i : 0640 + args << "w" if args.empty? + + args << mode + + # Update the umask to make non-executable files + Puppet::Util.withumask(File.umask ^ 0111) do + File.open(file, *args) do |file| + yield file + end + end + end + end + + def readwritelock(default, *args, &bloc) + file = value(get_config_file_default(default).name) + tmpfile = file + ".tmp" + sync = Sync.new + raise Puppet::DevError, "Cannot create #{file}; directory #{File.dirname(file)} does not exist" unless FileTest.directory?(File.dirname(tmpfile)) + + sync.synchronize(Sync::EX) do + File.open(file, ::File::CREAT|::File::RDWR, 0600) do |rf| + rf.lock_exclusive do + if File.exist?(tmpfile) + raise Puppet::Error, ".tmp file already exists for #{file}; Aborting locked write. Check the .tmp file and delete if appropriate" + end + + # If there's a failure, remove our tmpfile + begin + writesub(default, tmpfile, *args, &bloc) + rescue + File.unlink(tmpfile) if FileTest.exist?(tmpfile) + raise + end + + begin + File.rename(tmpfile, file) + rescue => detail + Puppet.err "Could not rename #{file} to #{tmpfile}: #{detail}" + File.unlink(tmpfile) if FileTest.exist?(tmpfile) + end + end + end + end + end + + private + + def get_config_file_default(default) + obj = nil + unless obj = @config[default] + raise ArgumentError, "Unknown default #{default}" + end + + raise ArgumentError, "Default #{default} is not a file" unless obj.is_a? FileSetting + + obj + end + + # Create the transportable objects for users and groups. + def add_user_resources(catalog, sections) + return unless Puppet.features.root? + return unless self[:mkusers] + + @config.each do |name, setting| + next unless setting.respond_to?(:owner) + next unless sections.nil? or sections.include?(setting.section) + + if user = setting.owner and user != "root" and catalog.resource(:user, user).nil? + resource = Puppet::Resource.new(:user, user, :parameters => {:ensure => :present}) + resource[:gid] = self[:group] if self[:group] + catalog.add_resource resource + end + if group = setting.group and ! %w{root wheel}.include?(group) and catalog.resource(:group, group).nil? + catalog.add_resource Puppet::Resource.new(:group, group, :parameters => {:ensure => :present}) + end + end + end + + # Yield each search source in turn. + def each_source(environment) + searchpath(environment).each do |source| + # Modify the source as necessary. + source = self.run_mode if source == :run_mode + yield source + end + end + + # Return all settings that have associated hooks; this is so + # we can call them after parsing the configuration file. + def settings_with_hooks + @config.values.find_all { |setting| setting.respond_to?(:handle) } + end + + # Extract extra setting information for files. + def extract_fileinfo(string) + result = {} + value = string.sub(/\{\s*([^}]+)\s*\}/) do + params = $1 + params.split(/\s*,\s*/).each do |str| + if str =~ /^\s*(\w+)\s*=\s*([\w\d]+)\s*$/ + param, value = $1.intern, $2 + result[param] = value + raise ArgumentError, "Invalid file option '#{param}'" unless [:owner, :mode, :group].include?(param) + + if param == :mode and value !~ /^\d+$/ + raise ArgumentError, "File modes must be numbers" + end + else + raise ArgumentError, "Could not parse '#{string}'" + end + end + '' + end + result[:value] = value.sub(/\s*$/, '') + result + end + + # Convert arguments into booleans, integers, or whatever. + def munge_value(value) + # Handle different data types correctly + return case value + when /^false$/i; false + when /^true$/i; true + when /^\d+$/i; Integer(value) + when true; true + when false; false + else + value.gsub(/^["']|["']$/,'').sub(/\s+$/, '') + end + end + + # This method just turns a file in to a hash of hashes. + def parse_file(file) + text = read_file(file) + + result = Hash.new { |names, name| + names[name] = {} + } + + count = 0 + + # Default to 'main' for the section. + section = :main + result[section][:_meta] = {} + text.split(/\n/).each { |line| + count += 1 + case line + when /^\s*\[(\w+)\]\s*$/ + section = $1.intern # Section names + # Add a meta section + result[section][:_meta] ||= {} + when /^\s*#/; next # Skip comments + when /^\s*$/; next # Skip blanks + when /^\s*(\w+)\s*=\s*(.*?)\s*$/ # settings + var = $1.intern + + # We don't want to munge modes, because they're specified in octal, so we'll + # just leave them as a String, since Puppet handles that case correctly. + if var == :mode + value = $2 + else + value = munge_value($2) + end + + # Check to see if this is a file argument and it has extra options + begin + if value.is_a?(String) and options = extract_fileinfo(value) + value = options[:value] + options.delete(:value) + result[section][:_meta][var] = options + end + result[section][var] = value + rescue Puppet::Error => detail + detail.file = file + detail.line = line + raise + end + else + error = Puppet::Error.new("Could not match line #{line}") + error.file = file + error.line = line + raise error + end + } + + result + end + + # Read the file in. + def read_file(file) + begin + return File.read(file) + rescue Errno::ENOENT + raise ArgumentError, "No such file #{file}" + rescue Errno::EACCES + raise ArgumentError, "Permission denied to file #{file}" + end + end + + # Set file metadata. + def set_metadata(meta) + meta.each do |var, values| + values.each do |param, value| + @config[var].send(param.to_s + "=", value) + end + end + end +end diff --git a/mcollective/lib/puppet/util/settings/boolean_setting.rb b/mcollective/lib/puppet/util/settings/boolean_setting.rb new file mode 100644 index 000000000..e4678c9b4 --- /dev/null +++ b/mcollective/lib/puppet/util/settings/boolean_setting.rb @@ -0,0 +1,30 @@ +require 'puppet/util/settings/setting' + +# A simple boolean. +class Puppet::Util::Settings::BooleanSetting < Puppet::Util::Settings::Setting + # get the arguments in getopt format + def getopt_args + if short + [["--#{name}", "-#{short}", GetoptLong::NO_ARGUMENT], ["--no-#{name}", GetoptLong::NO_ARGUMENT]] + else + [["--#{name}", GetoptLong::NO_ARGUMENT], ["--no-#{name}", GetoptLong::NO_ARGUMENT]] + end + end + + def optparse_args + if short + ["--[no-]#{name}", "-#{short}", desc, :NONE ] + else + ["--[no-]#{name}", desc, :NONE] + end + end + + def munge(value) + case value + when true, "true"; return true + when false, "false"; return false + else + raise ArgumentError, "Invalid value '#{value.inspect}' for #{@name}" + end + end +end diff --git a/mcollective/lib/puppet/util/settings/file_setting.rb b/mcollective/lib/puppet/util/settings/file_setting.rb new file mode 100644 index 000000000..776398ef4 --- /dev/null +++ b/mcollective/lib/puppet/util/settings/file_setting.rb @@ -0,0 +1,124 @@ +require 'puppet/util/settings/setting' + +# A file. +class Puppet::Util::Settings::FileSetting < Puppet::Util::Settings::Setting + AllowedOwners = %w{root service} + AllowedGroups = %w{root service} + + class SettingError < StandardError; end + + attr_accessor :mode, :create + + # Should we create files, rather than just directories? + def create_files? + create + end + + def group=(value) + unless AllowedGroups.include?(value) + identifying_fields = [desc,name,default].compact.join(': ') + raise SettingError, "Internal error: The :group setting for #{identifying_fields} must be 'service', not '#{value}'" + end + @group = value + end + + def group + return unless @group + @settings[:group] + end + + def owner=(value) + unless AllowedOwners.include?(value) + identifying_fields = [desc,name,default].compact.join(': ') + raise SettingError, "Internal error: The :owner setting for #{identifying_fields} must be either 'root' or 'service', not '#{value}'" + end + @owner = value + end + + def owner + return unless @owner + return "root" if @owner == "root" or ! use_service_user? + @settings[:user] + end + + def use_service_user? + @settings[:mkusers] or @settings.service_user_available? + end + + # Set the type appropriately. Yep, a hack. This supports either naming + # the variable 'dir', or adding a slash at the end. + def munge(value) + # If it's not a fully qualified path... + if value.is_a?(String) and value !~ /^\$/ and value != 'false' + # Make it one + value = File.expand_path(value) + end + if value.to_s =~ /\/$/ + @type = :directory + return value.sub(/\/$/, '') + end + value + end + + # Return the appropriate type. + def type + value = @settings.value(self.name) + if @name.to_s =~ /dir/ + return :directory + elsif value.to_s =~ /\/$/ + return :directory + elsif value.is_a? String + return :file + else + return nil + end + end + + # Turn our setting thing into a Puppet::Resource instance. + def to_resource + return nil unless type = self.type + + path = self.value + + return nil unless path.is_a?(String) + + # Make sure the paths are fully qualified. + path = File.expand_path(path) + + return nil unless type == :directory or create_files? or File.exist?(path) + return nil if path =~ /^\/dev/ + + resource = Puppet::Resource.new(:file, path) + + if Puppet[:manage_internal_file_permissions] + resource[:mode] = self.mode if self.mode + + if Puppet.features.root? + resource[:owner] = self.owner if self.owner + resource[:group] = self.group if self.group + end + end + + resource[:ensure] = type + resource[:loglevel] = :debug + resource[:links] = :follow + resource[:backup] = false + + resource.tag(self.section, self.name, "settings") + + resource + end + + # Make sure any provided variables look up to something. + def validate(value) + return true unless value.is_a? String + value.scan(/\$(\w+)/) { |name| + name = $1 + unless @settings.include?(name) + raise ArgumentError, + "Settings parameter '#{name}' is undefined" + end + } + end +end + diff --git a/mcollective/lib/puppet/util/settings/setting.rb b/mcollective/lib/puppet/util/settings/setting.rb new file mode 100644 index 000000000..07409eefe --- /dev/null +++ b/mcollective/lib/puppet/util/settings/setting.rb @@ -0,0 +1,94 @@ +# The base element type. +class Puppet::Util::Settings::Setting + attr_accessor :name, :section, :default, :setbycli, :call_on_define + attr_reader :desc, :short + + def desc=(value) + @desc = value.gsub(/^\s*/, '') + end + + # get the arguments in getopt format + def getopt_args + if short + [["--#{name}", "-#{short}", GetoptLong::REQUIRED_ARGUMENT]] + else + [["--#{name}", GetoptLong::REQUIRED_ARGUMENT]] + end + end + + # get the arguments in OptionParser format + def optparse_args + if short + ["--#{name}", "-#{short}", desc, :REQUIRED] + else + ["--#{name}", desc, :REQUIRED] + end + end + + def hook=(block) + meta_def :handle, &block + end + + # Create the new element. Pretty much just sets the name. + def initialize(args = {}) + unless @settings = args.delete(:settings) + raise ArgumentError.new("You must refer to a settings object") + end + + args.each do |param, value| + method = param.to_s + "=" + raise ArgumentError, "#{self.class} does not accept #{param}" unless self.respond_to? method + + self.send(method, value) + end + + raise ArgumentError, "You must provide a description for the #{self.name} config option" unless self.desc + end + + def iscreated + @iscreated = true + end + + def iscreated? + @iscreated + end + + def set? + !!(!@value.nil?) + end + + # short name for the celement + def short=(value) + raise ArgumentError, "Short names can only be one character." if value.to_s.length != 1 + @short = value.to_s + end + + # Convert the object to a config statement. + def to_config + str = @desc.gsub(/^/, "# ") + "\n" + + # Add in a statement about the default. + str += "# The default value is '#{@default}'.\n" if @default + + # If the value has not been overridden, then print it out commented + # and unconverted, so it's clear that that's the default and how it + # works. + value = @settings.value(self.name) + + if value != @default + line = "#{@name} = #{value}" + else + line = "# #{@name} = #{@default}" + end + + str += line + "\n" + + str.gsub(/^/, " ") + end + + # Retrieves the value, or if it's not set, retrieves the default. + def value + @settings.value(self.name) + end +end + diff --git a/mcollective/lib/puppet/util/storage.rb b/mcollective/lib/puppet/util/storage.rb new file mode 100644 index 000000000..abc0bbcde --- /dev/null +++ b/mcollective/lib/puppet/util/storage.rb @@ -0,0 +1,96 @@ +require 'yaml' +require 'sync' + +require 'puppet/util/file_locking' + +# a class for storing state +class Puppet::Util::Storage + include Singleton + include Puppet::Util + + def self.state + @@state + end + + def initialize + self.class.load + end + + # Return a hash that will be stored to disk. It's worth noting + # here that we use the object's full path, not just the name/type + # combination. At the least, this is useful for those non-isomorphic + # types like exec, but it also means that if an object changes locations + # in the configuration it will lose its cache. + def self.cache(object) + if object.is_a?(Symbol) + name = object + else + name = object.to_s + end + + @@state[name] ||= {} + end + + def self.clear + @@state.clear + Storage.init + end + + def self.init + @@state = {} + @@splitchar = "\t" + end + + self.init + + def self.load + Puppet.settings.use(:main) unless FileTest.directory?(Puppet[:statedir]) + + unless File.exists?(Puppet[:statefile]) + self.init unless !@@state.nil? + return + end + unless File.file?(Puppet[:statefile]) + Puppet.warning("Checksumfile #{Puppet[:statefile]} is not a file, ignoring") + return + end + Puppet::Util.benchmark(:debug, "Loaded state") do + Puppet::Util::FileLocking.readlock(Puppet[:statefile]) do |file| + begin + @@state = YAML.load(file) + rescue => detail + Puppet.err "Checksumfile #{Puppet[:statefile]} is corrupt (#{detail}); replacing" + begin + File.rename(Puppet[:statefile], Puppet[:statefile] + ".bad") + rescue + raise Puppet::Error, + "Could not rename corrupt #{Puppet[:statefile]}; remove manually" + end + end + end + end + + unless @@state.is_a?(Hash) + Puppet.err "State got corrupted" + self.init + end + + #Puppet.debug "Loaded state is #{@@state.inspect}" + end + + def self.stateinspect + @@state.inspect + end + + def self.store + Puppet.debug "Storing state" + + Puppet.info "Creating state file #{Puppet[:statefile]}" unless FileTest.exist?(Puppet[:statefile]) + + Puppet::Util.benchmark(:debug, "Stored state") do + Puppet::Util::FileLocking.writelock(Puppet[:statefile], 0660) do |file| + file.print YAML.dump(@@state) + end + end + end +end diff --git a/mcollective/lib/puppet/util/subclass_loader.rb b/mcollective/lib/puppet/util/subclass_loader.rb new file mode 100644 index 000000000..3fb048835 --- /dev/null +++ b/mcollective/lib/puppet/util/subclass_loader.rb @@ -0,0 +1,82 @@ +# A module for loading subclasses into an array and retrieving +# them by name. Also sets up a method for each class so +# that you can just do Klass.subclass, rather than Klass.subclass(:subclass). +# +# This module is currently used by network handlers and clients. +module Puppet::Util::SubclassLoader + attr_accessor :loader, :classloader + + # Iterate over each of the subclasses. + def each + @subclasses ||= [] + @subclasses.each { |c| yield c } + end + + # The hook method that sets up subclass loading. We need the name + # of the method to create and the path in which to look for them. + def handle_subclasses(name, path) + raise ArgumentError, "Must be a class to use SubclassLoader" unless self.is_a?(Class) + @subclasses = [] + + @loader = Puppet::Util::Autoload.new( + self, + + path, :wrap => false + ) + + @subclassname = name + + @classloader = self + + # Now create a method for retrieving these subclasses by name. Note + # that we're defining a class method here, not an instance. + meta_def(name) do |subname| + subname = subname.to_s.downcase + + unless c = @subclasses.find { |c| c.name.to_s.downcase == subname } + loader.load(subname) + c = @subclasses.find { |c| c.name.to_s.downcase == subname } + + # Now make the method that returns this subclass. This way we + # normally avoid the method_missing method. + define_method(subname) { c } if c and ! respond_to?(subname) + end + return c + end + end + + # Add a new class to our list. Note that this has to handle subclasses of + # subclasses, thus the reason we're keeping track of the @@classloader. + def inherited(sub) + @subclasses ||= [] + sub.classloader = self.classloader + if self.classloader == self + @subclasses << sub + else + @classloader.inherited(sub) + end + end + + # See if we can load a class. + def method_missing(method, *args) + unless self == self.classloader + super + end + return nil unless defined?(@subclassname) + self.send(@subclassname, method) || nil + end + + # Retrieve or calculate a name. + def name(dummy_argument=:work_arround_for_ruby_GC_bug) + @name ||= self.to_s.sub(/.+::/, '').intern + + @name + end + + # Provide a list of all subclasses. + def subclasses + @loader.loadall + @subclasses.collect { |klass| klass.name } + end +end + diff --git a/mcollective/lib/puppet/util/suidmanager.rb b/mcollective/lib/puppet/util/suidmanager.rb new file mode 100644 index 000000000..d93915567 --- /dev/null +++ b/mcollective/lib/puppet/util/suidmanager.rb @@ -0,0 +1,155 @@ +require 'puppet/util/warnings' +require 'forwardable' +require 'etc' + +module Puppet::Util::SUIDManager + include Puppet::Util::Warnings + extend Forwardable + + # Note groups= is handled specially due to a bug in OS X 10.6 + to_delegate_to_process = [ :euid=, :euid, :egid=, :egid, :uid=, :uid, :gid=, :gid, :groups ] + + to_delegate_to_process.each do |method| + def_delegator Process, method + module_function method + end + + def osx_maj_ver + return @osx_maj_ver unless @osx_maj_ver.nil? + require 'facter' + # 'kernel' is available without explicitly loading all facts + if Facter.value('kernel') != 'Darwin' + @osx_maj_ver = false + return @osx_maj_ver + end + # But 'macosx_productversion_major' requires it. + Facter.loadfacts + @osx_maj_ver = Facter.value('macosx_productversion_major') + end + module_function :osx_maj_ver + + def groups=(grouplist) + if osx_maj_ver == '10.6' + return true + else + return Process.groups = grouplist + end + end + module_function :groups= + + def self.root? + Process.uid == 0 + end + + # Methods to handle changing uid/gid of the running process. In general, + # these will noop or fail on Windows, and require root to change to anything + # but the current uid/gid (which is a noop). + + # Runs block setting euid and egid if provided then restoring original ids. + # If running on Windows or without root, the block will be run with the + # current euid/egid. + def asuser(new_uid=nil, new_gid=nil) + return yield if Puppet.features.microsoft_windows? + return yield unless root? + return yield unless new_uid or new_gid + + old_euid, old_egid = self.euid, self.egid + begin + change_privileges(new_uid, new_gid, false) + + yield + ensure + change_privileges(new_uid ? old_euid : nil, old_egid, false) + end + end + module_function :asuser + + # If `permanently` is set, will permanently change the uid/gid of the + # process. If not, it will only set the euid/egid. If only uid is supplied, + # the primary group of the supplied gid will be used. If only gid is + # supplied, only gid will be changed. This method will fail if used on + # Windows. + def change_privileges(uid=nil, gid=nil, permanently=false) + return unless uid or gid + + unless gid + uid = convert_xid(:uid, uid) + gid = Etc.getpwuid(uid).gid + end + + change_group(gid, permanently) + change_user(uid, permanently) if uid + end + module_function :change_privileges + + # Changes the egid of the process if `permanently` is not set, otherwise + # changes gid. This method will fail if used on Windows, or attempting to + # change to a different gid without root. + def change_group(group, permanently=false) + gid = convert_xid(:gid, group) + raise Puppet::Error, "No such group #{group}" unless gid + + if permanently + Process::GID.change_privilege(gid) + else + Process.egid = gid + end + end + module_function :change_group + + # As change_group, but operates on uids. If changing user permanently, + # supplementary groups will be set the to default groups for the new uid. + def change_user(user, permanently=false) + uid = convert_xid(:uid, user) + raise Puppet::Error, "No such user #{user}" unless uid + + if permanently + # If changing uid, we must be root. So initgroups first here. + initgroups(uid) + + Process::UID.change_privilege(uid) + else + # We must be root to initgroups, so initgroups before dropping euid if + # we're root, otherwise elevate euid before initgroups. + # change euid (to root) first. + if Process.euid == 0 + initgroups(uid) + Process.euid = uid + else + Process.euid = uid + initgroups(uid) + end + end + end + module_function :change_user + + # Make sure the passed argument is a number. + def convert_xid(type, id) + map = {:gid => :group, :uid => :user} + raise ArgumentError, "Invalid id type #{type}" unless map.include?(type) + ret = Puppet::Util.send(type, id) + if ret == nil + raise Puppet::Error, "Invalid #{map[type]}: #{id}" + end + ret + end + module_function :convert_xid + + # Initialize primary and supplemental groups to those of the target user. We + # take the UID and manually look up their details in the system database, + # including username and primary group. This method will fail on Windows, or + # if used without root to initgroups of another user. + def initgroups(uid) + pwent = Etc.getpwuid(uid) + Process.initgroups(pwent.name, pwent.gid) + end + + module_function :initgroups + + def run_and_capture(command, new_uid=nil, new_gid=nil) + output = Puppet::Util.execute(command, :failonfail => false, :combine => true, :uid => new_uid, :gid => new_gid) + [output, $CHILD_STATUS.dup] + end + module_function :run_and_capture +end + diff --git a/mcollective/lib/puppet/util/tagging.rb b/mcollective/lib/puppet/util/tagging.rb new file mode 100644 index 000000000..6323ee08c --- /dev/null +++ b/mcollective/lib/puppet/util/tagging.rb @@ -0,0 +1,56 @@ +# Created on 2008-01-19 +# Copyright Luke Kanies + +# A common module to handle tagging. +module Puppet::Util::Tagging + # Add a tag to our current list. These tags will be added to all + # of the objects contained in this scope. + def tag(*ary) + @tags ||= [] + + qualified = [] + + ary.collect { |tag| tag.to_s.downcase }.each do |tag| + fail(Puppet::ParseError, "Invalid tag #{tag.inspect}") unless valid_tag?(tag) + qualified << tag if tag.include?("::") + @tags << tag unless @tags.include?(tag) + end + + handle_qualified_tags( qualified ) + end + + # Are we tagged with the provided tag? + def tagged?(*tags) + not ( self.tags & tags.flatten.collect { |t| t.to_s } ).empty? + end + + # Return a copy of the tag list, so someone can't ask for our tags + # and then modify them. + def tags + @tags ||= [] + @tags.dup + end + + def tags=(tags) + @tags = [] + + return if tags.nil? or tags == "" + + tags = tags.strip.split(/\s*,\s*/) if tags.is_a?(String) + + tags.each do |t| + tag(t) + end + end + + private + + def handle_qualified_tags( qualified ) + # LAK:NOTE See http://snurl.com/21zf8 [groups_google_com] + qualified.collect { |name| x = name.split("::") }.flatten.each { |tag| @tags << tag unless @tags.include?(tag) } + end + + def valid_tag?(tag) + tag =~ /^\w[-\w:.]*$/ + end +end diff --git a/mcollective/lib/puppet/util/user_attr.rb b/mcollective/lib/puppet/util/user_attr.rb new file mode 100644 index 000000000..9b190580b --- /dev/null +++ b/mcollective/lib/puppet/util/user_attr.rb @@ -0,0 +1,21 @@ +class UserAttr + def self.get_attributes_by_name(name) + attributes = nil + + File.readlines('/etc/user_attr').each do |line| + next if line =~ /^#/ + + token = line.split(':') + + if token[0] == name + attributes = {:name => name} + token[4].split(';').each do |attr| + key_value = attr.split('=') + attributes[key_value[0].intern] = key_value[1].strip + end + break + end + end + attributes + end +end diff --git a/mcollective/lib/puppet/util/warnings.rb b/mcollective/lib/puppet/util/warnings.rb new file mode 100644 index 000000000..7e26feaa0 --- /dev/null +++ b/mcollective/lib/puppet/util/warnings.rb @@ -0,0 +1,29 @@ +# Methods to help with handling warnings. +module Puppet::Util::Warnings + module_function + + def notice_once(msg) + Puppet::Util::Warnings.maybe_log(msg, self.class) { Puppet.notice msg } + end + + + def warnonce(msg) + Puppet::Util::Warnings.maybe_log(msg, self.class) { Puppet.warning msg } + end + + def clear_warnings + @stampwarnings = {} + nil + end + + protected + + def self.maybe_log(message, klass) + @stampwarnings ||= {} + @stampwarnings[klass] ||= [] + return nil if @stampwarnings[klass].include? message + yield + @stampwarnings[klass] << message + nil + end +end diff --git a/mcollective/lib/puppet/util/zaml.rb b/mcollective/lib/puppet/util/zaml.rb new file mode 100644 index 000000000..b22dfc199 --- /dev/null +++ b/mcollective/lib/puppet/util/zaml.rb @@ -0,0 +1,320 @@ +# +# ZAML -- A partial replacement for YAML, writen with speed and code clarity +# in mind. ZAML fixes one YAML bug (loading Exceptions) and provides +# a replacement for YAML.dump unimaginatively called ZAML.dump, +# which is faster on all known cases and an order of magnitude faster +# with complex structures. +# +# http://github.com/hallettj/zaml +# +# Authors: Markus Roberts, Jesse Hallett, Ian McIntosh, Igal Koshevoy, Simon Chiang +# + +require 'yaml' + +class ZAML + VERSION = "0.1.1" + # + # Class Methods + # + def self.dump(stuff, where='') + z = new + stuff.to_zaml(z) + Label.counter_reset + where << z.to_s + end + # + # Instance Methods + # + def initialize + @result = [] + @indent = nil + @structured_key_prefix = nil + Label.counter_reset + emit('--- ') + end + def nested(tail=' ') + old_indent = @indent + @indent = "#{@indent || "\n"}#{tail}" + yield + @indent = old_indent + end + class Label + # + # YAML only wants objects in the datastream once; if the same object + # occurs more than once, we need to emit a label ("&idxxx") on the + # first occurrence and then emit a back reference (*idxxx") on any + # subsequent occurrence(s). + # + # To accomplish this we keeps a hash (by object id) of the labels of + # the things we serialize as we begin to serialize them. The labels + # initially serialize as an empty string (since most objects are only + # going to be be encountered once), but can be changed to a valid + # (by assigning it a number) the first time it is subsequently used, + # if it ever is. Note that we need to do the label setup BEFORE we + # start to serialize the object so that circular structures (in + # which we will encounter a reference to the object as we serialize + # it can be handled). + # + def self.counter_reset + @@previously_emitted_object = {} + @@next_free_label_number = 0 + end + def initialize(obj) + @this_label_number = nil + @obj = obj # prevent garbage collection so that object id isn't reused + @@previously_emitted_object[obj.object_id] = self + end + def to_s + @this_label_number ? ('&id%03d ' % @this_label_number) : '' + end + def reference + @this_label_number ||= (@@next_free_label_number += 1) + @reference ||= '*id%03d' % @this_label_number + end + def self.for(obj) + @@previously_emitted_object[obj.object_id] + end + end + def new_label_for(obj) + Label.new(obj) + end + def first_time_only(obj) + if label = Label.for(obj) + emit(label.reference) + else + if @structured_key_prefix and not obj.is_a? String + emit(@structured_key_prefix) + @structured_key_prefix = nil + end + emit(new_label_for(obj)) + yield + end + end + def emit(s) + @result << s + @recent_nl = false + end + def nl(s='') + emit(@indent || "\n") unless @recent_nl + emit(s) + @recent_nl = true + end + def to_s + @result.join + end + def prefix_structured_keys(x) + @structured_key_prefix = x + yield + nl unless @structured_key_prefix + @structured_key_prefix = nil + end +end + +################################################################ +# +# Behavior for custom classes +# +################################################################ + +class Object + def to_yaml_properties + instance_variables.sort # Default YAML behavior + end + def yaml_property_munge(x) + x + end + def zamlized_class_name(root) + cls = self.class + "!ruby/#{root.name.downcase}#{cls == root ? '' : ":#{cls.respond_to?(:name) ? cls.name : cls}"}" + end + def to_zaml(z) + z.first_time_only(self) { + z.emit(zamlized_class_name(Object)) + z.nested { + instance_variables = to_yaml_properties + if instance_variables.empty? + z.emit(" {}") + else + instance_variables.each { |v| + z.nl + v[1..-1].to_zaml(z) # Remove leading '@' + z.emit(': ') + yaml_property_munge(instance_variable_get(v)).to_zaml(z) + } + end + } + } + end +end + +################################################################ +# +# Behavior for built-in classes +# +################################################################ + +class NilClass + def to_zaml(z) + z.emit('') # NOTE: blank turns into nil in YAML.load + end +end + +class Symbol + def to_zaml(z) + z.emit(self.inspect) + end +end + +class TrueClass + def to_zaml(z) + z.emit('true') + end +end + +class FalseClass + def to_zaml(z) + z.emit('false') + end +end + +class Numeric + def to_zaml(z) + z.emit(self) + end +end + +class Regexp + def to_zaml(z) + z.first_time_only(self) { z.emit("#{zamlized_class_name(Regexp)} #{inspect}") } + end +end + +class Exception + def to_zaml(z) + z.emit(zamlized_class_name(Exception)) + z.nested { + z.nl("message: ") + message.to_zaml(z) + } + end + # + # Monkey patch for buggy Exception restore in YAML + # + # This makes it work for now but is not very future-proof; if things + # change we'll most likely want to remove this. To mitigate the risks + # as much as possible, we test for the bug before appling the patch. + # + if respond_to? :yaml_new and yaml_new(self, :tag, "message" => "blurp").message != "blurp" + def self.yaml_new( klass, tag, val ) + o = YAML.object_maker( klass, {} ).exception(val.delete( 'message')) + val.each_pair do |k,v| + o.instance_variable_set("@#{k}", v) + end + o + end + end +end + +class String + ZAML_ESCAPES = %w{\x00 \x01 \x02 \x03 \x04 \x05 \x06 \a \x08 \t \n \v \f \r \x0e \x0f \x10 \x11 \x12 \x13 \x14 \x15 \x16 \x17 \x18 \x19 \x1a \e \x1c \x1d \x1e \x1f } + def escaped_for_zaml + gsub( /\x5C/, "\\\\\\" ). # Demi-kludge for Maglev/rubinius; the regexp should be /\\/ but parsetree chokes on that. + gsub( /"/, "\\\"" ). + gsub( /([\x00-\x1F])/ ) { |x| ZAML_ESCAPES[ x.unpack("C")[0] ] }. + gsub( /([\x80-\xFF])/ ) { |x| "\\x#{x.unpack("C")[0].to_s(16)}" } + end + def to_zaml(z) + num = '[-+]?(0x)?\d+\.?\d*' + case + when self == '' + z.emit('""') + # when self =~ /[\x00-\x08\x0B\x0C\x0E-\x1F\x80-\xFF]/ + # z.emit("!binary |\n") + # z.emit([self].pack("m*")) + when ( + (self =~ /\A(true|false|yes|no|on|null|off|#{num}(:#{num})*|!|=|~)$/i) or + (self =~ /\A\n* /) or + (self =~ /[\s:]$/) or + (self =~ /^[>|][-+\d]*\s/i) or + (self[-1..-1] =~ /\s/) or + (self =~ /[\x00-\x08\x0B\x0C\x0E-\x1F\x80-\xFF]/) or + (self =~ /[,\[\]\{\}\r\t]|:\s|\s#/) or + (self =~ /\A([-:?!#&*'"]|<<|%.+:.)/) + ) + z.emit("\"#{escaped_for_zaml}\"") + when self =~ /\n/ + if self[-1..-1] == "\n" then z.emit('|+') else z.emit('|-') end + z.nested { split("\n",-1).each { |line| z.nl; z.emit(line.chomp("\n")) } } + else + z.emit(self) + end + end +end + +class Hash + def to_zaml(z) + z.first_time_only(self) { + z.nested { + if empty? + z.emit('{}') + else + each_pair { |k, v| + z.nl + z.prefix_structured_keys('? ') { k.to_zaml(z) } + z.emit(': ') + v.to_zaml(z) + } + end + } + } + end +end + +class Array + def to_zaml(z) + z.first_time_only(self) { + z.nested { + if empty? + z.emit('[]') + else + each { |v| z.nl('- '); v.to_zaml(z) } + end + } + } + end +end + +class Time + def to_zaml(z) + # 2008-12-06 10:06:51.373758 -07:00 + ms = ("%0.6f" % (usec * 1e-6)).sub(/^\d+\./,'') + offset = "%+0.2i:%0.2i" % [utc_offset / 3600, (utc_offset / 60) % 60] + z.emit(self.strftime("%Y-%m-%d %H:%M:%S.#{ms} #{offset}")) + end +end + +class Date + def to_zaml(z) + z.emit(strftime('%Y-%m-%d')) + end +end + +class Range + def to_zaml(z) + z.first_time_only(self) { + z.emit(zamlized_class_name(Range)) + z.nested { + z.nl + z.emit('begin: ') + z.emit(first) + z.nl + z.emit('end: ') + z.emit(last) + z.nl + z.emit('excl: ') + z.emit(exclude_end?) + } + } + end +end diff --git a/mcollective/modules/one/Modulefile b/mcollective/modules/one/Modulefile new file mode 100644 index 000000000..30777b10a --- /dev/null +++ b/mcollective/modules/one/Modulefile @@ -0,0 +1,6 @@ +name 'one' +version '0.0.1' +author 'Mirantis Inc' +license 'Apache' +summary 'Test module: one' +description 'Test module: one' diff --git a/mcollective/modules/one/files/file_one b/mcollective/modules/one/files/file_one new file mode 100644 index 000000000..58c9bdf9d --- /dev/null +++ b/mcollective/modules/one/files/file_one @@ -0,0 +1 @@ +111 diff --git a/mcollective/modules/one/manifests/init.pp b/mcollective/modules/one/manifests/init.pp new file mode 100644 index 000000000..df81398db --- /dev/null +++ b/mcollective/modules/one/manifests/init.pp @@ -0,0 +1,19 @@ +class one ( + $first = "First parameter", + $second = "Second parameter" + ) { + notify {"sample notification": + message => "Class parameters: first: $first, second: $second" + } + + file {"sample file": + path => "/tmp/sample_file", + source => "modules/one/file_one" + } + + file {"sample template": + path => "/tmp/sample_template", + content => template("one/template_one.erb") + } + + } diff --git a/mcollective/modules/one/templates/template_one.erb b/mcollective/modules/one/templates/template_one.erb new file mode 100644 index 000000000..b17bdb9a5 --- /dev/null +++ b/mcollective/modules/one/templates/template_one.erb @@ -0,0 +1,3 @@ +111 +<%= @first %> +222 diff --git a/mcollective/yaml/node.yaml b/mcollective/yaml/node.yaml new file mode 100644 index 000000000..8ce34920c --- /dev/null +++ b/mcollective/yaml/node.yaml @@ -0,0 +1,7 @@ +classes: + one: + first: first param + second: second param +parameters: + one: one + two: two