[mcollective] Added mcollective code (not finished yet)

This commit is contained in:
Vladimir Kozhukalov 2012-09-07 12:29:31 +04:00 committed by default
parent c1a6cd3611
commit d3a6b858f4
584 changed files with 65627 additions and 0 deletions

2
mcollective/README Normal file
View File

@ -0,0 +1,2 @@
* rubygems
* amqp >= 0.9.7

View File

@ -0,0 +1,14 @@
metadata :name => "Fake Agent",
:description => "Fake Agent",
:author => "Mirantis Inc.",
:license => "Apache License 2.0",
:version => "0.0.1",
:url => "http://mirantis.com",
:timeout => 20
action "echo", :description => "Echo request message" do
output :output,
:description => "Just request message",
:display_as => "Echo message"
end

12
mcollective/agent/fake.rb Normal file
View File

@ -0,0 +1,12 @@
module MCollective
module Agent
class Fake < RPC::Agent
action "echo" do
validate :msg, String
reply[:msg] = "Hello, it is my reply: #{request[:msg]}"
end
end
end
end

View File

@ -0,0 +1,23 @@
metadata \
:name => "Naily Agent",
:description => "Naily Agent",
:author => "Mirantis Inc.",
:license => "Apache License 2.0",
:version => "0.0.1",
:url => "http://mirantis.com",
:timeout => 300
action "runonce", :description => "Runs puppet apply" do
output \
:output,
:description => "Response message",
:display_as => "Response message"
end
action "echo", :description => "Echo request message" do
output \
:output,
:description => "Just echo request message",
:display_as => "Echo message"
end

View File

@ -0,0 +1,79 @@
module MCollective
module Agent
class Naily < RPC::Agent
metadata \
:name => "Naily Agent",
:description => "Naily Agent",
:author => "Mirantis Inc.",
:license => "Apache License 2.0",
:version => "0.0.1",
:url => "http://mirantis.com",
:timeout => 300
def startup_hook
@lockfile = @config.pluginconf["naily.lockfile"] ||
"/var/lock/naily.lock"
@puppet = @config.pluginconf["naily.puppet"] ||
"/usr/bin/puppet"
@puppetlog = @config.pluginconf["naily.puppetlog"] ||
"/var/log/puppet.log"
@puppetmodules = @config.pluginconf["naily.puppetmodules"] ||
"/etc/puppet/modules"
@sitepp = @config.pluginconf["naily.sitepp"] ||
"/etc/puppet/manifests/site.pp"
end
action "runonce" do
runonce
end
action "echo" do
validate :msg, String
reply[:msg] = "Hello, it is my reply: #{request[:msg]}"
end
private
def running?
status = run("flock -w 0 -o #{@lockfile} -c ''", :cwd => "/")
return true if status != 0
return false
end
def runonce
if running?
reply.fail "Agent is running at the moment"
else
runonce_background
end
end
def flock_command command
return "flock -w 0 -o #{@lockfile} -c \"#{command}\""
end
def runonce_background
cmd = [@puppet, "apply"]
cmd << ["-l", @puppetlog]
cmd << "--verbose"
cmd << "--debug"
cmd << ["--modulepath", @puppetmodules]
cmd << @sitepp
cmd = cmd.join(" ")
cmd = flock_command cmd
reply[:command] = cmd
reply[:status] = run(
cmd,
:stdout => :output,
:stderr => :err,
:chomp => true
)
end
end
end
end

View File

@ -0,0 +1,92 @@
require 'rubygems'
require 'eventmachine'
require 'amqp'
require 'json'
require 'lib/helpers'
class MyClient
include Helpers
def initialize
test_message_id = random_string
test_message_payload = JSON.dump({"msg_id" => test_message_id,
"action" => "status"})
test_message_metadata = {
:routing_key => "mcollective"
}
response_exname = test_message_id
response_qname = test_message_id
response_routing_key = test_message_id
request_exname = "nailgun.topic"
logger.debug("Connecting to rabbitmq")
AMQP.connect(:host => "localhost",
:port => 5672,
:username => "guest",
:password => "guest") do |connection|
@connection = connection
logger.debug("Initializing channel")
AMQP::Channel.new(connection) do |channel|
logger.debug("Initializing response exchange: #{response_exname}")
response_exchange = AMQP::Exchange.new(channel, :direct, response_exname,
:auto_delete => true)
logger.debug("Initializing response queue: #{response_qname}")
response_queue = AMQP::Queue.new(channel, response_qname,
:exclusive => true, :auto_delete => true)
logger.debug("Binding response queue to response exchange")
response_queue.bind(response_exchange, :routing_key => response_routing_key)
logger.debug("Subscribing to response queue")
response_queue.subscribe(:ack => true) do |metadata, payload|
logger.debug("Response:")
logger.debug("Response: metadata: #{metadata}")
logger.debug("Response: payload: #{payload}")
metadata.ack
response_queue.purge
response_queue.delete
response_exchange.delete
EM.stop
end
logger.debug("Initializing request exchange: #{request_exname}")
request_exchange = AMQP::Exchange.new(channel, :topic, "nailgun.topic")
logger.debug("Sending request: #{test_message_payload}")
request_exchange.publish(test_message_payload, test_message_metadata)
end
end
end
def disconnect &blk
@connection.close
yield if blk
end
end
EM.run do
myclient = MyClient.new
Signal.trap("TERM") do
puts "TERM signal has been caught"
myclient.disconnect do
EventMachine.stop
end
end
Signal.trap("INT") do
puts "INT signal has been caught"
myclient.disconnect do
EventMachine.stop
end
end
end

View File

@ -0,0 +1,66 @@
require 'rubygems'
require 'eventmachine'
require 'amqp'
require 'json'
require 'lib/helpers'
class MyServer
include Helpers
def initialize
logger.debug("Connecting to rabbitmq")
AMQP.connect(:host => "localhost",
:port => 5672,
:username => "guest",
:password => "guest") do |connection|
@connection = connection
logger.debug("Initializing channel")
AMQP::Channel.new(connection) do |channel|
server_exchange = AMQP::Exchange.new(channel, :topic, "nailgun.topic")
server_queue = AMQP::Queue.new(channel, "mcollective",
:exclusive => true, :auto_delete => true)
server_queue.bind(server_exchange, :routing_key => "mcollective")
server_queue.subscribe() do |metadata, payload|
logger.debug("Received message: #{payload}")
payload_parsed = JSON.parse(payload)
msg_id = payload_parsed["msg_id"]
exchange = AMQP::Exchange.new(channel, :direct, msg_id,
:auto_delete => true)
exchange.publish("Response", :routing_key => msg_id)
end
end
end
end
def disconnect &blk
@connection.close
yield if blk
end
end
EM.run do
myserver = MyServer.new
Signal.trap("TERM") do
puts "TERM signal has been caught"
myserver.disconnect do
EventMachine.stop
end
end
Signal.trap("INT") do
puts "INT signal has been caught"
myserver.disconnect do
EventMachine.stop
end
end
end

23
mcollective/bin/naily.rb Executable file
View File

@ -0,0 +1,23 @@
#!/usr/bin/env ruby
$LOAD_PATH.unshift(File.expand_path(File.join(File.dirname(__FILE__), "..", "lib")))
require 'rubygems'
require 'naily/server/config'
require 'naily/server/daemon'
Naily::Server::Config.define do |config|
config.amqp_host = "127.0.0.1"
config.amqp_port = 5672
config.amqp_username = "guest"
config.amqp_password = "guest"
config.topic_exchange_name = "nailgun.topic"
config.topic_queue_name = "mcollective"
config.topic_queue_routing_key = "mcollective"
end
daemon = Naily::Server::Daemon.new
daemon.run

View File

@ -0,0 +1,4 @@
require 'client/blocking_client'
client = BlockingClient.new
client.run

21
mcollective/bin/run_catalog.rb Executable file
View File

@ -0,0 +1,21 @@
#!/usr/bin/env ruby
$LOAD_PATH.unshift(File.expand_path(File.join(File.dirname(__FILE__), "..", "lib")))
require 'naily/framework/catalog'
def usage
puts "Usage: $0 <basedir> <nodename>"
exit 1
end
if not ARGV[0] or not ARGV[1]
usage
else
basedir = ARGV[0]
nodename = ARGV[1]
puts Naily::Framework::Catalog.get_catalog basedir, nodename
end

View File

@ -0,0 +1,4 @@
require 'client/client'
client = Client.new
client.run

View File

@ -0,0 +1,10 @@
$LOAD_PATH << '../'
require 'client/client'
tc = TestClient.new
tc.discover
tc.run
tc.report
tc.disconnect

View File

@ -0,0 +1,20 @@
require 'amqp'
require 'naily/amqp/helpers'
module Naily
module Amqp
class DirectPublisher
include Helpers
def initialize channel, message
logger.debug("Publish message: payload: #{message}")
AMQP::Exchange.new(channel, :direct, message.exchange_name,
:auto_delete => true) do |exchange|
exchange.publish(message, :routing_key => message.routing_key) do
logger.debug("Publish message: complete")
end
end
end
end
end
end

View File

@ -0,0 +1,95 @@
require 'amqp'
require 'json'
require 'naily/amqp/helpers'
require 'naily/amqp/message'
require 'naily/amqp/topic_consumer'
require 'naily/amqp/direct_publisher'
require 'naily/amqp/handler'
# WE FOLLOW OPENSTACK RPC MODEL DESCRIBED HERE
# http://docs.openstack.org/developer/nova/devref/rpc.html
# RUBY AMQP RPC MODEL DESCRIBED HERE
# http://rubyamqp.info/articles/patterns_and_use_cases/
module Naily
module Amqp
class Driver
include Helpers
def initialize options={}
default_options = {
:host => "localhost",
:port => 5672,
:username => "guest",
:password => "guest",
:topic_exchange_name => "nailgun.topic",
:topic_queue_name => "mcollective",
:topic_queue_routing_key => "mcollective",
}
opts = default_options.merge(options)
logger.debug("Connecting to rabbitmq")
AMQP.connect(:host => opts[:host],
:port => opts[:port],
:username => opts[:username],
:password => opts[:password]) do |connection|
@connection = connection
logger.debug("Initializing channel")
AMQP::Channel.new(connection) do |channel|
@channel = channel
TopicConsumer.new(self,
channel,
opts[:topic_exchange_name],
opts[:topic_queue_name],
opts[:topic_queue_routing_key])
end
end
end
def handle message
raise "Message is not valid" if not message.valid?
handler = Naily::Amqp::Handler.new message
response = handler.handle
DirectPublisher.new(@channel, response) if response
end
def disconnect &blk
@connection.close
yield if blk
end
# def ready? options={} &blk
# default_options = {
# :timeout => 10,
# :on_timeout => nil
# }
# options = default_options.merge(options)
# tick = 0.5
# n = 0
# timer = EM::PeriodicTimer.new(tick) do
# if @status == :ready
# timer.cancel
# yield true
# end
# if (n+=1) > options[:timeout] / tick
# @logger.error("Ready status timed out")
# timer.cancel
# if options[:on_timeout]
# options[:on_timeout].call if options[:on_timeout]
# else
# yield false
# end
# end
# end
# end
end
end
end

View File

@ -0,0 +1,48 @@
require 'naily/handler/echo'
require 'naily/handler/null'
require 'naily/handler/mco'
require 'naily/amqp/message'
module Naily
module Amqp
class Handler
include Helpers
def initialize message
@message = message
@real_handler = get_real_handler
end
def get_real_handler
case message.rpc_method.to_sym
when :echo
return Naily::Handler::Echo.new @message.rpc_method_args.to_hash
when :mco
return Naily::Handler::Mco.new @message.rpc_method_args.to_hash
else
return Naily::Handler::Null.new @message.rpc_method_args.to_hash
end
end
def handle
@real_handler.handle do |response|
response ||= {}
if @message.call?
body = {
:result => handler_response,
:failure => nil,
:ending => false
}
options = {
:exchange_name => @message.msg_id,
:routing_key => @message.msg_id
}
return Response.new body, options
end
return nil
end
end
end
end
end

View File

@ -0,0 +1,25 @@
require 'logger'
module Naily
module Amqp
module Helpers
def logger
logger = ::Logger.new(STDOUT)
logger.level = ::Logger::DEBUG
logger
end
def random_string(length=16, downcase=true)
chars = ('a'..'z').to_a + ('A'..'Z').to_a + ('0'..'9').to_a
rnd = ""
length.times do |i|
rnd << chars[rand(chars.length)]
end
rnd.downcase! if downcase
rnd
end
end
end
end

View File

@ -0,0 +1,76 @@
require 'json'
require 'naily/amqp/helpers'
module Naily
module Amqp
class Message
include Helpers
attr_reader :metadata
def payload= p
@payload = JSON.parse(p)
end
def payload
JSON.dump(@payload)
end
def valid?
return false if not @payload
end
def to_s
self.payload
end
end
class Request < Message
def initialize m=nil, p=nil
@metadata = m
self.payload = p
end
def valid?
call_valid_actions = ["status"]
cast_valid_actions = ["deploy"]
return false if not @payload
return false if not @payload["action"]
return false if self.call? and not call_valid_actions.include?(self.action)
return false if not self.call? and not cast_valid_actions.include?(self.action)
return false if self.call? and not @payload["msg_id"]
true
end
def call?
return true if @payload["msg_id"]
false
end
def msg_id
@payload["msg_id"]
end
def action
@payload["action"]
end
end
class Response < Message
attr_accessor :routing_key
attr_accessor :exchange_name
def initialize p=nil, options={}
self.payload = p
self.routing_key = options[:routing_key] if options[:routing_key]
self.exchange_name = options[:exchange_name] if options[:exchange_name]
end
end
end
end

View File

@ -0,0 +1,37 @@
require 'amqp'
require 'naily/amqp/helpers'
module Naily
module Amqp
class TopicConsumer
include Helpers
def initialize parent, channel, exchange_name, queue_name, routing_key, &blk
logger.debug("Initializing topic consumer: exchange: #{exchange_name} "\
"queue: #{queue_name} routing_key: #{routing_key}")
@parent = parent
AMQP::Exchange.new(channel, :topic, exchange_name) do |exchange|
AMQP::Queue.new(channel, queue_name, :exclusive => true,
:auto_delete => true) do |queue|
queue.bind(exchange, :routing_key => routing_key) do
queue.subscribe(:ack => true) do |metadata, payload|
message = Request.new(metadata, payload)
logger.debug("Received message: #{message}")
if message.valid?
@parent.handle(message)
else
logger.error("Received message is not valid")
end
metadata.ack
end
if blk
yield self
end
end
end
end
end
end
end
end

View File

@ -0,0 +1,21 @@
require 'eventmachine'
module Naily
module Framework
class Async
def initialize instance
@instance = instance
end
def call method_name, *args, &blk
EM.defer(Proc.new {
method = @instance.method(method_name)
method.call(*args)
}, blk ? blk : nil)
end
end
end
end

View File

@ -0,0 +1,79 @@
require 'puppet'
require 'puppet/node'
require 'yaml'
require 'json'
require 'pp'
require 'logger'
require 'puppet/parser/compiler'
require 'puppet/indirector/yaml'
require 'puppet/indirector/request'
require 'puppet/indirector/node/exec'
require 'puppet/indirector/catalog/yaml'
require 'puppet/application'
require 'puppet/external/pson/common'
module Naily
module Framework
module Catalog
class Yaml < Puppet::Node::Exec
def initialize basepath
@basepath = basepath
end
def find(request)
if File.exist? path(request.key)
output = open(path(request.key)) do |file|
file.read
end
else
raise "File #{path(request.key)} does not exist"
end
# Translate the output to ruby.
result = translate(request.key, output)
create_node(request.key, result)
end
# This method is the same as that one in super class excluding
# that the facts are not merged into node
def create_node(name, result)
node = Puppet::Node.new(name)
set = false
[:parameters, :classes, :environment].each do |param|
if value = result[param]
node.send(param.to_s + "=", value)
set = true
end
end
node
end
def path name, ext=".yaml"
File.join(@basepath, name + ext)
end
end
def self.get_catalog basepath, nodename
request = Puppet::Indirector::Request.new('node', :find, nodename)
node_terminus = Yaml.new basepath
node = node_terminus.find(request)
compiler = Puppet::Parser::Compiler.new(node)
catalog = compiler.compile
catalog_json = PSON::generate(catalog.to_resource,
:allow_nan => true,
:max_nesting => false)
# jj JSON.load(catalog_json)
end
end
end
end

View File

@ -0,0 +1,15 @@
module Naily
module Framework
module Client
def available_roles
end
def runonce params={}
end
end
end
end

View File

@ -0,0 +1,15 @@
module Naily
module Handler
class Echo
def initialize args={}
@args = args
end
def handle &blk
yield @args
end
end
end
end

View File

@ -0,0 +1,28 @@
require 'naily/framework/async'
module Naily
module Handler
class Mco
def initialize args
@args = args
end
def handle &blk
case @args["client"]
when "simple"
client = Naily::MCClient::Simple.new
when "blocking"
client = Naily::MCClient::Blocking.new
else
raise "Unknown mcollective client"
end
async = Naily::Framework::Async.new client
async.call @args["action"], @args["action_args"] do |result|
yield({'result' => 'Action ended: #{result}'})
end
end
end
end
end

View File

@ -0,0 +1,16 @@
require 'naily/framework/async'
module Naily
module Handler
class Null
def initialize args
end
def handle &blk
yield
end
end
end
end

View File

@ -0,0 +1,31 @@
require 'mcollective'
require 'naily/framework/client'
module Naily
module MCClient
class Blocking
include MCollective::RPC
include Naily::Framework::Client
def initialize
@mc = rpcclient('naily')
@mc.verbose = true
end
def run
responses = []
@mc.echo(:msg => "Testing fake agent plugin: before sleep").each do |response|
responses << "Response: from: #{response[:sender]} message: #{response[:data][:msg]}"
end
sleep 10
@mc.echo(:msg => "Testing fake agent plugin: after sleep").each do |response|
responses << "Response: from: #{response[:sender]} message: #{response[:data][:msg]}"
end
end
def disconnect
@mc.disconnect
end
end
end
end

View File

@ -0,0 +1,27 @@
require 'mcollective'
require 'naily/framework/client'
module Naily
module MCClient
class Simple
include MCollective::RPC
include Naily::Framework::Client
def initialize
@mc = rpcclient('naily')
@mc.verbose = true
end
def run
responses = []
@mc.runonce().each do |response|
responses << response
end
end
def disconnect
@mc.disconnect
end
end
end
end

View File

@ -0,0 +1,19 @@
module Naily
module Server
module Config
extend self
attr_accessor :amqp_host
attr_accessor :amqp_port
attr_accessor :amqp_username
attr_accessor :amqp_password
attr_accessor :topic_exchange_name
attr_accessor :topic_queue_name
attr_accessor :topic_queue_routing_key
def define
yield self
end
end
end
end

View File

@ -0,0 +1,38 @@
require 'logger'
require 'eventmachine'
require 'naily/amqp/driver'
module Naily
module Server
class Daemon
def initialize
@logger = Logger.new(STDOUT)
@logger.level = Logger::DEBUG
@options = {
:host => Config.amqp_host,
:port => Config.amqp_port,
:username => Config.amqp_username,
:password => Config.amqp_password,
:topic_exchange_name => Config.topic_exchange_name,
:topic_queue_name => Config.topic_queue_name,
:topic_queue_routing_key => Config.topic_queue_routing_key
}
end
def run
EM.run do
driver = Naily::Amqp::Driver.new @options
Signal.trap("INT") do
@logger.debug("INT signal has been caught")
driver.disconnect do
EventMachine.stop
end
end
end
end
end
end
end

164
mcollective/lib/puppet.rb Normal file
View File

@ -0,0 +1,164 @@
# Try to load rubygems. Hey rubygems, I hate you.
begin
require 'rubygems'
rescue LoadError
end
# see the bottom of the file for further inclusions
require 'singleton'
require 'facter'
require 'puppet/error'
require 'puppet/util'
require 'puppet/util/autoload'
require 'puppet/util/settings'
require 'puppet/util/feature'
require 'puppet/util/suidmanager'
require 'puppet/util/run_mode'
#------------------------------------------------------------
# the top-level module
#
# all this really does is dictate how the whole system behaves, through
# preferences for things like debugging
#
# it's also a place to find top-level commands like 'debug'
module Puppet
PUPPETVERSION = '2.6.17'
def Puppet.version
PUPPETVERSION
end
class << self
include Puppet::Util
attr_reader :features
attr_writer :name
end
# the hash that determines how our system behaves
@@settings = Puppet::Util::Settings.new
# The services running in this process.
@services ||= []
require 'puppet/util/logging'
extend Puppet::Util::Logging
# The feature collection
@features = Puppet::Util::Feature.new('puppet/feature')
# Load the base features.
require 'puppet/feature/base'
# Store a new default value.
def self.setdefaults(section, hash)
@@settings.setdefaults(section, hash)
end
# configuration parameter access and stuff
def self.[](param)
if param == :debug
return Puppet::Util::Log.level == :debug
else
return @@settings[param]
end
end
# configuration parameter access and stuff
def self.[]=(param,value)
@@settings[param] = value
end
def self.clear
@@settings.clear
end
def self.debug=(value)
if value
Puppet::Util::Log.level=(:debug)
else
Puppet::Util::Log.level=(:notice)
end
end
def self.settings
@@settings
end
def self.run_mode
$puppet_application_mode || Puppet::Util::RunMode[:user]
end
def self.application_name
$puppet_application_name ||= "apply"
end
# Load all of the configuration parameters.
require 'puppet/defaults'
def self.genmanifest
if Puppet[:genmanifest]
puts Puppet.settings.to_manifest
exit(0)
end
end
# Parse the config file for this process.
def self.parse_config
Puppet.settings.parse
end
# XXX this should all be done using puppet objects, not using
# normal mkdir
def self.recmkdir(dir,mode = 0755)
if FileTest.exist?(dir)
return false
else
tmp = dir.sub(/^\//,'')
path = [File::SEPARATOR]
tmp.split(File::SEPARATOR).each { |dir|
path.push dir
if ! FileTest.exist?(File.join(path))
begin
Dir.mkdir(File.join(path), mode)
rescue Errno::EACCES => detail
Puppet.err detail.to_s
return false
rescue => detail
Puppet.err "Could not create #{path}: #{detail}"
return false
end
elsif FileTest.directory?(File.join(path))
next
else FileTest.exist?(File.join(path))
raise Puppet::Error, "Cannot create #{dir}: basedir #{File.join(path)} is a file"
end
}
return true
end
end
# Create a new type. Just proxy to the Type class.
def self.newtype(name, options = {}, &block)
Puppet::Type.newtype(name, options, &block)
end
# Retrieve a type by name. Just proxy to the Type class.
def self.type(name)
# LAK:DEP Deprecation notice added 12/17/2008
Puppet.warning "Puppet.type is deprecated; use Puppet::Type.type"
Puppet::Type.type(name)
end
end
require 'puppet/type'
require 'puppet/parser'
require 'puppet/resource'
require 'puppet/network'
require 'puppet/ssl'
require 'puppet/module'
require 'puppet/util/storage'
require 'puppet/status'
require 'puppet/file_bucket/file'

View File

@ -0,0 +1,107 @@
require 'sync'
require 'puppet/external/event-loop'
require 'puppet/application'
# A general class for triggering a run of another
# class.
class Puppet::Agent
require 'puppet/agent/locker'
include Puppet::Agent::Locker
attr_reader :client_class, :client, :splayed
# Just so we can specify that we are "the" instance.
def initialize(client_class)
@splayed = false
@client_class = client_class
end
def lockfile_path
client_class.lockfile_path
end
def needing_restart?
Puppet::Application.restart_requested?
end
# Perform a run with our client.
def run(*args)
if running?
Puppet.notice "Run of #{client_class} already in progress; skipping"
return
end
result = nil
block_run = Puppet::Application.controlled_run do
splay
with_client do |client|
begin
sync.synchronize { lock { result = client.run(*args) } }
rescue SystemExit,NoMemoryError
raise
rescue Exception => detail
puts detail.backtrace if Puppet[:trace]
Puppet.err "Could not run #{client_class}: #{detail}"
end
end
true
end
Puppet.notice "Shutdown/restart in progress; skipping run" unless block_run
result
end
def stopping?
Puppet::Application.stop_requested?
end
# Have we splayed already?
def splayed?
splayed
end
# Sleep when splay is enabled; else just return.
def splay
return unless Puppet[:splay]
return if splayed?
time = rand(Integer(Puppet[:splaylimit]) + 1)
Puppet.info "Sleeping for #{time} seconds (splay is enabled)"
sleep(time)
@splayed = true
end
# Start listening for events. We're pretty much just listening for
# timer events here.
def start
# Create our timer. Puppet will handle observing it and such.
timer = EventLoop::Timer.new(:interval => Puppet[:runinterval], :tolerance => 1, :start? => true) do
run
end
# Run once before we start following the timer
timer.sound_alarm
end
def sync
@sync ||= Sync.new
end
private
# Create and yield a client instance, keeping a reference
# to it during the yield.
def with_client
begin
@client = client_class.new
rescue SystemExit,NoMemoryError
raise
rescue Exception => detail
puts detail.backtrace if Puppet[:trace]
Puppet.err "Could not create instance of #{client_class}: #{detail}"
return
end
yield @client
ensure
@client = nil
end
end

View File

@ -0,0 +1,40 @@
require 'puppet/util/pidlock'
# Break out the code related to locking the agent. This module is just
# included into the agent, but having it here makes it easier to test.
module Puppet::Agent::Locker
# Let the daemon run again, freely in the filesystem.
def enable
lockfile.unlock(:anonymous => true)
end
# Stop the daemon from making any catalog runs.
def disable
lockfile.lock(:anonymous => true)
end
# Yield if we get a lock, else do nothing. Return
# true/false depending on whether we get the lock.
def lock
if lockfile.lock
begin
yield
ensure
lockfile.unlock
end
return true
else
return false
end
end
def lockfile
@lockfile ||= Puppet::Util::Pidlock.new(lockfile_path)
@lockfile
end
def running?
lockfile.locked?
end
end

View File

@ -0,0 +1,425 @@
require 'optparse'
require 'puppet/util/plugins'
# This class handles all the aspects of a Puppet application/executable
# * setting up options
# * setting up logs
# * choosing what to run
# * representing execution status
#
# === Usage
# An application is a subclass of Puppet::Application.
#
# For legacy compatibility,
# Puppet::Application[:example].run
# is equivalent to
# Puppet::Application::Example.new.run
#
#
# class Puppet::Application::Example << Puppet::Application
#
# def preinit
# # perform some pre initialization
# @all = false
# end
#
# # run_command is called to actually run the specified command
# def run_command
# send Puppet::Util::CommandLine.new.args.shift
# end
#
# # option uses metaprogramming to create a method
# # and also tells the option parser how to invoke that method
# option("--arg ARGUMENT") do |v|
# @args << v
# end
#
# option("--debug", "-d") do |v|
# @debug = v
# end
#
# option("--all", "-a:) do |v|
# @all = v
# end
#
# def handle_unknown(opt,arg)
# # last chance to manage an option
# ...
# # let's say to the framework we finally handle this option
# true
# end
#
# def read
# # read action
# end
#
# def write
# # writeaction
# end
#
# end
#
# === Preinit
# The preinit block is the first code to be called in your application, before option parsing,
# setup or command execution.
#
# === Options
# Puppet::Application uses +OptionParser+ to manage the application options.
# Options are defined with the +option+ method to which are passed various
# arguments, including the long option, the short option, a description...
# Refer to +OptionParser+ documentation for the exact format.
# * If the option method is given a block, this one will be called whenever
# the option is encountered in the command-line argument.
# * If the option method has no block, a default functionnality will be used, that
# stores the argument (or true/false if the option doesn't require an argument) in
# the global (to the application) options array.
# * If a given option was not defined by a the +option+ method, but it exists as a Puppet settings:
# * if +unknown+ was used with a block, it will be called with the option name and argument
# * if +unknown+ wasn't used, then the option/argument is handed to Puppet.settings.handlearg for
# a default behavior
#
# --help is managed directly by the Puppet::Application class, but can be overriden.
#
# === Setup
# Applications can use the setup block to perform any initialization.
# The defaul +setup+ behaviour is to: read Puppet configuration and manage log level and destination
#
# === What and how to run
# If the +dispatch+ block is defined it is called. This block should return the name of the registered command
# to be run.
# If it doesn't exist, it defaults to execute the +main+ command if defined.
#
# === Execution state
# The class attributes/methods of Puppet::Application serve as a global place to set and query the execution
# status of the application: stopping, restarting, etc. The setting of the application status does not directly
# aftect its running status; it's assumed that the various components within the application will consult these
# settings appropriately and affect their own processing accordingly. Control operations (signal handlers and
# the like) should set the status appropriately to indicate to the overall system that it's the process of
# stopping or restarting (or just running as usual).
#
# So, if something in your application needs to stop the process, for some reason, you might consider:
#
# def stop_me!
# # indicate that we're stopping
# Puppet::Application.stop!
# # ...do stuff...
# end
#
# And, if you have some component that involves a long-running process, you might want to consider:
#
# def my_long_process(giant_list_to_munge)
# giant_list_to_munge.collect do |member|
# # bail if we're stopping
# return if Puppet::Application.stop_requested?
# process_member(member)
# end
# end
module Puppet
class Application
require 'puppet/util'
include Puppet::Util
DOCPATTERN = File.expand_path(File.dirname(__FILE__) + "/util/command_line/*" )
class << self
include Puppet::Util
attr_accessor :run_status
def clear!
self.run_status = nil
end
def stop!
self.run_status = :stop_requested
end
def restart!
self.run_status = :restart_requested
end
# Indicates that Puppet::Application.restart! has been invoked and components should
# do what is necessary to facilitate a restart.
def restart_requested?
:restart_requested == run_status
end
# Indicates that Puppet::Application.stop! has been invoked and components should do what is necessary
# for a clean stop.
def stop_requested?
:stop_requested == run_status
end
# Indicates that one of stop! or start! was invoked on Puppet::Application, and some kind of process
# shutdown/short-circuit may be necessary.
def interrupted?
[:restart_requested, :stop_requested].include? run_status
end
# Indicates that Puppet::Application believes that it's in usual running run_mode (no stop/restart request
# currently active).
def clear?
run_status.nil?
end
# Only executes the given block if the run status of Puppet::Application is clear (no restarts, stops,
# etc. requested).
# Upon block execution, checks the run status again; if a restart has been requested during the block's
# execution, then controlled_run will send a new HUP signal to the current process.
# Thus, long-running background processes can potentially finish their work before a restart.
def controlled_run(&block)
return unless clear?
result = block.call
Process.kill(:HUP, $PID) if restart_requested?
result
end
def should_parse_config
@parse_config = true
end
def should_not_parse_config
@parse_config = false
end
def should_parse_config?
@parse_config = true if ! defined?(@parse_config)
@parse_config
end
# used to declare code that handle an option
def option(*options, &block)
long = options.find { |opt| opt =~ /^--/ }.gsub(/^--(?:\[no-\])?([^ =]+).*$/, '\1' ).gsub('-','_')
fname = symbolize("handle_#{long}")
if (block_given?)
define_method(fname, &block)
else
define_method(fname) do |value|
self.options["#{long}".to_sym] = value
end
end
self.option_parser_commands << [options, fname]
end
def banner(banner = nil)
@banner ||= banner
end
def option_parser_commands
@option_parser_commands ||= (
superclass.respond_to?(:option_parser_commands) ? superclass.option_parser_commands.dup : []
)
@option_parser_commands
end
def find(name)
klass = name.to_s.capitalize
# const_defined? is used before const_get since const_defined? will only
# check within our namespace, whereas const_get will check ancestor
# trees as well, resulting in unexpected behaviour.
if !self.const_defined?(klass)
puts "Unable to find application '#{name.to_s}'."
Kernel::exit(1)
end
self.const_get(klass)
end
def [](name)
find(name).new
end
# Sets or gets the run_mode name. Sets the run_mode name if a mode_name is
# passed. Otherwise, gets the run_mode or a default run_mode
#
def run_mode( mode_name = nil)
return @run_mode if @run_mode and not mode_name
require 'puppet/util/run_mode'
@run_mode = Puppet::Util::RunMode[ mode_name || :user ]
end
end
attr_reader :options, :command_line
# Every app responds to --version
option("--version", "-V") do |arg|
puts "#{Puppet.version}"
exit
end
# Every app responds to --help
option("--help", "-h") do |v|
help
end
def should_parse_config?
self.class.should_parse_config?
end
# override to execute code before running anything else
def preinit
end
def initialize(command_line = nil)
require 'puppet/util/command_line'
@command_line = command_line || Puppet::Util::CommandLine.new
set_run_mode self.class.run_mode
@options = {}
require 'puppet'
end
# WARNING: This is a totally scary, frightening, and nasty internal API. We
# strongly advise that you do not use this, and if you insist, we will
# politely allow you to keep both pieces of your broken code.
#
# We plan to provide a supported, long-term API to deliver this in a way
# that you can use. Please make sure that you let us know if you do require
# this, and this message is still present in the code. --daniel 2011-02-03
def set_run_mode(mode)
@run_mode = mode
$puppet_application_mode = @run_mode
$puppet_application_name = name
if Puppet.respond_to? :settings
# This is to reduce the amount of confusion in rspec
# because it might have loaded defaults.rb before the globals were set
# and thus have the wrong defaults for the current application
Puppet.settings.set_value(:confdir, Puppet.run_mode.conf_dir, :mutable_defaults)
Puppet.settings.set_value(:vardir, Puppet.run_mode.var_dir, :mutable_defaults)
Puppet.settings.set_value(:name, Puppet.application_name.to_s, :mutable_defaults)
Puppet.settings.set_value(:logdir, Puppet.run_mode.logopts, :mutable_defaults)
Puppet.settings.set_value(:rundir, Puppet.run_mode.run_dir, :mutable_defaults)
Puppet.settings.set_value(:run_mode, Puppet.run_mode.name.to_s, :mutable_defaults)
end
end
# This is the main application entry point
def run
exit_on_fail("initialize") { hook('preinit') { preinit } }
exit_on_fail("parse options") { hook('parse_options') { parse_options } }
exit_on_fail("parse configuration file") { Puppet.settings.parse } if should_parse_config?
exit_on_fail("prepare for execution") { hook('setup') { setup } }
exit_on_fail("run") { hook('run_command') { run_command } }
end
def main
raise NotImplementedError, "No valid command or main"
end
def run_command
main
end
def setup
# Handle the logging settings
if options[:debug] or options[:verbose]
Puppet::Util::Log.newdestination(:console)
if options[:debug]
Puppet::Util::Log.level = :debug
else
Puppet::Util::Log.level = :info
end
end
Puppet::Util::Log.newdestination(:syslog) unless options[:setdest]
end
def parse_options
# Create an option parser
option_parser = OptionParser.new(self.class.banner)
# Add all global options to it.
Puppet.settings.optparse_addargs([]).each do |option|
option_parser.on(*option) do |arg|
handlearg(option[0], arg)
end
end
# Add options that are local to this application, which were
# created using the "option()" metaprogramming method. If there
# are any conflicts, this application's options will be favored.
self.class.option_parser_commands.each do |options, fname|
option_parser.on(*options) do |value|
# Call the method that "option()" created.
self.send(fname, value)
end
end
# scan command line.
begin
option_parser.parse!(self.command_line.args)
rescue OptionParser::ParseError => detail
$stderr.puts detail
$stderr.puts "Try 'puppet #{command_line.subcommand_name} --help'"
exit(1)
end
end
def handlearg(opt, arg)
# rewrite --[no-]option to --no-option if that's what was given
if opt =~ /\[no-\]/ and !arg
opt = opt.gsub(/\[no-\]/,'no-')
end
# otherwise remove the [no-] prefix to not confuse everybody
opt = opt.gsub(/\[no-\]/, '')
unless respond_to?(:handle_unknown) and send(:handle_unknown, opt, arg)
# Puppet.settings.handlearg doesn't handle direct true/false :-)
if arg.is_a?(FalseClass)
arg = "false"
elsif arg.is_a?(TrueClass)
arg = "true"
end
Puppet.settings.handlearg(opt, arg)
end
end
# this is used for testing
def self.exit(code)
exit(code)
end
def name
self.class.to_s.sub(/.*::/,"").downcase.to_sym
end
def help
if Puppet.features.usage?
# RH:FIXME: My goodness, this is ugly.
::RDoc.const_set("PuppetSourceFile", name)
#:stopdoc: # Issue #4161
def (::RDoc).caller
docfile = `grep -l 'Puppet::Application\\[:#{::RDoc::PuppetSourceFile}\\]' #{DOCPATTERN}`.chomp
super << "#{docfile}:0"
end
#:startdoc:
::RDoc::usage && exit
else
puts "No help available unless you have RDoc::usage installed"
exit
end
rescue Errno::ENOENT
puts "No help available for puppet #{name}"
exit
end
private
def exit_on_fail(message, code = 1)
yield
rescue RuntimeError, NotImplementedError => detail
puts detail.backtrace if Puppet[:trace]
$stderr.puts "Could not #{message}: #{detail}"
exit(code)
end
def hook(step,&block)
Puppet::Plugins.send("before_application_#{step}",:application_object => self)
x = yield
Puppet::Plugins.send("after_application_#{step}",:application_object => self, :return_value => x)
x
end
end
end

View File

@ -0,0 +1,272 @@
require 'puppet/application'
class Puppet::Application::Agent < Puppet::Application
should_parse_config
run_mode :agent
attr_accessor :args, :agent, :daemon, :host
def preinit
# Do an initial trap, so that cancels don't get a stack trace.
Signal.trap(:INT) do
$stderr.puts "Cancelling startup"
exit(0)
end
{
:waitforcert => nil,
:detailed_exitcodes => false,
:verbose => false,
:debug => false,
:centrallogs => false,
:setdest => false,
:enable => false,
:disable => false,
:client => true,
:fqdn => nil,
:serve => [],
:digest => :MD5,
:fingerprint => false,
}.each do |opt,val|
options[opt] = val
end
@args = {}
require 'puppet/daemon'
@daemon = Puppet::Daemon.new
@daemon.argv = ARGV.dup
end
option("--centrallogging")
option("--disable")
option("--enable")
option("--debug","-d")
option("--fqdn FQDN","-f")
option("--test","-t")
option("--verbose","-v")
option("--fingerprint")
option("--digest DIGEST")
option("--serve HANDLER", "-s") do |arg|
if Puppet::Network::Handler.handler(arg)
options[:serve] << arg.to_sym
else
raise "Could not find handler for #{arg}"
end
end
option("--no-client") do |arg|
options[:client] = false
end
option("--detailed-exitcodes") do |arg|
options[:detailed_exitcodes] = true
end
option("--logdest DEST", "-l DEST") do |arg|
begin
Puppet::Util::Log.newdestination(arg)
options[:setdest] = true
rescue => detail
puts detail.backtrace if Puppet[:debug]
$stderr.puts detail.to_s
end
end
option("--waitforcert WAITFORCERT", "-w") do |arg|
options[:waitforcert] = arg.to_i
end
option("--port PORT","-p") do |arg|
@args[:Port] = arg
end
def run_command
return fingerprint if options[:fingerprint]
return onetime if Puppet[:onetime]
main
end
def fingerprint
unless cert = host.certificate || host.certificate_request
$stderr.puts "Fingerprint asked but no certificate nor certificate request have yet been issued"
exit(1)
return
end
unless fingerprint = cert.fingerprint(options[:digest])
raise ArgumentError, "Could not get fingerprint for digest '#{options[:digest]}'"
end
puts fingerprint
end
def onetime
unless options[:client]
$stderr.puts "onetime is specified but there is no client"
exit(43)
return
end
@daemon.set_signal_traps
begin
report = @agent.run
rescue => detail
puts detail.backtrace if Puppet[:trace]
Puppet.err detail.to_s
end
if not report
exit(1)
elsif options[:detailed_exitcodes] then
exit(report.exit_status)
else
exit(0)
end
end
def main
Puppet.notice "Starting Puppet client version #{Puppet.version}"
@daemon.start
end
# Enable all of the most common test options.
def setup_test
Puppet.settings.handlearg("--ignorecache")
Puppet.settings.handlearg("--no-usecacheonfailure")
Puppet.settings.handlearg("--no-splay")
Puppet.settings.handlearg("--show_diff")
Puppet.settings.handlearg("--no-daemonize")
options[:verbose] = true
Puppet[:onetime] = true
options[:detailed_exitcodes] = true
end
# Handle the logging settings.
def setup_logs
if options[:debug] or options[:verbose]
Puppet::Util::Log.newdestination(:console)
if options[:debug]
Puppet::Util::Log.level = :debug
else
Puppet::Util::Log.level = :info
end
end
Puppet::Util::Log.newdestination(:syslog) unless options[:setdest]
end
def enable_disable_client(agent)
if options[:enable]
agent.enable
elsif options[:disable]
agent.disable
end
exit(0)
end
def setup_listen
unless FileTest.exists?(Puppet[:rest_authconfig])
Puppet.err "Will not start without authorization file #{Puppet[:rest_authconfig]}"
exit(14)
end
handlers = nil
if options[:serve].empty?
handlers = [:Runner]
else
handlers = options[:serve]
end
require 'puppet/network/server'
# No REST handlers yet.
server = Puppet::Network::Server.new(:xmlrpc_handlers => handlers, :port => Puppet[:puppetport])
@daemon.server = server
end
def setup_host
@host = Puppet::SSL::Host.new
waitforcert = options[:waitforcert] || (Puppet[:onetime] ? 0 : 120)
cert = @host.wait_for_cert(waitforcert) unless options[:fingerprint]
end
def setup
setup_test if options[:test]
setup_logs
exit(Puppet.settings.print_configs ? 0 : 1) if Puppet.settings.print_configs?
# If noop is set, then also enable diffs
Puppet[:show_diff] = true if Puppet[:noop]
args[:Server] = Puppet[:server]
if options[:fqdn]
args[:FQDN] = options[:fqdn]
Puppet[:certname] = options[:fqdn]
end
if options[:centrallogs]
logdest = args[:Server]
logdest += ":" + args[:Port] if args.include?(:Port)
Puppet::Util::Log.newdestination(logdest)
end
Puppet.settings.use :main, :agent, :ssl
# Always ignoreimport for agent. It really shouldn't even try to import,
# but this is just a temporary band-aid.
Puppet[:ignoreimport] = true
# We need to specify a ca location for all of the SSL-related i
# indirected classes to work; in fingerprint mode we just need
# access to the local files and we don't need a ca.
Puppet::SSL::Host.ca_location = options[:fingerprint] ? :none : :remote
Puppet::Transaction::Report.terminus_class = :rest
# we want the last report to be persisted locally
Puppet::Transaction::Report.cache_class = :yaml
# Override the default; puppetd needs this, usually.
# You can still override this on the command-line with, e.g., :compiler.
Puppet[:catalog_terminus] = :rest
# Override the default.
Puppet[:facts_terminus] = :facter
Puppet::Resource::Catalog.cache_class = :yaml
# We need tomake the client either way, we just don't start it
# if --no-client is set.
require 'puppet/agent'
require 'puppet/configurer'
@agent = Puppet::Agent.new(Puppet::Configurer)
enable_disable_client(@agent) if options[:enable] or options[:disable]
@daemon.agent = agent if options[:client]
# It'd be nice to daemonize later, but we have to daemonize before the
# waitforcert happens.
@daemon.daemonize if Puppet[:daemonize]
setup_host
@objects = []
# This has to go after the certs are dealt with.
if Puppet[:listen]
unless Puppet[:onetime]
setup_listen
else
Puppet.notice "Ignoring --listen on onetime run"
end
end
end
end

View File

@ -0,0 +1,171 @@
require 'puppet/application'
class Puppet::Application::Apply < Puppet::Application
should_parse_config
option("--debug","-d")
option("--execute EXECUTE","-e") do |arg|
options[:code] = arg
end
option("--loadclasses","-L")
option("--verbose","-v")
option("--use-nodes")
option("--detailed-exitcodes")
option("--apply catalog", "-a catalog") do |arg|
options[:catalog] = arg
end
option("--logdest LOGDEST", "-l") do |arg|
begin
Puppet::Util::Log.newdestination(arg)
options[:logset] = true
rescue => detail
$stderr.puts detail.to_s
end
end
def run_command
if options[:catalog]
apply
elsif Puppet[:parseonly]
parseonly
else
main
end
end
def apply
if options[:catalog] == "-"
text = $stdin.read
else
text = File.read(options[:catalog])
end
begin
catalog = Puppet::Resource::Catalog.convert_from(Puppet::Resource::Catalog.default_format,text)
catalog = Puppet::Resource::Catalog.pson_create(catalog) unless catalog.is_a?(Puppet::Resource::Catalog)
rescue => detail
raise Puppet::Error, "Could not deserialize catalog from pson: #{detail}"
end
catalog = catalog.to_ral
require 'puppet/configurer'
configurer = Puppet::Configurer.new
configurer.run :catalog => catalog
end
def parseonly
# Set our code or file to use.
if options[:code] or command_line.args.length == 0
Puppet[:code] = options[:code] || STDIN.read
else
Puppet[:manifest] = command_line.args.shift
end
begin
Puppet::Node::Environment.new(Puppet[:environment]).known_resource_types
rescue => detail
Puppet.err detail
exit 1
end
exit 0
end
def main
# Set our code or file to use.
if options[:code] or command_line.args.length == 0
Puppet[:code] = options[:code] || STDIN.read
else
manifest = command_line.args.shift
raise "Could not find file #{manifest}" unless File.exist?(manifest)
Puppet.warning("Only one file can be applied per run. Skipping #{command_line.args.join(', ')}") if command_line.args.size > 0
Puppet[:manifest] = manifest
end
# Collect our facts.
unless facts = Puppet::Node::Facts.find(Puppet[:node_name_value])
raise "Could not find facts for #{Puppet[:node_name_value]}"
end
unless Puppet[:node_name_fact].empty?
Puppet[:node_name_value] = facts.values[Puppet[:node_name_fact]]
facts.name = Puppet[:node_name_value]
end
# Find our Node
unless node = Puppet::Node.find(Puppet[:node_name_value])
raise "Could not find node #{Puppet[:node_name_value]}"
end
# Merge in the facts.
node.merge(facts.values)
# Allow users to load the classes that puppet agent creates.
if options[:loadclasses]
file = Puppet[:classfile]
if FileTest.exists?(file)
unless FileTest.readable?(file)
$stderr.puts "#{file} is not readable"
exit(63)
end
node.classes = File.read(file).split(/[\s\n]+/)
end
end
begin
# Compile our catalog
starttime = Time.now
catalog = Puppet::Resource::Catalog.find(node.name, :use_node => node)
# Translate it to a RAL catalog
catalog = catalog.to_ral
catalog.finalize
catalog.retrieval_duration = Time.now - starttime
require 'puppet/configurer'
configurer = Puppet::Configurer.new
report = configurer.run(:skip_plugin_download => true, :catalog => catalog)
if not report
exit(1)
elsif options[:detailed_exitcodes] then
exit(report.exit_status)
else
exit(0)
end
rescue => detail
puts detail.backtrace if Puppet[:trace]
$stderr.puts detail.message
exit(1)
end
end
def setup
exit(Puppet.settings.print_configs ? 0 : 1) if Puppet.settings.print_configs?
# If noop is set, then also enable diffs
Puppet[:show_diff] = true if Puppet[:noop]
Puppet::Util::Log.newdestination(:console) unless options[:logset]
client = nil
server = nil
Signal.trap(:INT) do
$stderr.puts "Exiting"
exit(1)
end
# we want the last report to be persisted locally
Puppet::Transaction::Report.cache_class = :yaml
if options[:debug]
Puppet::Util::Log.level = :debug
elsif options[:verbose]
Puppet::Util::Log.level = :info
end
end
end

View File

@ -0,0 +1,115 @@
require 'puppet/application'
class Puppet::Application::Cert < Puppet::Application
should_parse_config
run_mode :master
attr_accessor :all, :ca, :digest, :signed
def subcommand
@subcommand
end
def subcommand=(name)
# Handle the nasty, legacy mapping of "clean" to "destroy".
sub = name.to_sym
@subcommand = (sub == :clean ? :destroy : sub)
end
option("--clean", "-c") do
self.subcommand = "destroy"
end
option("--all", "-a") do
@all = true
end
option("--digest DIGEST") do |arg|
@digest = arg
end
option("--signed", "-s") do
@signed = true
end
option("--debug", "-d") do |arg|
Puppet::Util::Log.level = :debug
end
require 'puppet/ssl/certificate_authority/interface'
Puppet::SSL::CertificateAuthority::Interface::INTERFACE_METHODS.reject {|m| m == :destroy }.each do |method|
option("--#{method.to_s.gsub('_','-')}", "-#{method.to_s[0,1]}") do
self.subcommand = method
end
end
option("--[no-]allow-dns-alt-names") do |value|
options[:allow_dns_alt_names] = value
end
option("--verbose", "-v") do
Puppet::Util::Log.level = :info
end
def main
if @all
hosts = :all
elsif @signed
hosts = :signed
else
hosts = command_line.args.collect { |h| h.downcase }
end
begin
@ca.apply(:revoke, options.merge(:to => hosts)) if subcommand == :destroy
@ca.apply(subcommand, options.merge(:to => hosts, :digest => @digest))
rescue => detail
puts detail.backtrace if Puppet[:trace]
puts detail.to_s
exit(24)
end
end
def setup
require 'puppet/ssl/certificate_authority'
exit(Puppet.settings.print_configs ? 0 : 1) if Puppet.settings.print_configs?
Puppet::Util::Log.newdestination :console
if [:generate, :destroy].include? subcommand
Puppet::SSL::Host.ca_location = :local
else
Puppet::SSL::Host.ca_location = :only
end
# If we are generating, and the option came from the CLI, it gets added to
# the data. This will do the right thing for non-local certificates, in
# that the command line but *NOT* the config file option will apply.
if subcommand == :generate
if Puppet.settings.setting(:dns_alt_names).setbycli
options[:dns_alt_names] = Puppet[:dns_alt_names]
end
end
begin
@ca = Puppet::SSL::CertificateAuthority.new
rescue => detail
puts detail.backtrace if Puppet[:trace]
puts detail.to_s
exit(23)
end
end
def parse_options
# handle the bareword subcommand pattern.
result = super
unless self.subcommand then
if sub = self.command_line.args.shift then
self.subcommand = sub
else
help
end
end
result
end
end

View File

@ -0,0 +1,203 @@
require 'puppet/application'
class Formatter
def initialize(width)
@width = width
end
def wrap(txt, opts)
return "" unless txt && !txt.empty?
work = (opts[:scrub] ? scrub(txt) : txt)
indent = (opts[:indent] ? opts[:indent] : 0)
textLen = @width - indent
patt = Regexp.new("^(.{0,#{textLen}})[ \n]")
prefix = " " * indent
res = []
while work.length > textLen
if work =~ patt
res << $1
work.slice!(0, $MATCH.length)
else
res << work.slice!(0, textLen)
end
end
res << work if work.length.nonzero?
prefix + res.join("\n#{prefix}")
end
def header(txt, sep = "-")
"\n#{txt}\n" + sep * txt.size
end
private
def scrub(text)
# For text with no carriage returns, there's nothing to do.
return text if text !~ /\n/
indent = nil
# If we can match an indentation, then just remove that same level of
# indent from every line.
if text =~ /^(\s+)/
indent = $1
return text.gsub(/^#{indent}/,'')
else
return text
end
end
end
class TypeDoc
def initialize
@format = Formatter.new(76)
@types = {}
Puppet::Type.loadall
Puppet::Type.eachtype { |type|
next if type.name == :component
@types[type.name] = type
}
end
def list_types
puts "These are the types known to puppet:\n"
@types.keys.sort { |a, b|
a.to_s <=> b.to_s
}.each do |name|
type = @types[name]
s = type.doc.gsub(/\s+/, " ")
n = s.index(".")
if n.nil?
s = ".. no documentation .."
elsif n > 45
s = s[0, 45] + " ..."
else
s = s[0, n]
end
printf "%-15s - %s\n", name, s
end
end
def format_type(name, opts)
name = name.to_sym
unless @types.has_key?(name)
puts "Unknown type #{name}"
return
end
type = @types[name]
puts @format.header(name.to_s, "=")
puts @format.wrap(type.doc, :indent => 0, :scrub => true) + "\n\n"
puts @format.header("Parameters")
if opts[:parameters]
format_attrs(type, [:property, :param])
else
list_attrs(type, [:property, :param])
end
if opts[:meta]
puts @format.header("Meta Parameters")
if opts[:parameters]
format_attrs(type, [:meta])
else
list_attrs(type, [:meta])
end
end
if type.providers.size > 0
puts @format.header("Providers")
if opts[:providers]
format_providers(type)
else
list_providers(type)
end
end
end
# List details about attributes
def format_attrs(type, attrs)
docs = {}
type.allattrs.each do |name|
kind = type.attrtype(name)
docs[name] = type.attrclass(name).doc if attrs.include?(kind) && name != :provider
end
docs.sort { |a,b|
a[0].to_s <=> b[0].to_s
}.each { |name, doc|
print "\n- **#{name}**"
if type.key_attributes.include?(name) and name != :name
puts " (*namevar*)"
else
puts ""
end
puts @format.wrap(doc, :indent => 4, :scrub => true)
}
end
# List the names of attributes
def list_attrs(type, attrs)
params = []
type.allattrs.each do |name|
kind = type.attrtype(name)
params << name.to_s if attrs.include?(kind) && name != :provider
end
puts @format.wrap(params.sort.join(", "), :indent => 4)
end
def format_providers(type)
type.providers.sort { |a,b|
a.to_s <=> b.to_s
}.each { |prov|
puts "\n- **#{prov}**"
puts @format.wrap(type.provider(prov).doc, :indent => 4, :scrub => true)
}
end
def list_providers(type)
list = type.providers.sort { |a,b|
a.to_s <=> b.to_s
}.join(", ")
puts @format.wrap(list, :indent => 4)
end
end
class Puppet::Application::Describe < Puppet::Application
banner "puppet describe [options] [type]"
should_not_parse_config
option("--short", "-s", "Only list parameters without detail") do |arg|
options[:parameters] = false
end
option("--providers","-p")
option("--list", "-l")
option("--meta","-m")
def preinit
options[:parameters] = true
end
def main
doc = TypeDoc.new
if options[:list]
doc.list_types
else
options[:types].each { |name| doc.format_type(name, options) }
end
end
def setup
options[:types] = command_line.args.dup
handle_help(nil) unless options[:list] || options[:types].size > 0
$stderr.puts "Warning: ignoring types when listing all types" if options[:list] && options[:types].size > 0
end
end

View File

@ -0,0 +1,178 @@
require 'puppet/application'
class Puppet::Application::Doc < Puppet::Application
should_not_parse_config
run_mode :master
attr_accessor :unknown_args, :manifest
def preinit
{:references => [], :mode => :text, :format => :to_markdown }.each do |name,value|
options[name] = value
end
@unknown_args = []
@manifest = false
end
option("--all","-a")
option("--outputdir OUTPUTDIR","-o")
option("--verbose","-v")
option("--debug","-d")
option("--charset CHARSET")
option("--format FORMAT", "-f") do |arg|
method = "to_#{arg}"
require 'puppet/util/reference'
if Puppet::Util::Reference.method_defined?(method)
options[:format] = method
else
raise "Invalid output format #{arg}"
end
end
option("--mode MODE", "-m") do |arg|
require 'puppet/util/reference'
if Puppet::Util::Reference.modes.include?(arg) or arg.intern==:rdoc
options[:mode] = arg.intern
else
raise "Invalid output mode #{arg}"
end
end
option("--list", "-l") do |arg|
require 'puppet/util/reference'
puts Puppet::Util::Reference.references.collect { |r| Puppet::Util::Reference.reference(r).doc }.join("\n")
exit(0)
end
option("--reference REFERENCE", "-r") do |arg|
options[:references] << arg.intern
end
def handle_unknown( opt, arg )
@unknown_args << {:opt => opt, :arg => arg }
true
end
def run_command
return[:rdoc].include?(options[:mode]) ? send(options[:mode]) : other
end
def rdoc
exit_code = 0
files = []
unless @manifest
env = Puppet::Node::Environment.new
files += env.modulepath
files << File.dirname(env[:manifest])
end
files += command_line.args
Puppet.info "scanning: #{files.inspect}"
Puppet.settings.setdefaults(
"puppetdoc",
"document_all" => [false, "Document all resources"]
)
Puppet.settings[:document_all] = options[:all] || false
begin
require 'puppet/util/rdoc'
if @manifest
Puppet::Util::RDoc.manifestdoc(files)
else
options[:outputdir] = "doc" unless options[:outputdir]
Puppet::Util::RDoc.rdoc(options[:outputdir], files, options[:charset])
end
rescue => detail
puts detail.backtrace if Puppet[:trace]
$stderr.puts "Could not generate documentation: #{detail}"
exit_code = 1
end
exit exit_code
end
def other
text = ""
with_contents = options[:references].length <= 1
exit_code = 0
require 'puppet/util/reference'
options[:references].sort { |a,b| a.to_s <=> b.to_s }.each do |name|
raise "Could not find reference #{name}" unless section = Puppet::Util::Reference.reference(name)
begin
# Add the per-section text, but with no ToC
text += section.send(options[:format], with_contents)
rescue => detail
puts detail.backtrace
$stderr.puts "Could not generate reference #{name}: #{detail}"
exit_code = 1
next
end
end
text += Puppet::Util::Reference.footer unless with_contents # We've only got one reference
if options[:mode] == :pdf
Puppet::Util::Reference.pdf(text)
else
puts text
end
exit exit_code
end
def setup
# sole manifest documentation
if command_line.args.size > 0
options[:mode] = :rdoc
@manifest = true
end
if options[:mode] == :rdoc
setup_rdoc
else
setup_reference
end
end
def setup_reference
if options[:all]
# Don't add dynamic references to the "all" list.
require 'puppet/util/reference'
options[:references] = Puppet::Util::Reference.references.reject do |ref|
Puppet::Util::Reference.reference(ref).dynamic?
end
end
options[:references] << :type if options[:references].empty?
end
def setup_rdoc(dummy_argument=:work_arround_for_ruby_GC_bug)
# consume the unknown options
# and feed them as settings
if @unknown_args.size > 0
@unknown_args.each do |option|
# force absolute path for modulepath when passed on commandline
if option[:opt]=="--modulepath" or option[:opt] == "--manifestdir"
option[:arg] = option[:arg].split(':').collect { |p| File.expand_path(p) }.join(':')
end
Puppet.settings.handlearg(option[:opt], option[:arg])
end
end
# Now parse the config
Puppet.parse_config
# Handle the logging settings.
if options[:debug] or options[:verbose]
if options[:debug]
Puppet::Util::Log.level = :debug
else
Puppet::Util::Log.level = :info
end
Puppet::Util::Log.newdestination(:console)
end
end
end

View File

@ -0,0 +1,87 @@
require 'puppet/application'
class Puppet::Application::Filebucket < Puppet::Application
should_not_parse_config
option("--bucket BUCKET","-b")
option("--debug","-d")
option("--local","-l")
option("--remote","-r")
option("--verbose","-v")
attr :args
def run_command
@args = command_line.args
command = args.shift
return send(command) if %w{get backup restore}.include? command
help
end
def get
md5 = args.shift
out = @client.getfile(md5)
print out
end
def backup
args.each do |file|
unless FileTest.exists?(file)
$stderr.puts "#{file}: no such file"
next
end
unless FileTest.readable?(file)
$stderr.puts "#{file}: cannot read file"
next
end
md5 = @client.backup(file)
puts "#{file}: #{md5}"
end
end
def restore
file = args.shift
md5 = args.shift
@client.restore(file, md5)
end
def setup
Puppet::Log.newdestination(:console)
@client = nil
@server = nil
Signal.trap(:INT) do
$stderr.puts "Cancelling"
exit(1)
end
if options[:debug]
Puppet::Log.level = :debug
elsif options[:verbose]
Puppet::Log.level = :info
end
# Now parse the config
Puppet.parse_config
exit(Puppet.settings.print_configs ? 0 : 1) if Puppet.settings.print_configs?
require 'puppet/file_bucket/dipper'
begin
if options[:local] or options[:bucket]
path = options[:bucket] || Puppet[:bucketdir]
@client = Puppet::FileBucket::Dipper.new(:Path => path)
else
@client = Puppet::FileBucket::Dipper.new(:Server => Puppet[:server])
end
rescue => detail
$stderr.puts detail
puts detail.backtrace if Puppet[:trace]
exit(1)
end
end
end

View File

@ -0,0 +1,181 @@
require 'puppet/application'
class Puppet::Application::Inspect < Puppet::Application
should_parse_config
run_mode :agent
option("--debug","-d")
option("--verbose","-v")
option("--logdest LOGDEST", "-l") do |arg|
begin
Puppet::Util::Log.newdestination(arg)
options[:logset] = true
rescue => detail
$stderr.puts detail.to_s
end
end
def help
puts <<-HELP ; exit # XXX
SYNOPSIS
========
Prepare and submit an inspection report to the puppet master.
USAGE
=====
puppet inspect
DESCRIPTION
===========
This command uses the cached catalog from the previous run of 'puppet
agent' to determine which attributes of which resources have been
marked as auditable with the 'audit' metaparameter. It then examines
the current state of the system, writes the state of the specified
resource attributes to a report, and submits the report to the puppet
master.
Puppet inspect does not run as a daemon, and must be run manually or from cron.
OPTIONS
=======
Any configuration setting which is valid in the configuration file is
also a valid long argument, e.g. '--server=master.domain.com'. See the
configuration file documentation at
http://docs.puppetlabs.com/references/latest/configuration.html for
the full list of acceptable settings.
AUTHOR
======
Puppet Labs
COPYRIGHT
=========
Copyright (c) 2011 Puppet Labs, LLC
Licensed under the GNU General Public License version 2
HELP
end
def setup
exit(Puppet.settings.print_configs ? 0 : 1) if Puppet.settings.print_configs?
raise "Inspect requires reporting to be enabled. Set report=true in puppet.conf to enable reporting." unless Puppet[:report]
@report = Puppet::Transaction::Report.new("inspect")
Puppet::Util::Log.newdestination(@report)
Puppet::Util::Log.newdestination(:console) unless options[:logset]
Signal.trap(:INT) do
$stderr.puts "Exiting"
exit(1)
end
if options[:debug]
Puppet::Util::Log.level = :debug
elsif options[:verbose]
Puppet::Util::Log.level = :info
end
Puppet::Transaction::Report.terminus_class = :rest
Puppet::Resource::Catalog.terminus_class = :yaml
end
def preinit
require 'puppet'
require 'puppet/file_bucket/dipper'
end
def run_command
benchmark(:notice, "Finished inspection") do
retrieval_starttime = Time.now
unless catalog = Puppet::Resource::Catalog.find(Puppet[:certname])
raise "Could not find catalog for #{Puppet[:certname]}"
end
@report.configuration_version = catalog.version
inspect_starttime = Time.now
@report.add_times("config_retrieval", inspect_starttime - retrieval_starttime)
if Puppet[:archive_files]
dipper = Puppet::FileBucket::Dipper.new(:Server => Puppet[:archive_file_server])
end
catalog.to_ral.resources.each do |ral_resource|
audited_attributes = ral_resource[:audit]
next unless audited_attributes
status = Puppet::Resource::Status.new(ral_resource)
begin
audited_resource = ral_resource.to_resource
rescue StandardError => detail
puts detail.backtrace if Puppet[:trace]
ral_resource.err "Could not inspect #{ral_resource}; skipping: #{detail}"
audited_attributes.each do |name|
event = ral_resource.event(
:property => name,
:status => "failure",
:audited => true,
:message => "failed to inspect #{name}"
)
status.add_event(event)
end
else
audited_attributes.each do |name|
next if audited_resource[name].nil?
# Skip :absent properties of :absent resources. Really, it would be nicer if the RAL returned nil for those, but it doesn't. ~JW
if name == :ensure or audited_resource[:ensure] != :absent or audited_resource[name] != :absent
event = ral_resource.event(
:previous_value => audited_resource[name],
:property => name,
:status => "audit",
:audited => true,
:message => "inspected value is #{audited_resource[name].inspect}"
)
status.add_event(event)
end
end
end
if Puppet[:archive_files] and ral_resource.type == :file and audited_attributes.include?(:content)
path = ral_resource[:path]
if File.readable?(path)
begin
dipper.backup(path)
rescue StandardError => detail
Puppet.warning detail
end
end
end
@report.add_resource_status(status)
end
finishtime = Time.now
@report.add_times("inspect", finishtime - inspect_starttime)
@report.finalize_report
begin
@report.save
rescue => detail
puts detail.backtrace if Puppet[:trace]
Puppet.err "Could not send report: #{detail}"
end
end
end
end

View File

@ -0,0 +1,210 @@
require 'puppet/application'
class Puppet::Application::Kick < Puppet::Application
should_not_parse_config
attr_accessor :hosts, :tags, :classes
option("--all","-a")
option("--foreground","-f")
option("--debug","-d")
option("--ping","-P")
option("--test")
option("--host HOST") do |arg|
@hosts << arg
end
option("--tag TAG", "-t") do |arg|
@tags << arg
end
option("--class CLASS", "-c") do |arg|
@classes << arg
end
option("--no-fqdn", "-n") do |arg|
options[:fqdn] = false
end
option("--parallel PARALLEL", "-p") do |arg|
begin
options[:parallel] = Integer(arg)
rescue
$stderr.puts "Could not convert #{arg.inspect} to an integer"
exit(23)
end
end
def run_command
@hosts += command_line.args
options[:test] ? test : main
end
def test
puts "Skipping execution in test mode"
exit(0)
end
def main
Puppet.warning "Failed to load ruby LDAP library. LDAP functionality will not be available" unless Puppet.features.ldap?
require 'puppet/util/ldap/connection'
todo = @hosts.dup
failures = []
# Now do the actual work
go = true
while go
# If we don't have enough children in process and we still have hosts left to
# do, then do the next host.
if @children.length < options[:parallel] and ! todo.empty?
host = todo.shift
pid = fork do
run_for_host(host)
end
@children[pid] = host
else
# Else, see if we can reap a process.
begin
pid = Process.wait
if host = @children[pid]
# Remove our host from the list of children, so the parallelization
# continues working.
@children.delete(pid)
failures << host if $CHILD_STATUS.exitstatus != 0
print "#{host} finished with exit code #{$CHILD_STATUS.exitstatus}\n"
else
$stderr.puts "Could not find host for PID #{pid} with status #{$CHILD_STATUS.exitstatus}"
end
rescue Errno::ECHILD
# There are no children left, so just exit unless there are still
# children left to do.
next unless todo.empty?
if failures.empty?
puts "Finished"
exit(0)
else
puts "Failed: #{failures.join(", ")}"
exit(3)
end
end
end
end
end
def run_for_host(host)
if options[:ping]
out = %x{ping -c 1 #{host}}
unless $CHILD_STATUS == 0
$stderr.print "Could not contact #{host}\n"
next
end
end
require 'puppet/run'
Puppet::Run.indirection.terminus_class = :rest
port = Puppet[:puppetport]
url = ["https://#{host}:#{port}", "production", "run", host].join('/')
print "Triggering #{host}\n"
begin
run_options = {
:tags => @tags,
:background => ! options[:foreground],
:ignoreschedules => options[:ignoreschedules]
}
run = Puppet::Run.new( run_options ).save( url )
puts "Getting status"
result = run.status
puts "status is #{result}"
rescue => detail
puts detail.backtrace if Puppet[:trace]
$stderr.puts "Host #{host} failed: #{detail}\n"
exit(2)
end
case result
when "success";
exit(0)
when "running"
$stderr.puts "Host #{host} is already running"
exit(3)
else
$stderr.puts "Host #{host} returned unknown answer '#{result}'"
exit(12)
end
end
def initialize(*args)
super
@hosts = []
@classes = []
@tags = []
end
def preinit
[:INT, :TERM].each do |signal|
Signal.trap(signal) do
$stderr.puts "Cancelling"
exit(1)
end
end
options[:parallel] = 1
options[:verbose] = true
options[:fqdn] = true
options[:ignoreschedules] = false
options[:foreground] = false
end
def setup
if options[:debug]
Puppet::Util::Log.level = :debug
else
Puppet::Util::Log.level = :info
end
# Now parse the config
Puppet.parse_config
if Puppet[:node_terminus] == "ldap" and (options[:all] or @classes)
if options[:all]
@hosts = Puppet::Node.search("whatever", :fqdn => options[:fqdn]).collect { |node| node.name }
puts "all: #{@hosts.join(", ")}"
else
@hosts = []
@classes.each do |klass|
list = Puppet::Node.search("whatever", :fqdn => options[:fqdn], :class => klass).collect { |node| node.name }
puts "#{klass}: #{list.join(", ")}"
@hosts += list
end
end
elsif ! @classes.empty?
$stderr.puts "You must be using LDAP to specify host classes"
exit(24)
end
@children = {}
# If we get a signal, then kill all of our children and get out.
[:INT, :TERM].each do |signal|
Signal.trap(signal) do
Puppet.notice "Caught #{signal}; shutting down"
@children.each do |pid, host|
Process.kill("INT", pid)
end
waitall
exit(1)
end
end
end
end

View File

@ -0,0 +1,173 @@
require 'puppet/application'
class Puppet::Application::Master < Puppet::Application
should_parse_config
run_mode :master
option("--debug", "-d")
option("--verbose", "-v")
# internal option, only to be used by ext/rack/config.ru
option("--rack")
option("--compile host", "-c host") do |arg|
options[:node] = arg
end
option("--logdest DEST", "-l DEST") do |arg|
begin
Puppet::Util::Log.newdestination(arg)
options[:setdest] = true
rescue => detail
puts detail.backtrace if Puppet[:debug]
$stderr.puts detail.to_s
end
end
def preinit
Signal.trap(:INT) do
$stderr.puts "Cancelling startup"
exit(0)
end
# Create this first-off, so we have ARGV
require 'puppet/daemon'
@daemon = Puppet::Daemon.new
@daemon.argv = ARGV.dup
end
def run_command
if options[:node]
compile
elsif Puppet[:parseonly]
parseonly
else
main
end
end
def compile
Puppet::Util::Log.newdestination :console
raise ArgumentError, "Cannot render compiled catalogs without pson support" unless Puppet.features.pson?
begin
unless catalog = Puppet::Resource::Catalog.find(options[:node])
raise "Could not compile catalog for #{options[:node]}"
end
jj catalog.to_resource
rescue => detail
$stderr.puts detail
exit(30)
end
exit(0)
end
def parseonly
begin
Puppet::Node::Environment.new(Puppet[:environment]).known_resource_types
rescue => detail
Puppet.err detail
exit 1
end
exit(0)
end
def main
require 'etc'
xmlrpc_handlers = [:Status, :FileServer, :Master, :Report, :Filebucket]
xmlrpc_handlers << :CA if Puppet[:ca]
# Make sure we've got a localhost ssl cert
Puppet::SSL::Host.localhost
# And now configure our server to *only* hit the CA for data, because that's
# all it will have write access to.
Puppet::SSL::Host.ca_location = :only if Puppet::SSL::CertificateAuthority.ca?
if Puppet.features.root?
begin
Puppet::Util.chuser
rescue => detail
puts detail.backtrace if Puppet[:trace]
$stderr.puts "Could not change user to #{Puppet[:user]}: #{detail}"
exit(39)
end
end
unless options[:rack]
require 'puppet/network/server'
@daemon.server = Puppet::Network::Server.new(:xmlrpc_handlers => xmlrpc_handlers)
@daemon.daemonize if Puppet[:daemonize]
else
require 'puppet/network/http/rack'
@app = Puppet::Network::HTTP::Rack.new(:xmlrpc_handlers => xmlrpc_handlers, :protocols => [:rest, :xmlrpc])
end
Puppet.notice "Starting Puppet master version #{Puppet.version}"
unless options[:rack]
@daemon.start
else
return @app
end
end
def setup_logs
# Handle the logging settings.
if options[:debug] or options[:verbose]
if options[:debug]
Puppet::Util::Log.level = :debug
else
Puppet::Util::Log.level = :info
end
unless Puppet[:daemonize] or options[:rack]
Puppet::Util::Log.newdestination(:console)
options[:setdest] = true
end
end
Puppet::Util::Log.newdestination(:syslog) unless options[:setdest]
end
def setup_terminuses
require 'puppet/file_serving/content'
require 'puppet/file_serving/metadata'
# Cache our nodes in yaml. Currently not configurable.
Puppet::Node.cache_class = :yaml
Puppet::FileServing::Content.indirection.terminus_class = :file_server
Puppet::FileServing::Metadata.indirection.terminus_class = :file_server
Puppet::FileBucket::File.indirection.terminus_class = :file
end
def setup_ssl
# Configure all of the SSL stuff.
if Puppet::SSL::CertificateAuthority.ca?
Puppet::SSL::Host.ca_location = :local
Puppet.settings.use :ca
Puppet::SSL::CertificateAuthority.instance
else
Puppet::SSL::Host.ca_location = :none
end
end
def setup
raise Puppet::Error.new("Puppet master is not supported on Microsoft Windows") if Puppet.features.microsoft_windows?
setup_logs
exit(Puppet.settings.print_configs ? 0 : 1) if Puppet.settings.print_configs?
Puppet.settings.use :main, :master, :ssl, :metrics
setup_terminuses
setup_ssl
end
end

View File

@ -0,0 +1,102 @@
require 'puppet/application'
require 'puppet/util'
class Puppet::Application::Queue < Puppet::Application
should_parse_config
attr_accessor :daemon
def preinit
require 'puppet/daemon'
@daemon = Puppet::Daemon.new
@daemon.argv = ARGV.dup
# Do an initial trap, so that cancels don't get a stack trace.
# This exits with exit code 1
Signal.trap(:INT) do
$stderr.puts "Caught SIGINT; shutting down"
exit(1)
end
# This is a normal shutdown, so code 0
Signal.trap(:TERM) do
$stderr.puts "Caught SIGTERM; shutting down"
exit(0)
end
{
:verbose => false,
:debug => false
}.each do |opt,val|
options[opt] = val
end
end
option("--debug","-d")
option("--verbose","-v")
option("--logdest DEST", "-l DEST") do |arg|
begin
Puppet::Util::Log.newdestination(arg)
options[:setdest] = true
rescue => detail
puts detail.backtrace if Puppet[:debug]
$stderr.puts detail.to_s
end
end
def main
require 'puppet/indirector/catalog/queue' # provides Puppet::Indirector::Queue.subscribe
Puppet.notice "Starting puppetqd #{Puppet.version}"
Puppet::Resource::Catalog::Queue.subscribe do |catalog|
# Once you have a Puppet::Resource::Catalog instance, calling save on it should suffice
# to put it through to the database via its active_record indirector (which is determined
# by the terminus_class = :active_record setting above)
Puppet::Util.benchmark(:notice, "Processing queued catalog for #{catalog.name}") do
begin
catalog.save
rescue => detail
puts detail.backtrace if Puppet[:trace]
Puppet.err "Could not save queued catalog for #{catalog.name}: #{detail}"
end
end
end
Thread.list.each { |thread| thread.join }
end
# Handle the logging settings.
def setup_logs
if options[:debug] or options[:verbose]
Puppet::Util::Log.newdestination(:console)
if options[:debug]
Puppet::Util::Log.level = :debug
else
Puppet::Util::Log.level = :info
end
end
Puppet::Util::Log.newdestination(:syslog) unless options[:setdest]
end
def setup
unless Puppet.features.stomp?
raise ArgumentError, "Could not load the 'stomp' library, which must be present for queueing to work. You must install the required library."
end
setup_logs
exit(Puppet.settings.print_configs ? 0 : 1) if Puppet.settings.print_configs?
require 'puppet/resource/catalog'
Puppet::Resource::Catalog.terminus_class = :active_record
daemon.daemonize if Puppet[:daemonize]
# We want to make sure that we don't have a cache
# class set up, because if storeconfigs is enabled,
# we'll get a loop of continually caching the catalog
# for storage again.
Puppet::Resource::Catalog.cache_class = nil
end
end

View File

@ -0,0 +1,128 @@
require 'puppet/application'
class Puppet::Application::Resource < Puppet::Application
should_not_parse_config
attr_accessor :host, :extra_params
def preinit
@extra_params = []
@host = nil
Facter.loadfacts
end
option("--debug","-d")
option("--verbose","-v")
option("--edit","-e")
option("--host HOST","-H") do |arg|
@host = arg
end
option("--types", "-t") do |arg|
types = []
Puppet::Type.loadall
Puppet::Type.eachtype do |t|
next if t.name == :component
types << t.name.to_s
end
puts types.sort
exit
end
option("--param PARAM", "-p") do |arg|
@extra_params << arg.to_sym
end
def main
args = command_line.args
type = args.shift or raise "You must specify the type to display"
typeobj = Puppet::Type.type(type) or raise "Could not find type #{type}"
name = args.shift
params = {}
args.each do |setting|
if setting =~ /^(\w+)=(.+)$/
params[$1] = $2
else
raise "Invalid parameter setting #{setting}"
end
end
raise "You cannot edit a remote host" if options[:edit] and @host
properties = typeobj.properties.collect { |s| s.name }
format = proc {|trans|
trans.dup.collect do |param, value|
if value.nil? or value.to_s.empty?
trans.delete(param)
elsif value.to_s == "absent" and param.to_s != "ensure"
trans.delete(param)
end
trans.delete(param) unless properties.include?(param) or @extra_params.include?(param)
end
trans.to_manifest
}
if @host
Puppet::Resource.indirection.terminus_class = :rest
port = Puppet[:puppetport]
key = ["https://#{host}:#{port}", "production", "resources", type, name].join('/')
else
key = [type, name].join('/')
end
text = if name
if params.empty?
[ Puppet::Resource.find( key ) ]
else
[ Puppet::Resource.new( type, name, :parameters => params ).save( key ) ]
end
else
if type == "file"
raise "Listing all file instances is not supported. Please specify a file or directory, e.g. puppet resource file /etc"
end
Puppet::Resource.search( key, {} )
end.map(&format).join("\n")
if options[:edit]
require 'tempfile'
# Prefer the current directory, which is more likely to be secure
# and, in the case of interactive use, accessible to the user.
tmpfile = Tempfile.new('x2puppet', Dir.pwd)
begin
# sync write, so nothing buffers before we invoke the editor.
tmpfile.sync = true
tmpfile.puts text
# edit the content
system(ENV["EDITOR"] || 'vi', tmpfile.path)
# ...and, now, pass that file to puppet to apply. Because
# many editors rename or replace the original file we need to
# feed the pathname, not the file content itself, to puppet.
system('puppet -v ' + tmpfile.path)
ensure
# The temporary file will be safely removed.
tmpfile.close(true)
end
else
puts text
end
end
def setup
Puppet::Util::Log.newdestination(:console)
# Now parse the config
Puppet.parse_config
if options[:debug]
Puppet::Util::Log.level = :debug
elsif options[:verbose]
Puppet::Util::Log.level = :info
end
end
end

View File

@ -0,0 +1,250 @@
# The client for interacting with the puppetmaster config server.
require 'sync'
require 'timeout'
require 'puppet/network/http_pool'
require 'puppet/util'
class Puppet::Configurer
require 'puppet/configurer/fact_handler'
require 'puppet/configurer/plugin_handler'
include Puppet::Configurer::FactHandler
include Puppet::Configurer::PluginHandler
# For benchmarking
include Puppet::Util
attr_reader :compile_time
# Provide more helpful strings to the logging that the Agent does
def self.to_s
"Puppet configuration client"
end
class << self
# Puppetd should only have one instance running, and we need a way
# to retrieve it.
attr_accessor :instance
include Puppet::Util
end
# How to lock instances of this class.
def self.lockfile_path
Puppet[:puppetdlockfile]
end
def clear
@catalog.clear(true) if @catalog
@catalog = nil
end
def execute_postrun_command
execute_from_setting(:postrun_command)
end
def execute_prerun_command
execute_from_setting(:prerun_command)
end
# Initialize and load storage
def dostorage
Puppet::Util::Storage.load
@compile_time ||= Puppet::Util::Storage.cache(:configuration)[:compile_time]
rescue => detail
puts detail.backtrace if Puppet[:trace]
Puppet.err "Corrupt state file #{Puppet[:statefile]}: #{detail}"
begin
::File.unlink(Puppet[:statefile])
retry
rescue => detail
raise Puppet::Error.new("Cannot remove #{Puppet[:statefile]}: #{detail}")
end
end
# Just so we can specify that we are "the" instance.
def initialize
Puppet.settings.use(:main, :ssl, :agent)
self.class.instance = self
@running = false
@splayed = false
end
# Prepare for catalog retrieval. Downloads everything necessary, etc.
def prepare(options)
dostorage
download_plugins unless options[:skip_plugin_download]
download_fact_plugins unless options[:skip_plugin_download]
end
# Get the remote catalog, yo. Returns nil if no catalog can be found.
def retrieve_catalog(fact_options)
fact_options ||= {}
# First try it with no cache, then with the cache.
unless (Puppet[:use_cached_catalog] and result = retrieve_catalog_from_cache(fact_options)) or result = retrieve_new_catalog(fact_options)
if ! Puppet[:usecacheonfailure]
Puppet.warning "Not using cache on failed catalog"
return nil
end
result = retrieve_catalog_from_cache(fact_options)
end
return nil unless result
convert_catalog(result, @duration)
end
# Convert a plain resource catalog into our full host catalog.
def convert_catalog(result, duration)
catalog = result.to_ral
catalog.finalize
catalog.retrieval_duration = duration
catalog.write_class_file
catalog
end
# Retrieve (optionally) and apply a catalog. If a catalog is passed in
# the options, then apply that one, otherwise retrieve it.
def retrieve_and_apply_catalog(options, fact_options)
unless catalog = (options.delete(:catalog) || retrieve_catalog(fact_options))
Puppet.err "Could not retrieve catalog; skipping run"
return
end
report = options[:report]
report.configuration_version = catalog.version
benchmark(:notice, "Finished catalog run") do
catalog.apply(options)
end
report.finalize_report
report
end
# The code that actually runs the catalog.
# This just passes any options on to the catalog,
# which accepts :tags and :ignoreschedules.
def run(options = {})
options[:report] ||= Puppet::Transaction::Report.new("apply")
report = options[:report]
Puppet::Util::Log.newdestination(report)
begin
prepare(options)
if Puppet::Resource::Catalog.indirection.terminus_class == :rest
# This is a bit complicated. We need the serialized and escaped facts,
# and we need to know which format they're encoded in. Thus, we
# get a hash with both of these pieces of information.
fact_options = facts_for_uploading
end
# set report host name now that we have the fact
report.host = Puppet[:node_name_value]
begin
execute_prerun_command or return nil
retrieve_and_apply_catalog(options, fact_options)
rescue SystemExit,NoMemoryError
raise
rescue => detail
puts detail.backtrace if Puppet[:trace]
Puppet.err "Failed to apply catalog: #{detail}"
return nil
ensure
execute_postrun_command or return nil
end
ensure
# Make sure we forget the retained module_directories of any autoload
# we might have used.
Thread.current[:env_module_directories] = nil
# Now close all of our existing http connections, since there's no
# reason to leave them lying open.
Puppet::Network::HttpPool.clear_http_instances
end
ensure
Puppet::Util::Log.close(report)
send_report(report)
end
def send_report(report)
puts report.summary if Puppet[:summarize]
save_last_run_summary(report)
report.save if Puppet[:report]
rescue => detail
puts detail.backtrace if Puppet[:trace]
Puppet.err "Could not send report: #{detail}"
end
def save_last_run_summary(report)
Puppet::Util::FileLocking.writelock(Puppet[:lastrunfile], 0660) do |file|
file.print YAML.dump(report.raw_summary)
end
rescue => detail
puts detail.backtrace if Puppet[:trace]
Puppet.err "Could not save last run local report: #{detail}"
end
private
def self.timeout
timeout = Puppet[:configtimeout]
case timeout
when String
if timeout =~ /^\d+$/
timeout = Integer(timeout)
else
raise ArgumentError, "Configuration timeout must be an integer"
end
when Integer # nothing
else
raise ArgumentError, "Configuration timeout must be an integer"
end
timeout
end
def execute_from_setting(setting)
return true if (command = Puppet[setting]) == ""
begin
Puppet::Util.execute([command])
true
rescue => detail
puts detail.backtrace if Puppet[:trace]
Puppet.err "Could not run command from #{setting}: #{detail}"
false
end
end
def retrieve_catalog_from_cache(fact_options)
result = nil
@duration = thinmark do
result = Puppet::Resource::Catalog.find(Puppet[:node_name_value], fact_options.merge(:ignore_terminus => true))
end
Puppet.notice "Using cached catalog"
result
rescue => detail
puts detail.backtrace if Puppet[:trace]
Puppet.err "Could not retrieve catalog from cache: #{detail}"
return nil
end
def retrieve_new_catalog(fact_options)
result = nil
@duration = thinmark do
result = Puppet::Resource::Catalog.find(Puppet[:node_name_value], fact_options.merge(:ignore_cache => true))
end
result
rescue SystemExit,NoMemoryError
raise
rescue Exception => detail
puts detail.backtrace if Puppet[:trace]
Puppet.err "Could not retrieve catalog from remote server: #{detail}"
return nil
end
end

View File

@ -0,0 +1,80 @@
require 'puppet/configurer'
require 'puppet/resource/catalog'
class Puppet::Configurer::Downloader
attr_reader :name, :path, :source, :ignore
# Determine the timeout value to use.
def self.timeout
timeout = Puppet[:configtimeout]
case timeout
when String
if timeout =~ /^\d+$/
timeout = Integer(timeout)
else
raise ArgumentError, "Configuration timeout must be an integer"
end
when Integer # nothing
else
raise ArgumentError, "Configuration timeout must be an integer"
end
timeout
end
# Evaluate our download, returning the list of changed values.
def evaluate
Puppet.info "Retrieving #{name}"
files = []
begin
Timeout.timeout(self.class.timeout) do
catalog.apply do |trans|
trans.changed?.find_all do |resource|
yield resource if block_given?
files << resource[:path]
end
end
end
rescue Puppet::Error, Timeout::Error => detail
puts detail.backtrace if Puppet[:debug]
Puppet.err "Could not retrieve #{name}: #{detail}"
end
files
end
def initialize(name, path, source, ignore = nil)
@name, @path, @source, @ignore = name, path, source, ignore
end
def catalog
catalog = Puppet::Resource::Catalog.new
catalog.host_config = false
catalog.add_resource(file)
catalog
end
def file
args = default_arguments.merge(:path => path, :source => source)
args[:ignore] = ignore.split if ignore
Puppet::Type.type(:file).new(args)
end
private
def default_arguments
{
:path => path,
:recurse => true,
:source => source,
:tag => name,
:owner => Process.uid,
:group => Process.gid,
:purge => true,
:force => true,
:backup => false,
:noop => false
}
end
end

View File

@ -0,0 +1,77 @@
require 'puppet/indirector/facts/facter'
require 'puppet/configurer/downloader'
# Break out the code related to facts. This module is
# just included into the agent, but having it here makes it
# easier to test.
module Puppet::Configurer::FactHandler
def download_fact_plugins?
Puppet[:factsync]
end
def find_facts
# This works because puppet agent configures Facts to use 'facter' for
# finding facts and the 'rest' terminus for caching them. Thus, we'll
# compile them and then "cache" them on the server.
begin
reload_facter
facts = Puppet::Node::Facts.find(Puppet[:node_name_value])
unless Puppet[:node_name_fact].empty?
Puppet[:node_name_value] = facts.values[Puppet[:node_name_fact]]
facts.name = Puppet[:node_name_value]
end
facts
rescue SystemExit,NoMemoryError
raise
rescue Exception => detail
puts detail.backtrace if Puppet[:trace]
raise Puppet::Error, "Could not retrieve local facts: #{detail}"
end
end
def facts_for_uploading
facts = find_facts
#format = facts.class.default_format
if facts.support_format?(:b64_zlib_yaml)
format = :b64_zlib_yaml
else
format = :yaml
end
text = facts.render(format)
{:facts_format => format, :facts => CGI.escape(text)}
end
# Retrieve facts from the central server.
def download_fact_plugins
return unless download_fact_plugins?
# Deprecated prior to 0.25, as of 5/19/2008
Puppet.warning "Fact syncing is deprecated as of 0.25 -- use 'pluginsync' instead"
Puppet::Configurer::Downloader.new("fact", Puppet[:factdest], Puppet[:factsource], Puppet[:factsignore]).evaluate
end
# Clear out all of the loaded facts and reload them from disk.
# NOTE: This is clumsy and shouldn't be required for later (1.5.x) versions
# of Facter.
def reload_facter
Facter.clear
# Reload everything.
if Facter.respond_to? :loadfacts
Facter.loadfacts
elsif Facter.respond_to? :load
Facter.load
else
Puppet.warning "You should upgrade your version of Facter to at least 1.3.8"
end
# This loads all existing facts and any new ones. We have to remove and
# reload because there's no way to unload specific facts.
Puppet::Node::Facts::Facter.load_fact_plugins
end
end

View File

@ -0,0 +1,33 @@
# Break out the code related to plugins. This module is
# just included into the agent, but having it here makes it
# easier to test.
module Puppet::Configurer::PluginHandler
def download_plugins?
Puppet[:pluginsync]
end
# Retrieve facts from the central server.
def download_plugins
return nil unless download_plugins?
plugin_downloader = Puppet::Configurer::Downloader.new(
"plugin",
Puppet[:plugindest],
Puppet[:pluginsource],
Puppet[:pluginsignore]
)
plugin_downloader.evaluate.each { |file| load_plugin(file) }
end
def load_plugin(file)
return unless FileTest.exist?(file)
return if FileTest.directory?(file)
begin
Puppet.info "Loading downloaded plugin #{file}"
load file
rescue Exception => detail
Puppet.err "Could not load downloaded file #{file}: #{detail}"
end
end
end

130
mcollective/lib/puppet/daemon.rb Executable file
View File

@ -0,0 +1,130 @@
require 'puppet'
require 'puppet/util/pidlock'
require 'puppet/external/event-loop'
require 'puppet/application'
# A module that handles operations common to all daemons. This is included
# into the Server and Client base classes.
class Puppet::Daemon
attr_accessor :agent, :server, :argv
def daemonname
Puppet[:name]
end
# Put the daemon into the background.
def daemonize
if pid = fork
Process.detach(pid)
exit(0)
end
create_pidfile
# Get rid of console logging
Puppet::Util::Log.close(:console)
Process.setsid
Dir.chdir("/")
begin
$stdin.reopen "/dev/null"
$stdout.reopen "/dev/null", "a"
$stderr.reopen $stdout
Puppet::Util::Log.reopen
rescue => detail
Puppet.err "Could not start #{Puppet[:name]}: #{detail}"
Puppet::Util::replace_file("/tmp/daemonout", 0644) do |f|
f.puts "Could not start #{Puppet[:name]}: #{detail}"
end
exit(12)
end
end
# Create a pidfile for our daemon, so we can be stopped and others
# don't try to start.
def create_pidfile
Puppet::Util.synchronize_on(Puppet[:name],Sync::EX) do
raise "Could not create PID file: #{pidfile}" unless Puppet::Util::Pidlock.new(pidfile).lock
end
end
# Provide the path to our pidfile.
def pidfile
Puppet[:pidfile]
end
def reexec
raise Puppet::DevError, "Cannot reexec unless ARGV arguments are set" unless argv
command = $0 + " " + argv.join(" ")
Puppet.notice "Restarting with '#{command}'"
stop(:exit => false)
exec(command)
end
def reload
return unless agent
if agent.running?
Puppet.notice "Not triggering already-running agent"
return
end
agent.run
end
# Remove the pid file for our daemon.
def remove_pidfile
Puppet::Util.synchronize_on(Puppet[:name],Sync::EX) do
locker = Puppet::Util::Pidlock.new(pidfile)
locker.unlock or Puppet.err "Could not remove PID file #{pidfile}" if locker.locked?
end
end
def restart
Puppet::Application.restart!
reexec unless agent and agent.running?
end
def reopen_logs
Puppet::Util::Log.reopen
end
# Trap a couple of the main signals. This should probably be handled
# in a way that anyone else can register callbacks for traps, but, eh.
def set_signal_traps
signals = {:INT => :stop, :TERM => :stop }
# extended signals not supported under windows
signals.update({:HUP => :restart, :USR1 => :reload, :USR2 => :reopen_logs }) unless Puppet.features.microsoft_windows?
signals.each do |signal, method|
Signal.trap(signal) do
Puppet.notice "Caught #{signal}; calling #{method}"
send(method)
end
end
end
# Stop everything
def stop(args = {:exit => true})
Puppet::Application.stop!
server.stop if server
remove_pidfile
Puppet::Util::Log.close_all
exit if args[:exit]
end
def start
set_signal_traps
create_pidfile
raise Puppet::DevError, "Daemons must have an agent, server, or both" unless agent or server
server.start if server
agent.start if agent
EventLoop.current.run
end
end

View File

@ -0,0 +1,895 @@
# The majority of the system configuration parameters are set in this file.
module Puppet
setdefaults(:main,
:confdir => [Puppet.run_mode.conf_dir, "The main Puppet configuration directory. The default for this parameter is calculated based on the user. If the process
is running as root or the user that Puppet is supposed to run as, it defaults to a system directory, but if it's running as any other user,
it defaults to being in the user's home directory."],
:vardir => [Puppet.run_mode.var_dir, "Where Puppet stores dynamic and growing data. The default for this parameter is calculated specially, like `confdir`_."],
:name => [Puppet.application_name.to_s, "The name of the application, if we are running as one. The
default is essentially $0 without the path or `.rb`."],
:run_mode => [Puppet.run_mode.name.to_s, "The effective 'run mode' of the application: master, agent, or user."]
)
setdefaults(:main, :logdir => Puppet.run_mode.logopts)
setdefaults(:main,
:trace => [false, "Whether to print stack traces on some errors"],
:autoflush => {
:default => false,
:desc => "Whether log files should always flush to disk.",
:hook => proc { |value| Log.autoflush = value }
},
:syslogfacility => ["daemon", "What syslog facility to use when logging to
syslog. Syslog has a fixed list of valid facilities, and you must
choose one of those; you cannot just make one up."],
:statedir => { :default => "$vardir/state",
:mode => 01755,
:desc => "The directory where Puppet state is stored. Generally,
this directory can be removed without causing harm (although it
might result in spurious service restarts)."
},
:rundir => {
:default => Puppet.run_mode.run_dir,
:mode => 01777,
:desc => "Where Puppet PID files are kept."
},
:genconfig => [false,
"Whether to just print a configuration to stdout and exit. Only makes
sense when used interactively. Takes into account arguments specified
on the CLI."],
:genmanifest => [false,
"Whether to just print a manifest to stdout and exit. Only makes
sense when used interactively. Takes into account arguments specified
on the CLI."],
:configprint => ["",
"Print the value of a specific configuration parameter. If a
parameter is provided for this, then the value is printed and puppet
exits. Comma-separate multiple values. For a list of all values,
specify 'all'. This feature is only available in Puppet versions
higher than 0.18.4."],
:color => ["ansi", "Whether to use colors when logging to the console.
Valid values are `ansi` (equivalent to `true`), `html` (mostly
used during testing with TextMate), and `false`, which produces
no color."],
:mkusers => [false,
"Whether to create the necessary user and group that puppet agent will
run as."],
:manage_internal_file_permissions => [true,
"Whether Puppet should manage the owner, group, and mode of files
it uses internally"
],
:onetime => {:default => false,
:desc => "Run the configuration once, rather than as a long-running
daemon. This is useful for interactively running puppetd.",
:short => 'o'
},
:path => {:default => "none",
:desc => "The shell search path. Defaults to whatever is inherited
from the parent process.",
:call_on_define => true, # Call our hook with the default value, so we always get the libdir set.
:hook => proc do |value|
ENV["PATH"] = "" if ENV["PATH"].nil?
ENV["PATH"] = value unless value == "none"
paths = ENV["PATH"].split(File::PATH_SEPARATOR)
%w{/usr/sbin /sbin}.each do |path|
ENV["PATH"] += File::PATH_SEPARATOR + path unless paths.include?(path)
end
value
end
},
:libdir => {:default => "$vardir/lib",
:desc => "An extra search path for Puppet. This is only useful
for those files that Puppet will load on demand, and is only
guaranteed to work for those cases. In fact, the autoload
mechanism is responsible for making sure this directory
is in Ruby's search path",
:call_on_define => true, # Call our hook with the default value, so we always get the libdir set.
:hook => proc do |value|
$LOAD_PATH.delete(@oldlibdir) if defined?(@oldlibdir) and $LOAD_PATH.include?(@oldlibdir)
@oldlibdir = value
$LOAD_PATH << value
end
},
:ignoreimport => [false, "A parameter that can be used in commit
hooks, since it enables you to parse-check a single file rather
than requiring that all files exist."],
:authconfig => [ "$confdir/namespaceauth.conf",
"The configuration file that defines the rights to the different
namespaces and methods. This can be used as a coarse-grained
authorization system for both `puppet agent` and `puppet master`."
],
:environment => {:default => "production", :desc => "The environment Puppet is running in. For clients
(e.g., `puppet agent`) this determines the environment itself, which
is used to find modules and much more. For servers (i.e., `puppet master`) this provides the default environment for nodes
we know nothing about."
},
:diff_args => ["-u", "Which arguments to pass to the diff command when printing differences between files."],
:diff => ["diff", "Which diff command to use when printing differences between files."],
:show_diff => [false, "Whether to print a contextual diff when files are being replaced. The diff
is printed on stdout, so this option is meaningless unless you are running Puppet interactively.
This feature currently requires the `diff/lcs` Ruby library."],
:daemonize => { :default => true,
:desc => "Send the process into the background. This is the default.",
:short => "D"
},
:maximum_uid => [4294967290, "The maximum allowed UID. Some platforms use negative UIDs
but then ship with tools that do not know how to handle signed ints, so the UIDs show up as
huge numbers that can then not be fed back into the system. This is a hackish way to fail in a
slightly more useful way when that happens."],
:node_terminus => ["plain", "Where to find information about nodes."],
:catalog_terminus => ["compiler", "Where to get node catalogs. This is useful to change if, for instance,
you'd like to pre-compile catalogs and store them in memcached or some other easily-accessed store."],
:facts_terminus => {
:default => Puppet.application_name.to_s == "master" ? 'yaml' : 'facter',
:desc => "The node facts terminus.",
:hook => proc do |value|
require 'puppet/node/facts'
# Cache to YAML if we're uploading facts away
if %w[rest inventory_service].include? value.to_s
Puppet::Node::Facts.cache_class = :yaml
end
end
},
:inventory_terminus => [ "$facts_terminus", "Should usually be the same as the facts terminus" ],
:httplog => { :default => "$logdir/http.log",
:owner => "root",
:mode => 0640,
:desc => "Where the puppet agent web server logs."
},
:http_proxy_host => ["none",
"The HTTP proxy host to use for outgoing connections. Note: You
may need to use a FQDN for the server hostname when using a proxy."],
:http_proxy_port => [3128, "The HTTP proxy port to use for outgoing connections"],
:filetimeout => [ 15,
"The minimum time to wait (in seconds) between checking for updates in
configuration files. This timeout determines how quickly Puppet checks whether
a file (such as manifests or templates) has changed on disk."
],
:queue_type => ["stomp", "Which type of queue to use for asynchronous processing."],
:queue_type => ["stomp", "Which type of queue to use for asynchronous processing."],
:queue_source => ["stomp://localhost:61613/", "Which type of queue to use for asynchronous processing. If your stomp server requires
authentication, you can include it in the URI as long as your stomp client library is at least 1.1.1"],
:async_storeconfigs => {:default => false, :desc => "Whether to use a queueing system to provide asynchronous database integration.
Requires that `puppetqd` be running and that 'PSON' support for ruby be installed.",
:hook => proc do |value|
if value
# This reconfigures the terminii for Node, Facts, and Catalog
Puppet.settings[:storeconfigs] = true
# But then we modify the configuration
Puppet::Resource::Catalog.cache_class = :queue
else
raise "Cannot disable asynchronous storeconfigs in a running process"
end
end
},
:thin_storeconfigs => {:default => false, :desc =>
"Boolean; wether storeconfigs store in the database only the facts and exported resources.
If true, then storeconfigs performance will be higher and still allow exported/collected
resources, but other usage external to Puppet might not work",
:hook => proc do |value|
Puppet.settings[:storeconfigs] = true if value
end
},
:config_version => ["", "How to determine the configuration version. By default, it will be the
time that the configuration is parsed, but you can provide a shell script to override how the
version is determined. The output of this script will be added to every log message in the
reports, allowing you to correlate changes on your hosts to the source version on the server."],
:zlib => [true,
"Boolean; whether to use the zlib library",
],
:prerun_command => ["", "A command to run before every agent run. If this command returns a non-zero
return code, the entire Puppet run will fail."],
:postrun_command => ["", "A command to run after every agent run. If this command returns a non-zero
return code, the entire Puppet run will be considered to have failed, even though it might have
performed work during the normal run."],
:freeze_main => [false, "Freezes the 'main' class, disallowing any code to be added to it. This
essentially means that you can't have any code outside of a node, class, or definition other
than in the site manifest."]
)
hostname = Facter["hostname"].value
domain = Facter["domain"].value
if domain and domain != ""
fqdn = [hostname, domain].join(".")
else
fqdn = hostname
end
Puppet.setdefaults(
:main,
# We have to downcase the fqdn, because the current ssl stuff (as oppsed to in master) doesn't have good facilities for
# manipulating naming.
:certname => {:default => fqdn.downcase, :desc => "The name to use when handling certificates. Defaults
to the fully qualified domain name.",
:call_on_define => true, # Call our hook with the default value, so we're always downcased
:hook => proc { |value| raise(ArgumentError, "Certificate names must be lower case; see #1168") unless value == value.downcase }},
:certdnsnames => {
:default => '',
:hook => proc do |value|
unless value.nil? or value == '' then
Puppet.warning <<WARN
The `certdnsnames` setting is no longer functional,
after CVE-2011-3872. We ignore the value completely.
For your own certificate request you can set `dns_alt_names` in the
configuration and it will apply locally. There is no configuration option to
set DNS alt names, or any other `subjectAltName` value, for another nodes
certificate.
Alternately you can use the `--dns_alt_names` command line option to set the
labels added while generating your own CSR.
WARN
end
end,
:desc => <<EOT
The `certdnsnames` setting is no longer functional,
after CVE-2011-3872. We ignore the value completely.
For your own certificate request you can set `dns_alt_names` in the
configuration and it will apply locally. There is no configuration option to
set DNS alt names, or any other `subjectAltName` value, for another nodes
certificate.
Alternately you can use the `--dns_alt_names` command line option to set the
labels added while generating your own CSR.
EOT
},
:dns_alt_names => {
:default => '',
:desc => <<EOT,
The comma-separated list of alternative DNS names to use for the local host.
When the node generates a CSR for itself, these are added to the request
as the desired `subjectAltName` in the certificate: additional DNS labels
that the certificate is also valid answering as.
This is generally required if you use a non-hostname `certname`, or if you
want to use `puppet kick` or `puppet resource -H` and the primary certname
does not match the DNS name you use to communicate with the host.
This is unnecessary for agents, unless you intend to use them as a server for
`puppet kick` or remote `puppet resource` management.
It is rarely necessary for servers; it is usually helpful only if you need to
have a pool of multiple load balanced masters, or for the same master to
respond on two physically separate networks under different names.
EOT
},
:certdir => {
:default => "$ssldir/certs",
:owner => "service",
:desc => "The certificate directory."
},
:ssldir => {
:default => "$confdir/ssl",
:mode => 0771,
:owner => "service",
:desc => "Where SSL certificates are kept."
},
:publickeydir => {
:default => "$ssldir/public_keys",
:owner => "service",
:desc => "The public key directory."
},
:requestdir => {
:default => "$ssldir/certificate_requests",
:owner => "service",
:desc => "Where host certificate requests are stored."
},
:privatekeydir => { :default => "$ssldir/private_keys",
:mode => 0750,
:owner => "service",
:desc => "The private key directory."
},
:privatedir => { :default => "$ssldir/private",
:mode => 0750,
:owner => "service",
:desc => "Where the client stores private certificate information."
},
:passfile => { :default => "$privatedir/password",
:mode => 0640,
:owner => "service",
:desc => "Where puppet agent stores the password for its private key.
Generally unused."
},
:hostcsr => { :default => "$ssldir/csr_$certname.pem",
:mode => 0644,
:owner => "service",
:desc => "Where individual hosts store and look for their certificate requests."
},
:hostcert => { :default => "$certdir/$certname.pem",
:mode => 0644,
:owner => "service",
:desc => "Where individual hosts store and look for their certificates."
},
:hostprivkey => { :default => "$privatekeydir/$certname.pem",
:mode => 0600,
:owner => "service",
:desc => "Where individual hosts store and look for their private key."
},
:hostpubkey => { :default => "$publickeydir/$certname.pem",
:mode => 0644,
:owner => "service",
:desc => "Where individual hosts store and look for their public key."
},
:localcacert => { :default => "$certdir/ca.pem",
:mode => 0644,
:owner => "service",
:desc => "Where each client stores the CA certificate."
},
:hostcrl => { :default => "$ssldir/crl.pem",
:mode => 0644,
:owner => "service",
:desc => "Where the host's certificate revocation list can be found.
This is distinct from the certificate authority's CRL."
},
:certificate_revocation => [true, "Whether certificate revocation should be supported by downloading a Certificate Revocation List (CRL)
to all clients. If enabled, CA chaining will almost definitely not work."]
)
setdefaults(
:ca,
:ca_name => ["Puppet CA: $certname", "The name to use the Certificate Authority certificate."],
:cadir => { :default => "$ssldir/ca",
:owner => "service",
:group => "service",
:mode => 0770,
:desc => "The root directory for the certificate authority."
},
:cacert => { :default => "$cadir/ca_crt.pem",
:owner => "service",
:group => "service",
:mode => 0660,
:desc => "The CA certificate."
},
:cakey => { :default => "$cadir/ca_key.pem",
:owner => "service",
:group => "service",
:mode => 0660,
:desc => "The CA private key."
},
:capub => { :default => "$cadir/ca_pub.pem",
:owner => "service",
:group => "service",
:desc => "The CA public key."
},
:cacrl => { :default => "$cadir/ca_crl.pem",
:owner => "service",
:group => "service",
:mode => 0664,
:desc => "The certificate revocation list (CRL) for the CA. Will be used if present but otherwise ignored.",
:hook => proc do |value|
if value == 'false'
Puppet.warning "Setting the :cacrl to 'false' is deprecated; Puppet will just ignore the crl if yours is missing"
end
end
},
:caprivatedir => { :default => "$cadir/private",
:owner => "service",
:group => "service",
:mode => 0770,
:desc => "Where the CA stores private certificate information."
},
:csrdir => { :default => "$cadir/requests",
:owner => "service",
:group => "service",
:desc => "Where the CA stores certificate requests"
},
:signeddir => { :default => "$cadir/signed",
:owner => "service",
:group => "service",
:mode => 0770,
:desc => "Where the CA stores signed certificates."
},
:capass => { :default => "$caprivatedir/ca.pass",
:owner => "service",
:group => "service",
:mode => 0660,
:desc => "Where the CA stores the password for the private key"
},
:serial => { :default => "$cadir/serial",
:owner => "service",
:group => "service",
:mode => 0644,
:desc => "Where the serial number for certificates is stored."
},
:autosign => { :default => "$confdir/autosign.conf",
:mode => 0644,
:desc => "Whether to enable autosign. Valid values are true (which
autosigns any key request, and is a very bad idea), false (which
never autosigns any key request), and the path to a file, which
uses that configuration file to determine which keys to sign."},
:ca_days => ["", "How long a certificate should be valid.
This parameter is deprecated, use ca_ttl instead"],
:ca_ttl => ["5y", "The default TTL for new certificates; valid values
must be an integer, optionally followed by one of the units
'y' (years of 365 days), 'd' (days), 'h' (hours), or
's' (seconds). The unit defaults to seconds. If this parameter
is set, ca_days is ignored. Examples are '3600' (one hour)
and '1825d', which is the same as '5y' (5 years) "],
:ca_md => ["md5", "The type of hash used in certificates."],
:req_bits => [2048, "The bit length of the certificates."],
:keylength => [1024, "The bit length of keys."],
:cert_inventory => {
:default => "$cadir/inventory.txt",
:mode => 0644,
:owner => "service",
:group => "service",
:desc => "A Complete listing of all certificates"
}
)
# Define the config default.
setdefaults(
Puppet.settings[:name],
:config => ["$confdir/puppet.conf",
"The configuration file for #{Puppet[:name]}."],
:pidfile => ["$rundir/$name.pid", "The pid file"],
:bindaddress => ["", "The address a listening server should bind to. Mongrel servers
default to 127.0.0.1 and WEBrick defaults to 0.0.0.0."],
:servertype => {:default => "webrick", :desc => "The type of server to use. Currently supported
options are webrick and mongrel. If you use mongrel, you will need
a proxy in front of the process or processes, since Mongrel cannot
speak SSL.",
:call_on_define => true, # Call our hook with the default value, so we always get the correct bind address set.
:hook => proc { |value| value == "webrick" ? Puppet.settings[:bindaddress] = "0.0.0.0" : Puppet.settings[:bindaddress] = "127.0.0.1" if Puppet.settings[:bindaddress] == "" }
}
)
setdefaults(:master,
:user => ["puppet", "The user puppet master should run as."],
:group => ["puppet", "The group puppet master should run as."],
:manifestdir => ["$confdir/manifests", "Where puppet master looks for its manifests."],
:manifest => ["$manifestdir/site.pp", "The entry-point manifest for puppet master."],
:code => ["", "Code to parse directly. This is essentially only used
by `puppet`, and should only be set if you're writing your own Puppet
executable"],
:masterlog => { :default => "$logdir/puppetmaster.log",
:owner => "service",
:group => "service",
:mode => 0660,
:desc => "Where puppet master logs. This is generally not used,
since syslog is the default log destination."
},
:masterhttplog => { :default => "$logdir/masterhttp.log",
:owner => "service",
:group => "service",
:mode => 0660,
:create => true,
:desc => "Where the puppet master web server logs."
},
:masterport => [8140, "Which port puppet master listens on."],
:parseonly => [false, "Just check the syntax of the manifests."],
:node_name => ["cert", "How the puppetmaster determines the client's identity
and sets the 'hostname', 'fqdn' and 'domain' facts for use in the manifest,
in particular for determining which 'node' statement applies to the client.
Possible values are 'cert' (use the subject's CN in the client's
certificate) and 'facter' (use the hostname that the client
reported in its facts)"],
:bucketdir => {
:default => "$vardir/bucket",
:mode => 0750,
:owner => "service",
:group => "service",
:desc => "Where FileBucket files are stored."
},
:rest_authconfig => [ "$confdir/auth.conf",
"The configuration file that defines the rights to the different
rest indirections. This can be used as a fine-grained
authorization system for `puppet master`."
],
:ca => [true, "Wether the master should function as a certificate authority."],
:modulepath => {:default => "$confdir/modules:/usr/share/puppet/modules",
:desc => "The search path for modules as a colon-separated list of
directories.", :type => :setting }, # We don't want this to be considered a file, since it's multiple files.
:ssl_client_header => ["HTTP_X_CLIENT_DN", "The header containing an authenticated
client's SSL DN. Only used with Mongrel. This header must be set by the proxy
to the authenticated client's SSL DN (e.g., `/CN=puppet.puppetlabs.com`).
See http://projects.puppetlabs.com/projects/puppet/wiki/Using_Mongrel for more information."],
:ssl_client_verify_header => ["HTTP_X_CLIENT_VERIFY", "The header containing the status
message of the client verification. Only used with Mongrel. This header must be set by the proxy
to 'SUCCESS' if the client successfully authenticated, and anything else otherwise.
See http://projects.puppetlabs.com/projects/puppet/wiki/Using_Mongrel for more information."],
# To make sure this directory is created before we try to use it on the server, we need
# it to be in the server section (#1138).
:yamldir => {:default => "$vardir/yaml", :owner => "service", :group => "service", :mode => "750",
:desc => "The directory in which YAML data is stored, usually in a subdirectory."},
:server_datadir => {:default => "$vardir/server_data", :owner => "service", :group => "service", :mode => "750",
:desc => "The directory in which serialized data is stored, usually in a subdirectory."},
:reports => ["store",
"The list of reports to generate. All reports are looked for
in `puppet/reports/name.rb`, and multiple report names should be
comma-separated (whitespace is okay)."
],
:reportdir => {:default => "$vardir/reports",
:mode => 0750,
:owner => "service",
:group => "service",
:desc => "The directory in which to store reports
received from the client. Each client gets a separate
subdirectory."},
:reporturl => ["http://localhost:3000/reports",
"The URL used by the http reports processor to send reports"],
:fileserverconfig => ["$confdir/fileserver.conf", "Where the fileserver configuration is stored."],
:strict_hostname_checking => [false, "Whether to only search for the complete
hostname as it is in the certificate when searching for node information
in the catalogs."]
)
setdefaults(:metrics,
:rrddir => {:default => "$vardir/rrd",
:mode => 0750,
:owner => "service",
:group => "service",
:desc => "The directory where RRD database files are stored.
Directories for each reporting host will be created under
this directory."
},
:rrdinterval => ["$runinterval", "How often RRD should expect data.
This should match how often the hosts report back to the server."]
)
setdefaults(:agent,
:node_name_value => { :default => "$certname",
:desc => "The explicit value used for the node name for all requests the agent
makes to the master. WARNING: This setting is mutually exclusive with
node_name_fact. Changing this setting also requires changes to the default
auth.conf configuration on the Puppet Master. Please see
http://links.puppetlabs.com/node_name_value for more information."
},
:node_name_fact => { :default => "",
:desc => "The fact name used to determine the node name used for all requests the agent
makes to the master. WARNING: This setting is mutually exclusive with
node_name_value. Changing this setting also requires changes to the default
auth.conf configuration on the Puppet Master. Please see
http://links.puppetlabs.com/node_name_fact for more information.",
:hook => proc do |value|
if !value.empty? and Puppet[:node_name_value] != Puppet[:certname]
raise "Cannot specify both the node_name_value and node_name_fact settings"
end
end
},
:localconfig => { :default => "$statedir/localconfig",
:owner => "root",
:mode => 0660,
:desc => "Where puppet agent caches the local configuration. An
extension indicating the cache format is added automatically."},
:statefile => { :default => "$statedir/state.yaml",
:mode => 0660,
:desc => "Where puppet agent and puppet master store state associated
with the running configuration. In the case of puppet master,
this file reflects the state discovered through interacting
with clients."
},
:clientyamldir => {:default => "$vardir/client_yaml", :mode => "750", :desc => "The directory in which client-side YAML data is stored."},
:client_datadir => {:default => "$vardir/client_data", :mode => "750", :desc => "The directory in which serialized data is stored on the client."},
:classfile => { :default => "$statedir/classes.txt",
:owner => "root",
:mode => 0644,
:desc => "The file in which puppet agent stores a list of the classes
associated with the retrieved configuration. Can be loaded in
the separate `puppet` executable using the `--loadclasses`
option."},
:puppetdlog => { :default => "$logdir/puppetd.log",
:owner => "root",
:mode => 0640,
:desc => "The log file for puppet agent. This is generally not used."
},
:server => ["puppet", "The server to which server puppet agent should connect"],
:ignoreschedules => [false,
"Boolean; whether puppet agent should ignore schedules. This is useful
for initial puppet agent runs."],
:puppetport => [8139, "Which port puppet agent listens on."],
:noop => [false, "Whether puppet agent should be run in noop mode."],
:runinterval => [1800, # 30 minutes
"How often puppet agent applies the client configuration; in seconds."],
:listen => [false, "Whether puppet agent should listen for
connections. If this is true, then by default only the
`runner` server is started, which allows remote authorized
and authenticated nodes to connect and trigger `puppet agent`
runs."],
:ca_server => ["$server", "The server to use for certificate
authority requests. It's a separate server because it cannot
and does not need to horizontally scale."],
:ca_port => ["$masterport", "The port to use for the certificate authority."],
:catalog_format => {
:default => "",
:desc => "(Deprecated for 'preferred_serialization_format') What format to
use to dump the catalog. Only supports 'marshal' and 'yaml'. Only
matters on the client, since it asks the server for a specific format.",
:hook => proc { |value|
if value
Puppet.warning "Setting 'catalog_format' is deprecated; use 'preferred_serialization_format' instead."
Puppet.settings[:preferred_serialization_format] = value
end
}
},
:preferred_serialization_format => ["pson", "The preferred means of serializing
ruby instances for passing over the wire. This won't guarantee that all
instances will be serialized using this method, since not all classes
can be guaranteed to support this format, but it will be used for all
classes that support it."],
:puppetdlockfile => [ "$statedir/puppetdlock", "A lock file to temporarily stop puppet agent from doing anything."],
:usecacheonfailure => [true,
"Whether to use the cached configuration when the remote
configuration will not compile. This option is useful for testing
new configurations, where you want to fix the broken configuration
rather than reverting to a known-good one."
],
:use_cached_catalog => [false,
"Whether to only use the cached catalog rather than compiling a new catalog
on every run. Puppet can be run with this enabled by default and then selectively
disabled when a recompile is desired."],
:ignorecache => [false,
"Ignore cache and always recompile the configuration. This is
useful for testing new configurations, where the local cache may in
fact be stale even if the timestamps are up to date - if the facts
change or if the server changes."
],
:downcasefacts => [false, "Whether facts should be made all lowercase when sent to the server."],
:dynamicfacts => ["memorysize,memoryfree,swapsize,swapfree",
"Facts that are dynamic; these facts will be ignored when deciding whether
changed facts should result in a recompile. Multiple facts should be
comma-separated."],
:splaylimit => ["$runinterval",
"The maximum time to delay before runs. Defaults to being the same as the
run interval."],
:splay => [false,
"Whether to sleep for a pseudo-random (but consistent) amount of time before
a run."],
:clientbucketdir => {
:default => "$vardir/clientbucket",
:mode => 0750,
:desc => "Where FileBucket files are stored locally."
},
:configtimeout => [120,
"How long the client should wait for the configuration to be retrieved
before considering it a failure. This can help reduce flapping if too
many clients contact the server at one time."
],
:reportserver => {
:default => "$server",
:call_on_define => false,
:desc => "(Deprecated for 'report_server') The server to which to send transaction reports.",
:hook => proc do |value|
Puppet.settings[:report_server] = value if value
end
},
:report_server => ["$server",
"The server to send transaction reports to."
],
:report_port => ["$masterport",
"The port to communicate with the report_server."
],
:inventory_server => ["$server",
"The server to send facts to."
],
:inventory_port => ["$masterport",
"The port to communicate with the inventory_server."
],
:report => [false,
"Whether to send reports after every transaction."
],
:lastrunfile => { :default => "$statedir/last_run_summary.yaml",
:mode => 0660,
:desc => "Where puppet agent stores the last run report summary in yaml format."
},
:lastrunreport => { :default => "$statedir/last_run_report.yaml",
:mode => 0660,
:desc => "Where puppet agent stores the last run report in yaml format."
},
:graph => [false, "Whether to create dot graph files for the different
configuration graphs. These dot files can be interpreted by tools
like OmniGraffle or dot (which is part of ImageMagick)."],
:graphdir => ["$statedir/graphs", "Where to store dot-outputted graphs."],
:http_compression => [false, "Allow http compression in REST communication with the master.
This setting might improve performance for agent -> master communications over slow WANs.
Your puppetmaster needs to support compression (usually by activating some settings in a reverse-proxy
in front of the puppetmaster, which rules out webrick).
It is harmless to activate this settings if your master doesn't support
compression, but if it supports it, this setting might reduce performance on high-speed LANs."]
)
setdefaults(:inspect,
:archive_files => [false, "During an inspect run, whether to archive files whose contents are audited to a file bucket."],
:archive_file_server => ["$server", "During an inspect run, the file bucket server to archive files to if archive_files is set."]
)
# Plugin information.
setdefaults(
:main,
:plugindest => ["$libdir",
"Where Puppet should store plugins that it pulls down from the central
server."],
:pluginsource => ["puppet://$server/plugins",
"From where to retrieve plugins. The standard Puppet `file` type
is used for retrieval, so anything that is a valid file source can
be used here."],
:pluginsync => [false, "Whether plugins should be synced with the central server."],
:pluginsignore => [".svn CVS .git", "What files to ignore when pulling down plugins."]
)
# Central fact information.
setdefaults(
:main,
:factpath => {:default => "$vardir/lib/facter:$vardir/facts",
:desc => "Where Puppet should look for facts. Multiple directories should
be colon-separated, like normal PATH variables.",
:call_on_define => true, # Call our hook with the default value, so we always get the value added to facter.
:type => :setting, # Don't consider it a file, because it could be multiple colon-separated files
:hook => proc { |value| Facter.search(value) if Facter.respond_to?(:search) }},
:factdest => ["$vardir/facts/",
"Where Puppet should store facts that it pulls down from the central
server."],
:factsource => ["puppet://$server/facts/",
"From where to retrieve facts. The standard Puppet `file` type
is used for retrieval, so anything that is a valid file source can
be used here."],
:factsync => [false, "Whether facts should be synced with the central server."],
:factsignore => [".svn CVS", "What files to ignore when pulling down facts."]
)
setdefaults(
:tagmail,
:tagmap => ["$confdir/tagmail.conf", "The mapping between reporting tags and email addresses."],
:sendmail => [which('sendmail') || '', "Where to find the sendmail binary with which to send email."],
:reportfrom => ["report@" + [Facter["hostname"].value, Facter["domain"].value].join("."), "The 'from' email address for the reports."],
:smtpserver => ["none", "The server through which to send email reports."]
)
setdefaults(
:rails,
:dblocation => { :default => "$statedir/clientconfigs.sqlite3",
:mode => 0660,
:owner => "service",
:group => "service",
:desc => "The database cache for client configurations. Used for
querying within the language."
},
:dbadapter => [ "sqlite3", "The type of database to use." ],
:dbmigrate => [ false, "Whether to automatically migrate the database." ],
:dbname => [ "puppet", "The name of the database to use." ],
:dbserver => [ "localhost", "The database server for caching. Only
used when networked databases are used."],
:dbport => [ "", "The database password for caching. Only
used when networked databases are used."],
:dbuser => [ "puppet", "The database user for caching. Only
used when networked databases are used."],
:dbpassword => [ "puppet", "The database password for caching. Only
used when networked databases are used."],
:dbconnections => [ '', "The number of database connections for networked
databases. Will be ignored unless the value is a positive integer."],
:dbsocket => [ "", "The database socket location. Only used when networked
databases are used. Will be ignored if the value is an empty string."],
:railslog => {:default => "$logdir/rails.log",
:mode => 0600,
:owner => "service",
:group => "service",
:desc => "Where Rails-specific logs are sent"
},
:rails_loglevel => ["info", "The log level for Rails connections. The value must be
a valid log level within Rails. Production environments normally use `info`
and other environments normally use `debug`."]
)
setdefaults(
:couchdb,
:couchdb_url => ["http://127.0.0.1:5984/puppet", "The url where the puppet couchdb database will be created"]
)
setdefaults(
:transaction,
:tags => ["", "Tags to use to find resources. If this is set, then
only resources tagged with the specified tags will be applied.
Values must be comma-separated."],
:evaltrace => [false, "Whether each resource should log when it is
being evaluated. This allows you to interactively see exactly
what is being done."],
:summarize => [false,
"Whether to print a transaction summary."
]
)
setdefaults(
:main,
:external_nodes => ["none",
"An external command that can produce node information. The output
must be a YAML dump of a hash, and that hash must have one or both of
`classes` and `parameters`, where `classes` is an array and
`parameters` is a hash. For unknown nodes, the commands should
exit with a non-zero exit code.
This command makes it straightforward to store your node mapping
information in other data sources like databases."])
setdefaults(
:ldap,
:ldapnodes => [false,
"Whether to search for node configurations in LDAP. See
http://projects.puppetlabs.com/projects/puppet/wiki/LDAP_Nodes for more information."],
:ldapssl => [false,
"Whether SSL should be used when searching for nodes.
Defaults to false because SSL usually requires certificates
to be set up on the client side."],
:ldaptls => [false,
"Whether TLS should be used when searching for nodes.
Defaults to false because TLS usually requires certificates
to be set up on the client side."],
:ldapserver => ["ldap",
"The LDAP server. Only used if `ldapnodes` is enabled."],
:ldapport => [389,
"The LDAP port. Only used if `ldapnodes` is enabled."],
:ldapstring => ["(&(objectclass=puppetClient)(cn=%s))",
"The search string used to find an LDAP node."],
:ldapclassattrs => ["puppetclass",
"The LDAP attributes to use to define Puppet classes. Values
should be comma-separated."],
:ldapstackedattrs => ["puppetvar",
"The LDAP attributes that should be stacked to arrays by adding
the values in all hierarchy elements of the tree. Values
should be comma-separated."],
:ldapattrs => ["all",
"The LDAP attributes to include when querying LDAP for nodes. All
returned attributes are set as variables in the top-level scope.
Multiple values should be comma-separated. The value 'all' returns
all attributes."],
:ldapparentattr => ["parentnode",
"The attribute to use to define the parent node."],
:ldapuser => ["",
"The user to use to connect to LDAP. Must be specified as a
full DN."],
:ldappassword => ["", "The password to use to connect to LDAP."],
:ldapbase => ["",
"The search base for LDAP searches. It's impossible to provide
a meaningful default here, although the LDAP libraries might
have one already set. Generally, it should be the 'ou=Hosts'
branch under your main directory."]
)
setdefaults(:master,
:storeconfigs => {:default => false, :desc => "Whether to store each client's configuration. This
requires ActiveRecord from Ruby on Rails.",
:call_on_define => true, # Call our hook with the default value, so we always get the libdir set.
:hook => proc do |value|
require 'puppet/node'
require 'puppet/node/facts'
if value
require 'puppet/rails'
raise "StoreConfigs not supported without ActiveRecord 2.1 or higher" unless Puppet.features.rails?
Puppet::Resource::Catalog.cache_class = :active_record unless Puppet.settings[:async_storeconfigs]
Puppet::Node::Facts.cache_class = :active_record
Puppet::Node.cache_class = :active_record
end
end
}
)
# This doesn't actually work right now.
setdefaults(
:parser,
:lexical => [false, "Whether to use lexical scoping (vs. dynamic)."],
:templatedir => ["$vardir/templates",
"Where Puppet looks for template files. Can be a list of colon-seperated
directories."
]
)
end

View File

@ -0,0 +1,7 @@
require 'puppet'
module Puppet::DSL
end
require 'puppet/dsl/resource_type_api'
require 'puppet/dsl/resource_api'

View File

@ -0,0 +1,120 @@
# This module adds functionality to a resource to make it
# capable of evaluating the DSL resource type block and also
# hooking into the scope system.
require 'puppet/resource/type_collection_helper'
class Puppet::DSL::ResourceAPI
include Puppet::Resource::TypeCollectionHelper
FUNCTION_MAP = {:acquire => :include}
attr_reader :scope, :resource, :block
def environment
scope.environment
end
def evaluate
set_instance_variables
instance_eval(&block)
end
def initialize(resource, scope, block)
@scope = scope
@resource = resource
@block = block
end
# Try to convert a missing method into a resource type or a function.
def method_missing(name, *args)
raise "MethodMissing loop when searching for #{name} with #{args.inspect}" if searching_for_method?
@searching_for_method = true
return create_resource(name, args[0], args[1]) if valid_type?(name)
name = map_function(name)
return call_function(name, args) if Puppet::Parser::Functions.function(name)
super
ensure
@searching_for_method = false
end
def set_instance_variables
resource.eachparam do |param|
instance_variable_set("@#{param.name}", param.value)
end
@title = resource.title
@name ||= resource.title
end
def create_resource(type, names, arguments = nil)
names = [names] unless names.is_a?(Array)
arguments ||= {}
raise ArgumentError, "Resource arguments must be provided as a hash" unless arguments.is_a?(Hash)
names.collect do |name|
resource = Puppet::Parser::Resource.new(type, name, :scope => scope)
arguments.each do |param, value|
resource[param] = value
end
resource.exported = true if exporting?
resource.virtual = true if virtualizing?
scope.compiler.add_resource(scope, resource)
resource
end
end
def call_function(name, args)
return false unless method = Puppet::Parser::Functions.function(name)
scope.send(method, *args)
end
def export(resources = nil, &block)
if resources
resources.each { |resource| resource.exported = true }
return resources
end
@exporting = true
instance_eval(&block)
ensure
@exporting = false
end
def virtual(resources = nil, &block)
if resources
resources.each { |resource| resource.virtual = true }
return resources
end
@virtualizing = true
instance_eval(&block)
ensure
@virtualizing = false
end
def valid_type?(name)
return true if [:class, :node].include?(name)
return true if Puppet::Type.type(name)
return(known_resource_types.definition(name) ? true : false)
end
private
def exporting?
@exporting
end
def map_function(name)
FUNCTION_MAP[name] || name
end
def searching_for_method?
@searching_for_method
end
def virtualizing?
@virtualizing
end
end

View File

@ -0,0 +1,46 @@
require 'puppet/resource/type'
class Puppet::DSL::ResourceTypeAPI
def define(name, *args, &block)
result = __mk_resource_type__(:definition, name, Hash.new, block)
result.set_arguments(__munge_type_arguments__(args))
nil
end
def hostclass(name, options = {}, &block)
__mk_resource_type__(:hostclass, name, options, block)
nil
end
def node(name, options = {}, &block)
__mk_resource_type__(:node, name, options, block)
nil
end
# Note: we don't want the user to call the following methods
# directly. However, we can't stop them by making the methods
# private because the user's .rb code gets instance_eval'ed on an
# instance of this class. So instead we name the methods using
# double underscores to discourage customers from calling them.
def __mk_resource_type__(type, name, options, code)
klass = Puppet::Resource::Type.new(type, name, options)
klass.ruby_code = code if code
Thread.current[:known_resource_types].add klass
klass
end
def __munge_type_arguments__(args)
args.inject([]) do |result, item|
if item.is_a?(Hash)
item.each { |p, v| result << [p, v] }
else
result << item
end
result
end
end
end

View File

@ -0,0 +1,43 @@
module Puppet # :nodoc:
# The base class for all Puppet errors. We want to make it easy to add
# line and file information. This probably isn't necessary for all
# errors, but...
class Error < RuntimeError
attr_accessor :line, :file
def backtrace
if defined?(@backtrace)
return @backtrace
else
return super
end
end
def initialize(message, line = nil, file = nil)
@message = message
@line = line if line
@file = file if file
end
def to_s
str = nil
if self.file and self.line
str = "#{@message} at #{@file}:#{@line}"
elsif self.line
str = "#{@message} at line #{@line}"
elsif self.file
str = "#{@message} in #{self.file}"
else
str = @message.to_s
end
str
end
end
# An error class for when I don't know what happened. Automatically
# prints a stack trace when in debug mode.
class DevError < Puppet::Error
end
end

19
mcollective/lib/puppet/external/base64.rb vendored Executable file
View File

@ -0,0 +1,19 @@
# a stupid hack class to get rid of all of the warnings but
# still make the encode/decode methods available
# 1.8.2 has a Base64 class, but 1.8.1 just imports the methods directly
# into Object
require 'base64'
unless defined?(Base64)
class Base64
def Base64.encode64(*args)
Object.method(:encode64).call(*args)
end
def Base64.decode64(*args)
Object.method(:decode64).call(*args)
end
end
end

326
mcollective/lib/puppet/external/dot.rb vendored Normal file
View File

@ -0,0 +1,326 @@
# rdot.rb
#
#
# This is a modified version of dot.rb from Dave Thomas's rdoc project. I [Horst Duchene]
# renamed it to rdot.rb to avoid collision with an installed rdoc/dot.
#
# It also supports undirected edges.
module DOT
# These glogal vars are used to make nice graph source.
$tab = ' '
$tab2 = $tab * 2
# if we don't like 4 spaces, we can change it any time
def change_tab (t)
$tab = t
$tab2 = t * 2
end
# options for node declaration
NODE_OPTS = [
# attributes due to
# http://www.graphviz.org/Documentation/dotguide.pdf
# March, 26, 2005
'bottomlabel', # auxiliary label for nodes of shape M*
'color', # default: black; node shape color
'comment', # any string (format-dependent)
'distortion', # default: 0.0; node distortion for shape=polygon
'fillcolor', # default: lightgrey/black; node fill color
'fixedsize', # default: false; label text has no affect on node size
'fontcolor', # default: black; type face color
'fontname', # default: Times-Roman; font family
'fontsize', # default: 14; point size of label
'group', # name of nodes group
'height', # default: .5; height in inches
'label', # default: node name; any string
'layer', # default: overlay range; all, id or id:id
'orientation', # dafault: 0.0; node rotation angle
'peripheries', # shape-dependent number of node boundaries
'regular', # default: false; force polygon to be regular
'shape', # default: ellipse; node shape; see Section 2.1 and Appendix E
'shapefile', # external EPSF or SVG custom shape file
'sides', # default: 4; number of sides for shape=polygon
'skew' , # default: 0.0; skewing of node for shape=polygon
'style', # graphics options, e.g. bold, dotted, filled; cf. Section 2.3
'toplabel', # auxiliary label for nodes of shape M*
'URL', # URL associated with node (format-dependent)
'width', # default: .75; width in inches
'z', # default: 0.0; z coordinate for VRML output
# maintained for backward compatibility or rdot internal
'bgcolor',
'rank'
]
# options for edge declaration
EDGE_OPTS = [
'arrowhead', # default: normal; style of arrowhead at head end
'arrowsize', # default: 1.0; scaling factor for arrowheads
'arrowtail', # default: normal; style of arrowhead at tail end
'color', # default: black; edge stroke color
'comment', # any string (format-dependent)
'constraint', # default: true use edge to affect node ranking
'decorate', # if set, draws a line connecting labels with their edges
'dir', # default: forward; forward, back, both, or none
'fontcolor', # default: black type face color
'fontname', # default: Times-Roman; font family
'fontsize', # default: 14; point size of label
'headlabel', # label placed near head of edge
'headport', # n,ne,e,se,s,sw,w,nw
'headURL', # URL attached to head label if output format is ismap
'label', # edge label
'labelangle', # default: -25.0; angle in degrees which head or tail label is rotated off edge
'labeldistance', # default: 1.0; scaling factor for distance of head or tail label from node
'labelfloat', # default: false; lessen constraints on edge label placement
'labelfontcolor', # default: black; type face color for head and tail labels
'labelfontname', # default: Times-Roman; font family for head and tail labels
'labelfontsize', # default: 14 point size for head and tail labels
'layer', # default: overlay range; all, id or id:id
'lhead', # name of cluster to use as head of edge
'ltail', # name of cluster to use as tail of edge
'minlen', # default: 1 minimum rank distance between head and tail
'samehead', # tag for head node; edge heads with the same tag are merged onto the same port
'sametail', # tag for tail node; edge tails with the same tag are merged onto the same port
'style', # graphics options, e.g. bold, dotted, filled; cf. Section 2.3
'taillabel', # label placed near tail of edge
'tailport', # n,ne,e,se,s,sw,w,nw
'tailURL', # URL attached to tail label if output format is ismap
'weight', # default: 1; integer cost of stretching an edge
# maintained for backward compatibility or rdot internal
'id'
]
# options for graph declaration
GRAPH_OPTS = [
'bgcolor',
'center', 'clusterrank', 'color', 'concentrate',
'fontcolor', 'fontname', 'fontsize',
'label', 'layerseq',
'margin', 'mclimit',
'nodesep', 'nslimit',
'ordering', 'orientation',
'page',
'rank', 'rankdir', 'ranksep', 'ratio',
'size'
]
# a root class for any element in dot notation
class DOTSimpleElement
attr_accessor :name
def initialize (params = {})
@label = params['name'] ? params['name'] : ''
end
def to_s
@name
end
end
# an element that has options ( node, edge, or graph )
class DOTElement < DOTSimpleElement
# attr_reader :parent
attr_accessor :name, :options
def initialize (params = {}, option_list = [])
super(params)
@name = params['name'] ? params['name'] : nil
@parent = params['parent'] ? params['parent'] : nil
@options = {}
option_list.each{ |i|
@options[i] = params[i] if params[i]
}
@options['label'] ||= @name if @name != 'node'
end
def each_option
@options.each{ |i| yield i }
end
def each_option_pair
@options.each_pair{ |key, val| yield key, val }
end
#def parent=( thing )
# @parent.delete( self ) if defined?( @parent ) and @parent
# @parent = thing
#end
end
# This is used when we build nodes that have shape=record
# ports don't have options :)
class DOTPort < DOTSimpleElement
attr_accessor :label
def initialize (params = {})
super(params)
@name = params['label'] ? params['label'] : ''
end
def to_s
( @name && @name != "" ? "<#{@name}>" : "" ) + "#{@label}"
end
end
# node element
class DOTNode < DOTElement
@ports
def initialize (params = {}, option_list = NODE_OPTS)
super(params, option_list)
@ports = params['ports'] ? params['ports'] : []
end
def each_port
@ports.each { |i| yield i }
end
def << (thing)
@ports << thing
end
def push (thing)
@ports.push(thing)
end
def pop
@ports.pop
end
def to_s (t = '')
# This code is totally incomprehensible; it needs to be replaced!
label = @options['shape'] != 'record' && @ports.length == 0 ?
@options['label'] ?
t + $tab + "label = \"#{@options['label']}\"\n" :
'' :
t + $tab + 'label = "' + " \\\n" +
t + $tab2 + "#{@options['label']}| \\\n" +
@ports.collect{ |i|
t + $tab2 + i.to_s
}.join( "| \\\n" ) + " \\\n" +
t + $tab + '"' + "\n"
t + "#{@name} [\n" +
@options.to_a.collect{ |i|
i[1] && i[0] != 'label' ?
t + $tab + "#{i[0]} = #{i[1]}" : nil
}.compact.join( ",\n" ) + ( label != '' ? ",\n" : "\n" ) +
label +
t + "]\n"
end
end
# A subgraph element is the same to graph, but has another header in dot
# notation.
class DOTSubgraph < DOTElement
@nodes
@dot_string
def initialize (params = {}, option_list = GRAPH_OPTS)
super(params, option_list)
@nodes = params['nodes'] ? params['nodes'] : []
@dot_string = 'graph'
end
def each_node
@nodes.each{ |i| yield i }
end
def << (thing)
@nodes << thing
end
def push (thing)
@nodes.push( thing )
end
def pop
@nodes.pop
end
def to_s (t = '')
hdr = t + "#{@dot_string} #{@name} {\n"
options = @options.to_a.collect{ |name, val|
val && name != 'label' ?
t + $tab + "#{name} = #{val}" :
name ? t + $tab + "#{name} = \"#{val}\"" : nil
}.compact.join( "\n" ) + "\n"
nodes = @nodes.collect{ |i|
i.to_s( t + $tab )
}.join( "\n" ) + "\n"
hdr + options + nodes + t + "}\n"
end
end
# This is a graph.
class DOTDigraph < DOTSubgraph
def initialize (params = {}, option_list = GRAPH_OPTS)
super(params, option_list)
@dot_string = 'digraph'
end
end
# This is an edge.
class DOTEdge < DOTElement
attr_accessor :from, :to
def initialize (params = {}, option_list = EDGE_OPTS)
super(params, option_list)
@from = params['from'] ? params['from'] : nil
@to = params['to'] ? params['to'] : nil
end
def edge_link
'--'
end
def to_s (t = '')
t + "#{@from} #{edge_link} #{to} [\n" +
@options.to_a.collect{ |i|
i[1] && i[0] != 'label' ?
t + $tab + "#{i[0]} = #{i[1]}" :
i[1] ? t + $tab + "#{i[0]} = \"#{i[1]}\"" : nil
}.compact.join( "\n" ) + "\n#{t}]\n"
end
end
class DOTDirectedEdge < DOTEdge
def edge_link
'->'
end
end
end

View File

@ -0,0 +1 @@
require "puppet/external/event-loop/event-loop"

View File

@ -0,0 +1,367 @@
## better-definers.rb --- better attribute and method definers
# Copyright (C) 2005 Daniel Brockman
# This program is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation;
# either version 2 of the License, or (at your option) any
# later version.
# This file is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
# You should have received a copy of the GNU General Public
# License along with this program; if not, write to the Free
# Software Foundation, 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301, USA.
class Symbol
def predicate?
to_s.include? "?" end
def imperative?
to_s.include? "!" end
def writer?
to_s.include? "=" end
def punctuated?
predicate? or imperative? or writer? end
def without_punctuation
to_s.delete("?!=").to_sym end
def predicate
without_punctuation.to_s + "?" end
def imperative
without_punctuation.to_s + "!" end
def writer
without_punctuation.to_s + "=" end
end
class Hash
def collect! (&block)
replace Hash[*collect(&block).flatten]
end
def flatten
to_a.flatten
end
end
module Kernel
def returning (value)
yield value ; value
end
end
class Module
def define_hard_aliases (name_pairs)
for new_aliases, existing_name in name_pairs do
new_aliases.kind_of? Array or new_aliases = [new_aliases]
for new_alias in new_aliases do
alias_method(new_alias, existing_name)
end
end
end
def define_soft_aliases (name_pairs)
for new_aliases, existing_name in name_pairs do
new_aliases.kind_of? Array or new_aliases = [new_aliases]
for new_alias in new_aliases do
class_eval %{def #{new_alias}(*args, &block)
#{existing_name}(*args, &block) end}
end
end
end
define_soft_aliases \
:define_hard_alias => :define_hard_aliases,
:define_soft_alias => :define_soft_aliases
# This method lets you define predicates like :foo?,
# which will be defined to return the value of @foo.
def define_readers (*names)
for name in names.map { |x| x.to_sym } do
if name.punctuated?
# There's no way to define an efficient reader whose
# name is different from the instance variable.
class_eval %{def #{name} ; @#{name.without_punctuation} end}
else
# Use `attr_reader' to define an efficient method.
attr_reader(name)
end
end
end
def writer_defined? (name)
method_defined?(name.to_sym.writer)
end
# If you pass a predicate symbol :foo? to this method, it'll first
# define a regular writer method :foo, without a question mark.
# Then it'll define an imperative writer method :foo! as a shorthand
# for setting the property to true.
def define_writers (*names, &body)
for name in names.map { |x| x.to_sym } do
if block_given?
define_method(name.writer, &body)
else
attr_writer(name.without_punctuation)
end
if name.predicate?
class_eval %{def #{name.imperative}
self.#{name.writer} true end}
end
end
end
define_soft_aliases \
:define_reader => :define_readers,
:define_writer => :define_writers
# We don't need a singular alias for `define_accessors',
# because it always defines at least two methods.
def define_accessors (*names)
define_readers(*names)
define_writers(*names)
end
def define_opposite_readers (name_pairs)
name_pairs.collect! { |k, v| [k.to_sym, v.to_sym] }
for opposite_name, name in name_pairs do
define_reader(name) unless method_defined?(name)
class_eval %{def #{opposite_name} ; not #{name} end}
end
end
def define_opposite_writers (name_pairs)
name_pairs.collect! { |k, v| [k.to_sym, v.to_sym] }
for opposite_name, name in name_pairs do
define_writer(name) unless writer_defined?(name)
class_eval %{def #{opposite_name.writer} x
self.#{name.writer} !x end}
class_eval %{def #{opposite_name.imperative}
self.#{name.writer} false end}
end
end
define_soft_aliases \
:define_opposite_reader => :define_opposite_readers,
:define_opposite_writer => :define_opposite_writers
def define_opposite_accessors (name_pairs)
define_opposite_readers name_pairs
define_opposite_writers name_pairs
end
def define_reader_with_opposite (name_pair, &body)
name, opposite_name = name_pair.flatten.collect { |x| x.to_sym }
define_method(name, &body)
define_opposite_reader(opposite_name => name)
end
def define_writer_with_opposite (name_pair, &body)
name, opposite_name = name_pair.flatten.collect { |x| x.to_sym }
define_writer(name, &body)
define_opposite_writer(opposite_name => name)
end
public :define_method
def define_methods (*names, &body)
names.each { |name| define_method(name, &body) }
end
def define_private_methods (*names, &body)
define_methods(*names, &body)
names.each { |name| private name }
end
def define_protected_methods (*names, &body)
define_methods(*names, &body)
names.each { |name| protected name }
end
def define_private_method (name, &body)
define_method(name, &body)
private name
end
def define_protected_method (name, &body)
define_method(name, &body)
protected name
end
end
class ImmutableAttributeError < StandardError
def initialize (attribute=nil, message=nil)
super message
@attribute = attribute
end
define_accessors :attribute
def to_s
if @attribute and @message
"cannot change the value of `#@attribute': #@message"
elsif @attribute
"cannot change the value of `#@attribute'"
elsif @message
"cannot change the value of attribute: #@message"
else
"cannot change the value of attribute"
end
end
end
class Module
# Guard each of the specified attributes by replacing the writer
# method with a proxy that asks the supplied block before proceeding
# with the change.
#
# If it's okay to change the attribute, the block should return
# either nil or the symbol :mutable. If it isn't okay, the block
# should return a string saying why the attribute can't be changed.
# If you don't want to provide a reason, you can have the block
# return just the symbol :immutable.
def guard_writers(*names, &predicate)
for name in names.map { |x| x.to_sym } do
define_hard_alias("__unguarded_#{name.writer}" => name.writer)
define_method(name.writer) do |new_value|
case result = predicate.call
when :mutable, nil
__send__("__unguarded_#{name.writer}", new_value)
when :immutable
raise ImmutableAttributeError.new(name)
else
raise ImmutableAttributeError.new(name, result)
end
end
end
end
def define_guarded_writers (*names, &block)
define_writers(*names)
guard_writers(*names, &block)
end
define_soft_alias :guard_writer => :guard_writers
define_soft_alias :define_guarded_writer => :define_guarded_writers
end
if __FILE__ == $0
require "test/unit"
class DefineAccessorsTest < Test::Unit::TestCase
def setup
@X = Class.new
@Y = Class.new @X
@x = @X.new
@y = @Y.new
end
def test_define_hard_aliases
@X.define_method(:foo) { 123 }
@X.define_method(:baz) { 321 }
@X.define_hard_aliases :bar => :foo, :quux => :baz
assert_equal @x.foo, 123
assert_equal @x.bar, 123
assert_equal @y.foo, 123
assert_equal @y.bar, 123
assert_equal @x.baz, 321
assert_equal @x.quux, 321
assert_equal @y.baz, 321
assert_equal @y.quux, 321
@Y.define_method(:foo) { 456 }
assert_equal @y.foo, 456
assert_equal @y.bar, 123
@Y.define_method(:quux) { 654 }
assert_equal @y.baz, 321
assert_equal @y.quux, 654
end
def test_define_soft_aliases
@X.define_method(:foo) { 123 }
@X.define_method(:baz) { 321 }
@X.define_soft_aliases :bar => :foo, :quux => :baz
assert_equal @x.foo, 123
assert_equal @x.bar, 123
assert_equal @y.foo, 123
assert_equal @y.bar, 123
assert_equal @x.baz, 321
assert_equal @x.quux, 321
assert_equal @y.baz, 321
assert_equal @y.quux, 321
@Y.define_method(:foo) { 456 }
assert_equal @y.foo, @y.bar, 456
@Y.define_method(:quux) { 654 }
assert_equal @y.baz, 321
assert_equal @y.quux, 654
end
def test_define_readers
@X.define_readers :foo, :bar
assert !@x.respond_to?(:foo=)
assert !@x.respond_to?(:bar=)
@x.instance_eval { @foo = 123 ; @bar = 456 }
assert_equal @x.foo, 123
assert_equal @x.bar, 456
@X.define_readers :baz?, :quux?
assert !@x.respond_to?(:baz=)
assert !@x.respond_to?(:quux=)
@x.instance_eval { @baz = false ; @quux = true }
assert !@x.baz?
assert @x.quux?
end
def test_define_writers
assert !@X.writer_defined?(:foo)
assert !@X.writer_defined?(:bar)
@X.define_writers :foo, :bar
assert @X.writer_defined?(:foo)
assert @X.writer_defined?(:bar)
assert @X.writer_defined?(:foo=)
assert @X.writer_defined?(:bar=)
assert @X.writer_defined?(:foo?)
assert @X.writer_defined?(:bar?)
assert !@x.respond_to?(:foo)
assert !@x.respond_to?(:bar)
@x.foo = 123
@x.bar = 456
assert_equal @x.instance_eval { @foo }, 123
assert_equal @x.instance_eval { @bar }, 456
@X.define_writers :baz?, :quux?
assert !@x.respond_to?(:baz?)
assert !@x.respond_to?(:quux?)
@x.baz = true
@x.quux = false
assert_equal @x.instance_eval { @baz }, true
assert_equal @x.instance_eval { @quux }, false
end
def test_define_accessors
@X.define_accessors :foo, :bar
@x.foo = 123 ; @x.bar = 456
assert_equal @x.foo, 123
assert_equal @x.bar, 456
end
def test_define_opposite_readers
@X.define_opposite_readers :foo? => :bar?, :baz? => :quux?
assert !@x.respond_to?(:foo=)
assert !@x.respond_to?(:bar=)
assert !@x.respond_to?(:baz=)
assert !@x.respond_to?(:quux=)
@x.instance_eval { @bar = true ; @quux = false }
assert !@x.foo?
assert @x.bar?
assert @x.baz?
assert !@x.quux?
end
def test_define_opposite_writers
@X.define_opposite_writers :foo? => :bar?, :baz => :quux
end
end
end

View File

@ -0,0 +1,355 @@
## event-loop.rb --- high-level IO multiplexer
# Copyright (C) 2005 Daniel Brockman
# This program is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation;
# either version 2 of the License, or (at your option) any
# later version.
# This file is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
# You should have received a copy of the GNU General Public
# License along with this program; if not, write to the Free
# Software Foundation, 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301, USA.
require "puppet/external/event-loop/better-definers"
require "puppet/external/event-loop/signal-system"
require "fcntl"
class EventLoop
include SignalEmitter
IO_STATES = [:readable, :writable, :exceptional]
class << self
def default ; @default ||= new end
def default= x ; @default = x end
def current
Thread.current["event-loop::current"] || default end
def current= x
Thread.current["event-loop::current"] = x end
def with_current (new)
if current == new
yield
else
begin
old = self.current
self.current = new
yield
ensure
self.current = old
end
end
end
def method_missing (name, *args, &block)
if current.respond_to? name
current.__send__(name, *args, &block)
else
super
end
end
end
define_signals :before_sleep, :after_sleep
def initialize
@running = false
@awake = false
@wakeup_time = nil
@timers = []
@io_arrays = [[], [], []]
@ios = Hash.new do |h, k| raise ArgumentError,
"invalid IO event: #{k}", caller(2) end
IO_STATES.each_with_index { |x, i| @ios[x] = @io_arrays[i] }
@notify_src, @notify_snk = IO.pipe
# prevent file descriptor leaks
if @notify_src.respond_to?(:fcntl) and defined?(Fcntl) and defined?(Fcntl::F_SETFD) and defined?(Fcntl::FD_CLOEXEC)
@notify_src.fcntl(Fcntl::F_SETFD, Fcntl::FD_CLOEXEC)
@notify_snk.fcntl(Fcntl::F_SETFD, Fcntl::FD_CLOEXEC)
end
@notify_src.will_block = false
@notify_snk.will_block = false
# Each time a byte is sent through the notification pipe
# we need to read it, or IO.select will keep returning.
monitor_io(@notify_src, :readable)
@notify_src.extend(Watchable)
@notify_src.on_readable do
begin
@notify_src.sysread(256)
rescue Errno::EAGAIN
# The pipe wasn't readable after all.
end
end
end
define_opposite_accessors \
:stopped? => :running?,
:sleeping? => :awake?
def run
if block_given?
thread = Thread.new { run }
yield ; quit ; thread.join
else
running!
iterate while running?
end
ensure
quit
end
def iterate (user_timeout=nil)
t1, t2 = user_timeout, max_timeout
timeout = t1 && t2 ? [t1, t2].min : t1 || t2
select(timeout).zip(IO_STATES) do |ios, state|
ios.each { |x| x.signal(state) } if ios
end
end
private
def select (timeout)
@wakeup_time = timeout ? Time.now + timeout : nil
# puts "waiting: #{timeout} seconds"
signal :before_sleep ; sleeping!
IO.select(*@io_arrays + [timeout]) || []
ensure
awake! ; signal :after_sleep
@timers.each { |x| x.sound_alarm if x.ready? }
end
public
def quit ; stopped! ; wake_up ; self end
def monitoring_io? (io, event)
@ios[event].include? io end
def monitoring_timer? (timer)
@timers.include? timer end
def monitor_io (io, *events)
for event in events do
@ios[event] << io ; wake_up unless monitoring_io?(io, event)
end
end
def monitor_timer (timer)
@timers << timer unless monitoring_timer? timer
end
def check_timer (timer)
wake_up if timer.end_time < @wakeup_time
end
def ignore_io (io, *events)
events = IO_STATES if events.empty?
for event in events do
wake_up if @ios[event].delete(io)
end
end
def ignore_timer (timer)
# Don't need to wake up for this.
@timers.delete(timer)
end
def max_timeout
return nil if @timers.empty?
[@timers.collect { |x| x.time_left }.min, 0].max
end
def wake_up
@notify_snk.write('.') if sleeping?
end
end
class Symbol
def io_state?
EventLoop::IO_STATES.include? self
end
end
module EventLoop::Watchable
include SignalEmitter
define_signals :readable, :writable, :exceptional
def monitor_events (*events)
EventLoop.monitor_io(self, *events) end
def ignore_events (*events)
EventLoop.ignore_io(self, *events) end
define_soft_aliases \
:monitor_event => :monitor_events,
:ignore_event => :ignore_events
def close ; super
ignore_events end
def close_read ; super
ignore_event :readable end
def close_write ; super
ignore_event :writable end
module Automatic
include EventLoop::Watchable
def add_signal_handler (name, &handler) super
monitor_event(name) if name.io_state?
end
def remove_signal_handler (name, handler) super
if @signal_handlers[name].empty?
ignore_event(name) if name.io_state?
end
end
end
end
class IO
def on_readable &block
extend EventLoop::Watchable::Automatic
on_readable(&block)
end
def on_writable &block
extend EventLoop::Watchable::Automatic
on_writable(&block)
end
def on_exceptional &block
extend EventLoop::Watchable::Automatic
on_exceptional(&block)
end
def will_block?
if respond_to?(:fcntl) and defined?(Fcntl) and defined?(Fcntl::F_GETFL) and defined?(Fcntl::O_NONBLOCK)
fcntl(Fcntl::F_GETFL, 0) & Fcntl::O_NONBLOCK == 0
end
end
def will_block= (wants_blocking)
if respond_to?(:fcntl) and defined?(Fcntl) and defined?(Fcntl::F_GETFL) and defined?(Fcntl::O_NONBLOCK)
flags = fcntl(Fcntl::F_GETFL, 0)
if wants_blocking
flags &= ~Fcntl::O_NONBLOCK
else
flags |= Fcntl::O_NONBLOCK
end
fcntl(Fcntl::F_SETFL, flags)
end
end
end
class EventLoop::Timer
include SignalEmitter
DEFAULT_INTERVAL = 0.0
DEFAULT_TOLERANCE = 0.001
def initialize (options={}, &handler)
@running = false
@start_time = nil
options = { :interval => options } if options.kind_of? Numeric
if options[:interval]
@interval = options[:interval].to_f
else
@interval = DEFAULT_INTERVAL
end
if options[:tolerance]
@tolerance = options[:tolerance].to_f
elsif DEFAULT_TOLERANCE < @interval
@tolerance = DEFAULT_TOLERANCE
else
@tolerance = 0.0
end
@event_loop = options[:event_loop] || EventLoop.current
if block_given?
add_signal_handler(:alarm, &handler)
start unless options[:start?] == false
else
start if options[:start?]
end
end
define_readers :interval, :tolerance
define_signal :alarm
def stopped? ; @start_time == nil end
def running? ; @start_time != nil end
def interval= (new_interval)
old_interval = @interval
@interval = new_interval
@event_loop.check_timer(self) if new_interval < old_interval
end
def end_time
@start_time + @interval end
def time_left
end_time - Time.now end
def ready?
time_left <= @tolerance end
def restart
@start_time = Time.now
end
def sound_alarm
signal :alarm
restart if running?
end
def start
@start_time = Time.now
@event_loop.monitor_timer(self)
end
def stop
@start_time = nil
@event_loop.ignore_timer(self)
end
end
if __FILE__ == $0
require "test/unit"
class TimerTest < Test::Unit::TestCase
def setup
@timer = EventLoop::Timer.new(:interval => 0.001)
end
def test_timer
@timer.on_alarm do
puts "[#{@timer.time_left} seconds left after alarm]"
EventLoop.quit
end
8.times do
t0 = Time.now
@timer.start ; EventLoop.run
t1 = Time.now
assert(t1 - t0 > @timer.interval - @timer.tolerance)
end
end
end
end
## event-loop.rb ends here.

View File

@ -0,0 +1,218 @@
## signal-system.rb --- simple intra-process signal system
# Copyright (C) 2005 Daniel Brockman
# This program is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation;
# either version 2 of the License, or (at your option) any
# later version.
# This file is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty
# of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
# You should have received a copy of the GNU General Public
# License along with this program; if not, write to the Free
# Software Foundation, 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301, USA.
require "puppet/external/event-loop/better-definers"
module SignalEmitterModule
def self.extended (object)
if object.kind_of? Module and not object < SignalEmitter
if object.respond_to? :fcall
# This is the way to call private methods
# in Ruby 1.9 as of November 16.
object.fcall :include, SignalEmitter
else
object.__send__ :include, SignalEmitter
end
end
end
def define_signal (name, slot=:before, &body)
# Can't use `define_method' and take a block pre-1.9.
class_eval %{ def on_#{name} &block
add_signal_handler(:#{name}, &block) end }
define_signal_handler(name, :before, &lambda {|*a|})
define_signal_handler(name, :after, &lambda {|*a|})
define_signal_handler(name, slot, &body) if block_given?
end
def define_signals (*names, &body)
names.each { |x| define_signal(x, &body) }
end
def define_signal_handler (name, slot=:before, &body)
case slot
when :before
define_protected_method "handle_#{name}", &body
when :after
define_protected_method "after_handle_#{name}", &body
else
raise ArgumentError, "invalid slot `#{slot.inspect}'; " +
"should be `:before' or `:after'", caller(1)
end
end
end
# This is an old name for the same thing.
SignalEmitterClass = SignalEmitterModule
module SignalEmitter
def self.included (includer)
includer.extend SignalEmitterClass if not includer.kind_of? SignalEmitterClass
end
def __maybe_initialize_signal_emitter
@signal_handlers ||= Hash.new { |h, k| h[k] = Array.new }
@allow_dynamic_signals ||= false
end
define_accessors :allow_dynamic_signals?
def add_signal_handler (name, &handler)
__maybe_initialize_signal_emitter
@signal_handlers[name] << handler
handler
end
define_soft_aliases [:on, :on_signal] => :add_signal_handler
def remove_signal_handler (name, handler)
__maybe_initialize_signal_emitter
@signal_handlers[name].delete(handler)
end
def __signal__ (name, *args, &block)
__maybe_initialize_signal_emitter
respond_to? "on_#{name}" or allow_dynamic_signals? or
fail "undefined signal `#{name}' for #{self}:#{self.class}"
__send__("handle_#{name}", *args, &block) if
respond_to? "handle_#{name}"
@signal_handlers[name].each { |x| x.call(*args, &block) }
__send__("after_handle_#{name}", *args, &block) if
respond_to? "after_handle_#{name}"
end
define_soft_alias :signal => :__signal__
end
# This module is indended to be a convenience mixin to be used by
# classes whose objects need to observe foreign signals. That is,
# if you want to observe some signals coming from an object, *you*
# should mix in this module.
#
# You cannot use this module at two different places of the same
# inheritance chain to observe signals coming from the same object.
#
# XXX: This has not seen much use, and I'd like to provide a
# better solution for the problem in the future.
module SignalObserver
def __maybe_initialize_signal_observer
@observed_signals ||= Hash.new do |signals, object|
signals[object] = Hash.new do |handlers, name|
handlers[name] = Array.new
end
end
end
def observe_signal (subject, name, &handler)
__maybe_initialize_signal_observer
@observed_signals[subject][name] << handler
subject.add_signal_handler(name, &handler)
end
def map_signals (source, pairs={})
pairs.each do |src_name, dst_name|
observe_signal(source, src_name) do |*args|
__signal__(dst_name, *args)
end
end
end
def absorb_signals (subject, *names)
names.each do |name|
observe_signal(subject, name) do |*args|
__signal__(name, *args)
end
end
end
define_soft_aliases \
:map_signal => :map_signals,
:absorb_signal => :absorb_signals
def ignore_signal (subject, name)
__maybe_initialize_signal_observer
__ignore_signal_1(subject, name)
@observed_signals.delete(subject) if
@observed_signals[subject].empty?
end
def ignore_signals (subject, *names)
__maybe_initialize_signal_observer
names = @observed_signals[subject] if names.empty?
names.each { |x| __ignore_signal_1(subject, x) }
end
private
def __ignore_signal_1(subject, name)
@observed_signals[subject][name].each do |handler|
subject.remove_signal_handler(name, handler) end
@observed_signals[subject].delete(name)
end
end
if __FILE__ == $0
require "test/unit"
class SignalEmitterTest < Test::Unit::TestCase
class X
include SignalEmitter
define_signal :foo
end
def setup
@x = X.new
end
def test_on_signal
moomin = 0
@x.on_signal(:foo) { moomin = 1 }
@x.signal :foo
assert moomin == 1
end
def test_on_foo
moomin = 0
@x.on_foo { moomin = 1 }
@x.signal :foo
assert moomin == 1
end
def test_multiple_on_signal
moomin = 0
@x.on_signal(:foo) { moomin += 1 }
@x.on_signal(:foo) { moomin += 2 }
@x.on_signal(:foo) { moomin += 4 }
@x.on_signal(:foo) { moomin += 8 }
@x.signal :foo
assert moomin == 15
end
def test_multiple_on_foo
moomin = 0
@x.on_foo { moomin += 1 }
@x.on_foo { moomin += 2 }
@x.on_foo { moomin += 4 }
@x.on_foo { moomin += 8 }
@x.signal :foo
assert moomin == 15
end
end
end
## application-signals.rb ends here.

63
mcollective/lib/puppet/external/lock.rb vendored Normal file
View File

@ -0,0 +1,63 @@
require 'thread'
require 'sync'
# Gotten from:
# http://path.berkeley.edu/~vjoel/ruby/solaris-bug.rb
# Extensions to the File class for exception-safe file locking in a
# environment with multiple user threads.
# This is here because closing a file on solaris unlocks any locks that
# other threads might have. So we have to make sure that only the last
# reader thread closes the file.
#
# The hash maps inode number to a count of reader threads
$reader_count = Hash.new(0)
class File
# Get an exclusive (i.e., write) lock on the file, and yield to the block.
# If the lock is not available, wait for it without blocking other ruby
# threads.
def lock_exclusive
if Thread.list.size == 1
flock(LOCK_EX)
else
# ugly hack because waiting for a lock in a Ruby thread blocks the
# process
period = 0.001
until flock(LOCK_EX|LOCK_NB)
sleep period
period *= 2 if period < 1
end
end
yield self
ensure
flush
flock(LOCK_UN)
end
# Get a shared (i.e., read) lock on the file, and yield to the block.
# If the lock is not available, wait for it without blocking other ruby
# threads.
def lock_shared
if Thread.list.size == 1
flock(LOCK_SH)
else
# ugly hack because waiting for a lock in a Ruby thread blocks the
# process
period = 0.001
until flock(LOCK_SH|LOCK_NB)
sleep period
period *= 2 if period < 1
end
end
yield self
ensure
Thread.exclusive {flock(LOCK_UN) if $reader_count[self.stat.ino] == 1}
## for solaris, no need to unlock here--closing does it
## but this has no effect on the bug
end
end

48
mcollective/lib/puppet/external/nagios.rb vendored Executable file
View File

@ -0,0 +1,48 @@
#!/usr/bin/env ruby -w
#--------------------
# A script to retrieve hosts from ldap and create an importable
# cfservd file from them
require 'digest/md5'
#require 'ldap'
require 'puppet/external/nagios/parser.rb'
require 'puppet/external/nagios/base.rb'
module Nagios
NAGIOSVERSION = '1.1'
# yay colors
PINK = ""
GREEN = ""
YELLOW = ""
SLATE = ""
ORANGE = ""
BLUE = ""
NOCOLOR = ""
RESET = ""
def self.version
NAGIOSVERSION
end
class Config
def Config.import(config)
text = String.new
File.open(config) { |file|
file.each { |line|
text += line
}
}
parser = Nagios::Parser.new
parser.parse(text)
end
def Config.each
Nagios::Object.objects.each { |object|
yield object
}
end
end
end

472
mcollective/lib/puppet/external/nagios/base.rb vendored Executable file
View File

@ -0,0 +1,472 @@
# The base class for all of our Nagios object types. Everything else
# is mostly just data.
class Nagios::Base
class UnknownNagiosType < RuntimeError # When an unknown type is asked for by name.
end
include Enumerable
class << self
attr_accessor :parameters, :derivatives, :ocs, :name, :att
attr_accessor :ldapbase
attr_writer :namevar
attr_reader :superior
end
# Attach one class to another.
def self.attach(hash)
@attach ||= {}
hash.each do |n, v| @attach[n] = v end
end
# Convert a parameter to camelcase
def self.camelcase(param)
param.gsub(/_./) do |match|
match.sub(/_/,'').capitalize
end
end
# Uncamelcase a parameter.
def self.decamelcase(param)
param.gsub(/[A-Z]/) do |match|
"_#{match.downcase}"
end
end
# Create a new instance of a given class.
def self.create(name, args = {})
name = name.intern if name.is_a? String
if @types.include?(name)
@types[name].new(args)
else
raise UnknownNagiosType, "Unknown type #{name}"
end
end
# Yield each type in turn.
def self.eachtype
@types.each do |name, type|
yield [name, type]
end
end
# Create a mapping.
def self.map(hash)
@map ||= {}
hash.each do |n, v| @map[n] = v end
end
# Return a mapping (or nil) for a param
def self.mapping(name)
name = name.intern if name.is_a? String
if defined?(@map)
@map[name]
else
nil
end
end
# Return the namevar for the canonical name.
def self.namevar
if defined?(@namevar)
return @namevar
else
if parameter?(:name)
return :name
elsif tmp = (self.name.to_s + "_name").intern and parameter?(tmp)
@namevar = tmp
return @namevar
else
raise "Type #{self.name} has no name var"
end
end
end
# Create a new type.
def self.newtype(name, &block)
name = name.intern if name.is_a? String
@types ||= {}
# Create the class, with the correct name.
t = Class.new(self)
t.name = name
# Everyone gets this. There should probably be a better way, and I
# should probably hack the attribute system to look things up based on
# this "use" setting, but, eh.
t.parameters = [:use]
const_set(name.to_s.capitalize,t)
# Evaluate the passed block. This should usually define all of the work.
t.class_eval(&block)
@types[name] = t
end
# Define both the normal case and camelcase method for a parameter
def self.paramattr(name)
camel = camelcase(name)
param = name
[name, camel].each do |method|
define_method(method) do
@parameters[param]
end
define_method(method.to_s + "=") do |value|
@parameters[param] = value
end
end
end
# Is the specified name a valid parameter?
def self.parameter?(name)
name = name.intern if name.is_a? String
@parameters.include?(name)
end
# Manually set the namevar
def self.setnamevar(name)
name = name.intern if name.is_a? String
@namevar = name
end
# Set the valid parameters for this class
def self.setparameters(*array)
@parameters += array
end
# Set the superior ldap object class. Seems silly to include this
# in this class, but, eh.
def self.setsuperior(name)
@superior = name
end
# Parameters to suppress in output.
def self.suppress(name)
@suppress ||= []
@suppress << name
end
# Whether a given parameter is suppressed.
def self.suppress?(name)
defined?(@suppress) and @suppress.include?(name)
end
# Return our name as the string.
def self.to_s
self.name.to_s
end
# Return a type by name.
def self.type(name)
name = name.intern if name.is_a? String
@types[name]
end
# Convenience methods.
def [](param)
send(param)
end
# Convenience methods.
def []=(param,value)
send(param.to_s + "=", value)
end
# Iterate across all ofour set parameters.
def each
@parameters.each { |param,value|
yield(param,value)
}
end
# Initialize our object, optionally with a list of parameters.
def initialize(args = {})
@parameters = {}
args.each { |param,value|
self[param] = value
}
if @namevar == :_naginator_name
self['_naginator_name'] = self['name']
end
end
# Handle parameters like attributes.
def method_missing(mname, *args)
pname = mname.to_s
pname.sub!(/=/, '')
if self.class.parameter?(pname)
if pname =~ /A-Z/
pname = self.class.decamelcase(pname)
end
self.class.paramattr(pname)
# Now access the parameters directly, to make it at least less
# likely we'll end up in an infinite recursion.
if mname.to_s =~ /=$/
@parameters[pname] = *args
else
return @parameters[mname]
end
else
super
end
end
# Retrieve our name, through a bit of redirection.
def name
send(self.class.namevar)
end
# This is probably a bad idea.
def name=(value)
unless self.class.namevar.to_s == "name"
send(self.class.namevar.to_s + "=", value)
end
end
def namevar
(self.type + "_name").intern
end
def parammap(param)
unless defined?(@map)
map = {
self.namevar => "cn"
}
map.update(self.class.map) if self.class.map
end
if map.include?(param)
return map[param]
else
return "nagios-" + param.id2name.gsub(/_/,'-')
end
end
def parent
unless defined?(self.class.attached)
puts "Duh, you called parent on an unattached class"
return
end
klass,param = self.class.attached
unless @parameters.include?(param)
puts "Huh, no attachment param"
return
end
klass[@parameters[param]]
end
# okay, this sucks
# how do i get my list of ocs?
def to_ldif
base = self.class.ldapbase
str = self.dn + "\n"
ocs = Array.new
if self.class.ocs
# i'm storing an array, so i have to flatten it and stuff
kocs = self.class.ocs
ocs.push(*kocs)
end
ocs.push "top"
oc = self.class.to_s
oc.sub!(/Nagios/,'nagios')
oc.sub!(/::/,'')
ocs.push oc
ocs.each { |oc|
str += "objectclass: #{oc}\n"
}
@parameters.each { |name,value|
next if self.class.suppress.include?(name)
ldapname = self.parammap(name)
str += ldapname + ": #{value}\n"
}
str += "\n"
end
def to_s
str = "define #{self.type} {\n"
self.each { |param,value|
str += %{\t%-30s %s\n} % [ param,
if value.is_a? Array
value.join(",")
else
value
end
]
}
str += "}\n"
str
end
# The type of object we are.
def type
self.class.name
end
# object types
newtype :host do
setparameters :host_name, :alias, :display_name, :address, :parents,
:hostgroups, :check_command, :initial_state, :max_check_attempts,
:check_interval, :retry_interval, :active_checks_enabled,
:passive_checks_enabled, :check_period, :obsess_over_host,
:check_freshness, :freshness_threshold, :event_handler,
:event_handler_enabled, :low_flap_threshold, :high_flap_threshold,
:flap_detection_enabled, :flap_detection_options,
:failure_prediction_enabled, :process_perf_data,
:retain_status_information, :retain_nonstatus_information, :contacts,
:contact_groups, :notification_interval, :first_notification_delay,
:notification_period, :notification_options, :notifications_enabled,
:stalking_options, :notes, :notes_url, :action_url, :icon_image,
:icon_image_alt, :vrml_image, :statusmap_image, "2d_coords".intern,
"3d_coords".intern,
:register, :use
setsuperior "person"
map :address => "ipHostNumber"
end
newtype :hostgroup do
setparameters :hostgroup_name, :alias, :members, :hostgroup_members, :notes,
:notes_url, :action_url,
:register, :use
end
newtype :service do
attach :host => :host_name
setparameters :host_name, :hostgroup_name, :service_description,
:display_name, :servicegroups, :is_volatile, :check_command,
:initial_state, :max_check_attempts, :check_interval, :retry_interval,
:normal_check_interval, :retry_check_interval, :active_checks_enabled,
:passive_checks_enabled, :parallelize_check, :check_period,
:obsess_over_service, :check_freshness, :freshness_threshold,
:event_handler, :event_handler_enabled, :low_flap_threshold,
:high_flap_threshold, :flap_detection_enabled,:flap_detection_options,
:process_perf_data, :failure_prediction_enabled, :retain_status_information,
:retain_nonstatus_information, :notification_interval,
:first_notification_delay, :notification_period, :notification_options,
:notifications_enabled, :contacts, :contact_groups, :stalking_options,
:notes, :notes_url, :action_url, :icon_image, :icon_image_alt,
:register, :use,
:_naginator_name
suppress :host_name
setnamevar :_naginator_name
end
newtype :servicegroup do
setparameters :servicegroup_name, :alias, :members, :servicegroup_members,
:notes, :notes_url, :action_url,
:register, :use
end
newtype :contact do
setparameters :contact_name, :alias, :contactgroups,
:host_notifications_enabled, :service_notifications_enabled,
:host_notification_period, :service_notification_period,
:host_notification_options, :service_notification_options,
:host_notification_commands, :service_notification_commands,
:email, :pager, :address1, :address2, :address3, :address4,
:address5, :address6, :can_submit_commands, :retain_status_information,
:retain_nonstatus_information,
:register, :use
setsuperior "person"
end
newtype :contactgroup do
setparameters :contactgroup_name, :alias, :members, :contactgroup_members,
:register, :use
end
# TODO - We should support generic time periods here eg "day 1 - 15"
newtype :timeperiod do
setparameters :timeperiod_name, :alias, :sunday, :monday, :tuesday,
:wednesday, :thursday, :friday, :saturday, :exclude,
:register, :use
end
newtype :command do
setparameters :command_name, :command_line
end
newtype :servicedependency do
auxiliary = true
setparameters :dependent_host_name, :dependent_hostgroup_name,
:dependent_service_description, :host_name, :hostgroup_name,
:service_description, :inherits_parent, :execution_failure_criteria,
:notification_failure_criteria, :dependency_period,
:register, :use,
:_naginator_name
setnamevar :_naginator_name
end
newtype :serviceescalation do
setparameters :host_name, :hostgroup_name, :servicegroup_name,
:service_description, :contacts, :contact_groups,
:first_notification, :last_notification, :notification_interval,
:escalation_period, :escalation_options,
:register, :use,
:_naginator_name
setnamevar :_naginator_name
end
newtype :hostdependency do
auxiliary = true
setparameters :dependent_host_name, :dependent_hostgroup_name, :host_name,
:hostgroup_name, :inherits_parent, :execution_failure_criteria,
:notification_failure_criteria, :dependency_period,
:register, :use,
:_naginator_name
setnamevar :_naginator_name
end
newtype :hostescalation do
setparameters :host_name, :hostgroup_name, :contacts, :contact_groups,
:first_notification, :last_notification, :notification_interval,
:escalation_period, :escalation_options,
:register, :use,
:_naginator_name
setnamevar :_naginator_name
end
newtype :hostextinfo do
auxiliary = true
setparameters :host_name, :notes, :notes_url, :icon_image, :icon_image_alt,
:vrml_image, :statusmap_image, "2d_coords".intern, "3d_coords".intern,
:register, :use
setnamevar :host_name
end
newtype :serviceextinfo do
auxiliary = true
setparameters :host_name, :service_description, :notes, :notes_url,
:action_url, :icon_image, :icon_image_alt,
:register, :use,
:_naginator_name
setnamevar :_naginator_name
end
end

View File

@ -0,0 +1,185 @@
# vim: syntax=ruby
class Nagios::Parser
token DEFINE NAME STRING PARAM LCURLY RCURLY VALUE RETURN COMMENT INLINECOMMENT
rule
decls: decl { return val[0] if val[0] }
| decls decl {
if val[1].nil?
result = val[0]
else
if val[0].nil?
result = val[1]
else
result = [ val[0], val[1] ].flatten
end
end
}
;
decl: object { result = [val[0]] }
| RETURN { result = nil }
| comment
;
comment: COMMENT RETURN { result = nil }
;
object: DEFINE NAME LCURLY RETURN vars RCURLY {
result = Nagios::Base.create(val[1],val[4])
}
;
vars: var
| vars var {
val[1].each {|p,v|
val[0][p] = v
}
result = val[0]
}
;
var: PARAM VALUE icomment returns { result = {val[0],val[1]} }
;
returns: RETURN
| returns RETURN
;
icomment: # nothing
| INLINECOMMENT
;
end
----inner
class ::Nagios::Parser::SyntaxError < RuntimeError; end
def parse(src)
@src = src
# state variables
@invar = false
@inobject = false
@done = false
@line = 0
@yydebug = true
do_parse
end
# The lexer. Very simple.
def token
@src.sub!(/\A\n/,'')
if $&
@line += 1
return [ :RETURN, "\n" ]
end
if @done
return nil
end
yytext = String.new
# remove comments from this line
@src.sub!(/\A[ \t]*;.*\n/,"\n")
if $&
return [:INLINECOMMENT, ""]
end
@src.sub!(/\A#.*\n/,"\n")
if $&
return [:COMMENT, ""]
end
@src.sub!(/#.*/,'')
if @src.length == 0
@done = true
return [false, '$']
end
if @invar
@src.sub!(/\A[ \t]+/,'')
@src.sub!(/\A([^;\n]+)(\n|;)/,'\2')
if $1
yytext += $1
end
@invar = false
return [:VALUE, yytext]
else
@src.sub!(/\A[\t ]*(\S+)([\t ]*|$)/,'')
if $1
yytext = $1
case yytext
when 'define'
#puts "got define"
return [:DEFINE, yytext]
when '{'
#puts "got {"
@inobject = true
return [:LCURLY, yytext]
else
unless @inobject
#puts "got type: #{yytext}"
if yytext =~ /\W/
giveback = yytext.dup
giveback.sub!(/^\w+/,'')
#puts "giveback " + giveback
#puts "yytext " + yytext
yytext.sub!(/\W.*$/,'')
#puts "yytext " + yytext
#puts "all [#{giveback} #{yytext} #{orig}]"
@src = giveback + @src
end
return [:NAME, yytext]
else
if yytext == '}'
#puts "got closure: #{yytext}"
@inobject = false
return [:RCURLY, '}']
end
unless @invar
@invar = true
return [:PARAM, $1]
else
end
end
end
end
end
end
def next_token
token
end
def yydebug
1
end
def yywrap
0
end
def on_error(token, value, vstack )
msg = ""
unless value.nil?
msg = "line #{@line}: syntax error at '#{value}'"
else
msg = "line #{@line}: syntax error at '#{token}'"
end
unless @src.size > 0
msg = "line #{@line}: Unexpected end of file"
end
if token == '$end'.intern
puts "okay, this is silly"
else
raise ::Nagios::Parser::SyntaxError, msg
end
end

View File

@ -0,0 +1,9 @@
all: parser.rb
debug: parser.rb setdebug
parser.rb: grammar.ry
racc -E -oparser.rb grammar.ry
setdebug:
perl -pi -e 's{\@yydebug =.*$$}{\@yydebug = true}' parser.rb

View File

@ -0,0 +1,775 @@
#
# DO NOT MODIFY!!!!
# This file is automatically generated by racc 1.4.5
# from racc grammer file "grammar.ry".
#
#
# parser.rb: generated by racc (runtime embedded)
#
###### racc/parser.rb begin
unless $LOADED_FEATURES.index 'racc/parser.rb'
$LOADED_FEATURES.push 'racc/parser.rb'
self.class.module_eval <<'..end racc/parser.rb modeval..id5256434e8a', 'racc/parser.rb', 1
#
# $Id: parser.rb,v 1.7 2005/11/20 17:31:32 aamine Exp $
#
# Copyright (c) 1999-2005 Minero Aoki
#
# This program is free software.
# You can distribute/modify this program under the same terms of ruby.
#
# As a special exception, when this code is copied by Racc
# into a Racc output file, you may use that output file
# without restriction.
#
NotImplementedError = NotImplementError unless defined?(NotImplementedError)
module Racc
class ParseError < StandardError; end
end
ParseError = Racc::ParseError unless defined?(::ParseError)
module Racc
Racc_No_Extentions = false unless defined?(Racc_No_Extentions)
class Parser
Racc_Runtime_Version = '1.4.5'
Racc_Runtime_Revision = '$Revision: 1.7 $'.split[1]
Racc_Runtime_Core_Version_R = '1.4.5'
Racc_Runtime_Core_Revision_R = '$Revision: 1.7 $'.split[1]
begin
require 'racc/cparse'
# Racc_Runtime_Core_Version_C = (defined in extention)
Racc_Runtime_Core_Revision_C = Racc_Runtime_Core_Id_C.split[2]
raise LoadError, 'old cparse.so' unless new.respond_to?(:_racc_do_parse_c, true)
raise LoadError, 'selecting ruby version of racc runtime core' if Racc_No_Extentions
Racc_Main_Parsing_Routine = :_racc_do_parse_c
Racc_YY_Parse_Method = :_racc_yyparse_c
Racc_Runtime_Core_Version = Racc_Runtime_Core_Version_C
Racc_Runtime_Core_Revision = Racc_Runtime_Core_Revision_C
Racc_Runtime_Type = 'c'
rescue LoadError
Racc_Main_Parsing_Routine = :_racc_do_parse_rb
Racc_YY_Parse_Method = :_racc_yyparse_rb
Racc_Runtime_Core_Version = Racc_Runtime_Core_Version_R
Racc_Runtime_Core_Revision = Racc_Runtime_Core_Revision_R
Racc_Runtime_Type = 'ruby'
end
def Parser.racc_runtime_type
Racc_Runtime_Type
end
private
def _racc_setup
@yydebug = false unless self.class::Racc_debug_parser
@yydebug ||= false
if @yydebug
@racc_debug_out ||= $stderr
@racc_debug_out ||= $stderr
end
arg = self.class::Racc_arg
arg[13] = true if arg.size < 14
arg
end
def _racc_init_sysvars
@racc_state = [0]
@racc_tstack = []
@racc_vstack = []
@racc_t = nil
@racc_val = nil
@racc_read_next = true
@racc_user_yyerror = false
@racc_error_status = 0
end
###
### do_parse
###
def do_parse
__send__(Racc_Main_Parsing_Routine, _racc_setup, false)
end
def next_token
raise NotImplementedError, "#{self.class}\#next_token is not defined"
end
def _racc_do_parse_rb(arg, in_debug)
action_table, action_check, action_default, action_pointer,
goto_table, goto_check, goto_default, goto_pointer,
nt_base, reduce_table, token_table, shift_n,
reduce_n, use_result, * = arg
_racc_init_sysvars
tok = act = i = nil
nerr = 0
catch(:racc_end_parse) {
while true
if i = action_pointer[@racc_state[-1]]
if @racc_read_next
if @racc_t != 0 # not EOF
tok, @racc_val = next_token
unless tok # EOF
@racc_t = 0
else
@racc_t = (token_table[tok] or 1) # error token
end
racc_read_token(@racc_t, tok, @racc_val) if @yydebug
@racc_read_next = false
end
end
i += @racc_t
unless i >= 0 and
act = action_table[i] and
action_check[i] == @racc_state[-1]
act = action_default[@racc_state[-1]]
end
else
act = action_default[@racc_state[-1]]
end
while act = _racc_evalact(act, arg)
;
end
end
}
end
###
### yyparse
###
def yyparse(recv, mid)
__send__(Racc_YY_Parse_Method, recv, mid, _racc_setup, true)
end
def _racc_yyparse_rb(recv, mid, arg, c_debug)
action_table, action_check, action_default, action_pointer,
goto_table, goto_check, goto_default, goto_pointer,
nt_base, reduce_table, token_table, shift_n,
reduce_n, use_result, * = arg
_racc_init_sysvars
tok = nil
act = nil
i = nil
nerr = 0
catch(:racc_end_parse) {
until i = action_pointer[@racc_state[-1]]
while act = _racc_evalact(action_default[@racc_state[-1]], arg)
;
end
end
recv.__send__(mid) do |tok, val|
unless tok
@racc_t = 0
else
@racc_t = (token_table[tok] or 1) # error token
end
@racc_val = val
@racc_read_next = false
i += @racc_t
unless i >= 0 and
act = action_table[i] and
action_check[i] == @racc_state[-1]
act = action_default[@racc_state[-1]]
end
while act = _racc_evalact(act, arg)
;
end
while not (i = action_pointer[@racc_state[-1]]) or
not @racc_read_next or
@racc_t == 0 # $
unless i and i += @racc_t and
i >= 0 and
act = action_table[i] and
action_check[i] == @racc_state[-1]
act = action_default[@racc_state[-1]]
end
while act = _racc_evalact(act, arg)
;
end
end
end
}
end
###
### common
###
def _racc_evalact(act, arg)
action_table, action_check, action_default, action_pointer,
goto_table, goto_check, goto_default, goto_pointer,
nt_base, reduce_table, token_table, shift_n,
reduce_n, use_result, * = arg
nerr = 0 # tmp
if act > 0 and act < shift_n
#
# shift
#
if @racc_error_status > 0
@racc_error_status -= 1 unless @racc_t == 1 # error token
end
@racc_vstack.push @racc_val
@racc_state.push act
@racc_read_next = true
if @yydebug
@racc_tstack.push @racc_t
racc_shift @racc_t, @racc_tstack, @racc_vstack
end
elsif act < 0 and act > -reduce_n
#
# reduce
#
code = catch(:racc_jump) {
@racc_state.push _racc_do_reduce(arg, act)
false
}
if code
case code
when 1 # yyerror
@racc_user_yyerror = true # user_yyerror
return -reduce_n
when 2 # yyaccept
return shift_n
else
raise '[Racc Bug] unknown jump code'
end
end
elsif act == shift_n
#
# accept
#
racc_accept if @yydebug
throw :racc_end_parse, @racc_vstack[0]
elsif act == -reduce_n
#
# error
#
case @racc_error_status
when 0
unless arg[21] # user_yyerror
nerr += 1
on_error @racc_t, @racc_val, @racc_vstack
end
when 3
if @racc_t == 0 # is $
throw :racc_end_parse, nil
end
@racc_read_next = true
end
@racc_user_yyerror = false
@racc_error_status = 3
while true
if i = action_pointer[@racc_state[-1]]
i += 1 # error token
if i >= 0 and
(act = action_table[i]) and
action_check[i] == @racc_state[-1]
break
end
end
throw :racc_end_parse, nil if @racc_state.size <= 1
@racc_state.pop
@racc_vstack.pop
if @yydebug
@racc_tstack.pop
racc_e_pop @racc_state, @racc_tstack, @racc_vstack
end
end
return act
else
raise "[Racc Bug] unknown action #{act.inspect}"
end
racc_next_state(@racc_state[-1], @racc_state) if @yydebug
nil
end
def _racc_do_reduce(arg, act)
action_table, action_check, action_default, action_pointer,
goto_table, goto_check, goto_default, goto_pointer,
nt_base, reduce_table, token_table, shift_n,
reduce_n, use_result, * = arg
state = @racc_state
vstack = @racc_vstack
tstack = @racc_tstack
i = act * -3
len = reduce_table[i]
reduce_to = reduce_table[i+1]
method_id = reduce_table[i+2]
void_array = []
tmp_t = tstack[-len, len] if @yydebug
tmp_v = vstack[-len, len]
tstack[-len, len] = void_array if @yydebug
vstack[-len, len] = void_array
state[-len, len] = void_array
# tstack must be updated AFTER method call
if use_result
vstack.push __send__(method_id, tmp_v, vstack, tmp_v[0])
else
vstack.push __send__(method_id, tmp_v, vstack)
end
tstack.push reduce_to
racc_reduce(tmp_t, reduce_to, tstack, vstack) if @yydebug
k1 = reduce_to - nt_base
if i = goto_pointer[k1]
i += state[-1]
if i >= 0 and (curstate = goto_table[i]) and goto_check[i] == k1
return curstate
end
end
goto_default[k1]
end
def on_error(t, val, vstack)
raise ParseError, sprintf("\nparse error on value %s (%s)", val.inspect, token_to_str(t) || '?')
end
def yyerror
throw :racc_jump, 1
end
def yyaccept
throw :racc_jump, 2
end
def yyerrok
@racc_error_status = 0
end
#
# for debugging output
#
def racc_read_token(t, tok, val)
@racc_debug_out.print 'read '
@racc_debug_out.print tok.inspect, '(', racc_token2str(t), ') '
@racc_debug_out.puts val.inspect
@racc_debug_out.puts
end
def racc_shift(tok, tstack, vstack)
@racc_debug_out.puts "shift #{racc_token2str tok}"
racc_print_stacks tstack, vstack
@racc_debug_out.puts
end
def racc_reduce(toks, sim, tstack, vstack)
out = @racc_debug_out
out.print 'reduce '
if toks.empty?
out.print ' <none>'
else
toks.each {|t| out.print ' ', racc_token2str(t) }
end
out.puts " --> #{racc_token2str(sim)}"
racc_print_stacks tstack, vstack
@racc_debug_out.puts
end
def racc_accept
@racc_debug_out.puts 'accept'
@racc_debug_out.puts
end
def racc_e_pop(state, tstack, vstack)
@racc_debug_out.puts 'error recovering mode: pop token'
racc_print_states state
racc_print_stacks tstack, vstack
@racc_debug_out.puts
end
def racc_next_state(curstate, state)
@racc_debug_out.puts "goto #{curstate}"
racc_print_states state
@racc_debug_out.puts
end
def racc_print_stacks(t, v)
out = @racc_debug_out
out.print ' ['
t.each_index do |i|
out.print ' (', racc_token2str(t[i]), ' ', v[i].inspect, ')'
end
out.puts ' ]'
end
def racc_print_states(s)
out = @racc_debug_out
out.print ' ['
s.each {|st| out.print ' ', st }
out.puts ' ]'
end
def racc_token2str(tok)
self.class::Racc_token_to_s_table[tok] or
raise "[Racc Bug] can't convert token #{tok} to string"
end
def token_to_str(t)
self.class::Racc_token_to_s_table[t]
end
end
end
..end racc/parser.rb modeval..id5256434e8a
end
###### racc/parser.rb end
module Nagios
class Parser < Racc::Parser
module_eval <<'..end grammar.ry modeval..idcb2ea30b34', 'grammar.ry', 57
class ::Nagios::Parser::SyntaxError < RuntimeError; end
def parse(src)
@src = src
# state variables
@invar = false
@inobject = false
@done = false
@line = 0
@yydebug = true
do_parse
end
# The lexer. Very simple.
def token
@src.sub!(/\A\n/,'')
if $MATCH
@line += 1
return [ :RETURN, "\n" ]
end
return nil if @done
yytext = String.new
# remove comments from this line
@src.sub!(/\A[ \t]*;.*\n/,"\n")
return [:INLINECOMMENT, ""] if $MATCH
@src.sub!(/\A#.*\n/,"\n")
return [:COMMENT, ""] if $MATCH
@src.sub!(/#.*/,'')
if @src.length == 0
@done = true
return [false, '$']
end
if @invar
@src.sub!(/\A[ \t]+/,'')
@src.sub!(/\A([^;\n]+)(\n|;)/,'\2')
if $1
yytext += $1
end
@invar = false
return [:VALUE, yytext]
else
@src.sub!(/\A[\t ]*(\S+)([\t ]*|$)/,'')
if $1
yytext = $1
case yytext
when 'define'
#puts "got define"
return [:DEFINE, yytext]
when '{'
#puts "got {"
@inobject = true
return [:LCURLY, yytext]
else
unless @inobject
#puts "got type: #{yytext}"
if yytext =~ /\W/
giveback = yytext.dup
giveback.sub!(/^\w+/,'')
#puts "giveback #{giveback}"
#puts "yytext #{yytext}"
yytext.sub!(/\W.*$/,'')
#puts "yytext #{yytext}"
#puts "all [#{giveback} #{yytext} #{orig}]"
@src = giveback + @src
end
return [:NAME, yytext]
else
if yytext == '}'
#puts "got closure: #{yytext}"
@inobject = false
return [:RCURLY, '}']
end
unless @invar
@invar = true
return [:PARAM, $1]
else
end
end
end
end
end
end
def next_token
token
end
def yydebug
1
end
def yywrap
0
end
def on_error(token, value, vstack )
msg = ""
unless value.nil?
msg = "line #{@line}: syntax error at '#{value}'"
else
msg = "line #{@line}: syntax error at '#{token}'"
end
msg = "line #{@line}: Unexpected end of file" unless @src.size > 0
if token == '$end'.intern
puts "okay, this is silly"
else
raise ::Nagios::Parser::SyntaxError, msg
end
end
..end grammar.ry modeval..idcb2ea30b34
##### racc 1.4.5 generates ###
racc_reduce_table = [
0, 0, :racc_error,
1, 13, :_reduce_1,
2, 13, :_reduce_2,
1, 14, :_reduce_3,
1, 14, :_reduce_4,
1, 14, :_reduce_none,
2, 16, :_reduce_6,
6, 15, :_reduce_7,
1, 17, :_reduce_none,
2, 17, :_reduce_9,
4, 18, :_reduce_10,
1, 20, :_reduce_none,
2, 20, :_reduce_none,
0, 19, :_reduce_none,
1, 19, :_reduce_none ]
racc_reduce_n = 15
racc_shift_n = 26
racc_action_table = [
9, 15, 1, 20, 1, 14, 12, 13, 11, 6,
7, 6, 7, 15, 18, 8, 21, 23, 25 ]
racc_action_check = [
2, 16, 2, 16, 0, 12, 8, 9, 7, 2,
2, 0, 0, 14, 15, 1, 18, 22, 24 ]
racc_action_pointer = [
2, 12, 0, nil, nil, nil, nil, -1, 0, 7,
nil, nil, -4, nil, 8, 6, -4, nil, 5, nil,
nil, nil, 8, nil, 9, nil ]
racc_action_default = [
-15, -15, -15, -1, -3, -5, -4, -15, -15, -15,
-2, -6, -15, 26, -15, -15, -15, -8, -13, -9,
-7, -14, -15, -11, -10, -12 ]
racc_goto_table = [ 17, 3, 19, 10, 2, 16, 22, 24 ]
racc_goto_check = [ 6, 2, 6, 2, 1, 5, 7, 8 ]
racc_goto_pointer = [ nil, 4, 1, nil, nil, -9, -14, -12, -15 ]
racc_goto_default = [ nil, nil, nil, 4, 5, nil, nil, nil, nil ]
racc_token_table = {
false => 0,
Object.new => 1,
:DEFINE => 2,
:NAME => 3,
:STRING => 4,
:PARAM => 5,
:LCURLY => 6,
:RCURLY => 7,
:VALUE => 8,
:RETURN => 9,
:COMMENT => 10,
:INLINECOMMENT => 11 }
racc_use_result_var = true
racc_nt_base = 12
Racc_arg = [
racc_action_table,
racc_action_check,
racc_action_default,
racc_action_pointer,
racc_goto_table,
racc_goto_check,
racc_goto_default,
racc_goto_pointer,
racc_nt_base,
racc_reduce_table,
racc_token_table,
racc_shift_n,
racc_reduce_n,
racc_use_result_var ]
Racc_token_to_s_table = [
'$end',
'error',
'DEFINE',
'NAME',
'STRING',
'PARAM',
'LCURLY',
'RCURLY',
'VALUE',
'RETURN',
'COMMENT',
'INLINECOMMENT',
'$start',
'decls',
'decl',
'object',
'comment',
'vars',
'var',
'icomment',
'returns']
Racc_debug_parser = false
##### racc system variables end #####
# reduce 0 omitted
module_eval <<'.,.,', 'grammar.ry', 6
def _reduce_1( val, _values, result )
return val[0] if val[0]
result
end
.,.,
module_eval <<'.,.,', 'grammar.ry', 18
def _reduce_2( val, _values, result )
if val[1].nil?
result = val[0]
else
if val[0].nil?
result = val[1]
else
result = [ val[0], val[1] ].flatten
end
end
result
end
.,.,
module_eval <<'.,.,', 'grammar.ry', 20
def _reduce_3( val, _values, result )
result = [val[0]]
result
end
.,.,
module_eval <<'.,.,', 'grammar.ry', 21
def _reduce_4( val, _values, result )
result = nil
result
end
.,.,
# reduce 5 omitted
module_eval <<'.,.,', 'grammar.ry', 25
def _reduce_6( val, _values, result )
result = nil
result
end
.,.,
module_eval <<'.,.,', 'grammar.ry', 31
def _reduce_7( val, _values, result )
result = Nagios::Base.create(val[1],val[4])
result
end
.,.,
# reduce 8 omitted
module_eval <<'.,.,', 'grammar.ry', 40
def _reduce_9( val, _values, result )
val[1].each {|p,v|
val[0][p] = v
}
result = val[0]
result
end
.,.,
module_eval <<'.,.,', 'grammar.ry', 42
def _reduce_10( val, _values, result )
result = {val[0],val[1]}
result
end
.,.,
# reduce 11 omitted
# reduce 12 omitted
# reduce 13 omitted
# reduce 14 omitted
def _reduce_none( val, _values, result )
result
end
end
end

View File

@ -0,0 +1,370 @@
require 'puppet/external/pson/version'
module PSON
class << self
# If _object_ is string-like parse the string and return the parsed result
# as a Ruby data structure. Otherwise generate a PSON text from the Ruby
# data structure object and return it.
#
# The _opts_ argument is passed through to generate/parse respectively, see
# generate and parse for their documentation.
def [](object, opts = {})
if object.respond_to? :to_str
PSON.parse(object.to_str, opts => {})
else
PSON.generate(object, opts => {})
end
end
# Returns the PSON parser class, that is used by PSON. This might be either
# PSON::Ext::Parser or PSON::Pure::Parser.
attr_reader :parser
# Set the PSON parser class _parser_ to be used by PSON.
def parser=(parser) # :nodoc:
@parser = parser
remove_const :Parser if const_defined? :Parser
const_set :Parser, parser
end
def registered_document_types
@registered_document_types ||= {}
end
# Register a class-constant for deserializaion.
def register_document_type(name,klass)
registered_document_types[name.to_s] = klass
end
# Return the constant located at _path_.
# Anything may be registered as a path by calling register_path, above.
# Otherwise, the format of _path_ has to be either ::A::B::C or A::B::C.
# In either of these cases A has to be defined in Object (e.g. the path
# must be an absolute namespace path. If the constant doesn't exist at
# the given path, an ArgumentError is raised.
def deep_const_get(path) # :nodoc:
path = path.to_s
registered_document_types[path] || path.split(/::/).inject(Object) do |p, c|
case
when c.empty? then p
when p.const_defined?(c) then p.const_get(c)
else raise ArgumentError, "can't find const for unregistered document type #{path}"
end
end
end
# Set the module _generator_ to be used by PSON.
def generator=(generator) # :nodoc:
@generator = generator
generator_methods = generator::GeneratorMethods
for const in generator_methods.constants
klass = deep_const_get(const)
modul = generator_methods.const_get(const)
klass.class_eval do
instance_methods(false).each do |m|
m.to_s == 'to_pson' and remove_method m
end
include modul
end
end
self.state = generator::State
const_set :State, self.state
end
# Returns the PSON generator modul, that is used by PSON. This might be
# either PSON::Ext::Generator or PSON::Pure::Generator.
attr_reader :generator
# Returns the PSON generator state class, that is used by PSON. This might
# be either PSON::Ext::Generator::State or PSON::Pure::Generator::State.
attr_accessor :state
# This is create identifier, that is used to decide, if the _pson_create_
# hook of a class should be called. It defaults to 'document_type'.
attr_accessor :create_id
end
self.create_id = 'document_type'
NaN = (-1.0) ** 0.5
Infinity = 1.0/0
MinusInfinity = -Infinity
# The base exception for PSON errors.
class PSONError < StandardError; end
# This exception is raised, if a parser error occurs.
class ParserError < PSONError; end
# This exception is raised, if the nesting of parsed datastructures is too
# deep.
class NestingError < ParserError; end
# This exception is raised, if a generator or unparser error occurs.
class GeneratorError < PSONError; end
# For backwards compatibility
UnparserError = GeneratorError
# If a circular data structure is encountered while unparsing
# this exception is raised.
class CircularDatastructure < GeneratorError; end
# This exception is raised, if the required unicode support is missing on the
# system. Usually this means, that the iconv library is not installed.
class MissingUnicodeSupport < PSONError; end
module_function
# Parse the PSON string _source_ into a Ruby data structure and return it.
#
# _opts_ can have the following
# keys:
# * *max_nesting*: The maximum depth of nesting allowed in the parsed data
# structures. Disable depth checking with :max_nesting => false, it defaults
# to 19.
# * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in
# defiance of RFC 4627 to be parsed by the Parser. This option defaults
# to false.
# * *create_additions*: If set to false, the Parser doesn't create
# additions even if a matchin class and create_id was found. This option
# defaults to true.
def parse(source, opts = {})
PSON.parser.new(source, opts).parse
end
# Parse the PSON string _source_ into a Ruby data structure and return it.
# The bang version of the parse method, defaults to the more dangerous values
# for the _opts_ hash, so be sure only to parse trusted _source_ strings.
#
# _opts_ can have the following keys:
# * *max_nesting*: The maximum depth of nesting allowed in the parsed data
# structures. Enable depth checking with :max_nesting => anInteger. The parse!
# methods defaults to not doing max depth checking: This can be dangerous,
# if someone wants to fill up your stack.
# * *allow_nan*: If set to true, allow NaN, Infinity, and -Infinity in
# defiance of RFC 4627 to be parsed by the Parser. This option defaults
# to true.
# * *create_additions*: If set to false, the Parser doesn't create
# additions even if a matchin class and create_id was found. This option
# defaults to true.
def parse!(source, opts = {})
opts = {
:max_nesting => false,
:allow_nan => true
}.update(opts)
PSON.parser.new(source, opts).parse
end
# Unparse the Ruby data structure _obj_ into a single line PSON string and
# return it. _state_ is
# * a PSON::State object,
# * or a Hash like object (responding to to_hash),
# * an object convertible into a hash by a to_h method,
# that is used as or to configure a State object.
#
# It defaults to a state object, that creates the shortest possible PSON text
# in one line, checks for circular data structures and doesn't allow NaN,
# Infinity, and -Infinity.
#
# A _state_ hash can have the following keys:
# * *indent*: a string used to indent levels (default: ''),
# * *space*: a string that is put after, a : or , delimiter (default: ''),
# * *space_before*: a string that is put before a : pair delimiter (default: ''),
# * *object_nl*: a string that is put at the end of a PSON object (default: ''),
# * *array_nl*: a string that is put at the end of a PSON array (default: ''),
# * *check_circular*: true if checking for circular data structures
# should be done (the default), false otherwise.
# * *allow_nan*: true if NaN, Infinity, and -Infinity should be
# generated, otherwise an exception is thrown, if these values are
# encountered. This options defaults to false.
# * *max_nesting*: The maximum depth of nesting allowed in the data
# structures from which PSON is to be generated. Disable depth checking
# with :max_nesting => false, it defaults to 19.
#
# See also the fast_generate for the fastest creation method with the least
# amount of sanity checks, and the pretty_generate method for some
# defaults for a pretty output.
def generate(obj, state = nil)
if state
state = State.from_state(state)
else
state = State.new
end
obj.to_pson(state)
end
# :stopdoc:
# I want to deprecate these later, so I'll first be silent about them, and
# later delete them.
alias unparse generate
module_function :unparse
# :startdoc:
# Unparse the Ruby data structure _obj_ into a single line PSON string and
# return it. This method disables the checks for circles in Ruby objects, and
# also generates NaN, Infinity, and, -Infinity float values.
#
# *WARNING*: Be careful not to pass any Ruby data structures with circles as
# _obj_ argument, because this will cause PSON to go into an infinite loop.
def fast_generate(obj)
obj.to_pson(nil)
end
# :stopdoc:
# I want to deprecate these later, so I'll first be silent about them, and later delete them.
alias fast_unparse fast_generate
module_function :fast_unparse
# :startdoc:
# Unparse the Ruby data structure _obj_ into a PSON string and return it. The
# returned string is a prettier form of the string returned by #unparse.
#
# The _opts_ argument can be used to configure the generator, see the
# generate method for a more detailed explanation.
def pretty_generate(obj, opts = nil)
state = PSON.state.new(
:indent => ' ',
:space => ' ',
:object_nl => "\n",
:array_nl => "\n",
:check_circular => true
)
if opts
if opts.respond_to? :to_hash
opts = opts.to_hash
elsif opts.respond_to? :to_h
opts = opts.to_h
else
raise TypeError, "can't convert #{opts.class} into Hash"
end
state.configure(opts)
end
obj.to_pson(state)
end
# :stopdoc:
# I want to deprecate these later, so I'll first be silent about them, and later delete them.
alias pretty_unparse pretty_generate
module_function :pretty_unparse
# :startdoc:
# Load a ruby data structure from a PSON _source_ and return it. A source can
# either be a string-like object, an IO like object, or an object responding
# to the read method. If _proc_ was given, it will be called with any nested
# Ruby object as an argument recursively in depth first order.
#
# This method is part of the implementation of the load/dump interface of
# Marshal and YAML.
def load(source, proc = nil)
if source.respond_to? :to_str
source = source.to_str
elsif source.respond_to? :to_io
source = source.to_io.read
else
source = source.read
end
result = parse(source, :max_nesting => false, :allow_nan => true)
recurse_proc(result, &proc) if proc
result
end
def recurse_proc(result, &proc)
case result
when Array
result.each { |x| recurse_proc x, &proc }
proc.call result
when Hash
result.each { |x, y| recurse_proc x, &proc; recurse_proc y, &proc }
proc.call result
else
proc.call result
end
end
private :recurse_proc
module_function :recurse_proc
alias restore load
module_function :restore
# Dumps _obj_ as a PSON string, i.e. calls generate on the object and returns
# the result.
#
# If anIO (an IO like object or an object that responds to the write method)
# was given, the resulting PSON is written to it.
#
# If the number of nested arrays or objects exceeds _limit_ an ArgumentError
# exception is raised. This argument is similar (but not exactly the
# same!) to the _limit_ argument in Marshal.dump.
#
# This method is part of the implementation of the load/dump interface of
# Marshal and YAML.
def dump(obj, anIO = nil, limit = nil)
if anIO and limit.nil?
anIO = anIO.to_io if anIO.respond_to?(:to_io)
unless anIO.respond_to?(:write)
limit = anIO
anIO = nil
end
end
limit ||= 0
result = generate(obj, :allow_nan => true, :max_nesting => limit)
if anIO
anIO.write result
anIO
else
result
end
rescue PSON::NestingError
raise ArgumentError, "exceed depth limit"
end
end
module ::Kernel
private
# Outputs _objs_ to STDOUT as PSON strings in the shortest form, that is in
# one line.
def j(*objs)
objs.each do |obj|
puts PSON::generate(obj, :allow_nan => true, :max_nesting => false)
end
nil
end
# Ouputs _objs_ to STDOUT as PSON strings in a pretty format, with
# indentation and over many lines.
def jj(*objs)
objs.each do |obj|
puts PSON::pretty_generate(obj, :allow_nan => true, :max_nesting => false)
end
nil
end
# If _object_ is string-like parse the string and return the parsed result as
# a Ruby data structure. Otherwise generate a PSON text from the Ruby data
# structure object and return it.
#
# The _opts_ argument is passed through to generate/parse respectively, see
# generate and parse for their documentation.
def PSON(object, opts = {})
if object.respond_to? :to_str
PSON.parse(object.to_str, opts)
else
PSON.generate(object, opts)
end
end
end
class ::Class
# Returns true, if this class can be used to create an instance
# from a serialised PSON string. The class has to implement a class
# method _pson_create_ that expects a hash as first parameter, which includes
# the required data.
def pson_creatable?
respond_to?(:pson_create)
end
end

View File

@ -0,0 +1,77 @@
require 'puppet/external/pson/common'
require 'puppet/external/pson/pure/parser'
require 'puppet/external/pson/pure/generator'
module PSON
begin
require 'iconv'
# An iconv instance to convert from UTF8 to UTF16 Big Endian.
UTF16toUTF8 = Iconv.new('utf-8', 'utf-16be') # :nodoc:
# An iconv instance to convert from UTF16 Big Endian to UTF8.
UTF8toUTF16 = Iconv.new('utf-16be', 'utf-8') # :nodoc:
UTF8toUTF16.iconv('no bom')
rescue LoadError
# We actually don't care
Puppet.warning "iconv couldn't be loaded, which is required for UTF-8/UTF-16 conversions"
rescue Errno::EINVAL, Iconv::InvalidEncoding
# Iconv doesn't support big endian utf-16. Let's try to hack this manually
# into the converters.
begin
old_verbose, $VERBSOSE = $VERBOSE, nil
# An iconv instance to convert from UTF8 to UTF16 Big Endian.
UTF16toUTF8 = Iconv.new('utf-8', 'utf-16') # :nodoc:
# An iconv instance to convert from UTF16 Big Endian to UTF8.
UTF8toUTF16 = Iconv.new('utf-16', 'utf-8') # :nodoc:
UTF8toUTF16.iconv('no bom')
if UTF8toUTF16.iconv("\xe2\x82\xac") == "\xac\x20"
swapper = Class.new do
def initialize(iconv) # :nodoc:
@iconv = iconv
end
def iconv(string) # :nodoc:
result = @iconv.iconv(string)
PSON.swap!(result)
end
end
UTF8toUTF16 = swapper.new(UTF8toUTF16) # :nodoc:
end
if UTF16toUTF8.iconv("\xac\x20") == "\xe2\x82\xac"
swapper = Class.new do
def initialize(iconv) # :nodoc:
@iconv = iconv
end
def iconv(string) # :nodoc:
string = PSON.swap!(string.dup)
@iconv.iconv(string)
end
end
UTF16toUTF8 = swapper.new(UTF16toUTF8) # :nodoc:
end
rescue Errno::EINVAL, Iconv::InvalidEncoding
Puppet.warning "iconv doesn't seem to support UTF-8/UTF-16 conversions"
ensure
$VERBOSE = old_verbose
end
end
# Swap consecutive bytes of _string_ in place.
def self.swap!(string) # :nodoc:
0.upto(string.size / 2) do |i|
break unless string[2 * i + 1]
string[2 * i], string[2 * i + 1] = string[2 * i + 1], string[2 * i]
end
string
end
# This module holds all the modules/classes that implement PSON's
# functionality in pure ruby.
module Pure
$DEBUG and warn "Using pure library for PSON."
PSON.parser = Parser
PSON.generator = Generator
end
PSON_LOADED = true
end

View File

@ -0,0 +1,401 @@
module PSON
MAP = {
"\x0" => '\u0000',
"\x1" => '\u0001',
"\x2" => '\u0002',
"\x3" => '\u0003',
"\x4" => '\u0004',
"\x5" => '\u0005',
"\x6" => '\u0006',
"\x7" => '\u0007',
"\b" => '\b',
"\t" => '\t',
"\n" => '\n',
"\xb" => '\u000b',
"\f" => '\f',
"\r" => '\r',
"\xe" => '\u000e',
"\xf" => '\u000f',
"\x10" => '\u0010',
"\x11" => '\u0011',
"\x12" => '\u0012',
"\x13" => '\u0013',
"\x14" => '\u0014',
"\x15" => '\u0015',
"\x16" => '\u0016',
"\x17" => '\u0017',
"\x18" => '\u0018',
"\x19" => '\u0019',
"\x1a" => '\u001a',
"\x1b" => '\u001b',
"\x1c" => '\u001c',
"\x1d" => '\u001d',
"\x1e" => '\u001e',
"\x1f" => '\u001f',
'"' => '\"',
'\\' => '\\\\',
} # :nodoc:
# Convert a UTF8 encoded Ruby string _string_ to a PSON string, encoded with
# UTF16 big endian characters as \u????, and return it.
if String.method_defined?(:force_encoding)
def utf8_to_pson(string) # :nodoc:
string = string.dup
string << '' # XXX workaround: avoid buffer sharing
string.force_encoding(Encoding::ASCII_8BIT)
string.gsub!(/["\\\x0-\x1f]/) { MAP[$MATCH] }
string
rescue Iconv::Failure => e
raise GeneratorError, "Caught #{e.class}: #{e}"
end
else
def utf8_to_pson(string) # :nodoc:
string.gsub(/["\\\x0-\x1f]/n) { MAP[$MATCH] }
end
end
module_function :utf8_to_pson
module Pure
module Generator
# This class is used to create State instances, that are use to hold data
# while generating a PSON text from a a Ruby data structure.
class State
# Creates a State object from _opts_, which ought to be Hash to create
# a new State instance configured by _opts_, something else to create
# an unconfigured instance. If _opts_ is a State object, it is just
# returned.
def self.from_state(opts)
case opts
when self
opts
when Hash
new(opts)
else
new
end
end
# Instantiates a new State object, configured by _opts_.
#
# _opts_ can have the following keys:
#
# * *indent*: a string used to indent levels (default: ''),
# * *space*: a string that is put after, a : or , delimiter (default: ''),
# * *space_before*: a string that is put before a : pair delimiter (default: ''),
# * *object_nl*: a string that is put at the end of a PSON object (default: ''),
# * *array_nl*: a string that is put at the end of a PSON array (default: ''),
# * *check_circular*: true if checking for circular data structures
# should be done (the default), false otherwise.
# * *check_circular*: true if checking for circular data structures
# should be done, false (the default) otherwise.
# * *allow_nan*: true if NaN, Infinity, and -Infinity should be
# generated, otherwise an exception is thrown, if these values are
# encountered. This options defaults to false.
def initialize(opts = {})
@seen = {}
@indent = ''
@space = ''
@space_before = ''
@object_nl = ''
@array_nl = ''
@check_circular = true
@allow_nan = false
configure opts
end
# This string is used to indent levels in the PSON text.
attr_accessor :indent
# This string is used to insert a space between the tokens in a PSON
# string.
attr_accessor :space
# This string is used to insert a space before the ':' in PSON objects.
attr_accessor :space_before
# This string is put at the end of a line that holds a PSON object (or
# Hash).
attr_accessor :object_nl
# This string is put at the end of a line that holds a PSON array.
attr_accessor :array_nl
# This integer returns the maximum level of data structure nesting in
# the generated PSON, max_nesting = 0 if no maximum is checked.
attr_accessor :max_nesting
def check_max_nesting(depth) # :nodoc:
return if @max_nesting.zero?
current_nesting = depth + 1
current_nesting > @max_nesting and
raise NestingError, "nesting of #{current_nesting} is too deep"
end
# Returns true, if circular data structures should be checked,
# otherwise returns false.
def check_circular?
@check_circular
end
# Returns true if NaN, Infinity, and -Infinity should be considered as
# valid PSON and output.
def allow_nan?
@allow_nan
end
# Returns _true_, if _object_ was already seen during this generating
# run.
def seen?(object)
@seen.key?(object.__id__)
end
# Remember _object_, to find out if it was already encountered (if a
# cyclic data structure is if a cyclic data structure is rendered).
def remember(object)
@seen[object.__id__] = true
end
# Forget _object_ for this generating run.
def forget(object)
@seen.delete object.__id__
end
# Configure this State instance with the Hash _opts_, and return
# itself.
def configure(opts)
@indent = opts[:indent] if opts.key?(:indent)
@space = opts[:space] if opts.key?(:space)
@space_before = opts[:space_before] if opts.key?(:space_before)
@object_nl = opts[:object_nl] if opts.key?(:object_nl)
@array_nl = opts[:array_nl] if opts.key?(:array_nl)
@check_circular = !!opts[:check_circular] if opts.key?(:check_circular)
@allow_nan = !!opts[:allow_nan] if opts.key?(:allow_nan)
if !opts.key?(:max_nesting) # defaults to 19
@max_nesting = 19
elsif opts[:max_nesting]
@max_nesting = opts[:max_nesting]
else
@max_nesting = 0
end
self
end
# Returns the configuration instance variables as a hash, that can be
# passed to the configure method.
def to_h
result = {}
for iv in %w{indent space space_before object_nl array_nl check_circular allow_nan max_nesting}
result[iv.intern] = instance_variable_get("@#{iv}")
end
result
end
end
module GeneratorMethods
module Object
# Converts this object to a string (calling #to_s), converts
# it to a PSON string, and returns the result. This is a fallback, if no
# special method #to_pson was defined for some object.
def to_pson(*) to_s.to_pson end
end
module Hash
# Returns a PSON string containing a PSON object, that is unparsed from
# this Hash instance.
# _state_ is a PSON::State object, that can also be used to configure the
# produced PSON string output further.
# _depth_ is used to find out nesting depth, to indent accordingly.
def to_pson(state = nil, depth = 0, *)
if state
state = PSON.state.from_state(state)
state.check_max_nesting(depth)
pson_check_circular(state) { pson_transform(state, depth) }
else
pson_transform(state, depth)
end
end
private
def pson_check_circular(state)
if state and state.check_circular?
state.seen?(self) and raise PSON::CircularDatastructure,
"circular data structures not supported!"
state.remember self
end
yield
ensure
state and state.forget self
end
def pson_shift(state, depth)
state and not state.object_nl.empty? or return ''
state.indent * depth
end
def pson_transform(state, depth)
delim = ','
if state
delim << state.object_nl
result = '{'
result << state.object_nl
result << map { |key,value|
s = pson_shift(state, depth + 1)
s << key.to_s.to_pson(state, depth + 1)
s << state.space_before
s << ':'
s << state.space
s << value.to_pson(state, depth + 1)
}.join(delim)
result << state.object_nl
result << pson_shift(state, depth)
result << '}'
else
result = '{'
result << map { |key,value|
key.to_s.to_pson << ':' << value.to_pson
}.join(delim)
result << '}'
end
result
end
end
module Array
# Returns a PSON string containing a PSON array, that is unparsed from
# this Array instance.
# _state_ is a PSON::State object, that can also be used to configure the
# produced PSON string output further.
# _depth_ is used to find out nesting depth, to indent accordingly.
def to_pson(state = nil, depth = 0, *)
if state
state = PSON.state.from_state(state)
state.check_max_nesting(depth)
pson_check_circular(state) { pson_transform(state, depth) }
else
pson_transform(state, depth)
end
end
private
def pson_check_circular(state)
if state and state.check_circular?
state.seen?(self) and raise PSON::CircularDatastructure,
"circular data structures not supported!"
state.remember self
end
yield
ensure
state and state.forget self
end
def pson_shift(state, depth)
state and not state.array_nl.empty? or return ''
state.indent * depth
end
def pson_transform(state, depth)
delim = ','
if state
delim << state.array_nl
result = '['
result << state.array_nl
result << map { |value|
pson_shift(state, depth + 1) << value.to_pson(state, depth + 1)
}.join(delim)
result << state.array_nl
result << pson_shift(state, depth)
result << ']'
else
'[' << map { |value| value.to_pson }.join(delim) << ']'
end
end
end
module Integer
# Returns a PSON string representation for this Integer number.
def to_pson(*) to_s end
end
module Float
# Returns a PSON string representation for this Float number.
def to_pson(state = nil, *)
case
when infinite?
if !state || state.allow_nan?
to_s
else
raise GeneratorError, "#{self} not allowed in PSON"
end
when nan?
if !state || state.allow_nan?
to_s
else
raise GeneratorError, "#{self} not allowed in PSON"
end
else
to_s
end
end
end
module String
# This string should be encoded with UTF-8 A call to this method
# returns a PSON string encoded with UTF16 big endian characters as
# \u????.
def to_pson(*)
'"' << PSON.utf8_to_pson(self) << '"'
end
# Module that holds the extinding methods if, the String module is
# included.
module Extend
# Raw Strings are PSON Objects (the raw bytes are stored in an array for the
# key "raw"). The Ruby String can be created by this module method.
def pson_create(o)
o['raw'].pack('C*')
end
end
# Extends _modul_ with the String::Extend module.
def self.included(modul)
modul.extend Extend
end
# This method creates a raw object hash, that can be nested into
# other data structures and will be unparsed as a raw string. This
# method should be used, if you want to convert raw strings to PSON
# instead of UTF-8 strings, e. g. binary data.
def to_pson_raw_object
{
PSON.create_id => self.class.name,
'raw' => self.unpack('C*'),
}
end
# This method creates a PSON text from the result of
# a call to to_pson_raw_object of this String.
def to_pson_raw(*args)
to_pson_raw_object.to_pson(*args)
end
end
module TrueClass
# Returns a PSON string for true: 'true'.
def to_pson(*) 'true' end
end
module FalseClass
# Returns a PSON string for false: 'false'.
def to_pson(*) 'false' end
end
module NilClass
# Returns a PSON string for nil: 'null'.
def to_pson(*) 'null' end
end
end
end
end
end

View File

@ -0,0 +1,264 @@
require 'strscan'
module PSON
module Pure
# This class implements the PSON parser that is used to parse a PSON string
# into a Ruby data structure.
class Parser < StringScanner
STRING = /" ((?:[^\x0-\x1f"\\] |
# escaped special characters:
\\["\\\/bfnrt] |
\\u[0-9a-fA-F]{4} |
# match all but escaped special characters:
\\[\x20-\x21\x23-\x2e\x30-\x5b\x5d-\x61\x63-\x65\x67-\x6d\x6f-\x71\x73\x75-\xff])*)
"/nx
INTEGER = /(-?0|-?[1-9]\d*)/
FLOAT = /(-?
(?:0|[1-9]\d*)
(?:
\.\d+(?i:e[+-]?\d+) |
\.\d+ |
(?i:e[+-]?\d+)
)
)/x
NAN = /NaN/
INFINITY = /Infinity/
MINUS_INFINITY = /-Infinity/
OBJECT_OPEN = /\{/
OBJECT_CLOSE = /\}/
ARRAY_OPEN = /\[/
ARRAY_CLOSE = /\]/
PAIR_DELIMITER = /:/
COLLECTION_DELIMITER = /,/
TRUE = /true/
FALSE = /false/
NULL = /null/
IGNORE = %r(
(?:
//[^\n\r]*[\n\r]| # line comments
/\* # c-style comments
(?:
[^*/]| # normal chars
/[^*]| # slashes that do not start a nested comment
\*[^/]| # asterisks that do not end this comment
/(?=\*/) # single slash before this comment's end
)*
\*/ # the End of this comment
|[ \t\r\n]+ # whitespaces: space, horicontal tab, lf, cr
)+
)mx
UNPARSED = Object.new
# Creates a new PSON::Pure::Parser instance for the string _source_.
#
# It will be configured by the _opts_ hash. _opts_ can have the following
# keys:
# * *max_nesting*: The maximum depth of nesting allowed in the parsed data
# structures. Disable depth checking with :max_nesting => false|nil|0,
# it defaults to 19.
# * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in
# defiance of RFC 4627 to be parsed by the Parser. This option defaults
# to false.
# * *create_additions*: If set to false, the Parser doesn't create
# additions even if a matchin class and create_id was found. This option
# defaults to true.
# * *object_class*: Defaults to Hash
# * *array_class*: Defaults to Array
def initialize(source, opts = {})
super
if !opts.key?(:max_nesting) # defaults to 19
@max_nesting = 19
elsif opts[:max_nesting]
@max_nesting = opts[:max_nesting]
else
@max_nesting = 0
end
@allow_nan = !!opts[:allow_nan]
ca = true
ca = opts[:create_additions] if opts.key?(:create_additions)
@create_id = ca ? PSON.create_id : nil
@object_class = opts[:object_class] || Hash
@array_class = opts[:array_class] || Array
end
alias source string
# Parses the current PSON string _source_ and returns the complete data
# structure as a result.
def parse
reset
obj = nil
until eos?
case
when scan(OBJECT_OPEN)
obj and raise ParserError, "source '#{peek(20)}' not in PSON!"
@current_nesting = 1
obj = parse_object
when scan(ARRAY_OPEN)
obj and raise ParserError, "source '#{peek(20)}' not in PSON!"
@current_nesting = 1
obj = parse_array
when skip(IGNORE)
;
else
raise ParserError, "source '#{peek(20)}' not in PSON!"
end
end
obj or raise ParserError, "source did not contain any PSON!"
obj
end
private
# Unescape characters in strings.
UNESCAPE_MAP = Hash.new { |h, k| h[k] = k.chr }
UNESCAPE_MAP.update(
{
?" => '"',
?\\ => '\\',
?/ => '/',
?b => "\b",
?f => "\f",
?n => "\n",
?r => "\r",
?t => "\t",
?u => nil,
})
def parse_string
if scan(STRING)
return '' if self[1].empty?
string = self[1].gsub(%r{(?:\\[\\bfnrt"/]|(?:\\u(?:[A-Fa-f\d]{4}))+|\\[\x20-\xff])}n) do |c|
if u = UNESCAPE_MAP[$MATCH[1]]
u
else # \uXXXX
bytes = ''
i = 0
while c[6 * i] == ?\\ && c[6 * i + 1] == ?u
bytes << c[6 * i + 2, 2].to_i(16) << c[6 * i + 4, 2].to_i(16)
i += 1
end
PSON::UTF16toUTF8.iconv(bytes)
end
end
string.force_encoding(Encoding::UTF_8) if string.respond_to?(:force_encoding)
string
else
UNPARSED
end
rescue Iconv::Failure => e
raise GeneratorError, "Caught #{e.class}: #{e}"
end
def parse_value
case
when scan(FLOAT)
Float(self[1])
when scan(INTEGER)
Integer(self[1])
when scan(TRUE)
true
when scan(FALSE)
false
when scan(NULL)
nil
when (string = parse_string) != UNPARSED
string
when scan(ARRAY_OPEN)
@current_nesting += 1
ary = parse_array
@current_nesting -= 1
ary
when scan(OBJECT_OPEN)
@current_nesting += 1
obj = parse_object
@current_nesting -= 1
obj
when @allow_nan && scan(NAN)
NaN
when @allow_nan && scan(INFINITY)
Infinity
when @allow_nan && scan(MINUS_INFINITY)
MinusInfinity
else
UNPARSED
end
end
def parse_array
raise NestingError, "nesting of #@current_nesting is too deep" if
@max_nesting.nonzero? && @current_nesting > @max_nesting
result = @array_class.new
delim = false
until eos?
case
when (value = parse_value) != UNPARSED
delim = false
result << value
skip(IGNORE)
if scan(COLLECTION_DELIMITER)
delim = true
elsif match?(ARRAY_CLOSE)
;
else
raise ParserError, "expected ',' or ']' in array at '#{peek(20)}'!"
end
when scan(ARRAY_CLOSE)
raise ParserError, "expected next element in array at '#{peek(20)}'!" if delim
break
when skip(IGNORE)
;
else
raise ParserError, "unexpected token in array at '#{peek(20)}'!"
end
end
result
end
def parse_object
raise NestingError, "nesting of #@current_nesting is too deep" if
@max_nesting.nonzero? && @current_nesting > @max_nesting
result = @object_class.new
delim = false
until eos?
case
when (string = parse_string) != UNPARSED
skip(IGNORE)
raise ParserError, "expected ':' in object at '#{peek(20)}'!" unless scan(PAIR_DELIMITER)
skip(IGNORE)
unless (value = parse_value).equal? UNPARSED
result[string] = value
delim = false
skip(IGNORE)
if scan(COLLECTION_DELIMITER)
delim = true
elsif match?(OBJECT_CLOSE)
;
else
raise ParserError, "expected ',' or '}' in object at '#{peek(20)}'!"
end
else
raise ParserError, "expected value in object at '#{peek(20)}'!"
end
when scan(OBJECT_CLOSE)
raise ParserError, "expected next name, value pair in object at '#{peek(20)}'!" if delim
if @create_id and klassname = result[@create_id]
klass = PSON.deep_const_get klassname
break unless klass and klass.pson_creatable?
result = klass.pson_create(result)
end
break
when skip(IGNORE)
;
else
raise ParserError, "unexpected token in object at '#{peek(20)}'!"
end
end
result
end
end
end
end

View File

@ -0,0 +1,8 @@
module PSON
# PSON version
VERSION = '1.1.9'
VERSION_ARRAY = VERSION.split(/\./).map { |x| x.to_i } # :nodoc:
VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
VERSION_BUILD = VERSION_ARRAY[2] # :nodoc:
end

View File

@ -0,0 +1,54 @@
# Created by Luke Kanies on 2006-04-30.
# Copyright (c) 2006. All rights reserved.
require 'puppet/util/feature'
# Add the simple features, all in one file.
# We've got LDAP available.
Puppet.features.add(:ldap, :libs => ["ldap"])
# We have the Rdoc::Usage library.
Puppet.features.add(:usage, :libs => %w{rdoc/ri/ri_paths rdoc/usage})
# We have libshadow, useful for managing passwords.
Puppet.features.add(:libshadow, :libs => ["shadow"])
# We're running as root.
Puppet.features.add(:root) { require 'puppet/util/suidmanager'; Puppet::Util::SUIDManager.root? }
# We've got mongrel available
Puppet.features.add(:mongrel, :libs => %w{rubygems mongrel puppet/network/http_server/mongrel})
# We have lcs diff
Puppet.features.add :diff, :libs => %w{diff/lcs diff/lcs/hunk}
# We have augeas
Puppet.features.add(:augeas, :libs => ["augeas"])
# We have RRD available
Puppet.features.add(:rrd_legacy, :libs => ["RRDtool"])
Puppet.features.add(:rrd, :libs => ["RRD"])
# We have OpenSSL
Puppet.features.add(:openssl, :libs => ["openssl"])
# We have a syslog implementation
Puppet.features.add(:syslog, :libs => ["syslog"])
# We can use POSIX user functions
Puppet.features.add(:posix) do
require 'etc'
Etc.getpwuid(0) != nil && Puppet.features.syslog?
end
# We can use Microsoft Windows functions
Puppet.features.add(:microsoft_windows, :libs => ["sys/admin", "win32/process", "win32/dir"])
raise Puppet::Error,"Cannot determine basic system flavour" unless Puppet.features.posix? or Puppet.features.microsoft_windows?
# We have CouchDB
Puppet.features.add(:couchdb, :libs => ["couchrest"])
# We have sqlite
Puppet.features.add(:sqlite, :libs => ["sqlite3"])

View File

@ -0,0 +1,6 @@
Puppet.features.add(:pson) do
require 'puppet/external/pson/common'
require 'puppet/external/pson/version'
require 'puppet/external/pson/pure'
true
end

View File

@ -0,0 +1,24 @@
require 'puppet/util/feature'
# See if we have rack available, an HTTP Application Stack
# Explicitly depend on rack library version >= 1.0.0
Puppet.features.add(:rack) do
begin
require 'rack'
rescue LoadError => detail
require 'rubygems'
require 'rack'
end
if ! (defined?(::Rack) and defined?(::Rack.release))
false
else
major_version = ::Rack.release.split('.')[0].to_i
if major_version >= 1
true
else
false
end
end
end

View File

@ -0,0 +1,33 @@
# Created by Luke Kanies on 2006-11-07.
# Copyright (c) 2006. All rights reserved.
require 'puppet/util/feature'
Puppet.features.rubygems?
Puppet.features.add(:rails) do
begin
require 'active_record'
require 'active_record/version'
rescue LoadError => detail
if FileTest.exists?("/usr/share/rails")
count = 0
Dir.entries("/usr/share/rails").each do |dir|
libdir = File.join("/usr/share/rails", dir, "lib")
if FileTest.exists?(libdir) and ! $LOAD_PATH.include?(libdir)
count += 1
$LOAD_PATH << libdir
end
end
retry if count > 0
end
end
unless (Puppet::Util.activerecord_version >= 2.1)
Puppet.info "ActiveRecord 2.1 or later required for StoreConfigs"
false
else
true
end
end

View File

@ -0,0 +1,6 @@
# Created by Luke Kanies on 2006-11-07.
# Copyright (c) 2006. All rights reserved.
require 'puppet/util/feature'
Puppet.features.add(:rubygems, :libs => "rubygems")

View File

@ -0,0 +1,3 @@
require 'puppet/util/feature'
Puppet.features.add(:selinux, :libs => ["selinux"])

View File

@ -0,0 +1,6 @@
require 'puppet/util/feature'
# We want this to load if possible, but it's not automatically
# required.
Puppet.features.rubygems?
Puppet.features.add(:stomp, :libs => %{stomp})

View File

@ -0,0 +1,6 @@
require 'puppet/util/feature'
# We want this to load if possible, but it's not automatically
# required.
Puppet.features.rubygems?
Puppet.features.add(:zlib, :libs => %{zlib})

View File

@ -0,0 +1,4 @@
# stub
module Puppet::FileBucket
class BucketError < RuntimeError; end
end

View File

@ -0,0 +1,105 @@
require 'puppet/file_bucket'
require 'puppet/file_bucket/file'
require 'puppet/indirector/request'
class Puppet::FileBucket::Dipper
# This is a transitional implementation that uses REST
# to access remote filebucket files.
attr_accessor :name
# Create our bucket client
def initialize(hash = {})
# Emulate the XMLRPC client
server = hash[:Server]
port = hash[:Port] || Puppet[:masterport]
environment = Puppet[:environment]
if hash.include?(:Path)
@local_path = hash[:Path]
@rest_path = nil
else
@local_path = nil
@rest_path = "https://#{server}:#{port}/#{environment}/file_bucket_file/"
end
end
def local?
!! @local_path
end
# Back up a file to our bucket
def backup(file)
raise(ArgumentError, "File #{file} does not exist") unless ::File.exist?(file)
contents = ::File.read(file)
begin
file_bucket_file = Puppet::FileBucket::File.new(contents, :bucket_path => @local_path)
files_original_path = absolutize_path(file)
dest_path = "#{@rest_path}#{file_bucket_file.name}#{files_original_path}"
# Make a HEAD request for the file so that we don't waste time
# uploading it if it already exists in the bucket.
unless Puppet::FileBucket::File.head("#{@rest_path}#{file_bucket_file.checksum_type}/#{file_bucket_file.checksum_data}#{files_original_path}")
file_bucket_file.save(dest_path)
end
return file_bucket_file.checksum_data
rescue => detail
puts detail.backtrace if Puppet[:trace]
raise Puppet::Error, "Could not back up #{file}: #{detail}"
end
end
# Retrieve a file by sum.
def getfile(sum)
source_path = "#{@rest_path}md5/#{sum}"
file_bucket_file = Puppet::FileBucket::File.find(source_path, :bucket_path => @local_path)
raise Puppet::Error, "File not found" unless file_bucket_file
file_bucket_file.to_s
end
# Restore the file
def restore(file,sum)
restore = true
if FileTest.exists?(file)
cursum = Digest::MD5.hexdigest(::File.read(file))
# if the checksum has changed...
# this might be extra effort
if cursum == sum
restore = false
end
end
if restore
if newcontents = getfile(sum)
tmp = ""
newsum = Digest::MD5.hexdigest(newcontents)
changed = nil
if FileTest.exists?(file) and ! FileTest.writable?(file)
changed = ::File.stat(file).mode
::File.chmod(changed | 0200, file)
end
::File.open(file, ::File::WRONLY|::File::TRUNC|::File::CREAT) { |of|
of.print(newcontents)
}
::File.chmod(changed, file) if changed
else
Puppet.err "Could not find file with checksum #{sum}"
return nil
end
return newsum
else
return nil
end
end
private
def absolutize_path( path )
require 'pathname'
Pathname.new(path).realpath
end
end

View File

@ -0,0 +1,55 @@
require 'puppet/file_bucket'
require 'puppet/indirector'
require 'puppet/util/checksums'
require 'digest/md5'
class Puppet::FileBucket::File
# This class handles the abstract notion of a file in a filebucket.
# There are mechanisms to save and load this file locally and remotely in puppet/indirector/filebucketfile/*
# There is a compatibility class that emulates pre-indirector filebuckets in Puppet::FileBucket::Dipper
extend Puppet::Indirector
indirects :file_bucket_file, :terminus_class => :selector
attr :contents
attr :bucket_path
def initialize( contents, options = {} )
raise ArgumentError if !contents.is_a?(String)
@contents = contents
@bucket_path = options.delete(:bucket_path)
raise ArgumentError if options != {}
end
def checksum_type
'md5'
end
def checksum
"{#{checksum_type}}#{checksum_data}"
end
def checksum_data
@checksum_data ||= Digest::MD5.hexdigest(contents)
end
def to_s
contents
end
def name
"#{checksum_type}/#{checksum_data}"
end
def self.from_s(contents)
self.new(contents)
end
def to_pson
{ "contents" => contents }.to_pson
end
def self.from_pson(pson)
self.new(pson["contents"])
end
end

View File

@ -0,0 +1,30 @@
# A simple way to turn file names into singletons,
# so we don't have tons of copies of each file path around.
class Puppet::FileCollection
require 'puppet/file_collection/lookup'
def self.collection
@collection
end
def initialize
@paths = []
@inverse = {}
end
def index(path)
if i = @inverse[path]
return i
else
@paths << path
i = @inverse[path] = @paths.length - 1
return i
end
end
def path(index)
@paths[index]
end
@collection = self.new
end

View File

@ -0,0 +1,20 @@
require 'puppet/file_collection'
# A simple module for looking up file paths and indexes
# in a file collection.
module Puppet::FileCollection::Lookup
attr_accessor :line, :file_index
def file_collection
Puppet::FileCollection.collection
end
def file=(path)
@file_index = file_collection.index(path)
end
def file
return nil unless file_index
file_collection.path(file_index)
end
end

View File

@ -0,0 +1,7 @@
#
# Created by Luke Kanies on 2007-10-16.
# Copyright (c) 2007. All rights reserved.
# Just a stub class.
class Puppet::FileServing # :nodoc:
end

View File

@ -0,0 +1,88 @@
#
# Created by Luke Kanies on 2007-10-22.
# Copyright (c) 2007. All rights reserved.
require 'puppet/file_serving'
# The base class for Content and Metadata; provides common
# functionality like the behaviour around links.
class Puppet::FileServing::Base
# This is for external consumers to store the source that was used
# to retrieve the metadata.
attr_accessor :source
# Does our file exist?
def exist?
stat
return true
rescue => detail
return false
end
# Return the full path to our file. Fails if there's no path set.
def full_path(dummy_argument=:work_arround_for_ruby_GC_bug)
(if relative_path.nil? or relative_path == "" or relative_path == "."
path
else
File.join(path, relative_path)
end).gsub(%r{/+}, "/")
end
def initialize(path, options = {})
self.path = path
@links = :manage
options.each do |param, value|
begin
send param.to_s + "=", value
rescue NoMethodError
raise ArgumentError, "Invalid option #{param} for #{self.class}"
end
end
end
# Determine how we deal with links.
attr_reader :links
def links=(value)
value = value.to_sym
value = :manage if value == :ignore
raise(ArgumentError, ":links can only be set to :manage or :follow") unless [:manage, :follow].include?(value)
@links = value
end
# Set our base path.
attr_reader :path
def path=(path)
raise ArgumentError.new("Paths must be fully qualified") unless path =~ /^#{::File::SEPARATOR}/
@path = path
end
# Set a relative path; this is used for recursion, and sets
# the file's path relative to the initial recursion point.
attr_reader :relative_path
def relative_path=(path)
raise ArgumentError.new("Relative paths must not be fully qualified") if path =~ /^#{::File::SEPARATOR}/
@relative_path = path
end
# Stat our file, using the appropriate link-sensitive method.
def stat
@stat_method ||= self.links == :manage ? :lstat : :stat
File.send(@stat_method, full_path)
end
def to_pson_data_hash
{
# No 'document_type' since we don't send these bare
'data' => {
'path' => @path,
'relative_path' => @relative_path,
'links' => @links
},
'metadata' => {
'api_version' => 1
}
}
end
end

View File

@ -0,0 +1,128 @@
#
# Created by Luke Kanies on 2007-10-16.
# Copyright (c) 2007. All rights reserved.
require 'puppet'
require 'puppet/file_serving'
require 'puppet/file_serving/mount'
require 'puppet/file_serving/mount/file'
require 'puppet/file_serving/mount/modules'
require 'puppet/file_serving/mount/plugins'
require 'puppet/util/cacher'
class Puppet::FileServing::Configuration
require 'puppet/file_serving/configuration/parser'
class << self
include Puppet::Util::Cacher
cached_attr(:configuration) { new }
end
Mount = Puppet::FileServing::Mount
# Create our singleton configuration.
def self.create
configuration
end
private_class_method :new
attr_reader :mounts
#private :mounts
# Find the right mount. Does some shenanigans to support old-style module
# mounts.
def find_mount(mount_name, environment)
# Reparse the configuration if necessary.
readconfig
if mount = mounts[mount_name]
return mount
end
if environment.module(mount_name)
Puppet::Util::Warnings.notice_once "DEPRECATION NOTICE: Files found in modules without specifying 'modules' in file path will be deprecated in the next major release. Please fix module '#{mount_name}' when no 0.24.x clients are present"
return mounts["modules"]
end
# This can be nil.
mounts[mount_name]
end
def initialize
@mounts = {}
@config_file = nil
# We don't check to see if the file is modified the first time,
# because we always want to parse at first.
readconfig(false)
end
# Is a given mount available?
def mounted?(name)
@mounts.include?(name)
end
# Split the path into the separate mount point and path.
def split_path(request)
# Reparse the configuration if necessary.
readconfig
mount_name, path = request.key.split(File::Separator, 2)
raise(ArgumentError, "Cannot find file: Invalid mount '#{mount_name}'") unless mount_name =~ %r{^[-\w]+$}
raise(ArgumentError, "Cannot find file: Invalid relative path '#{path}'") if path and path.split('/').include?('..')
return nil unless mount = find_mount(mount_name, request.environment)
if mount.name == "modules" and mount_name != "modules"
# yay backward-compatibility
path = "#{mount_name}/#{path}"
end
if path == ""
path = nil
elsif path
# Remove any double slashes that might have occurred
path = path.gsub(/\/+/, "/")
end
return mount, path
end
def umount(name)
@mounts.delete(name) if @mounts.include? name
end
private
def mk_default_mounts
@mounts["modules"] ||= Mount::Modules.new("modules")
@mounts["modules"].allow('*') if @mounts["modules"].empty?
@mounts["plugins"] ||= Mount::Plugins.new("plugins")
@mounts["plugins"].allow('*') if @mounts["plugins"].empty?
end
# Read the configuration file.
def readconfig(check = true)
config = Puppet[:fileserverconfig]
return unless FileTest.exists?(config)
@parser ||= Puppet::FileServing::Configuration::Parser.new(config)
return if check and ! @parser.changed?
# Don't assign the mounts hash until we're sure the parsing succeeded.
begin
newmounts = @parser.parse
@mounts = newmounts
rescue => detail
puts detail.backtrace if Puppet[:trace]
Puppet.err "Error parsing fileserver configuration: #{detail}; using old configuration"
end
ensure
# Make sure we've got our plugins and modules.
mk_default_mounts
end
end

View File

@ -0,0 +1,122 @@
require 'puppet/file_serving/configuration'
require 'puppet/util/loadedfile'
class Puppet::FileServing::Configuration::Parser < Puppet::Util::LoadedFile
Mount = Puppet::FileServing::Mount
MODULES = 'modules'
# Parse our configuration file.
def parse
raise("File server configuration #{self.file} does not exist") unless FileTest.exists?(self.file)
raise("Cannot read file server configuration #{self.file}") unless FileTest.readable?(self.file)
@mounts = {}
@count = 0
File.open(self.file) { |f|
mount = nil
f.each { |line|
# Have the count increment at the top, in case we throw exceptions.
@count += 1
case line
when /^\s*#/; next # skip comments
when /^\s*$/; next # skip blank lines
when /\[([-\w]+)\]/
mount = newmount($1)
when /^\s*(\w+)\s+(.+)$/
var = $1
value = $2
raise(ArgumentError, "Fileserver configuration file does not use '=' as a separator") if value =~ /^=/
case var
when "path"
path(mount, value)
when "allow"
allow(mount, value)
when "deny"
deny(mount, value)
else
raise ArgumentError.new("Invalid argument '#{var}'", @count, file)
end
else
raise ArgumentError.new("Invalid line '#{line.chomp}'", @count, file)
end
}
}
validate
@mounts
end
private
# Allow a given pattern access to a mount.
def allow(mount, value)
# LAK:NOTE See http://snurl.com/21zf8 [groups_google_com]
x = value.split(/\s*,\s*/).each { |val|
begin
mount.info "allowing #{val} access"
mount.allow(val)
rescue AuthStoreError => detail
raise ArgumentError.new(
detail.to_s,
@count, file)
end
}
end
# Deny a given pattern access to a mount.
def deny(mount, value)
# LAK:NOTE See http://snurl.com/21zf8 [groups_google_com]
x = value.split(/\s*,\s*/).each { |val|
begin
mount.info "denying #{val} access"
mount.deny(val)
rescue AuthStoreError => detail
raise ArgumentError.new(
detail.to_s,
@count, file)
end
}
end
# Create a new mount.
def newmount(name)
raise ArgumentError, "#{@mounts[name]} is already mounted at #{name}", @count, file if @mounts.include?(name)
case name
when "modules"
mount = Mount::Modules.new(name)
when "plugins"
mount = Mount::Plugins.new(name)
else
mount = Mount::File.new(name)
end
@mounts[name] = mount
mount
end
# Set the path for a mount.
def path(mount, value)
if mount.respond_to?(:path=)
begin
mount.path = value
rescue ArgumentError => detail
Puppet.err "Removing mount #{mount.name}: #{detail}"
@mounts.delete(mount.name)
end
else
Puppet.warning "The '#{mount.name}' module can not have a path. Ignoring attempt to set it"
end
end
# Make sure all of our mounts are valid. We have to do this after the fact
# because details are added over time as the file is parsed.
def validate
@mounts.each { |name, mount| mount.validate }
end
end

View File

@ -0,0 +1,49 @@
#
# Created by Luke Kanies on 2007-10-16.
# Copyright (c) 2007. All rights reserved.
require 'puppet/indirector'
require 'puppet/file_serving'
require 'puppet/file_serving/base'
# A class that handles retrieving file contents.
# It only reads the file when its content is specifically
# asked for.
class Puppet::FileServing::Content < Puppet::FileServing::Base
extend Puppet::Indirector
indirects :file_content, :terminus_class => :selector
attr_writer :content
def self.supported_formats
[:raw]
end
def self.from_raw(content)
instance = new("/this/is/a/fake/path")
instance.content = content
instance
end
# BF: we used to fetch the file content here, but this is counter-productive
# for puppetmaster streaming of file content. So collect just returns itself
def collect
return if stat.ftype == "directory"
self
end
# Read the content of our file in.
def content
unless @content
# This stat can raise an exception, too.
raise(ArgumentError, "Cannot read the contents of links unless following links") if stat.ftype == "symlink"
@content = ::File.read(full_path)
end
@content
end
def to_raw
File.new(full_path, "r")
end
end

View File

@ -0,0 +1,172 @@
#
# Created by Luke Kanies on 2007-10-22.
# Copyright (c) 2007. All rights reserved.
require 'find'
require 'puppet/file_serving'
require 'puppet/file_serving/metadata'
# Operate recursively on a path, returning a set of file paths.
class Puppet::FileServing::Fileset
attr_reader :path, :ignore, :links
attr_accessor :recurse, :recurselimit, :checksum_type
# Produce a hash of files, with merged so that earlier files
# with the same postfix win. E.g., /dir1/subfile beats /dir2/subfile.
# It's a hash because we need to know the relative path of each file,
# and the base directory.
# This will probably only ever be used for searching for plugins.
def self.merge(*filesets)
result = {}
filesets.each do |fileset|
fileset.files.each do |file|
result[file] ||= fileset.path
end
end
result
end
# Return a list of all files in our fileset. This is different from the
# normal definition of find in that we support specific levels
# of recursion, which means we need to know when we're going another
# level deep, which Find doesn't do.
def files
files = perform_recursion
# Now strip off the leading path, so each file becomes relative, and remove
# any slashes that might end up at the beginning of the path.
result = files.collect { |file| file.sub(%r{^#{Regexp.escape(@path)}/*}, '') }
# And add the path itself.
result.unshift(".")
result
end
# Should we ignore this path?
def ignore?(path)
return false if @ignore == [nil]
# 'detect' normally returns the found result, whereas we just want true/false.
! @ignore.detect { |pattern| File.fnmatch?(pattern, path) }.nil?
end
def ignore=(values)
values = [values] unless values.is_a?(Array)
@ignore = values
end
def initialize(path, options = {})
path = path.chomp(File::SEPARATOR) unless path == File::SEPARATOR
raise ArgumentError.new("Fileset paths must be fully qualified") unless File.expand_path(path) == path
@path = path
# Set our defaults.
@ignore = []
@links = :manage
@recurse = false
@recurselimit = :infinite
if options.is_a?(Puppet::Indirector::Request)
initialize_from_request(options)
else
initialize_from_hash(options)
end
raise ArgumentError.new("Fileset paths must exist") unless stat = stat(path)
raise ArgumentError.new("Fileset recurse parameter must not be a number anymore, please use recurselimit") if @recurse.is_a?(Integer)
end
def links=(links)
links = links.to_sym
raise(ArgumentError, "Invalid :links value '#{links}'") unless [:manage, :follow].include?(links)
@links = links
@stat_method = links == :manage ? :lstat : :stat
end
# Should we recurse further? This is basically a single
# place for all of the logic around recursion.
def recurse?(depth)
# recurse if told to, and infinite recursion or current depth not at the limit
self.recurse and (self.recurselimit == :infinite or depth <= self.recurselimit)
end
def initialize_from_hash(options)
options.each do |option, value|
method = option.to_s + "="
begin
send(method, value)
rescue NoMethodError
raise ArgumentError, "Invalid option '#{option}'"
end
end
end
def initialize_from_request(request)
[:links, :ignore, :recurse, :recurselimit, :checksum_type].each do |param|
if request.options.include?(param) # use 'include?' so the values can be false
value = request.options[param]
elsif request.options.include?(param.to_s)
value = request.options[param.to_s]
end
next if value.nil?
value = true if value == "true"
value = false if value == "false"
value = Integer(value) if value.is_a?(String) and value =~ /^\d+$/
send(param.to_s + "=", value)
end
end
private
# Pull the recursion logic into one place. It's moderately hairy, and this
# allows us to keep the hairiness apart from what we do with the files.
def perform_recursion
# Start out with just our base directory.
current_dirs = [@path]
next_dirs = []
depth = 1
result = []
return result unless recurse?(depth)
while dir_path = current_dirs.shift or ((depth += 1) and recurse?(depth) and current_dirs = next_dirs and next_dirs = [] and dir_path = current_dirs.shift)
next unless stat = stat(dir_path)
next unless stat.directory?
Dir.entries(dir_path).each do |file_path|
next if [".", ".."].include?(file_path)
# Note that this also causes matching directories not
# to be recursed into.
next if ignore?(file_path)
# Add it to our list of files to return
result << File.join(dir_path, file_path)
# And to our list of files/directories to iterate over.
next_dirs << File.join(dir_path, file_path)
end
end
result
end
public
# Stat a given file, using the links-appropriate method.
def stat(path)
@stat_method ||= self.links == :manage ? :lstat : :stat
begin
return File.send(@stat_method, path)
rescue
# If this happens, it is almost surely because we're
# trying to manage a link to a file that does not exist.
return nil
end
end
end

View File

@ -0,0 +1,117 @@
#
# Created by Luke Kanies on 2007-10-16.
# Copyright (c) 2007. All rights reserved.
require 'puppet'
require 'puppet/indirector'
require 'puppet/file_serving'
require 'puppet/file_serving/base'
require 'puppet/util/checksums'
# A class that handles retrieving file metadata.
class Puppet::FileServing::Metadata < Puppet::FileServing::Base
include Puppet::Util::Checksums
extend Puppet::Indirector
indirects :file_metadata, :terminus_class => :selector
attr_reader :path, :owner, :group, :mode, :checksum_type, :checksum, :ftype, :destination
PARAM_ORDER = [:mode, :ftype, :owner, :group]
def attributes_with_tabs
raise(ArgumentError, "Cannot manage files of type #{ftype}") unless ['file','directory','link'].include? ftype
desc = []
PARAM_ORDER.each { |check|
check = :ftype if check == :type
desc << send(check)
}
desc << checksum
desc << @destination rescue nil if ftype == 'link'
desc.join("\t")
end
def checksum_type=(type)
raise(ArgumentError, "Unsupported checksum type #{type}") unless respond_to?("#{type}_file")
@checksum_type = type
end
# Retrieve the attributes for this file, relative to a base directory.
# Note that File.stat raises Errno::ENOENT if the file is absent and this
# method does not catch that exception.
def collect
real_path = full_path
stat = stat()
@owner = stat.uid
@group = stat.gid
@ftype = stat.ftype
# We have to mask the mode, yay.
@mode = stat.mode & 007777
case stat.ftype
when "file"
@checksum = ("{#{@checksum_type}}") + send("#{@checksum_type}_file", real_path).to_s
when "directory" # Always just timestamp the directory.
@checksum_type = "ctime"
@checksum = ("{#{@checksum_type}}") + send("#{@checksum_type}_file", path).to_s
when "link"
@destination = File.readlink(real_path)
@checksum = ("{#{@checksum_type}}") + send("#{@checksum_type}_file", real_path).to_s rescue nil
else
raise ArgumentError, "Cannot manage files of type #{stat.ftype}"
end
end
def initialize(path,data={})
@owner = data.delete('owner')
@group = data.delete('group')
@mode = data.delete('mode')
if checksum = data.delete('checksum')
@checksum_type = checksum['type']
@checksum = checksum['value']
end
@checksum_type ||= "md5"
@ftype = data.delete('type')
@destination = data.delete('destination')
super(path,data)
end
PSON.register_document_type('FileMetadata',self)
def to_pson_data_hash
{
'document_type' => 'FileMetadata',
'data' => super['data'].update(
{
'owner' => owner,
'group' => group,
'mode' => mode,
'checksum' => {
'type' => checksum_type,
'value' => checksum
},
'type' => ftype,
'destination' => destination,
}),
'metadata' => {
'api_version' => 1
}
}
end
def to_pson(*args)
to_pson_data_hash.to_pson(*args)
end
def self.from_pson(data)
new(data.delete('path'), data)
end
end

View File

@ -0,0 +1,44 @@
#
# Created by Luke Kanies on 2007-10-16.
# Copyright (c) 2007. All rights reserved.
require 'puppet/network/authstore'
require 'puppet/util/logging'
require 'puppet/util/cacher'
require 'puppet/file_serving'
require 'puppet/file_serving/metadata'
require 'puppet/file_serving/content'
# Broker access to the filesystem, converting local URIs into metadata
# or content objects.
class Puppet::FileServing::Mount < Puppet::Network::AuthStore
include Puppet::Util::Logging
attr_reader :name
def find(path, options)
raise NotImplementedError
end
# Create our object. It must have a name.
def initialize(name)
unless name =~ %r{^[-\w]+$}
raise ArgumentError, "Invalid mount name format '#{name}'"
end
@name = name
super()
end
def search(path, options)
raise NotImplementedError
end
def to_s
"mount[#{@name}]"
end
# A noop.
def validate
end
end

View File

@ -0,0 +1,124 @@
require 'puppet/util/cacher'
require 'puppet/file_serving/mount'
class Puppet::FileServing::Mount::File < Puppet::FileServing::Mount
class << self
include Puppet::Util::Cacher
cached_attr(:localmap) do
{ "h" => Facter.value("hostname"),
"H" => [Facter.value("hostname"),
Facter.value("domain")].join("."),
"d" => Facter.value("domain")
}
end
end
def complete_path(relative_path, node)
full_path = path(node)
raise ArgumentError.new("Mounts without paths are not usable") unless full_path
# If there's no relative path name, then we're serving the mount itself.
return full_path unless relative_path
file = ::File.join(full_path, relative_path)
if !(FileTest.exist?(file) or FileTest.symlink?(file))
Puppet.info("File does not exist or is not accessible: #{file}")
return nil
end
file
end
# Return an instance of the appropriate class.
def find(short_file, request)
complete_path(short_file, request.node)
end
# Return the path as appropriate, expanding as necessary.
def path(node = nil)
if expandable?
return expand(@path, node)
else
return @path
end
end
# Set the path.
def path=(path)
# FIXME: For now, just don't validate paths with replacement
# patterns in them.
if path =~ /%./
# Mark that we're expandable.
@expandable = true
else
raise ArgumentError, "#{path} does not exist or is not a directory" unless FileTest.directory?(path)
raise ArgumentError, "#{path} is not readable" unless FileTest.readable?(path)
@expandable = false
end
@path = path
end
def search(path, request)
return nil unless path = complete_path(path, request.node)
[path]
end
# Verify our configuration is valid. This should really check to
# make sure at least someone will be allowed, but, eh.
def validate
raise ArgumentError.new("Mounts without paths are not usable") if @path.nil?
end
private
# Create a map for a specific node.
def clientmap(node)
{
"h" => node.sub(/\..*$/, ""),
"H" => node,
"d" => node.sub(/[^.]+\./, "") # domain name
}
end
# Replace % patterns as appropriate.
def expand(path, node = nil)
# This map should probably be moved into a method.
map = nil
if node
map = clientmap(node)
else
Puppet.notice "No client; expanding '#{path}' with local host"
# Else, use the local information
map = localmap
end
path.gsub(/%(.)/) do |v|
key = $1
if key == "%"
"%"
else
map[key] || v
end
end
end
# Do we have any patterns in our path, yo?
def expandable?
if defined?(@expandable)
@expandable
else
false
end
end
# Cache this manufactured map, since if it's used it's likely
# to get used a lot.
def localmap
self.class.localmap
end
end

View File

@ -0,0 +1,24 @@
require 'puppet/file_serving/mount'
# This is the modules-specific mount: it knows how to search through
# modules for files. Yay.
class Puppet::FileServing::Mount::Modules < Puppet::FileServing::Mount
# Return an instance of the appropriate class.
def find(path, request)
raise "No module specified" if path.to_s.empty?
module_name, relative_path = path.split("/", 2)
return nil unless mod = request.environment.module(module_name)
mod.file(relative_path)
end
def search(path, request)
if result = find(path, request)
[result]
end
end
def valid?
true
end
end

View File

@ -0,0 +1,26 @@
require 'puppet/file_serving/mount'
# Find files in the modules' plugins directories.
# This is a very strange mount because it merges
# many directories into one.
class Puppet::FileServing::Mount::Plugins < Puppet::FileServing::Mount
# Return an instance of the appropriate class.
def find(relative_path, request)
return nil unless mod = request.environment.modules.find { |mod| mod.plugin(relative_path) }
path = mod.plugin(relative_path)
path
end
def search(relative_path, request)
# We currently only support one kind of search on plugins - return
# them all.
paths = request.environment.modules.find_all { |mod| mod.plugins? }.collect { |mod| mod.plugin_directory }
return(paths.empty? ? nil : paths)
end
def valid?
true
end
end

View File

@ -0,0 +1,25 @@
#
# Created by Luke Kanies on 2007-10-22.
# Copyright (c) 2007. All rights reserved.
require 'puppet/file_serving'
require 'puppet/file_serving/fileset'
# Define some common methods for FileServing termini.
module Puppet::FileServing::TerminusHelper
# Create model instances for all files in a fileset.
def path2instances(request, *paths)
filesets = paths.collect do |path|
# Filesets support indirector requests as an options collection
Puppet::FileServing::Fileset.new(path, request)
end
Puppet::FileServing::Fileset.merge(*filesets).collect do |file, base_path|
inst = model.new(base_path, :relative_path => file)
inst.checksum_type = request.options[:checksum_type] if request.options[:checksum_type]
inst.links = request.options[:links] if request.options[:links]
inst.collect
inst
end
end
end

View File

@ -0,0 +1,33 @@
#
# Created by Luke Kanies on 2007-10-18.
# Copyright (c) 2007. All rights reserved.
require 'uri'
require 'puppet/file_serving'
# This module is used to pick the appropriate terminus
# in file-serving indirections. This is necessary because
# the terminus varies based on the URI asked for.
module Puppet::FileServing::TerminusSelector
PROTOCOL_MAP = {"puppet" => :rest, "file" => :file}
def select(request)
# We rely on the request's parsing of the URI.
# Short-circuit to :file if it's a fully-qualified path or specifies a 'file' protocol.
return PROTOCOL_MAP["file"] if request.key =~ /^#{::File::SEPARATOR}/
return PROTOCOL_MAP["file"] if request.protocol == "file"
# We're heading over the wire the protocol is 'puppet' and we've got a server name or we're not named 'apply' or 'puppet'
if request.protocol == "puppet" and (request.server or !["puppet","apply"].include?(Puppet.settings[:name]))
return PROTOCOL_MAP["puppet"]
end
if request.protocol and PROTOCOL_MAP[request.protocol].nil?
raise(ArgumentError, "URI protocol '#{request.protocol}' is not currently supported for file serving")
end
# If we're still here, we're using the file_server or modules.
:file_server
end
end

View File

@ -0,0 +1,78 @@
# Manage indirections to termini. They are organized in terms of indirections -
# - e.g., configuration, node, file, certificate -- and each indirection has one
# or more terminus types defined. The indirection is configured via the
# +indirects+ method, which will be called by the class extending itself
# with this module.
module Puppet::Indirector
# LAK:FIXME We need to figure out how to handle documentation for the
# different indirection types.
require 'puppet/indirector/indirection'
require 'puppet/indirector/terminus'
require 'puppet/indirector/envelope'
require 'puppet/network/format_handler'
# Declare that the including class indirects its methods to
# this terminus. The terminus name must be the name of a Puppet
# default, not the value -- if it's the value, then it gets
# evaluated at parse time, which is before the user has had a chance
# to override it.
def indirects(indirection, options = {})
raise(ArgumentError, "Already handling indirection for #{@indirection.name}; cannot also handle #{indirection}") if @indirection
# populate this class with the various new methods
extend ClassMethods
include InstanceMethods
include Puppet::Indirector::Envelope
extend Puppet::Network::FormatHandler
# instantiate the actual Terminus for that type and this name (:ldap, w/ args :node)
# & hook the instantiated Terminus into this class (Node: @indirection = terminus)
@indirection = Puppet::Indirector::Indirection.new(self, indirection, options)
end
module ClassMethods
attr_reader :indirection
def cache_class=(klass)
indirection.cache_class = klass
end
def terminus_class=(klass)
indirection.terminus_class = klass
end
# Expire any cached instance.
def expire(*args)
indirection.expire(*args)
end
def find(*args)
indirection.find(*args)
end
def head(*args)
indirection.head(*args)
end
def destroy(*args)
indirection.destroy(*args)
end
def search(*args)
indirection.search(*args)
end
end
module InstanceMethods
def save(key = nil)
self.class.indirection.save key, self
end
end
# Helper definition for indirections that handle filenames.
BadNameRegexp = Regexp.union(/^\.\./,
%r{[\\/]},
"\0",
/(?i)^[a-z]:/)
end

View File

@ -0,0 +1,28 @@
require 'puppet/indirector'
class Puppet::Indirector::ActiveRecord < Puppet::Indirector::Terminus
class << self
attr_accessor :ar_model
end
def self.use_ar_model(klass)
self.ar_model = klass
end
def ar_model
self.class.ar_model
end
def initialize
Puppet::Rails.init
end
def find(request)
return nil unless instance = ar_model.find_by_name(request.key)
instance.to_puppet
end
def save(request)
ar_model.from_puppet(request.instance).save
end
end

View File

@ -0,0 +1,41 @@
require 'puppet/rails/host'
require 'puppet/indirector/active_record'
require 'puppet/resource/catalog'
class Puppet::Resource::Catalog::ActiveRecord < Puppet::Indirector::ActiveRecord
use_ar_model Puppet::Rails::Host
# If we can find the host, then return a catalog with the host's resources
# as the vertices.
def find(request)
return nil unless request.options[:cache_integration_hack]
return nil unless host = ar_model.find_by_name(request.key)
catalog = Puppet::Resource::Catalog.new(host.name)
host.resources.each do |resource|
catalog.add_resource resource.to_transportable
end
catalog
end
# Save the values from a Facts instance as the facts on a Rails Host instance.
def save(request)
catalog = request.instance
host = ar_model.find_by_name(catalog.name) || ar_model.create(:name => catalog.name)
host.railsmark "Saved catalog to database" do
host.merge_resources(catalog.vertices)
host.last_compile = Time.now
if node = Puppet::Node.find(catalog.name)
host.ip = node.parameters["ipaddress"]
host.environment = node.environment.to_s
end
host.save
end
end
end

View File

@ -0,0 +1,170 @@
require 'puppet/node'
require 'puppet/resource/catalog'
require 'puppet/indirector/code'
require 'yaml'
class Puppet::Resource::Catalog::Compiler < Puppet::Indirector::Code
desc "Puppet's catalog compilation interface, and its back-end is
Puppet's compiler"
include Puppet::Util
attr_accessor :code
def extract_facts_from_request(request)
return unless text_facts = request.options[:facts]
raise ArgumentError, "Facts but no fact format provided for #{request.name}" unless format = request.options[:facts_format]
# If the facts were encoded as yaml, then the param reconstitution system
# in Network::HTTP::Handler will automagically deserialize the value.
if text_facts.is_a?(Puppet::Node::Facts)
facts = text_facts
else
facts = Puppet::Node::Facts.convert_from(format, text_facts)
end
facts.save
end
# Compile a node's catalog.
def find(request)
extract_facts_from_request(request)
node = node_from_request(request)
if catalog = compile(node)
return catalog
else
# This shouldn't actually happen; we should either return
# a config or raise an exception.
return nil
end
end
# filter-out a catalog to remove exported resources
def filter(catalog)
return catalog.filter { |r| r.virtual? } if catalog.respond_to?(:filter)
catalog
end
def initialize
set_server_facts
setup_database_backend if Puppet[:storeconfigs]
end
# Is our compiler part of a network, or are we just local?
def networked?
Puppet.run_mode.master?
end
private
# Add any extra data necessary to the node.
def add_node_data(node)
# Merge in our server-side facts, so they can be used during compilation.
node.merge(@server_facts)
end
# Compile the actual catalog.
def compile(node)
str = "Compiled catalog for #{node.name}"
str += " in environment #{node.environment}" if node.environment
config = nil
loglevel = networked? ? :notice : :none
benchmark(loglevel, str) do
begin
config = Puppet::Parser::Compiler.compile(node)
rescue Puppet::Error => detail
Puppet.err(detail.to_s) if networked?
raise
end
end
config
end
# Turn our host name into a node object.
def find_node(name)
begin
return nil unless node = Puppet::Node.find(name)
rescue => detail
puts detail.backtrace if Puppet[:trace]
raise Puppet::Error, "Failed when searching for node #{name}: #{detail}"
end
# Add any external data to the node.
add_node_data(node)
node
end
# Extract the node from the request, or use the request
# to find the node.
def node_from_request(request)
if node = request.options[:use_node]
return node
end
# We rely on our authorization system to determine whether the connected
# node is allowed to compile the catalog's node referenced by key.
# By default the REST authorization system makes sure only the connected node
# can compile his catalog.
# This allows for instance monitoring systems or puppet-load to check several
# node's catalog with only one certificate and a modification to auth.conf
# If no key is provided we can only compile the currently connected node.
name = request.key || request.node
if node = find_node(name)
return node
end
raise ArgumentError, "Could not find node '#{name}'; cannot compile"
end
# Initialize our server fact hash; we add these to each client, and they
# won't change while we're running, so it's safe to cache the values.
def set_server_facts
@server_facts = {}
# Add our server version to the fact list
@server_facts["serverversion"] = Puppet.version.to_s
# And then add the server name and IP
{"servername" => "fqdn",
"serverip" => "ipaddress"
}.each do |var, fact|
if value = Facter.value(fact)
@server_facts[var] = value
else
Puppet.warning "Could not retrieve fact #{fact}"
end
end
if @server_facts["servername"].nil?
host = Facter.value(:hostname)
if domain = Facter.value(:domain)
@server_facts["servername"] = [host, domain].join(".")
else
@server_facts["servername"] = host
end
end
end
def setup_database_backend
raise Puppet::Error, "Rails is missing; cannot store configurations" unless Puppet.features.rails?
Puppet::Rails.init
end
# Mark that the node has checked in. LAK:FIXME this needs to be moved into
# the Node class, or somewhere that's got abstract backends.
def update_node_check(node)
if Puppet.features.rails? and Puppet[:storeconfigs]
Puppet::Rails.connect
host = Puppet::Rails::Host.find_or_create_by_name(node.name)
host.last_freshcheck = Time.now
host.save
end
end
end

Some files were not shown because too many files have changed in this diff Show More