See the github repo at https://github.com/house9/puppet-solo-hello-world
Resources
- http://www.confreaks.com/videos/2479-railsconf2013-devops-for-the-rubyist-soul
- https://speakerdeck.com/jtdowney/devops-for-the-rubyist-soul-at-rubynation-2013
See the github repo at https://github.com/house9/puppet-solo-hello-world
Write-Ahead Logging (WAL) is a standard method for ensuring data integrity. A detailed description can be found in most (if not all) books about transaction processing. Briefly, WAL's central concept is that changes to data files (where tables and indexes reside) must be written only after those changes have been logged, that is, after log records describing the changes have been flushed to permanent storage. If we follow this procedure, we do not need to flush data pages to disk on every transaction commit, because we know that in the event of a crash we will be able to recover the database using the log: any changes that have not been applied to the data pages can be redone from the log records. (This is roll-forward recovery, also known as REDO.)
exec { | |
"apt-get update": | |
path => "/usr/bin" | |
} | |
# manage_package_repo => true will satisfy apt_postgresql_org | |
# include postgresql::package_source::apt_postgresql_org | |
class { 'postgresql': | |
version => '9.2', | |
manage_package_repo => true, | |
charset => 'UTF8', | |
locale => 'en_US.UTF-8' | |
} | |
class { 'postgresql::server': | |
} | |
postgresql::db { 'mydatabasename': | |
user => 'mydatabaseuser', | |
password => 'mypassword' | |
} |
# 1) install puppet locally | |
# - install modules to local modules directory | |
# puppet module install puppetlabs/postgresql --force -i modules | |
# puppet module install puppetlabs/stdlib --force -i modules | |
# puppet module install puppetlabs/firewall --force -i modules | |
# puppet module install puppetlabs/apt --force -i modules | |
# puppet module install ripienaar/concat --force -i modules | |
# OR | |
# download tar file from https://forge.puppetlabs.com/puppetlabs/postgresql | |
# and place into modules directory | |
# 2) create a manifests/default.pp file - see https://gist.github.com/house9/5762219 | |
Vagrant.configure("2") do |config| | |
config.vm.box = "precise64" | |
config.vm.provision :puppet do |puppet| | |
puppet.module_path = "./modules" | |
end | |
end | |
# 3) vagrant up |
Vagrant.configure("2") do |config| | |
# assumes your chef.json files are in cookbooks or site-cookbooks directory | |
servers = [ | |
{ | |
id: :db1, | |
ip: "172.16.2.111", | |
node_json: "master-db.json" | |
}, | |
{ | |
id: :db2, | |
ip: "172.16.2.222", | |
node_json: "standby-db.json" | |
}, | |
{ | |
id: :web1, | |
ip: "172.16.2.333", | |
node_json: "web.json" | |
} | |
] | |
# for testing remove some servers from provision | |
db_servers.reject! { |item| item[:id] == :db2 } | |
db_servers.each do |server_settings| | |
config.vm.define server_settings[:id] do |db| | |
db.vm.box = "precise32" | |
db.vm.network :private_network, ip: server_settings[:ip] | |
db.vm.provision :chef_solo do |chef| | |
chef.cookbooks_path = ["./cookbooks", "./site-cookbooks"] | |
# http://jbbarth.com/posts/2011-03-20-vagrant-provisioning-with-chefsolo.html | |
["./nodes/#{server_settings[:node_json]}"].each do |node_file| | |
json = JSON.parse(File.read(node_file)) | |
json["run_list"].each { |run| chef.add_recipe(run) } | |
chef.json.merge!(json) | |
end | |
end | |
end | |
end | |
end |