20
20
# You should have received a copy of the GNU General Public License
21
21
# along with this program. If not, see <http://www.gnu.org/licenses/>.
25
23
from cloudinit import handlers
26
24
from cloudinit import log as logging
27
25
from cloudinit import mergers
32
30
LOG = logging.getLogger(__name__)
34
32
MERGE_HEADER = 'Merge-Type'
36
# Due to the way the loading of yaml configuration was done previously,
37
# where previously each cloud config part was appended to a larger yaml
38
# file and then finally that file was loaded as one big yaml file we need
39
# to mimic that behavior by altering the default strategy to be replacing
40
# keys of prior merges.
48
# #combined file (comments not included)
52
# This gets loaded into yaml with final result {'a': 22}
53
DEF_MERGERS = mergers.string_extract_mergers('dict(replace)+list()+str()')
54
CLOUD_PREFIX = "#cloud-config"
55
JSONP_PREFIX = "#cloud-config-jsonp"
57
# The file header -> content types this module will handle.
59
JSONP_PREFIX: handlers.type_from_starts_with(JSONP_PREFIX),
60
CLOUD_PREFIX: handlers.type_from_starts_with(CLOUD_PREFIX),
33
DEF_MERGERS = mergers.default_mergers()
64
36
class CloudConfigPartHandler(handlers.Handler):
67
39
self.cloud_buf = None
68
40
self.cloud_fn = paths.get_ipath("cloud_config")
69
41
self.file_names = []
42
self.mergers = [DEF_MERGERS]
71
44
def list_types(self):
72
return list(CC_TYPES.values())
46
handlers.type_from_starts_with("#cloud-config"),
74
49
def _write_cloud_config(self):
75
50
if not self.cloud_fn:
79
54
if self.file_names:
80
55
file_lines.append("# from %s files" % (len(self.file_names)))
81
56
for fn in self.file_names:
84
57
file_lines.append("# %s" % (fn))
85
58
file_lines.append("")
86
59
if self.cloud_buf is not None:
87
60
# Something was actually gathered....
92
65
lines.extend(file_lines)
113
86
all_mergers.extend(mergers_header)
114
87
if not all_mergers:
115
88
all_mergers = DEF_MERGERS
116
return (payload_yaml, all_mergers)
118
def _merge_patch(self, payload):
119
# JSON doesn't handle comments in this manner, so ensure that
120
# if we started with this 'type' that we remove it before
121
# attempting to load it as json (which the jsonpatch library will
123
payload = payload.lstrip()
124
payload = util.strip_prefix_suffix(payload, prefix=JSONP_PREFIX)
125
patch = jsonpatch.JsonPatch.from_string(payload)
126
LOG.debug("Merging by applying json patch %s", patch)
127
self.cloud_buf = patch.apply(self.cloud_buf, in_place=False)
129
91
def _merge_part(self, payload, headers):
130
(payload_yaml, my_mergers) = self._extract_mergers(payload, headers)
131
LOG.debug("Merging by applying %s", my_mergers)
132
merger = mergers.construct(my_mergers)
133
self.cloud_buf = merger.merge(self.cloud_buf, payload_yaml)
92
next_mergers = self._extract_mergers(payload, headers)
93
# Use the merger list from the last call, since it is the one
94
# that will be defining how to merge with the next payload.
95
curr_mergers = list(self.mergers[-1])
96
LOG.debug("Merging by applying %s", curr_mergers)
97
self.mergers.append(next_mergers)
98
merger = mergers.construct(curr_mergers)
99
if self.cloud_buf is None:
100
# First time through, merge with an empty dict...
102
self.cloud_buf = merger.merge(self.cloud_buf,
103
util.load_yaml(payload))
135
105
def _reset(self):
136
106
self.file_names = []
137
107
self.cloud_buf = None
108
self.mergers = [DEF_MERGERS]
139
110
def handle_part(self, _data, ctype, filename, # pylint: disable=W0221
140
111
payload, _frequency, headers): # pylint: disable=W0613
149
# First time through, merge with an empty dict...
150
if self.cloud_buf is None or not self.file_names:
152
if ctype == CC_TYPES[JSONP_PREFIX]:
153
self._merge_patch(payload)
155
self._merge_part(payload, headers)
156
# Ensure filename is ok to store
157
for i in ("\n", "\r", "\t"):
158
filename = filename.replace(i, " ")
159
self.file_names.append(filename.strip())
120
self._merge_part(payload, headers)
121
self.file_names.append(filename)
161
123
util.logexc(LOG, "Failed at merging in cloud config part from %s",