~smoser/cloud-init/trunk.transfer-ds

« back to all changes in this revision

Viewing changes to cloudinit/stages.py

  • Committer: Scott Moser
  • Date: 2016-03-22 08:08:51 UTC
  • Revision ID: smoser@ubuntu.com-20160322080851-3hiise035rniilq2
commit data source transfer stuff

the goal here is to make a v2 datasources that are mroe aware of
network_config and can return a 'transfer' mode, taht basically
says "Yes,  I'm the datasource you're looking for, but I need some network".

I walked through the DS and looked which ones could benefit from this.
The first ones to look at would be:
ConfigDrive: claim, hostname, network
NoCloud    : claim, hostname, network
MAAS       : claim, network (disabled)
Azure      : claim, hostname

                  definitive hostname   network
== Pure Local ==
ConfigDrive:        YES       yes        yes
CloudSigma:         YES       yes        no
NoCloud             YES       yes        yes
OpenNebula          Weak      yes        yes
OVF                 Weak      yes        yes
SmartOS             YES       yes        no
AltCloud            YES       no         no

== Hybrid ==
Azure:              YES       yes        no
BigStep:            YES       no         could[partner mod]

== Pure Network ==
CloudStack:         NO        no         no
Ec2                 NO        no         yes[network]
GCE                 NO        no         no?
MAAS                YES       no         no
OpenStack           NO        no         no
DigitalOcean        NO        no         no

Show diffs side-by-side

added added

removed removed

Lines of Context:
42
42
from cloudinit import distros
43
43
from cloudinit import helpers
44
44
from cloudinit import importer
 
45
from cloudinit import net
45
46
from cloudinit import log as logging
46
47
from cloudinit import sources
47
48
from cloudinit import type_utils
193
194
        # We try to restore from a current link and static path
194
195
        # by using the instance link, if purge_cache was called
195
196
        # the file wont exist.
196
 
        pickled_fn = self.paths.get_ipath_cur('obj_pkl')
197
 
        pickle_contents = None
198
 
        try:
199
 
            pickle_contents = util.load_file(pickled_fn, decode=False)
200
 
        except Exception as e:
201
 
            if os.path.isfile(pickled_fn):
202
 
                LOG.warn("failed loading pickle in %s: %s" % (pickled_fn, e))
203
 
            pass
204
 
 
205
 
        # This is expected so just return nothing
206
 
        # successfully loaded...
207
 
        if not pickle_contents:
208
 
            return None
209
 
        try:
210
 
            return pickle.loads(pickle_contents)
211
 
        except Exception:
212
 
            util.logexc(LOG, "Failed loading pickled blob from %s", pickled_fn)
213
 
            return None
 
197
        return _pkl_load(self.paths.get_ipath_cur('obj_pkl'))
214
198
 
215
199
    def _write_to_cache(self):
216
200
        if self.datasource is NULL_DATA_SOURCE:
217
201
            return False
218
 
        pickled_fn = self.paths.get_ipath_cur("obj_pkl")
219
 
        try:
220
 
            pk_contents = pickle.dumps(self.datasource)
221
 
        except Exception:
222
 
            util.logexc(LOG, "Failed pickling datasource %s", self.datasource)
223
 
            return False
224
 
        try:
225
 
            util.write_file(pickled_fn, pk_contents, omode="wb", mode=0o400)
226
 
        except Exception:
227
 
            util.logexc(LOG, "Failed pickling datasource to %s", pickled_fn)
228
 
            return False
229
 
        return True
 
202
        return _pkl_store(self.datasource, self.paths.get_ipath_cur("obj_pkl"))
230
203
 
231
204
    def _get_datasources(self):
232
205
        # Any config provided???
341
314
        return iid
342
315
 
343
316
    def fetch(self, existing="check"):
344
 
        return self._get_data_source(existing=existing)
 
317
        transfer_pkl = os.path.join(self.paths.local_transfer_dir, "ds.pkl")
 
318
        if os.path.exists(transfer_pkl):
 
319
            return self.un_transfer(transfer_pkl)
 
320
        else:
 
321
            return self._get_data_source(existing=existing)
345
322
 
346
323
    def instancify(self):
347
324
        return self._reflect_cur_instance()
595
572
        # Run the handlers
596
573
        self._do_handlers(user_data_msg, c_handlers_list, frequency)
597
574
 
 
575
    def store_transfer(self)
 
576
        transfer_pkl = os.path.join(self.paths.local_transfer_dir, "ds.pkl")
 
577
        return _pkl_store(self.datasource, transfer_pkl)
 
578
 
 
579
    def un_transfer(self, fname):
 
580
        tranobj = _pkl_load(fname)
 
581
        if not tranobj:
 
582
            raise RuntimeError("failed to load untransfer pkl from %s" % fname)
 
583
        self.datasource = tranobj
 
584
        tranobj.un_transfer()
 
585
        self._reset()
 
586
 
 
587
    def is_transfer(self):
 
588
        if hasattr(self.datasource, 'is_transfer')
 
589
            return self.datasource.is_transfer()
 
590
        return False
 
591
 
598
592
 
599
593
class Modules(object):
600
594
    def __init__(self, init, cfg_files=None, reporter=None):
796
790
        base_cfgs.append(default_cfg)
797
791
 
798
792
    return util.mergemanydict(base_cfgs)
 
793
 
 
794
 
 
795
def _pkl_store(obj, fname):
 
796
    try:
 
797
        pk_contents = pickle.dumps(obj)
 
798
    except Exception:
 
799
        util.logexc(LOG, "Failed pickling datasource %s", obj)
 
800
        return False
 
801
    try:
 
802
        util.write_file(fname, pk_contents, omode="wb", mode=0o400)
 
803
    except Exception:
 
804
        util.logexc(LOG, "Failed pickling datasource to %s", fname)
 
805
        return False
 
806
    return True
 
807
 
 
808
 
 
809
def _pkl_load(fname):
 
810
    pickle_contents = None
 
811
    try:
 
812
        pickle_contents = util.load_file(fname, decode=False)
 
813
    except Exception as e:
 
814
        if os.path.isfile(fname):
 
815
            LOG.warn("failed loading pickle in %s: %s" % (fname, e))
 
816
        pass
 
817
 
 
818
    # This is allowed so just return nothing successfully loaded...
 
819
    if not pickle_contents:
 
820
        return None
 
821
    try:
 
822
        return pickle.loads(pickle_contents)
 
823
    except Exception:
 
824
        util.logexc(LOG, "Failed loading pickled blob from %s", fname)
 
825
        return None