82
82
HADOOP_CONF_DIR = os.environ["HADOOP_CONF_DIR"]
83
83
HDPConfPath = os.path.join(os.path.sep,home, hdpScript, "configuration_files", "core_hadoop")
84
84
source = os.listdir(HDPConfPath)
85
# shutil.rmtree(HADOOP_CONF_DIR)
86
# os.mkdir(HADOOP_CONF_DIR)
87
85
for files in source:
88
86
srcFile = os.path.join(os.path.sep, HDPConfPath, files)
89
87
desFile = os.path.join(os.path.sep, HADOOP_CONF_DIR, files)
133
131
setDirPermission(os.environ['MAPRED_PID_DIR'], os.environ['MAPRED_USER'], group, 0755)
134
132
setDirPermission(os.environ['YARN_LOCAL_DIR'], os.environ['YARN_USER'], group, 0755)
135
133
setDirPermission(os.environ['YARN_LOCAL_LOG_DIR'], os.environ['YARN_USER'], group, 0755)
136
#subprocess.call(createLogPIDallNodes)
137
#subprocess.call(createResourceManDataNodeDir)
138
134
hdfsConfPath = os.path.join(os.path.sep, os.environ['HADOOP_CONF_DIR'],'hdfs-site.xml')
139
135
setHadoopConfigXML(hdfsConfPath, "dfs.namenode.name.dir", config_get('dfs_name_dir'))
140
136
setHadoopConfigXML(hdfsConfPath, "dfs.datanode.data.dir", config_get('dfs_data_dir'))
184
180
cmd = shlex.split("su {} -c '/usr/lib/hadoop/sbin/hadoop-daemon.sh --config {} start namenode'".\
185
181
format(hdfsUser, hadoopConfDir))
186
182
subprocess.call(cmd)
184
def stop_namenode(hdfsUser):
185
log("==> start namenode for user={}".format(hdfsUser), "INFO")
186
hadoopConfDir = os.environ["HADOOP_CONF_DIR"]
187
cmd = shlex.split("su {} -c '/usr/lib/hadoop/sbin/hadoop-daemon.sh --config {} stop namenode'".\
188
format(hdfsUser, hadoopConfDir))
187
190
# candidate for BD charm helper
188
192
def start_datanode(hdfsUser):
189
193
log("==> start namenode for user={}".format(hdfsUser), "INFO")
190
194
hadoopConfDir = os.environ["HADOOP_CONF_DIR"]
192
196
format(hdfsUser, hadoopConfDir))
193
197
subprocess.call(cmd)
199
def stop_datanode(hdfsUser):
200
log("==> start namenode for user={}".format(hdfsUser), "INFO")
201
hadoopConfDir = os.environ["HADOOP_CONF_DIR"]
202
cmd = shlex.split("su {} -c '/usr/lib/hadoop/sbin/hadoop-daemon.sh --config {} stop datanode'".\
203
format(hdfsUser, hadoopConfDir))
195
205
# candidate for BD charm helper
196
206
def configureYarn(RMhostname):
197
207
yarnConfPath = os.path.join(os.path.sep, os.environ['HADOOP_CONF_DIR'],"yarn-site.xml")
220
230
format(yarnUser, hadoopConfDir))
221
231
subprocess.call(cmd)
233
def stop_RM(yarnUser):
234
log("==> stop resourcemanager", "INFO")
235
hadoopConfDir = os.environ["HADOOP_CONF_DIR"]
236
os.environ["HADOOP_LIBEXEC_DIR"]="/usr/lib/hadoop/libexec"
237
cmd = shlex.split("su {} -c '/usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config {} stop resourcemanager'".\
238
format(yarnUser, hadoopConfDir))
223
240
# candidate for BD charm helper
224
241
def start_NM(yarnUser):
225
242
log("==> start nodemanager", "INFO")
228
245
cmd = shlex.split("su {} -c '/usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config {} start nodemanager'".\
229
246
format(yarnUser, hadoopConfDir))
230
247
subprocess.call(cmd)
249
def stop_NM(yarnUser):
250
log("==> stop nodemanager", "INFO")
251
hadoopConfDir = os.environ["HADOOP_CONF_DIR"]
252
os.environ["HADOOP_LIBEXEC_DIR"]="/usr/lib/hadoop/libexec"
253
cmd = shlex.split("su {} -c '/usr/lib/hadoop-yarn/sbin/yarn-daemon.sh --config {} stop nodemanager'".\
254
format(yarnUser, hadoopConfDir))
257
def stop_hadoop_services():
258
if is_jvm_service_active("ResourceManager"):
259
stop_RM(os.environ['YARN_USER'])
260
if is_jvm_service_active("NodeManager"):
261
stop_NM(os.environ['YARN_USER'])
262
if is_jvm_service_active("NameNode"):
263
stop_namenode(os.environ['HDFS_USER'])
264
if is_jvm_service_active("DataNode"):
265
stop_datanode(os.environ['HDFS_USER'])
267
def restart_hadoop_services():
268
if is_jvm_service_active("ResourceManager"):
269
stop_RM(os.environ['YARN_USER'])
270
start_RM(os.environ['YARN_USER'])
271
if is_jvm_service_active("NodeManager"):
272
stop_NM(os.environ['YARN_USER'])
273
start_NM(os.environ['YARN_USER'])
274
if is_jvm_service_active("NameNode"):
275
stop_namenode(os.environ['HDFS_USER'])
276
start_namenode(os.environ['HDFS_USER'])
277
if is_jvm_service_active("DataNode"):
278
stop_datanode(os.environ['HDFS_USER'])
279
start_datanode(os.environ['HDFS_USER'])
281
def configureHDFS(hostname):
282
hdfsConfPath = os.path.join(os.path.sep, os.environ['HADOOP_CONF_DIR'],'hdfs-site.xml')
283
coreConfPath = os.path.join(os.path.sep, os.environ['HADOOP_CONF_DIR'],'core-site.xml')
284
setHadoopConfigXML(coreConfPath, "fs.defaultFS", "hdfs://"+hostname+":8020")
285
setHadoopConfigXML(hdfsConfPath, "dfs.namenode.http-address", hostname+":50070")
231
287
#################################### Global Data ################################
235
291
if not os.path.isfile(hdp_hellper_script):
236
292
log ("Erro ==> {} not found".format(hdp_hellper_script), "ERROR")
238
# These scripts should be removed, use bdutils.setDirPermission() instead, look at hdp-zookeeper for example
239
createLogPIDallNodes = os.path.join(os.path.sep, os.environ['CHARM_DIR'],'files', 'scripts', "createLogPIDallNodes.sh")
240
295
createDadoopConfDir = os.path.join(os.path.sep, os.environ['CHARM_DIR'],'files', 'scripts', "createHadoopConfDir.sh")
241
createResourceManDataNodeDir = os.path.join(os.path.sep, os.environ['CHARM_DIR'],'files', 'scripts', "createResourceManDataNodeDir.sh")
242
296
hdpScript = "hdp_scripts"
243
297
hdpScriptPath = os.path.join(os.path.sep,home, hdpScript,'scripts')
244
298
usersAndGroupsScript = os.path.join(os.path.sep, hdpScriptPath, "usersAndGroups.sh")
333
387
@hooks.hook('namenode-relation-joined')
334
388
def namenode_relation_joined():
335
389
log("Configuring namenode - joined phase", "INFO")
336
#global nameNodeReady
338
391
if is_jvm_service_active("NameNode"):
339
392
relation_set(nameNodeReady=True)
342
395
setHadoopEnvVar()
343
396
nodeType="namenode"
344
#createNameNodeDirs = os.path.join(os.path.sep, os.environ['CHARM_DIR'],'files',\
345
# 'scripts', "createNameNodeDirs.sh")
346
#subprocess.call(createNameNodeDirs)
347
397
setDirPermission(os.environ['DFS_NAME_DIR'], os.environ['HDFS_USER'], os.environ['HADOOP_GROUP'], 0755)
348
398
relation_set(namenode_hostname=gethostname())
349
hdfsConfPath = os.path.join(os.path.sep, os.environ['HADOOP_CONF_DIR'],'hdfs-site.xml')
350
coreConfPath = os.path.join(os.path.sep, os.environ['HADOOP_CONF_DIR'],'core-site.xml')
351
setHadoopConfigXML(coreConfPath, "fs.defaultFS", "hdfs://"+gethostname()+":8020")
352
setHadoopConfigXML(hdfsConfPath, "dfs.namenode.http-address", gethostname()+":50070")
399
configureHDFS(gethostname())
353
400
format_namenode(os.environ["HDFS_USER"])
354
401
start_namenode(os.environ["HDFS_USER"])
371
418
setHadoopEnvVar()
372
419
nodeType="namenode"
373
420
namenode_hostname = relation_get("namenode_hostname")
374
hdfsConfPath = os.path.join(os.path.sep, os.environ['HADOOP_CONF_DIR'],'hdfs-site.xml')
375
coreConfPath = os.path.join(os.path.sep, os.environ['HADOOP_CONF_DIR'],'core-site.xml')
376
setHadoopConfigXML(coreConfPath, "fs.defaultFS", "hdfs://"+namenode_hostname+":8020")
377
setHadoopConfigXML(hdfsConfPath, "dfs.namenode.http-address", namenode_hostname+":50070")
378
#createDataNodeDirs = os.path.join(os.path.sep, os.environ['CHARM_DIR'],'files',\
379
# 'scripts', "createDataNodeDirs.sh")
380
#subprocess.call(createDataNodeDirs)
421
configureHDFS(namenode_hostname)
381
422
setDirPermission(os.environ['DFS_DATA_DIR'], os.environ['HDFS_USER'], os.environ['HADOOP_GROUP'], 0750)
382
423
start_datanode(os.environ["HDFS_USER"])
389
430
@hooks.hook('config-changed')
390
431
def config_changed():
391
432
log( "config-changed called", "INFO")
435
restart_hadoop_services
394
439
@hooks.hook('namenode-relation-changed')
395
440
def namenode_relation_changed():
396
441
log("Configuring namenode - changed phase", "INFO")
399
443
@hooks.hook('start')