Skip to content
Snippets Groups Projects
Commit ec2bfdbe authored by Aaron Johnson's avatar Aaron Johnson
Browse files

working save_datasources module

* refactored to use positional arguments
parent ab06386b
No related branches found
No related tags found
No related merge requests found
from grafana_backup.save_dashboards import main as save_dashboards
#from grafana_backup.save_datasources import main as save_datasources
from grafana_backup.save_datasources import main as save_datasources
#from grafana_backup.save_folders import main as save_folders
#from grafana_backup.save_alert_channels import main as save_alert_channels
from grafana_backup.archive import main as archive
......@@ -7,7 +7,7 @@ from grafana_backup.archive import main as archive
def main(args, settings):
save_dashboards(args, settings)
#save_datasources()
save_datasources(args, settings)
#save_folders()
#save_alert_channels()
archive(args, settings)
import os
import json
from grafana_backup.dashboardApi import import_grafana_settings, search_datasource
from grafana_backup.dashboardApi import search_datasource
from grafana_backup.commons import print_horizontal_line
settings = import_grafana_settings("grafanaSettings")
globals().update(settings) # To be able to use the settings here, we need to update the globals of this module
def main(args, settings):
backup_dir = settings.get('BACKUP_DIR')
timestamp = settings.get('TIMESTAMP')
grafana_url = settings.get('GRAFANA_URL')
http_get_headers = settings.get('HTTP_GET_HEADERS')
verify_ssl = settings.get('VERIFY_SSL')
debug = settings.get('DEBUG')
module_name = "datasources"
folder_path = '{0}/{1}/{2}'.format(BACKUP_DIR, module_name, TIMESTAMP)
log_file = '{0}_{1}.txt'.format(module_name, TIMESTAMP)
folder_path = '{0}/datasources/{1}'.format(backup_dir, timestamp)
log_file = 'datasources_{0}.txt'.format(timestamp)
if not os.path.exists(folder_path):
os.makedirs(folder_path)
if not os.path.exists(folder_path):
os.makedirs(folder_path)
def main():
datasources = get_all_datasources_and_save()
datasources = get_all_datasources_and_save(folder_path, grafana_url, http_get_headers, verify_ssl, debug)
print_horizontal_line()
def save_datasource(file_name, datasource_setting):
def save_datasource(file_name, datasource_setting, folder_path):
file_path = folder_path + '/' + file_name + '.datasource'
with open(file_path, 'w') as f:
f.write(json.dumps(datasource_setting))
print("datasource:{0} is saved to {1}".format(file_name, file_path))
def get_all_datasources_and_save():
status_code_and_content = search_datasource()
def get_all_datasources_and_save(folder_path, grafana_url, http_get_headers, verify_ssl, debug):
status_code_and_content = search_datasource(grafana_url, http_get_headers, verify_ssl, debug)
if status_code_and_content[0] == 200:
datasources = status_code_and_content[1]
print("There are {0} datasources:".format(len(datasources)))
for datasource in datasources:
print(datasource)
save_datasource(datasource['name'], datasource)
save_datasource(datasource['name'], datasource, folder_path)
else:
print("query datasource failed, status: {0}, msg: {1}".format(status_code_and_content[0],
status_code_and_content[1]))
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment