diff --git a/README.md b/README.md index 86776be..16ed4c7 100644 --- a/README.md +++ b/README.md @@ -18,10 +18,33 @@ python2.7 powergraph.py --host="server address" --port="server port" --user="all ``` You can use the optional parameter ```--store``` in order to save the infos as json on tinydb. Without this parameter, the script will -print on the terminal. +print on the terminal. Besides, you can use ```--feedback``` with store +in order to see the measures status. If you want to set the time interval that a new csv file is +generated, you can use the flag ```--csv_interval```. The +```--tail_length``` is used to set the number of lines the csv +file will have. Run ```csvcreator.py``` like this: ``` python2.7 csvcreator.py --jsonfile="generated_json_name" ``` + +There are two optional arguments: ```--date```, to create the csv only with +the data from a specific day and ```--name```, with the name you want your +```csv``` file. + +The ```graph_csv.py``` runs the ```powergraph.py``` and, from time +to time, creates a new csv file, with the latest measures. To run it, +type: + +``` +python2.7 graph_csv.py --host="server address" --port="server port" --user="allowed user" --passwd="password for this user +--jsonfile="path to bd jsonfile" +``` + +Besides, you can use the following optional arguments: +- interval: interval between each ipmi measure (default=10) +- nread: number of ipmi measures to be done (default=infinity) +- csv_interval: interval that a new csv file is made (deafult=300s) +- tail_length: size of the csv files (default=300) diff --git a/csvcreator.py b/csvcreator.py index 03bce02..4110190 100755 --- a/csvcreator.py +++ b/csvcreator.py @@ -66,6 +66,8 @@ def readdbtable(json_file_data, table_name): # Creates two separeted lists that will be the coluns in # the csv file. One is for the time and another for the # watts + timelist.append('date') + wattslist.append('consumption') for iten in dbdict: # print iten, dbdict[iten], timedict[dbdict[iten]] timelist.append(dbdict[iten]) diff --git a/graph_csv.py b/graph_csv.py new file mode 100755 index 0000000..2fd638c --- /dev/null +++ b/graph_csv.py @@ -0,0 +1,143 @@ +#! /usr/bin/env python +# -*- coding: utf-8 -*- + +""" +Copyright (C) 2017 Unicamp-OpenPower +Licensed under the Apache License, Version 2.0 (the “License”); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at +http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an “AS IS” BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import argparse +import thread +import time +import os + +INTERVAL = 10 +PYTHON_VERSION = 'python2.7' + +def build_commands(args): + """ + function used to build the ipmi and csv commands + """ + year = time.strftime("%Y") + month = int(time.strftime("%m")) + day = int(time.strftime("%d")) + date=str(year)+str(month)+str(day) + + csv_command = PYTHON_VERSION + ' csvcreator.py --name=last --date=' + date + \ + ' --jsonfile=' + csv_command = csv_command + args.jsonfile + + if args.csv_interval: + global CSV_INTERVAL + CSV_INTERVAL = args.csv_interval + else: + CSV_INTERVAL = 300 + + powergraph_command = PYTHON_VERSION + ' powergraph.py' + if not args.host: + print "\nERROR: hostname is required.\n" + parser.print_help() + sys.exit(1) + else: + powergraph_command += ' --host=' + args.host + if args.port: + powergraph_command += ' --port=' + args.port + if not args.user: + print "\nERROR: username is required.\n" + parser.print_help() + sys.exit(1) + else: + powergraph_command += ' --user=' + args.user + if args.passwd: + powergraph_command += ' --passwd=' + args.passwd + if args.interval: + powergraph_command += ' --interval=' + args.interval + else: + powergraph_command += ' --interval=1' + powergraph_command += ' --store' + + return powergraph_command,csv_command + +def get_input(): + """ + function to get the arguments from the user + """ + parser = argparse.ArgumentParser(description='Parameters') + + parser.add_argument('--host', + help='adress of the host', required=True) + parser.add_argument('--port', + help='port of IPMI host', required=True) + parser.add_argument('--user', + help='user allowed to acces IPMI', required=True) + parser.add_argument('--passwd', + help='password for the specific user', required=True) + parser.add_argument('--interval', + help='seconds between each data reading') + parser.add_argument('--nread', + help='number of time to collect data') + + parser.add_argument('--jsonfile', + help='jsonfile to be converted as csv', required=True) + parser.add_argument('--csv_interval', + help='interval you want to create a new csv file') + parser.add_argument('--tail_length', + help='the amount of inputs do get from the csv file ' + 'in order to create the input for the graphic ' + 'visualization', + default=300) + + return parser.parse_args() + +def run_collector(command): + """ + function to run the collection of data + """ + try: + os.system(command) + except OSError as err: + print ("OS erros: {0}".format(err)) + + +def run_csv(command,tail_length): + """ + function to run the csv generator + """ + while True: + time.sleep(float(CSV_INTERVAL)) + try: + os.system(command) + os.system("tail -n 300 last.csv > aux.csv") + os.system("mv -f aux.csv last.csv") + except OSError as err: + print("OS error: {0}".format(err)) + +def main(): + """ + Main execution. + """ + args = get_input() + powergraph_command, csv_command = build_commands(args) + thread.start_new_thread(run_collector, (powergraph_command,)) + thread.start_new_thread(run_csv, (csv_command, args.tail_length, )) + try: + while True: + pass + except (KeyboardInterrupt, SystemExit): + print "\nExecution cancelled. Bye!" + sys.exit(1) + + +if __name__ == "__main__": + """ + Invoking the main execution function. + """ + main() + diff --git a/killer.sh b/killer.sh new file mode 100755 index 0000000..dc3f45c --- /dev/null +++ b/killer.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +#script made to make sure the ipmi data collector is running. +#used alongside with crontab + +#name of the process that can be running to get data +threads=('powergraph.py' 'graph_csv.py') + +#get size of the threads array +size=$(echo ${#threads[@]}) +size=$(echo "$size-1" | bc) + +for i in $(seq 0 $size); do + #if the number of procees running are only one, so its the grep itself + #and nedd to be restarted + ps_size=$(ps aux | grep ${threads[i]} | wc -l) + if [ "$ps_size" == 1 ]; then + for i in $(seq 0 $size); do + for pid in $(pgrep -f ${threads[i]}); do + kill $pid + done + done + #comando to launch the data getter again + python2.7 graph_csv.py --host='' --port='' \ + --user='' --passwd=' \ + --interval=1 --jsonfile=powerdata.json & + fi +done diff --git a/powergraph.py b/powergraph.py index 9cc89be..44a9bdc 100755 --- a/powergraph.py +++ b/powergraph.py @@ -25,7 +25,7 @@ NREAD = 10 INFINITY = False STORE = False - +FEEDBACK = False def savedb(input): """ @@ -97,6 +97,9 @@ def get_input(): parser.add_argument('--nread', help='number of time to collect data') parser.add_argument('--store', action='store_true', help='save the data collected in a nosql db') + parser.add_argument('--feedback', action='store_true', + help='print the collecting status') + args = parser.parse_args() return args, parser @@ -135,6 +138,9 @@ def build_command(args, parser): if args.store: global STORE STORE = True + if args.feedback: + global FEEDBACK + FEEDBACK = True return cmd @@ -161,7 +167,7 @@ def run(command, counter): infos['Year'] = aux[2][2:6] infos['Energy'] = energy info = create_string(counter + 1, infos) - if not STORE: + if not STORE or FEEDBACK: print info else: savedb(info.split('|'))