Python Forum
Thread Rating:
  • 0 Vote(s) - 0 Average
  • 1
  • 2
  • 3
  • 4
  • 5
File Counter Help!
#1
Hi,

I've been working on a file counter that will cout files from a folder in linux (CentOS7) that can output to to a page which i can use prometheus to collect the data but i'm not having much luck getting it to ready what i need can anyone help with what could be wrong?

count_exporter.py

#!/usr/bin/env python3
import os
import json
from argparse import ArgumentParser
from prometheus_client import start_http_server, Metric, REGISTRY
from prometheus_client.core import GaugeMetricFamily, CounterMetricFamily
import time
import logging
import sys

PORT = 9666  # Metrik-Endpoint Port for calling "ip:PORT/metrics"
UPDATE_INTERVAL = 10  # in seconds

debug = False
files = {}
states = {}
config = "./config.txt"

def parse_args():
    parser = ArgumentParser()
    parser.add_argument('-d', '--debug', action='store_true',
                        help='Activate Debug Output on stdout')
    parser.add_argument('config', help='Path of Config File',nargs='?')	
    args = parser.parse_args()
    if args.config is None:
        args.config = "./config.txt"
    return args

def count_files_in_path(path):
	if debug:
			print ("Count Files in %s" %path)
	num_files = len([f for f in os.listdir(path)if os.path.isfile(os.path.join(path, f))])
	return num_files

def read_config(path):
	if debug:
			print ("Open Config %s" %path)
	file = open(path,"r+")
	lines = file.readlines()
	for line in lines:
		if len(line.split()) < 2:
			sys.exit("Error in Config File!")
		path = line.split()[1]
		name = line.split()[0]
		files[name] = path
		states[name] = 0
	return 

def update_matches():
	for name, state in states.items():
		states[name] = count_files_in_path(files[name])
		if debug:
			print ("Update State for %s: %s" %name,state)
	return
    
class CustomCollector(object):
    def collect(self):
        c = GaugeMetricFamily('file_count', 'Filescount in Path', labels=[
                                'name'])
        for name, state in states.items():
            if debug:
                print ("Collect State for %s: %s" %name,state)
            c.add_metric(['%s' % name], int(state))
        yield c

class StreamToLogger(object):
    """
    Fake file-like stream object that redirects writes to a logger instance.
    """

    def __init__(self, logger,terminal, log_level=logging.INFO):
        self.terminal = terminal
        self.logger = logger
        self.log_level = log_level
        self.linebuf = ''

    def write(self, buf):
        self.terminal.write(buf)
        for line in buf.rstrip().splitlines():
            self.logger.log(self.log_level, line.rstrip())
            #self.terminal.write(line)

    logging.basicConfig(
        level=logging.DEBUG,
        format='%(asctime)s:%(levelname)s:%(name)s:%(message)s',
        filename="out.log",
        filemode='a'
    )


if __name__ == '__main__':
    args = parse_args()
    if args.debug:
        debug = True
        config = args.config
        read_config(config)
    # Set Logger to Stdout and Stderror
    stdout_logger = logging.getLogger('STDOUT')
    sl = StreamToLogger(stdout_logger, sys.stdout, logging.INFO)
    sys.stdout = sl
    stderr_logger = logging.getLogger('STDERR')
    sl = StreamToLogger(stderr_logger, sys.stderr, logging.ERROR)
    sys.stderr = sl
    update_matches()
    print ("Start Listening on Port %s" % PORT)
    # Start Http Server, waiting for Prometheus Pulls
    start_http_server(int(PORT))
    REGISTRY.register(CustomCollector())
    while True:
        update_matches()
        time.sleep(UPDATE_INTERVAL)

config.txt
[hr]
IN /data/IN
OUT1 /data/OUT1
OUT2 /data/OUT2
OUT3 /data/OUT3


locations for files are:
Service: /etc/systemd/system/count_exporter.service
Script: /etc/count_exporter/count_exporter.py
Config: /etc/count_exporter/config.txt
Counting loctions:
/data/IN
/data/OUT1
/data/OUT2
/data/OUT3
I found this on the following link, i've tried to follow this the best i can and had to make some changes to the py script on the site as it was trying to run on 2.7, https://github.com/fpietsch/prometheus-f...t-exporter.

I have got the script to run and outputs data onto the httpserver but does not display any metrics for the file count.

example output (localhost:9666/metrics)

Output:
# HELP python_gc_objects_collected_total Objects collected during gc # TYPE python_gc_objects_collected_total counter python_gc_objects_collected_total{generation="0"} 371.0 python_gc_objects_collected_total{generation="1"} 7.0 python_gc_objects_collected_total{generation="2"} 0.0 # HELP python_gc_objects_uncollectable_total Uncollectable object found during GC # TYPE python_gc_objects_uncollectable_total counter python_gc_objects_uncollectable_total{generation="0"} 0.0 python_gc_objects_uncollectable_total{generation="1"} 0.0 python_gc_objects_uncollectable_total{generation="2"} 0.0 # HELP python_gc_collections_total Number of times this generation was collected # TYPE python_gc_collections_total counter python_gc_collections_total{generation="0"} 43.0 python_gc_collections_total{generation="1"} 3.0 python_gc_collections_total{generation="2"} 0.0 # HELP python_info Python platform information # TYPE python_info gauge python_info{implementation="CPython",major="3",minor="6",patchlevel="8",version="3.6.8"} 1.0 # HELP process_virtual_memory_bytes Virtual memory size in bytes. # TYPE process_virtual_memory_bytes gauge process_virtual_memory_bytes 3.76455168e+08 # HELP process_resident_memory_bytes Resident memory size in bytes. # TYPE process_resident_memory_bytes gauge process_resident_memory_bytes 1.6924672e+07 # HELP process_start_time_seconds Start time of the process since unix epoch in seconds. # TYPE process_start_time_seconds gauge process_start_time_seconds 1.59214640502e+09 # HELP process_cpu_seconds_total Total user and system CPU time spent in seconds. # TYPE process_cpu_seconds_total counter process_cpu_seconds_total 0.65 # HELP process_open_fds Number of open file descriptors. # TYPE process_open_fds gauge process_open_fds 7.0 # HELP process_max_fds Maximum number of open file descriptors. # TYPE process_max_fds gauge process_max_fds 1024.0 # HELP file_count Filescount in Path # TYPE file_count gauge


Any help would be really helpful! i have also changed the folder/file permissions to 777 on all areas incase that this was the issue.

Thanks.
Reply


Possibly Related Threads…
Thread Author Replies Views Last Post
  Counter of the duplicated packets from a pcap file salwa17 8 4,145 Jun-26-2020, 11:31 PM
Last Post: salwa17

Forum Jump:

User Panel Messages

Announcements
Announcement #1 8/1/2020
Announcement #2 8/2/2020
Announcement #3 8/6/2020