792 lines
27 KiB
Python
792 lines
27 KiB
Python
from flask import Flask, render_template, request, jsonify
|
|
from hudu_api_ctek import get_company_by_name, check_is_existing_asset, create_asset, update_asset
|
|
from syncro_api_module import get_syncro_managed_customers
|
|
import os
|
|
import mysql.connector
|
|
from mysql.connector import Error
|
|
import logging
|
|
import re
|
|
from datetime import datetime, timedelta
|
|
|
|
app = Flask(__name__)
|
|
app.config['ENV'] = 'development'
|
|
app.config['DEBUG'] = True
|
|
|
|
logging.basicConfig(level=logging.INFO)
|
|
app.logger.setLevel(logging.INFO)
|
|
|
|
VALID_API_TOKEN = "8f8b899ab9c663a8ce5d7803eb3da7235464478e54b7f4e5"
|
|
VALID_TOKENS_POWERBI = ["14e21348e8d19708c9b2933a2dc09683aa4c2a691121d574"]
|
|
HUDU_API_BASEURL = os.getenv("HUDU_API_BASEURL")
|
|
HUDU_API_KEY = os.getenv("HUDU_API_KEY")
|
|
|
|
def get_db_cnx(db_name):
|
|
host = os.getenv("MYSQL_HOST")
|
|
user = os.getenv("MYSQL_USER")
|
|
password = os.getenv("MYSQL_PASSWORD")
|
|
database = db_name
|
|
|
|
if not host or not user or not password:
|
|
raise Exception("Missing environment variables for MySQL connection.")
|
|
|
|
try:
|
|
cnx = mysql.connector.connect(user=user, password=password, host=host, database=database)
|
|
return cnx
|
|
except Error as e:
|
|
raise Exception(f"Failed to connect to MySQL: {str(e)}")
|
|
|
|
|
|
#####################################################################
|
|
# CYBERTEK LOG ENDPOINT ROUTES
|
|
#####################################################################
|
|
@app.route("/log/requests", methods=["POST"])
|
|
def process_log():
|
|
if request.is_json:
|
|
log_data = request.get_json()
|
|
cnx = get_db_cnx("ctek_logs")
|
|
cursor = cnx.cursor()
|
|
|
|
source_name = log_data.get('script_name', 'N/A')
|
|
api_endpoint = log_data.get('api_endpoint', 'N/A')
|
|
status_code = log_data.get('status_code', 'N/A')
|
|
|
|
cursor.execute(
|
|
"INSERT INTO api_requests (source_name, api_endpoint, status_code) VALUES (%s, %s, %s)",
|
|
(source_name, api_endpoint, status_code)
|
|
)
|
|
cnx.commit()
|
|
cursor.close()
|
|
cnx.close()
|
|
|
|
app.logger.info(f"Received api request log data, added to database.")
|
|
return jsonify({"message": "Data successfully inserted"}), 200
|
|
else:
|
|
app.logger.error("Request data is not in JSON format.")
|
|
return jsonify({"error": "Request data is not in JSON format"}), 400
|
|
|
|
|
|
|
|
#####################################################################
|
|
# MANAGED DEFENDER ENDPOINT ROUTES
|
|
#####################################################################
|
|
@app.route("/defender/endpoints", methods=["GET"])
|
|
def get_defender_endpoints():
|
|
cnx = None
|
|
cursor = None
|
|
|
|
try:
|
|
auth_header = request.headers.get('Authorization')
|
|
if not auth_header or auth_header != f"Bearer {VALID_API_TOKEN}":
|
|
app.logger.warning("Unauthorized access attempt.")
|
|
return jsonify({"error": "Unauthorized"}), 401
|
|
|
|
cnx = get_db_cnx("ctek_defender")
|
|
cursor = cnx.cursor()
|
|
cursor.execute("SELECT * FROM endpoints")
|
|
|
|
# Fetch column headers
|
|
column_headers = [desc[0] for desc in cursor.description]
|
|
|
|
# Fetch rows
|
|
rows = cursor.fetchall()
|
|
|
|
# Convert rows to dictionaries with column headers as keys
|
|
results = [dict(zip(column_headers, row)) for row in rows]
|
|
|
|
app.logger.info("Retrieved Managed Defender Endpoint data.")
|
|
return jsonify({"endpoints": results}), 200
|
|
|
|
except Exception as e:
|
|
app.logger.error(f"Failed to retrieve data: {str(e)}")
|
|
return jsonify({"error": f"Failed to retrieve data: {str(e)}"}), 500
|
|
|
|
finally:
|
|
if cursor:
|
|
cursor.close()
|
|
if cnx:
|
|
cnx.close()
|
|
|
|
@app.route("/defender/endpoint_status", methods=["POST"])
|
|
def process_defender_endpoint_status():
|
|
cnx = None
|
|
cursor = None
|
|
|
|
try:
|
|
auth_header = request.headers.get('Authorization')
|
|
if not auth_header or auth_header != f"Bearer {VALID_API_TOKEN}":
|
|
app.logger.warning("Unauthorized access attempt.")
|
|
return jsonify({"error": "Unauthorized"}), 401
|
|
|
|
data = request.get_json()
|
|
customer = data["customer"]
|
|
hostname = data["hostname"]
|
|
timestamp = data["timestamp"]
|
|
defender_info = data["defender_status"]
|
|
antivirus_enabled = defender_info["AntivirusEnabled"]
|
|
antispyware_enabled = defender_info["AntispywareEnabled"]
|
|
realtime_protection_enabled = defender_info["RealTimeProtectionEnabled"]
|
|
behavior_monitor_enabled = defender_info["BehaviorMonitorEnabled"]
|
|
full_scan_end_time = defender_info.get("FullScanEndTime") # .get() to handle possible None
|
|
quick_scan_end_time = defender_info.get("QuickScanEndTime")
|
|
quick_scan_overdue = defender_info["QuickScanOverdue"]
|
|
|
|
cnx = get_db_cnx("ctek_defender")
|
|
cursor = cnx.cursor()
|
|
|
|
query = """
|
|
INSERT INTO endpoints (
|
|
customer, hostname, last_updated, antivirus_enabled, antispyware_enabled,
|
|
realtime_protection_enabled, behavior_monitor_enabled, full_scan_end_time,
|
|
quick_scan_end_time, quick_scan_overdue
|
|
) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
|
ON DUPLICATE KEY UPDATE
|
|
last_updated=%s, antivirus_enabled=%s, antispyware_enabled=%s,
|
|
realtime_protection_enabled=%s, behavior_monitor_enabled=%s,
|
|
full_scan_end_time=%s, quick_scan_end_time=%s, quick_scan_overdue=%s
|
|
"""
|
|
|
|
values = (
|
|
customer, hostname, timestamp, antivirus_enabled, antispyware_enabled,
|
|
realtime_protection_enabled, behavior_monitor_enabled,
|
|
full_scan_end_time, quick_scan_end_time, quick_scan_overdue,
|
|
# Update section
|
|
timestamp, antivirus_enabled, antispyware_enabled,
|
|
realtime_protection_enabled, behavior_monitor_enabled,
|
|
full_scan_end_time, quick_scan_end_time, quick_scan_overdue
|
|
)
|
|
|
|
cursor.execute(query, values)
|
|
cnx.commit()
|
|
|
|
app.logger.info(f"Received Managed Endpoint Defender data for hostname: {hostname}")
|
|
return jsonify({"message": "Data successfully inserted"}), 200
|
|
|
|
except Exception as e:
|
|
app.logger.error(f"Failed to process data: {str(e)}")
|
|
return jsonify({"error": f"Failed to process data: {str(e)}"}), 500
|
|
|
|
finally:
|
|
if cursor:
|
|
cursor.close()
|
|
if cnx:
|
|
cnx.close()
|
|
|
|
|
|
|
|
@app.route("/defender/endpoint_threat", methods=["POST"])
|
|
def process_defender_endpoint_threat():
|
|
cnx = None
|
|
cursor = None
|
|
|
|
try:
|
|
data = request.json
|
|
if not all(key in data for key in ("hostname", "threat_name")):
|
|
return jsonify({"error": "Missing required data fields"}), 400
|
|
|
|
config = data.json()
|
|
app.logger.info(f"Received Managed Endpoint Defender data: {data}")
|
|
print(config)
|
|
return jsonify({"message": "Data successfully inserted"}), 200
|
|
|
|
except Exception as e:
|
|
return jsonify({"error": f"Failed to process data: {str(e)}"}), 500
|
|
|
|
|
|
|
|
#####################################################################
|
|
# CYBERTEK AGENT ROUTES
|
|
#####################################################################
|
|
@app.route("/ctek_agent/netperf_checkin", methods=["POST"])
|
|
def process_agent_netperf_checkin():
|
|
cnx = None
|
|
cursor = None
|
|
|
|
try:
|
|
data = request.json
|
|
required_fields = ["hostname", "customer_name", "dl_speed", "ul_speed", "dl_jitter", "ul_jitter", "packet_loss", "latency_defgw_ms", "latency_wan_ms", "isp"]
|
|
if not all(key in data for key in required_fields):
|
|
return jsonify({"error": "Missing required data fields"}), 400
|
|
|
|
agent_hostname = data["hostname"]
|
|
customer_name = data["customer_name"]
|
|
dl_speed = data["dl_speed"]
|
|
ul_speed = data["ul_speed"]
|
|
dl_jitter = data["dl_jitter"]
|
|
ul_jitter = data["ul_jitter"]
|
|
packet_loss = data["packet_loss"]
|
|
latency_defgw_ms = data["latency_defgw_ms"]
|
|
latency_wan_ms = data["latency_wan_ms"]
|
|
isp = data["isp"]
|
|
|
|
cnx = get_db_cnx("ctek_agent")
|
|
cursor = cnx.cursor()
|
|
cursor.execute(
|
|
"""
|
|
INSERT INTO net_perf (
|
|
hostname, customer_name, dl_speed, ul_speed, dl_jitter, ul_jitter,
|
|
packet_loss, avg_latency_defgw, avg_latency_wan, isp
|
|
) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
|
""",
|
|
(agent_hostname, customer_name, dl_speed, ul_speed, dl_jitter, ul_jitter,
|
|
packet_loss, latency_defgw_ms, latency_wan_ms, isp)
|
|
)
|
|
cnx.commit()
|
|
|
|
app.logger.info(f"Received Cybertek Agent check-in data: {data}")
|
|
return jsonify({"message": f"Data for {agent_hostname} successfully inserted"}), 200
|
|
|
|
except mysql.connector.Error as db_err:
|
|
app.logger.error(f"Database error: {str(db_err)}")
|
|
return jsonify({"error": "Database error occurred"}), 500
|
|
|
|
except Exception as e:
|
|
app.logger.error(f"Error processing Cybertek Agent check-in data: {str(e)}")
|
|
return jsonify({"error": f"Failed to process Cybertek Agent check-in data: {str(e)}"}), 500
|
|
|
|
finally:
|
|
if cursor is not None:
|
|
cursor.close()
|
|
if cnx is not None:
|
|
cnx.close()
|
|
|
|
|
|
@app.route("/ctek_agent/netperf_all", methods=["GET"])
|
|
def get_netperf_all():
|
|
cnx = None
|
|
cursor = None
|
|
token = request.headers.get('Authorization')
|
|
query = """
|
|
SELECT
|
|
avg_latency_defgw AS `RTD - LAN`,
|
|
avg_latency_wan AS `RTD - WAN`,
|
|
dl_speed AS `Download Speed`,
|
|
dl_jitter AS `Download Jitter`,
|
|
ul_speed AS `Upload Speed`,
|
|
ul_jitter AS `Upload Jitter`,
|
|
packet_loss AS `Packet Loss`,
|
|
hostname AS `Hostname`,
|
|
customer_name AS `Customer Name`,
|
|
timestamp AS `TimeStamp`,
|
|
isp AS `ISP`
|
|
FROM ctek_agent.net_perf
|
|
WHERE timestamp >= DATE_SUB(NOW(), INTERVAL 90 DAY)
|
|
"""
|
|
|
|
try:
|
|
if not token or token.replace('Bearer ', '') not in VALID_TOKENS_POWERBI:
|
|
return jsonify({"error": "Unauthorized"}), 401
|
|
cnx = get_db_cnx('ctek_agent')
|
|
cursor = cnx.cursor(dictionary=True)
|
|
cursor.execute(query)
|
|
|
|
results=cursor.fetchall()
|
|
return jsonify(results), 200
|
|
|
|
except Exception as e:
|
|
return jsonify({"error": str(e)}), 500
|
|
|
|
finally:
|
|
if cursor:
|
|
cursor.close()
|
|
if cnx:
|
|
cnx.close()
|
|
|
|
#####################################################################
|
|
# CYBERTEK CSAT ROUTES
|
|
# ---PENDING LOGIC TO PULL THE REAL SCORE AVERAGE
|
|
#####################################################################
|
|
@app.route("/csat/average_score", methods=["GET"])
|
|
def get_average_csat_score():
|
|
return jsonify({"average_score": 4.85}), 200
|
|
|
|
|
|
#####################################################################
|
|
# CYBERTEK FINANCE ROUTES
|
|
#####################################################################
|
|
@app.route("/finance/invoices_paid", methods=["GET"])
|
|
def get_syncro_invoices_paid():
|
|
cnx = None
|
|
cursor = None
|
|
auth_header = request.headers.get('Authorization')
|
|
|
|
if not auth_header or auth_header != f'Bearer {VALID_API_TOKEN}':
|
|
app.logger.warning("Unauthorized access attempt.")
|
|
return jsonify({"error": "Unauthorized"}), 401
|
|
|
|
try:
|
|
cnx = get_db_cnx("ctek_syncro")
|
|
cursor = cnx.cursor(dictionary=True)
|
|
cursor.execute("""
|
|
SELECT i.*, c.tax_rate_id
|
|
FROM invoices i
|
|
JOIN customers c ON i.customer_id = c.id
|
|
WHERE i.is_paid = 1;
|
|
""")
|
|
invoices = cursor.fetchall()
|
|
|
|
app.logger.info("Retrieved all paid invoices.")
|
|
return jsonify({"invoices": invoices}), 200
|
|
|
|
except Exception as e:
|
|
app.logger.error(f"Failed to retrieve data: {str(e)}")
|
|
return jsonify({"error": f"Failed to retrieve data: {str(e)}"}), 500
|
|
|
|
finally:
|
|
if cursor:
|
|
cursor.close()
|
|
if cnx:
|
|
cnx.close()
|
|
|
|
|
|
|
|
#####################################################################
|
|
# KEEPER SECURITY ROUTES
|
|
#####################################################################
|
|
@app.route('/keeper/security_report', methods=['POST'])
|
|
def receive_security_report():
|
|
cnx = None
|
|
cursor = None
|
|
|
|
try:
|
|
data = request.get_json()
|
|
|
|
app.logger.info(f"Received data: {data}") # Log received data
|
|
|
|
cnx = get_db_cnx("ctek_keeper")
|
|
cursor = cnx.cursor()
|
|
|
|
for user in data:
|
|
user_email = user.get("email", "N/A")
|
|
user_name = user.get("name", "N/A")
|
|
password_count_weak = user.get("weak", 0)
|
|
password_count_medium = user.get("medium", 0)
|
|
password_count_strong = user.get("strong", 0)
|
|
password_count_reused = user.get("reused", 0)
|
|
password_count_unique = user.get("unique", 0)
|
|
security_score = user.get("securityScore", 0)
|
|
user_2fa = user.get("twoFactorChannel", "N/A")
|
|
customer_name = user.get("node", "N/A")
|
|
|
|
# Properly formatted SQL query with correct number of placeholders
|
|
sql = """
|
|
INSERT INTO security_report (
|
|
user_email,
|
|
user_name,
|
|
password_count_weak,
|
|
password_count_medium,
|
|
password_count_strong,
|
|
password_count_reused,
|
|
`password_count_unique`,
|
|
security_score,
|
|
user_2fa,
|
|
customer_name
|
|
) VALUES (
|
|
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s
|
|
) ON DUPLICATE KEY UPDATE
|
|
password_count_weak = VALUES(password_count_weak),
|
|
password_count_medium = VALUES(password_count_medium),
|
|
password_count_strong = VALUES(password_count_strong),
|
|
password_count_reused = VALUES(password_count_reused),
|
|
`password_count_unique` = VALUES(password_count_unique),
|
|
security_score = VALUES(security_score),
|
|
user_2fa = VALUES(user_2fa),
|
|
customer_name = VALUES(customer_name)
|
|
"""
|
|
|
|
params = (
|
|
user_email, user_name, password_count_weak, password_count_medium,
|
|
password_count_strong, password_count_reused, password_count_unique,
|
|
security_score, user_2fa, customer_name
|
|
)
|
|
|
|
cursor.execute(sql, params)
|
|
|
|
cnx.commit()
|
|
|
|
return jsonify({"message": "Data successfully inserted"}), 200
|
|
|
|
except mysql.connector.Error as db_err:
|
|
app.logger.error(f"Database error: {str(db_err)}")
|
|
return jsonify({"error": "Database error occurred"}), 500
|
|
|
|
except Exception as e:
|
|
app.logger.error(f"Error processing data: {str(e)}")
|
|
return jsonify({"error": f"Failed to process data: {str(e)}"}), 500
|
|
|
|
finally:
|
|
if cursor is not None:
|
|
cursor.close()
|
|
if cnx is not None:
|
|
cnx.close()
|
|
|
|
@app.route('/keeper/security_reports', methods=['GET'])
|
|
def get_security_reports():
|
|
cnx = None
|
|
cursor = None
|
|
|
|
try:
|
|
cnx = get_db_cnx("ctek_keeper")
|
|
cursor = cnx.cursor()
|
|
cursor.execute("SELECT * FROM security_report")
|
|
|
|
column_headers = [desc[0] for desc in cursor.description]
|
|
rows = cursor.fetchall()
|
|
results = [dict(zip(column_headers, row)) for row in rows]
|
|
|
|
return jsonify({"reports": results}), 200
|
|
|
|
except mysql.connector.Error as db_err:
|
|
app.logger.error(f"Database error: {str(db_err)}")
|
|
return jsonify({"error": "Database error occurred"}), 500
|
|
|
|
except Exception as e:
|
|
app.logger.error(f"Error processing data: {str(e)}")
|
|
return jsonify({"error": f"Failed to process data: {str(e)}"}), 500
|
|
|
|
#####################################################################
|
|
# SYNOLOGY ENDPOINT ROUTES
|
|
#####################################################################
|
|
@app.route('/synology/backup_job_status', methods=['POST'])
|
|
def process_backup_job_status():
|
|
backups_layout_id = 12
|
|
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
|
app.logger.info(f"Received Synology backup job status data at {timestamp}")
|
|
|
|
try:
|
|
data = request.json
|
|
app.logger.info(f"Received webhook request data. {data}")
|
|
customer_name = data.get("customer_name", "Unknown Customer")
|
|
app.logger.info(f"Processing data for customer: {customer_name}")
|
|
|
|
text = data.get("text", "")
|
|
match = re.search(r'backup task (.*?) on', text)
|
|
if match:
|
|
backup_task = match.group(1)
|
|
else:
|
|
backup_task = "Unknown Task"
|
|
|
|
app.logger.info(f"Backup task: {backup_task}")
|
|
|
|
custom_fields = [{"backup_description": "Active Backup for Business",
|
|
"last_backup": timestamp}]
|
|
|
|
hudu_company, error = get_company_by_name(
|
|
HUDU_API_BASEURL, HUDU_API_KEY, customer_name
|
|
)
|
|
|
|
company_id = hudu_company.get("id", None)
|
|
|
|
if not company_id:
|
|
app.logger.error(
|
|
f"Company ID not found for company name: {customer_name}"
|
|
)
|
|
return jsonify({"error": "Company ID not found"}), 400
|
|
|
|
existing_asset_id = check_is_existing_asset(
|
|
HUDU_API_BASEURL,
|
|
HUDU_API_KEY,
|
|
company_id,
|
|
backups_layout_id,
|
|
backup_task
|
|
)
|
|
|
|
app.logger.info(f"Existing asset ID: {existing_asset_id}")
|
|
|
|
if not existing_asset_id:
|
|
asset_response = create_asset(
|
|
HUDU_API_BASEURL,
|
|
HUDU_API_KEY,
|
|
company_id,
|
|
backups_layout_id,
|
|
backup_task,
|
|
custom_fields,
|
|
)
|
|
else:
|
|
asset_response = update_asset(
|
|
HUDU_API_BASEURL,
|
|
HUDU_API_KEY,
|
|
company_id,
|
|
existing_asset_id,
|
|
custom_fields,
|
|
)
|
|
|
|
return jsonify({"message": "Data successfully processed"}), 200
|
|
|
|
except Exception as e:
|
|
app.logger.error(f"Failed to process data: {str(e)}")
|
|
return jsonify({"error": f"Failed to process data: {str(e)}"}), 500
|
|
|
|
#####################################################################
|
|
# POWERBI ENDPOINT ROUTES
|
|
#####################################################################
|
|
@app.route("/powerbi/m365_users_all", methods=["GET"])
|
|
def get_m365_users_all():
|
|
token = request.headers.get("Authorization")
|
|
if token not in VALID_TOKENS_POWERBI:
|
|
return "You are not authorized to access this route", 401
|
|
|
|
managed_customers = get_syncro_managed_customers()
|
|
m365_users_all = []
|
|
|
|
for customer in managed_customers:
|
|
customer_name = customer.get("business_name")
|
|
customer_shortname = customer.get("properties", {}).get("Customer Short Name")
|
|
db_name = f"Customer_{customer_shortname}"
|
|
|
|
try:
|
|
mysql_conn = get_db_cnx(db_name)
|
|
mysql_cursor = mysql_conn.cursor(
|
|
dictionary=True
|
|
) # Use dictionary=True to get results as dictionaries
|
|
mysql_cursor.execute("SELECT * FROM m365_users")
|
|
m365_users = mysql_cursor.fetchall()
|
|
|
|
# Inject customer name into each record
|
|
for user in m365_users:
|
|
user["CustomerName"] = customer_name
|
|
|
|
m365_users_all.extend(m365_users)
|
|
|
|
except mysql.connector.Error as e:
|
|
app.logger.error(f"Error querying MySQL database {db_name}: {str(e)}")
|
|
finally:
|
|
if "mysql_cursor" in locals() and mysql_cursor:
|
|
mysql_cursor.close()
|
|
if "mysql_conn" in locals() and mysql_conn:
|
|
mysql_conn.close()
|
|
|
|
return jsonify(m365_users_all)
|
|
|
|
|
|
@app.route("/powerbi/m365_securescores_all", methods=["GET"])
|
|
def get_m365_securescores_all():
|
|
token = request.headers.get("Authorization")
|
|
if token not in VALID_TOKENS_POWERBI:
|
|
return "You are not authorized to access this route", 401
|
|
|
|
managed_customers = get_syncro_managed_customers()
|
|
m365_securescores_all = []
|
|
|
|
for customer in managed_customers:
|
|
customer_name = customer.get("business_name")
|
|
customer_shortname = customer.get("properties", {}).get("Customer Short Name")
|
|
db_name = f"Customer_{customer_shortname}"
|
|
|
|
try:
|
|
mysql_conn = get_db_cnx(db_name)
|
|
mysql_cursor = mysql_conn.cursor(
|
|
dictionary=True
|
|
) # Use dictionary=True to get results as dictionaries
|
|
mysql_cursor.execute("SELECT * FROM m365_secure_score")
|
|
m365_securescores = mysql_cursor.fetchall()
|
|
|
|
# Inject customer name into each record
|
|
for score in m365_securescores:
|
|
score["CustomerName"] = customer_name
|
|
|
|
m365_securescores_all.extend(m365_securescores)
|
|
|
|
except mysql.connector.Error as e:
|
|
app.logger.error(f"Error querying MySQL database {db_name}: {str(e)}")
|
|
finally:
|
|
if "mysql_cursor" in locals() and mysql_cursor:
|
|
mysql_cursor.close()
|
|
if "mysql_conn" in locals() and mysql_conn:
|
|
mysql_conn.close()
|
|
|
|
return jsonify(m365_securescores_all)
|
|
|
|
|
|
@app.route("/csat/responses", methods=["GET"])
|
|
def get_csat_responses():
|
|
cnx = None
|
|
cursor = None
|
|
|
|
try:
|
|
token = request.headers.get("Authorization")
|
|
if token not in VALID_TOKENS_POWERBI:
|
|
return "You are not authorized to access this route", 401
|
|
|
|
cnx = get_db_cnx("simplesat")
|
|
cursor = cnx.cursor()
|
|
cursor.execute("SELECT * FROM responses")
|
|
|
|
# Fetch column headers
|
|
column_headers = [desc[0] for desc in cursor.description]
|
|
|
|
# Fetch rows
|
|
rows = cursor.fetchall()
|
|
|
|
# Convert rows to dictionaries with column headers as keys
|
|
results = [dict(zip(column_headers, row)) for row in rows]
|
|
|
|
app.logger.info("Retrieved CSAT response data.")
|
|
return jsonify({"endpoints": results}), 200
|
|
|
|
except Exception as e:
|
|
app.logger.error(f"Failed to retrieve data: {str(e)}")
|
|
return jsonify({"error": f"Failed to retrieve data: {str(e)}"}), 500
|
|
|
|
finally:
|
|
if cursor:
|
|
cursor.close()
|
|
if cnx:
|
|
cnx.close()
|
|
|
|
|
|
#####################################################################
|
|
# CUSTOMER SPECIFIC ROUTES
|
|
#####################################################################
|
|
@app.route('/cust/spbhs/user_activity_checkin', methods=['POST'])
|
|
def add_user_activity():
|
|
try:
|
|
activities = request.get_json()
|
|
if not isinstance(activities, list):
|
|
activities = [activities]
|
|
|
|
cnx = get_db_cnx('Customer_SPB')
|
|
cursor = cnx.cursor()
|
|
|
|
check_query = """
|
|
SELECT 1 FROM workstation_user_activity
|
|
WHERE timestamp = %s
|
|
AND username = %s
|
|
AND event_type = %s
|
|
AND computer_name = %s
|
|
AND event_id = %s
|
|
"""
|
|
|
|
insert_query = """
|
|
INSERT INTO workstation_user_activity
|
|
(timestamp, event_type, username, computer_name, event_id)
|
|
VALUES (%s, %s, %s, %s, %s)
|
|
"""
|
|
|
|
records_added = 0
|
|
duplicates = 0
|
|
|
|
for activity in activities:
|
|
values = (
|
|
activity['timestamp'],
|
|
activity['username'],
|
|
activity['event_type'],
|
|
activity['computer_name'],
|
|
activity['event_id']
|
|
)
|
|
|
|
# Check if record exists
|
|
cursor.execute(check_query, values)
|
|
exists = cursor.fetchone() is not None
|
|
|
|
if not exists:
|
|
# Reorder values for insert to match column order
|
|
insert_values = (
|
|
activity['timestamp'],
|
|
activity['event_type'],
|
|
activity['username'],
|
|
activity['computer_name'],
|
|
activity['event_id']
|
|
)
|
|
cursor.execute(insert_query, insert_values)
|
|
records_added += 1
|
|
else:
|
|
duplicates += 1
|
|
|
|
cnx.commit()
|
|
cursor.close()
|
|
cnx.close()
|
|
|
|
return jsonify({
|
|
'records_added': records_added,
|
|
'duplicates_skipped': duplicates
|
|
}), 201
|
|
|
|
except Exception as e:
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
|
|
@app.route('/cust/spbhs/user_activity_report', methods=['GET'])
|
|
def get_user_activity_report():
|
|
try:
|
|
start_date = request.args.get('start_date',
|
|
(datetime.utcnow() - timedelta(days=1)).strftime('%Y-%m-%d %H:%M:%S'))
|
|
end_date = request.args.get('end_date',
|
|
datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'))
|
|
|
|
cnx = get_db_cnx('Customer_SPB')
|
|
cursor = cnx.cursor(dictionary=True)
|
|
|
|
query = """
|
|
SELECT
|
|
timestamp,
|
|
event_type,
|
|
username,
|
|
computer_name,
|
|
event_id
|
|
FROM workstation_user_activity
|
|
WHERE timestamp BETWEEN %s AND %s
|
|
ORDER BY timestamp DESC
|
|
"""
|
|
|
|
cursor.execute(query, (start_date, end_date))
|
|
activities = cursor.fetchall()
|
|
|
|
cursor.close()
|
|
cnx.close()
|
|
|
|
return jsonify({
|
|
'status': 'success',
|
|
'data': activities
|
|
})
|
|
|
|
except Exception as e:
|
|
return jsonify({
|
|
'status': 'error',
|
|
'message': str(e)
|
|
}), 500
|
|
|
|
#####################################################################
|
|
# NEW WIFI CREDENTIALS ROUTE
|
|
#####################################################################
|
|
@app.route("/wifi_credentials/add", methods=["POST"])
|
|
def add_wifi_credentials():
|
|
try:
|
|
# Parse incoming JSON data
|
|
data = request.json
|
|
ssid = data.get('ssid')
|
|
preshared_key = data.get('preshared_key')
|
|
|
|
if not ssid or not preshared_key:
|
|
return jsonify({'error': 'SSID and preshared_key are required'}), 400
|
|
|
|
# Connect to the database
|
|
cnx = get_db_cnx("wificonnections")
|
|
cursor = cnx.cursor()
|
|
|
|
# Check for duplicates
|
|
check_query = "SELECT id FROM wifi_credentials WHERE ssid = %s"
|
|
cursor.execute(check_query, (ssid,))
|
|
result = cursor.fetchone()
|
|
|
|
if result:
|
|
return jsonify({'message': 'SSID already exists'}), 409
|
|
|
|
# Insert new record
|
|
insert_query = "INSERT INTO wifi_credentials (ssid, preshared_key) VALUES (%s, %s)"
|
|
cursor.execute(insert_query, (ssid, preshared_key))
|
|
cnx.commit()
|
|
|
|
return jsonify({'message': 'WiFi credentials added successfully'}), 201
|
|
|
|
except Error as e:
|
|
return jsonify({'error': str(e)}), 500
|
|
|
|
finally:
|
|
if 'cursor' in locals():
|
|
cursor.close()
|
|
if 'cnx' in locals():
|
|
cnx.close()
|
|
|
|
if __name__ == '__main__':
|
|
app.run(host='0.0.0.0', port=5000, debug=True)
|