Add USB, Notifications, Network plugins and reusable EmployeeSearch component
New Plugins: - USB plugin: Device checkout/checkin with employee lookup, checkout history - Notifications plugin: Announcements with types, scheduling, shopfloor display - Network plugin: Network device management with subnets and VLANs - Equipment and Computers plugins: Asset type separation Frontend: - EmployeeSearch component: Reusable employee lookup with autocomplete - USB views: List, detail, checkout/checkin modals - Notifications views: List, form with recognition mode - Network views: Device list, detail, form - Calendar view with FullCalendar integration - Shopfloor and TV dashboard views - Reports index page - Map editor for asset positioning - Light/dark mode fixes for map tooltips Backend: - Employee search API with external lookup service - Collector API for PowerShell data collection - Reports API endpoints - Slides API for TV dashboard - Fixed AppVersion model (removed BaseModel inheritance) - Added checkout_name column to usbcheckouts table Styling: - Unified detail page styles - Improved pagination (page numbers instead of prev/next) - Dark/light mode theme improvements Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -1,6 +1,7 @@
|
||||
"""Core API blueprints."""
|
||||
|
||||
from .auth import auth_bp
|
||||
from .assets import assets_bp
|
||||
from .machines import machines_bp
|
||||
from .machinetypes import machinetypes_bp
|
||||
from .pctypes import pctypes_bp
|
||||
@@ -14,9 +15,14 @@ from .dashboard import dashboard_bp
|
||||
from .applications import applications_bp
|
||||
from .knowledgebase import knowledgebase_bp
|
||||
from .search import search_bp
|
||||
from .reports import reports_bp
|
||||
from .collector import collector_bp
|
||||
from .employees import employees_bp
|
||||
from .slides import slides_bp
|
||||
|
||||
__all__ = [
|
||||
'auth_bp',
|
||||
'assets_bp',
|
||||
'machines_bp',
|
||||
'machinetypes_bp',
|
||||
'pctypes_bp',
|
||||
@@ -30,4 +36,8 @@ __all__ = [
|
||||
'applications_bp',
|
||||
'knowledgebase_bp',
|
||||
'search_bp',
|
||||
'reports_bp',
|
||||
'collector_bp',
|
||||
'employees_bp',
|
||||
'slides_bp',
|
||||
]
|
||||
|
||||
659
shopdb/core/api/assets.py
Normal file
659
shopdb/core/api/assets.py
Normal file
@@ -0,0 +1,659 @@
|
||||
"""Assets API endpoints - unified asset queries."""
|
||||
|
||||
from flask import Blueprint, request
|
||||
from flask_jwt_extended import jwt_required
|
||||
|
||||
from shopdb.extensions import db
|
||||
from shopdb.core.models import Asset, AssetType, AssetStatus, AssetRelationship, RelationshipType
|
||||
from shopdb.utils.responses import (
|
||||
success_response,
|
||||
error_response,
|
||||
paginated_response,
|
||||
ErrorCodes
|
||||
)
|
||||
from shopdb.utils.pagination import get_pagination_params, paginate_query
|
||||
|
||||
assets_bp = Blueprint('assets', __name__)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Asset Types
|
||||
# =============================================================================
|
||||
|
||||
@assets_bp.route('/types', methods=['GET'])
|
||||
@jwt_required()
|
||||
def list_asset_types():
|
||||
"""List all asset types."""
|
||||
page, per_page = get_pagination_params(request)
|
||||
|
||||
query = AssetType.query
|
||||
|
||||
if request.args.get('active', 'true').lower() != 'false':
|
||||
query = query.filter(AssetType.isactive == True)
|
||||
|
||||
query = query.order_by(AssetType.assettype)
|
||||
|
||||
items, total = paginate_query(query, page, per_page)
|
||||
data = [t.to_dict() for t in items]
|
||||
|
||||
return paginated_response(data, page, per_page, total)
|
||||
|
||||
|
||||
@assets_bp.route('/types/<int:type_id>', methods=['GET'])
|
||||
@jwt_required()
|
||||
def get_asset_type(type_id: int):
|
||||
"""Get a single asset type."""
|
||||
t = AssetType.query.get(type_id)
|
||||
|
||||
if not t:
|
||||
return error_response(
|
||||
ErrorCodes.NOT_FOUND,
|
||||
f'Asset type with ID {type_id} not found',
|
||||
http_code=404
|
||||
)
|
||||
|
||||
return success_response(t.to_dict())
|
||||
|
||||
|
||||
@assets_bp.route('/types', methods=['POST'])
|
||||
@jwt_required()
|
||||
def create_asset_type():
|
||||
"""Create a new asset type."""
|
||||
data = request.get_json()
|
||||
|
||||
if not data or not data.get('assettype'):
|
||||
return error_response(ErrorCodes.VALIDATION_ERROR, 'assettype is required')
|
||||
|
||||
if AssetType.query.filter_by(assettype=data['assettype']).first():
|
||||
return error_response(
|
||||
ErrorCodes.CONFLICT,
|
||||
f"Asset type '{data['assettype']}' already exists",
|
||||
http_code=409
|
||||
)
|
||||
|
||||
t = AssetType(
|
||||
assettype=data['assettype'],
|
||||
plugin_name=data.get('plugin_name'),
|
||||
table_name=data.get('table_name'),
|
||||
description=data.get('description'),
|
||||
icon=data.get('icon')
|
||||
)
|
||||
|
||||
db.session.add(t)
|
||||
db.session.commit()
|
||||
|
||||
return success_response(t.to_dict(), message='Asset type created', http_code=201)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Asset Statuses
|
||||
# =============================================================================
|
||||
|
||||
@assets_bp.route('/statuses', methods=['GET'])
|
||||
@jwt_required()
|
||||
def list_asset_statuses():
|
||||
"""List all asset statuses."""
|
||||
page, per_page = get_pagination_params(request)
|
||||
|
||||
query = AssetStatus.query
|
||||
|
||||
if request.args.get('active', 'true').lower() != 'false':
|
||||
query = query.filter(AssetStatus.isactive == True)
|
||||
|
||||
query = query.order_by(AssetStatus.status)
|
||||
|
||||
items, total = paginate_query(query, page, per_page)
|
||||
data = [s.to_dict() for s in items]
|
||||
|
||||
return paginated_response(data, page, per_page, total)
|
||||
|
||||
|
||||
@assets_bp.route('/statuses/<int:status_id>', methods=['GET'])
|
||||
@jwt_required()
|
||||
def get_asset_status(status_id: int):
|
||||
"""Get a single asset status."""
|
||||
s = AssetStatus.query.get(status_id)
|
||||
|
||||
if not s:
|
||||
return error_response(
|
||||
ErrorCodes.NOT_FOUND,
|
||||
f'Asset status with ID {status_id} not found',
|
||||
http_code=404
|
||||
)
|
||||
|
||||
return success_response(s.to_dict())
|
||||
|
||||
|
||||
@assets_bp.route('/statuses', methods=['POST'])
|
||||
@jwt_required()
|
||||
def create_asset_status():
|
||||
"""Create a new asset status."""
|
||||
data = request.get_json()
|
||||
|
||||
if not data or not data.get('status'):
|
||||
return error_response(ErrorCodes.VALIDATION_ERROR, 'status is required')
|
||||
|
||||
if AssetStatus.query.filter_by(status=data['status']).first():
|
||||
return error_response(
|
||||
ErrorCodes.CONFLICT,
|
||||
f"Asset status '{data['status']}' already exists",
|
||||
http_code=409
|
||||
)
|
||||
|
||||
s = AssetStatus(
|
||||
status=data['status'],
|
||||
description=data.get('description'),
|
||||
color=data.get('color')
|
||||
)
|
||||
|
||||
db.session.add(s)
|
||||
db.session.commit()
|
||||
|
||||
return success_response(s.to_dict(), message='Asset status created', http_code=201)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Assets
|
||||
# =============================================================================
|
||||
|
||||
@assets_bp.route('', methods=['GET'])
|
||||
@jwt_required()
|
||||
def list_assets():
|
||||
"""
|
||||
List all assets with filtering and pagination.
|
||||
|
||||
Query parameters:
|
||||
- page: Page number (default: 1)
|
||||
- per_page: Items per page (default: 20, max: 100)
|
||||
- active: Filter by active status (default: true)
|
||||
- search: Search by assetnumber or name
|
||||
- type: Filter by asset type name (e.g., 'equipment', 'computer')
|
||||
- type_id: Filter by asset type ID
|
||||
- status_id: Filter by status ID
|
||||
- location_id: Filter by location ID
|
||||
- businessunit_id: Filter by business unit ID
|
||||
- include_type_data: Include category-specific extension data (default: false)
|
||||
"""
|
||||
page, per_page = get_pagination_params(request)
|
||||
|
||||
query = Asset.query
|
||||
|
||||
# Active filter
|
||||
if request.args.get('active', 'true').lower() != 'false':
|
||||
query = query.filter(Asset.isactive == True)
|
||||
|
||||
# Search filter
|
||||
if search := request.args.get('search'):
|
||||
query = query.filter(
|
||||
db.or_(
|
||||
Asset.assetnumber.ilike(f'%{search}%'),
|
||||
Asset.name.ilike(f'%{search}%'),
|
||||
Asset.serialnumber.ilike(f'%{search}%')
|
||||
)
|
||||
)
|
||||
|
||||
# Type filter by name
|
||||
if type_name := request.args.get('type'):
|
||||
query = query.join(AssetType).filter(AssetType.assettype == type_name)
|
||||
|
||||
# Type filter by ID
|
||||
if type_id := request.args.get('type_id'):
|
||||
query = query.filter(Asset.assettypeid == int(type_id))
|
||||
|
||||
# Status filter
|
||||
if status_id := request.args.get('status_id'):
|
||||
query = query.filter(Asset.statusid == int(status_id))
|
||||
|
||||
# Location filter
|
||||
if location_id := request.args.get('location_id'):
|
||||
query = query.filter(Asset.locationid == int(location_id))
|
||||
|
||||
# Business unit filter
|
||||
if bu_id := request.args.get('businessunit_id'):
|
||||
query = query.filter(Asset.businessunitid == int(bu_id))
|
||||
|
||||
# Sorting
|
||||
sort_by = request.args.get('sort', 'assetnumber')
|
||||
sort_dir = request.args.get('dir', 'asc')
|
||||
|
||||
sort_columns = {
|
||||
'assetnumber': Asset.assetnumber,
|
||||
'name': Asset.name,
|
||||
'createddate': Asset.createddate,
|
||||
'modifieddate': Asset.modifieddate,
|
||||
}
|
||||
|
||||
if sort_by in sort_columns:
|
||||
col = sort_columns[sort_by]
|
||||
query = query.order_by(col.desc() if sort_dir == 'desc' else col)
|
||||
else:
|
||||
query = query.order_by(Asset.assetnumber)
|
||||
|
||||
items, total = paginate_query(query, page, per_page)
|
||||
|
||||
# Include type data if requested
|
||||
include_type_data = request.args.get('include_type_data', 'false').lower() == 'true'
|
||||
data = [a.to_dict(include_type_data=include_type_data) for a in items]
|
||||
|
||||
return paginated_response(data, page, per_page, total)
|
||||
|
||||
|
||||
@assets_bp.route('/<int:asset_id>', methods=['GET'])
|
||||
@jwt_required()
|
||||
def get_asset(asset_id: int):
|
||||
"""
|
||||
Get a single asset with full details.
|
||||
|
||||
Query parameters:
|
||||
- include_type_data: Include category-specific extension data (default: true)
|
||||
"""
|
||||
asset = Asset.query.get(asset_id)
|
||||
|
||||
if not asset:
|
||||
return error_response(
|
||||
ErrorCodes.NOT_FOUND,
|
||||
f'Asset with ID {asset_id} not found',
|
||||
http_code=404
|
||||
)
|
||||
|
||||
include_type_data = request.args.get('include_type_data', 'true').lower() != 'false'
|
||||
return success_response(asset.to_dict(include_type_data=include_type_data))
|
||||
|
||||
|
||||
@assets_bp.route('', methods=['POST'])
|
||||
@jwt_required()
|
||||
def create_asset():
|
||||
"""Create a new asset."""
|
||||
data = request.get_json()
|
||||
|
||||
if not data:
|
||||
return error_response(ErrorCodes.VALIDATION_ERROR, 'No data provided')
|
||||
|
||||
# Validate required fields
|
||||
if not data.get('assetnumber'):
|
||||
return error_response(ErrorCodes.VALIDATION_ERROR, 'assetnumber is required')
|
||||
if not data.get('assettypeid'):
|
||||
return error_response(ErrorCodes.VALIDATION_ERROR, 'assettypeid is required')
|
||||
|
||||
# Check for duplicate assetnumber
|
||||
if Asset.query.filter_by(assetnumber=data['assetnumber']).first():
|
||||
return error_response(
|
||||
ErrorCodes.CONFLICT,
|
||||
f"Asset with number '{data['assetnumber']}' already exists",
|
||||
http_code=409
|
||||
)
|
||||
|
||||
# Validate foreign keys exist
|
||||
if not AssetType.query.get(data['assettypeid']):
|
||||
return error_response(
|
||||
ErrorCodes.VALIDATION_ERROR,
|
||||
f"Asset type with ID {data['assettypeid']} not found"
|
||||
)
|
||||
|
||||
asset = Asset(
|
||||
assetnumber=data['assetnumber'],
|
||||
name=data.get('name'),
|
||||
serialnumber=data.get('serialnumber'),
|
||||
assettypeid=data['assettypeid'],
|
||||
statusid=data.get('statusid', 1),
|
||||
locationid=data.get('locationid'),
|
||||
businessunitid=data.get('businessunitid'),
|
||||
mapleft=data.get('mapleft'),
|
||||
maptop=data.get('maptop'),
|
||||
notes=data.get('notes')
|
||||
)
|
||||
|
||||
db.session.add(asset)
|
||||
db.session.commit()
|
||||
|
||||
return success_response(asset.to_dict(), message='Asset created', http_code=201)
|
||||
|
||||
|
||||
@assets_bp.route('/<int:asset_id>', methods=['PUT'])
|
||||
@jwt_required()
|
||||
def update_asset(asset_id: int):
|
||||
"""Update an asset."""
|
||||
asset = Asset.query.get(asset_id)
|
||||
|
||||
if not asset:
|
||||
return error_response(
|
||||
ErrorCodes.NOT_FOUND,
|
||||
f'Asset with ID {asset_id} not found',
|
||||
http_code=404
|
||||
)
|
||||
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return error_response(ErrorCodes.VALIDATION_ERROR, 'No data provided')
|
||||
|
||||
# Check for conflicting assetnumber
|
||||
if 'assetnumber' in data and data['assetnumber'] != asset.assetnumber:
|
||||
if Asset.query.filter_by(assetnumber=data['assetnumber']).first():
|
||||
return error_response(
|
||||
ErrorCodes.CONFLICT,
|
||||
f"Asset with number '{data['assetnumber']}' already exists",
|
||||
http_code=409
|
||||
)
|
||||
|
||||
# Update allowed fields
|
||||
allowed_fields = [
|
||||
'assetnumber', 'name', 'serialnumber', 'assettypeid', 'statusid',
|
||||
'locationid', 'businessunitid', 'mapleft', 'maptop', 'notes', 'isactive'
|
||||
]
|
||||
|
||||
for key in allowed_fields:
|
||||
if key in data:
|
||||
setattr(asset, key, data[key])
|
||||
|
||||
db.session.commit()
|
||||
return success_response(asset.to_dict(), message='Asset updated')
|
||||
|
||||
|
||||
@assets_bp.route('/<int:asset_id>', methods=['DELETE'])
|
||||
@jwt_required()
|
||||
def delete_asset(asset_id: int):
|
||||
"""Delete (soft delete) an asset."""
|
||||
asset = Asset.query.get(asset_id)
|
||||
|
||||
if not asset:
|
||||
return error_response(
|
||||
ErrorCodes.NOT_FOUND,
|
||||
f'Asset with ID {asset_id} not found',
|
||||
http_code=404
|
||||
)
|
||||
|
||||
asset.isactive = False
|
||||
db.session.commit()
|
||||
|
||||
return success_response(message='Asset deleted')
|
||||
|
||||
|
||||
@assets_bp.route('/lookup/<assetnumber>', methods=['GET'])
|
||||
@jwt_required()
|
||||
def lookup_asset_by_number(assetnumber: str):
|
||||
"""
|
||||
Look up an asset by its asset number.
|
||||
|
||||
Useful for finding the asset ID when you only have the machine/asset number.
|
||||
"""
|
||||
asset = Asset.query.filter_by(assetnumber=assetnumber, isactive=True).first()
|
||||
|
||||
if not asset:
|
||||
return error_response(
|
||||
ErrorCodes.NOT_FOUND,
|
||||
f'Asset with number {assetnumber} not found',
|
||||
http_code=404
|
||||
)
|
||||
|
||||
return success_response(asset.to_dict(include_type_data=True))
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Asset Relationships
|
||||
# =============================================================================
|
||||
|
||||
@assets_bp.route('/<int:asset_id>/relationships', methods=['GET'])
|
||||
@jwt_required()
|
||||
def get_asset_relationships(asset_id: int):
|
||||
"""
|
||||
Get all relationships for an asset.
|
||||
|
||||
Returns both outgoing (source) and incoming (target) relationships.
|
||||
"""
|
||||
asset = Asset.query.get(asset_id)
|
||||
|
||||
if not asset:
|
||||
return error_response(
|
||||
ErrorCodes.NOT_FOUND,
|
||||
f'Asset with ID {asset_id} not found',
|
||||
http_code=404
|
||||
)
|
||||
|
||||
# Get outgoing relationships (this asset is source)
|
||||
outgoing = AssetRelationship.query.filter_by(
|
||||
source_assetid=asset_id
|
||||
).filter(AssetRelationship.isactive == True).all()
|
||||
|
||||
# Get incoming relationships (this asset is target)
|
||||
incoming = AssetRelationship.query.filter_by(
|
||||
target_assetid=asset_id
|
||||
).filter(AssetRelationship.isactive == True).all()
|
||||
|
||||
outgoing_data = []
|
||||
for rel in outgoing:
|
||||
r = rel.to_dict()
|
||||
r['target_asset'] = rel.target_asset.to_dict() if rel.target_asset else None
|
||||
r['relationship_type_name'] = rel.relationship_type.relationshiptype if rel.relationship_type else None
|
||||
outgoing_data.append(r)
|
||||
|
||||
incoming_data = []
|
||||
for rel in incoming:
|
||||
r = rel.to_dict()
|
||||
r['source_asset'] = rel.source_asset.to_dict() if rel.source_asset else None
|
||||
r['relationship_type_name'] = rel.relationship_type.relationshiptype if rel.relationship_type else None
|
||||
incoming_data.append(r)
|
||||
|
||||
return success_response({
|
||||
'outgoing': outgoing_data,
|
||||
'incoming': incoming_data
|
||||
})
|
||||
|
||||
|
||||
@assets_bp.route('/relationships', methods=['POST'])
|
||||
@jwt_required()
|
||||
def create_asset_relationship():
|
||||
"""Create a relationship between two assets."""
|
||||
data = request.get_json()
|
||||
|
||||
if not data:
|
||||
return error_response(ErrorCodes.VALIDATION_ERROR, 'No data provided')
|
||||
|
||||
# Validate required fields
|
||||
required = ['source_assetid', 'target_assetid', 'relationshiptypeid']
|
||||
for field in required:
|
||||
if not data.get(field):
|
||||
return error_response(ErrorCodes.VALIDATION_ERROR, f'{field} is required')
|
||||
|
||||
source_id = data['source_assetid']
|
||||
target_id = data['target_assetid']
|
||||
type_id = data['relationshiptypeid']
|
||||
|
||||
# Validate assets exist
|
||||
if not Asset.query.get(source_id):
|
||||
return error_response(ErrorCodes.NOT_FOUND, f'Source asset {source_id} not found', http_code=404)
|
||||
if not Asset.query.get(target_id):
|
||||
return error_response(ErrorCodes.NOT_FOUND, f'Target asset {target_id} not found', http_code=404)
|
||||
if not RelationshipType.query.get(type_id):
|
||||
return error_response(ErrorCodes.NOT_FOUND, f'Relationship type {type_id} not found', http_code=404)
|
||||
|
||||
# Check for duplicate relationship
|
||||
existing = AssetRelationship.query.filter_by(
|
||||
source_assetid=source_id,
|
||||
target_assetid=target_id,
|
||||
relationshiptypeid=type_id
|
||||
).first()
|
||||
|
||||
if existing:
|
||||
return error_response(
|
||||
ErrorCodes.CONFLICT,
|
||||
'This relationship already exists',
|
||||
http_code=409
|
||||
)
|
||||
|
||||
rel = AssetRelationship(
|
||||
source_assetid=source_id,
|
||||
target_assetid=target_id,
|
||||
relationshiptypeid=type_id,
|
||||
notes=data.get('notes')
|
||||
)
|
||||
|
||||
db.session.add(rel)
|
||||
db.session.commit()
|
||||
|
||||
return success_response(rel.to_dict(), message='Relationship created', http_code=201)
|
||||
|
||||
|
||||
@assets_bp.route('/relationships/<int:rel_id>', methods=['DELETE'])
|
||||
@jwt_required()
|
||||
def delete_asset_relationship(rel_id: int):
|
||||
"""Delete an asset relationship."""
|
||||
rel = AssetRelationship.query.get(rel_id)
|
||||
|
||||
if not rel:
|
||||
return error_response(
|
||||
ErrorCodes.NOT_FOUND,
|
||||
f'Relationship with ID {rel_id} not found',
|
||||
http_code=404
|
||||
)
|
||||
|
||||
rel.isactive = False
|
||||
db.session.commit()
|
||||
|
||||
return success_response(message='Relationship deleted')
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Asset Communications
|
||||
# =============================================================================
|
||||
|
||||
# =============================================================================
|
||||
# Unified Asset Map
|
||||
# =============================================================================
|
||||
|
||||
@assets_bp.route('/map', methods=['GET'])
|
||||
@jwt_required()
|
||||
def get_assets_map():
|
||||
"""
|
||||
Get all assets with map positions for unified floor map display.
|
||||
|
||||
Returns assets with mapleft/maptop coordinates, joined with type-specific data.
|
||||
|
||||
Query parameters:
|
||||
- assettype: Filter by asset type name (equipment, computer, network, printer)
|
||||
- businessunitid: Filter by business unit ID
|
||||
- statusid: Filter by status ID
|
||||
- locationid: Filter by location ID
|
||||
- search: Search by assetnumber, name, or serialnumber
|
||||
"""
|
||||
from shopdb.core.models import Location, BusinessUnit
|
||||
|
||||
query = Asset.query.filter(
|
||||
Asset.isactive == True,
|
||||
Asset.mapleft.isnot(None),
|
||||
Asset.maptop.isnot(None)
|
||||
)
|
||||
|
||||
# Filter by asset type name
|
||||
if assettype := request.args.get('assettype'):
|
||||
types = assettype.split(',')
|
||||
query = query.join(AssetType).filter(AssetType.assettype.in_(types))
|
||||
|
||||
# Filter by business unit
|
||||
if bu_id := request.args.get('businessunitid'):
|
||||
query = query.filter(Asset.businessunitid == int(bu_id))
|
||||
|
||||
# Filter by status
|
||||
if status_id := request.args.get('statusid'):
|
||||
query = query.filter(Asset.statusid == int(status_id))
|
||||
|
||||
# Filter by location
|
||||
if location_id := request.args.get('locationid'):
|
||||
query = query.filter(Asset.locationid == int(location_id))
|
||||
|
||||
# Search filter
|
||||
if search := request.args.get('search'):
|
||||
query = query.filter(
|
||||
db.or_(
|
||||
Asset.assetnumber.ilike(f'%{search}%'),
|
||||
Asset.name.ilike(f'%{search}%'),
|
||||
Asset.serialnumber.ilike(f'%{search}%')
|
||||
)
|
||||
)
|
||||
|
||||
assets = query.all()
|
||||
|
||||
# Build response with type-specific data
|
||||
data = []
|
||||
for asset in assets:
|
||||
item = {
|
||||
'assetid': asset.assetid,
|
||||
'assetnumber': asset.assetnumber,
|
||||
'name': asset.name,
|
||||
'displayname': asset.display_name,
|
||||
'serialnumber': asset.serialnumber,
|
||||
'mapleft': asset.mapleft,
|
||||
'maptop': asset.maptop,
|
||||
'assettype': asset.assettype.assettype if asset.assettype else None,
|
||||
'assettypeid': asset.assettypeid,
|
||||
'status': asset.status.status if asset.status else None,
|
||||
'statusid': asset.statusid,
|
||||
'statuscolor': asset.status.color if asset.status else None,
|
||||
'location': asset.location.locationname if asset.location else None,
|
||||
'locationid': asset.locationid,
|
||||
'businessunit': asset.businessunit.businessunit if asset.businessunit else None,
|
||||
'businessunitid': asset.businessunitid,
|
||||
'primaryip': asset.primary_ip,
|
||||
}
|
||||
|
||||
# Add type-specific data
|
||||
type_data = asset._get_extension_data()
|
||||
if type_data:
|
||||
item['typedata'] = type_data
|
||||
|
||||
data.append(item)
|
||||
|
||||
# Get available asset types for filters
|
||||
asset_types = AssetType.query.filter(AssetType.isactive == True).all()
|
||||
types_data = [{'assettypeid': t.assettypeid, 'assettype': t.assettype, 'icon': t.icon} for t in asset_types]
|
||||
|
||||
# Get status options
|
||||
statuses = AssetStatus.query.filter(AssetStatus.isactive == True).all()
|
||||
status_data = [{'statusid': s.statusid, 'status': s.status, 'color': s.color} for s in statuses]
|
||||
|
||||
# Get business units
|
||||
business_units = BusinessUnit.query.filter(BusinessUnit.isactive == True).all()
|
||||
bu_data = [{'businessunitid': bu.businessunitid, 'businessunit': bu.businessunit} for bu in business_units]
|
||||
|
||||
# Get locations
|
||||
locations = Location.query.filter(Location.isactive == True).all()
|
||||
loc_data = [{'locationid': loc.locationid, 'locationname': loc.locationname} for loc in locations]
|
||||
|
||||
return success_response({
|
||||
'assets': data,
|
||||
'total': len(data),
|
||||
'filters': {
|
||||
'assettypes': types_data,
|
||||
'statuses': status_data,
|
||||
'businessunits': bu_data,
|
||||
'locations': loc_data
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@assets_bp.route('/<int:asset_id>/communications', methods=['GET'])
|
||||
@jwt_required()
|
||||
def get_asset_communications(asset_id: int):
|
||||
"""Get all communications for an asset."""
|
||||
from shopdb.core.models import Communication
|
||||
|
||||
asset = Asset.query.get(asset_id)
|
||||
|
||||
if not asset:
|
||||
return error_response(
|
||||
ErrorCodes.NOT_FOUND,
|
||||
f'Asset with ID {asset_id} not found',
|
||||
http_code=404
|
||||
)
|
||||
|
||||
comms = Communication.query.filter_by(
|
||||
assetid=asset_id,
|
||||
isactive=True
|
||||
).all()
|
||||
|
||||
data = []
|
||||
for comm in comms:
|
||||
c = comm.to_dict()
|
||||
c['comtype_name'] = comm.comtype.comtype if comm.comtype else None
|
||||
data.append(c)
|
||||
|
||||
return success_response(data)
|
||||
374
shopdb/core/api/collector.py
Normal file
374
shopdb/core/api/collector.py
Normal file
@@ -0,0 +1,374 @@
|
||||
"""
|
||||
PowerShell Data Collection API endpoints.
|
||||
|
||||
Compatibility layer for existing PowerShell scripts that update PC data.
|
||||
Uses API key authentication instead of JWT for automated scripts.
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from functools import wraps
|
||||
from flask import Blueprint, request, current_app
|
||||
|
||||
from shopdb.extensions import db
|
||||
from shopdb.core.models import Machine, Application, InstalledApp
|
||||
from shopdb.utils.responses import success_response, error_response, ErrorCodes
|
||||
|
||||
collector_bp = Blueprint('collector', __name__)
|
||||
|
||||
|
||||
def require_api_key(f):
|
||||
"""Decorator to require API key authentication."""
|
||||
@wraps(f)
|
||||
def decorated(*args, **kwargs):
|
||||
api_key = request.headers.get('X-API-Key')
|
||||
if not api_key:
|
||||
api_key = request.args.get('api_key')
|
||||
|
||||
expected_key = current_app.config.get('COLLECTOR_API_KEY')
|
||||
|
||||
if not expected_key:
|
||||
return error_response(
|
||||
ErrorCodes.INTERNAL_ERROR,
|
||||
'Collector API key not configured',
|
||||
http_code=500
|
||||
)
|
||||
|
||||
if api_key != expected_key:
|
||||
return error_response(
|
||||
ErrorCodes.UNAUTHORIZED,
|
||||
'Invalid API key',
|
||||
http_code=401
|
||||
)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
return decorated
|
||||
|
||||
|
||||
@collector_bp.route('/pc', methods=['POST'])
|
||||
@require_api_key
|
||||
def update_pc_info():
|
||||
"""
|
||||
Update PC information from PowerShell collection script.
|
||||
|
||||
Expected JSON payload:
|
||||
{
|
||||
"hostname": "PC-1234",
|
||||
"osname": "Windows 10 Enterprise",
|
||||
"osversion": "10.0.19045",
|
||||
"lastboottime": "2024-01-15T08:30:00",
|
||||
"currentuser": "jsmith",
|
||||
"ipaddress": "10.1.2.100",
|
||||
"macaddress": "00:11:22:33:44:55",
|
||||
"serialnumber": "ABC123",
|
||||
"manufacturer": "Dell",
|
||||
"model": "OptiPlex 7090"
|
||||
}
|
||||
"""
|
||||
data = request.get_json()
|
||||
|
||||
if not data:
|
||||
return error_response(ErrorCodes.VALIDATION_ERROR, 'No data provided')
|
||||
|
||||
hostname = data.get('hostname')
|
||||
if not hostname:
|
||||
return error_response(ErrorCodes.VALIDATION_ERROR, 'hostname is required')
|
||||
|
||||
# Find the PC by hostname
|
||||
pc = Machine.query.filter(
|
||||
Machine.hostname.ilike(hostname),
|
||||
Machine.pctypeid.isnot(None)
|
||||
).first()
|
||||
|
||||
if not pc:
|
||||
# Try to find by machine number if hostname not found
|
||||
pc = Machine.query.filter(
|
||||
Machine.machinenumber.ilike(hostname),
|
||||
Machine.pctypeid.isnot(None)
|
||||
).first()
|
||||
|
||||
if not pc:
|
||||
return error_response(
|
||||
ErrorCodes.NOT_FOUND,
|
||||
f'PC with hostname {hostname} not found',
|
||||
http_code=404
|
||||
)
|
||||
|
||||
# Update PC fields
|
||||
update_fields = {
|
||||
'lastzabbixsync': datetime.utcnow(), # Track last collection time
|
||||
}
|
||||
|
||||
if data.get('lastboottime'):
|
||||
try:
|
||||
update_fields['lastboottime'] = datetime.fromisoformat(
|
||||
data['lastboottime'].replace('Z', '+00:00')
|
||||
)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if data.get('currentuser'):
|
||||
# Store previous user before updating
|
||||
if pc.currentuserid != data['currentuser']:
|
||||
update_fields['lastuserid'] = pc.currentuserid
|
||||
update_fields['currentuserid'] = data['currentuser']
|
||||
|
||||
if data.get('serialnumber'):
|
||||
update_fields['serialnumber'] = data['serialnumber']
|
||||
|
||||
# Update the record
|
||||
for key, value in update_fields.items():
|
||||
if hasattr(pc, key):
|
||||
setattr(pc, key, value)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return success_response({
|
||||
'machineid': pc.machineid,
|
||||
'hostname': pc.hostname,
|
||||
'updated': True
|
||||
}, message='PC info updated')
|
||||
|
||||
|
||||
@collector_bp.route('/apps', methods=['POST'])
|
||||
@require_api_key
|
||||
def update_installed_apps():
|
||||
"""
|
||||
Update installed applications for a PC.
|
||||
|
||||
Expected JSON payload:
|
||||
{
|
||||
"hostname": "PC-1234",
|
||||
"apps": [
|
||||
{
|
||||
"appname": "Microsoft Office",
|
||||
"version": "16.0.14326.20454",
|
||||
"installdate": "2024-01-10"
|
||||
},
|
||||
...
|
||||
]
|
||||
}
|
||||
"""
|
||||
data = request.get_json()
|
||||
|
||||
if not data:
|
||||
return error_response(ErrorCodes.VALIDATION_ERROR, 'No data provided')
|
||||
|
||||
hostname = data.get('hostname')
|
||||
if not hostname:
|
||||
return error_response(ErrorCodes.VALIDATION_ERROR, 'hostname is required')
|
||||
|
||||
apps = data.get('apps', [])
|
||||
if not apps:
|
||||
return error_response(ErrorCodes.VALIDATION_ERROR, 'apps list is required')
|
||||
|
||||
# Find the PC
|
||||
pc = Machine.query.filter(
|
||||
Machine.hostname.ilike(hostname),
|
||||
Machine.pctypeid.isnot(None)
|
||||
).first()
|
||||
|
||||
if not pc:
|
||||
return error_response(
|
||||
ErrorCodes.NOT_FOUND,
|
||||
f'PC with hostname {hostname} not found',
|
||||
http_code=404
|
||||
)
|
||||
|
||||
updated_count = 0
|
||||
created_count = 0
|
||||
skipped_count = 0
|
||||
|
||||
for app_data in apps:
|
||||
app_name = app_data.get('appname')
|
||||
if not app_name:
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Find the application in the database
|
||||
app = Application.query.filter(
|
||||
Application.appname.ilike(app_name)
|
||||
).first()
|
||||
|
||||
if not app:
|
||||
# Skip apps not in our tracked list
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Check if already installed
|
||||
installed = InstalledApp.query.filter_by(
|
||||
machineid=pc.machineid,
|
||||
appid=app.appid
|
||||
).first()
|
||||
|
||||
if installed:
|
||||
# Update version if changed
|
||||
new_version = app_data.get('version')
|
||||
if new_version and installed.installedversion != new_version:
|
||||
installed.installedversion = new_version
|
||||
installed.modifieddate = datetime.utcnow()
|
||||
updated_count += 1
|
||||
else:
|
||||
# Create new installed app record
|
||||
installed = InstalledApp(
|
||||
machineid=pc.machineid,
|
||||
appid=app.appid,
|
||||
installedversion=app_data.get('version'),
|
||||
installdate=datetime.utcnow()
|
||||
)
|
||||
db.session.add(installed)
|
||||
created_count += 1
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return success_response({
|
||||
'hostname': hostname,
|
||||
'machineid': pc.machineid,
|
||||
'created': created_count,
|
||||
'updated': updated_count,
|
||||
'skipped': skipped_count
|
||||
}, message='Installed apps updated')
|
||||
|
||||
|
||||
@collector_bp.route('/heartbeat', methods=['POST'])
|
||||
@require_api_key
|
||||
def pc_heartbeat():
|
||||
"""
|
||||
Record PC online status / heartbeat.
|
||||
|
||||
Expected JSON payload:
|
||||
{
|
||||
"hostname": "PC-1234"
|
||||
}
|
||||
|
||||
Or batch update:
|
||||
{
|
||||
"hostnames": ["PC-1234", "PC-1235", "PC-1236"]
|
||||
}
|
||||
"""
|
||||
data = request.get_json()
|
||||
|
||||
if not data:
|
||||
return error_response(ErrorCodes.VALIDATION_ERROR, 'No data provided')
|
||||
|
||||
hostnames = data.get('hostnames', [])
|
||||
if not hostnames and data.get('hostname'):
|
||||
hostnames = [data['hostname']]
|
||||
|
||||
if not hostnames:
|
||||
return error_response(ErrorCodes.VALIDATION_ERROR, 'hostname or hostnames required')
|
||||
|
||||
updated = 0
|
||||
not_found = []
|
||||
|
||||
for hostname in hostnames:
|
||||
pc = Machine.query.filter(
|
||||
Machine.hostname.ilike(hostname),
|
||||
Machine.pctypeid.isnot(None)
|
||||
).first()
|
||||
|
||||
if pc:
|
||||
pc.lastzabbixsync = datetime.utcnow()
|
||||
updated += 1
|
||||
else:
|
||||
not_found.append(hostname)
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return success_response({
|
||||
'updated': updated,
|
||||
'not_found': not_found,
|
||||
'timestamp': datetime.utcnow().isoformat()
|
||||
}, message=f'{updated} PC(s) heartbeat recorded')
|
||||
|
||||
|
||||
@collector_bp.route('/bulk', methods=['POST'])
|
||||
@require_api_key
|
||||
def bulk_update():
|
||||
"""
|
||||
Bulk update multiple PCs at once.
|
||||
|
||||
Expected JSON payload:
|
||||
{
|
||||
"pcs": [
|
||||
{
|
||||
"hostname": "PC-1234",
|
||||
"currentuser": "jsmith",
|
||||
"lastboottime": "2024-01-15T08:30:00"
|
||||
},
|
||||
...
|
||||
]
|
||||
}
|
||||
"""
|
||||
data = request.get_json()
|
||||
|
||||
if not data:
|
||||
return error_response(ErrorCodes.VALIDATION_ERROR, 'No data provided')
|
||||
|
||||
pcs = data.get('pcs', [])
|
||||
if not pcs:
|
||||
return error_response(ErrorCodes.VALIDATION_ERROR, 'pcs list is required')
|
||||
|
||||
updated = 0
|
||||
not_found = []
|
||||
errors = []
|
||||
|
||||
for pc_data in pcs:
|
||||
hostname = pc_data.get('hostname')
|
||||
if not hostname:
|
||||
continue
|
||||
|
||||
pc = Machine.query.filter(
|
||||
Machine.hostname.ilike(hostname),
|
||||
Machine.pctypeid.isnot(None)
|
||||
).first()
|
||||
|
||||
if not pc:
|
||||
not_found.append(hostname)
|
||||
continue
|
||||
|
||||
try:
|
||||
pc.lastzabbixsync = datetime.utcnow()
|
||||
|
||||
if pc_data.get('currentuser'):
|
||||
if pc.currentuserid != pc_data['currentuser']:
|
||||
pc.lastuserid = pc.currentuserid
|
||||
pc.currentuserid = pc_data['currentuser']
|
||||
|
||||
if pc_data.get('lastboottime'):
|
||||
try:
|
||||
pc.lastboottime = datetime.fromisoformat(
|
||||
pc_data['lastboottime'].replace('Z', '+00:00')
|
||||
)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
updated += 1
|
||||
|
||||
except Exception as e:
|
||||
errors.append({'hostname': hostname, 'error': str(e)})
|
||||
|
||||
db.session.commit()
|
||||
|
||||
return success_response({
|
||||
'updated': updated,
|
||||
'not_found': not_found,
|
||||
'errors': errors,
|
||||
'timestamp': datetime.utcnow().isoformat()
|
||||
}, message=f'{updated} PC(s) updated')
|
||||
|
||||
|
||||
@collector_bp.route('/status', methods=['GET'])
|
||||
@require_api_key
|
||||
def collector_status():
|
||||
"""Check collector API status and configuration."""
|
||||
return success_response({
|
||||
'status': 'ok',
|
||||
'timestamp': datetime.utcnow().isoformat(),
|
||||
'endpoints': [
|
||||
'POST /api/collector/pc',
|
||||
'POST /api/collector/apps',
|
||||
'POST /api/collector/heartbeat',
|
||||
'POST /api/collector/bulk',
|
||||
'GET /api/collector/status'
|
||||
]
|
||||
})
|
||||
161
shopdb/core/api/employees.py
Normal file
161
shopdb/core/api/employees.py
Normal file
@@ -0,0 +1,161 @@
|
||||
"""Employee lookup API endpoints."""
|
||||
|
||||
from flask import Blueprint, request
|
||||
from shopdb.utils.responses import success_response, error_response, ErrorCodes
|
||||
|
||||
employees_bp = Blueprint('employees', __name__)
|
||||
|
||||
|
||||
@employees_bp.route('/search', methods=['GET'])
|
||||
def search_employees():
|
||||
"""
|
||||
Search employees by name.
|
||||
|
||||
Query parameters:
|
||||
- q: Search query (searches first and last name)
|
||||
- limit: Max results (default 10)
|
||||
"""
|
||||
query = request.args.get('q', '').strip()
|
||||
limit = min(int(request.args.get('limit', 10)), 50)
|
||||
|
||||
if len(query) < 2:
|
||||
return error_response(
|
||||
ErrorCodes.VALIDATION_ERROR,
|
||||
'Search query must be at least 2 characters'
|
||||
)
|
||||
|
||||
try:
|
||||
import pymysql
|
||||
conn = pymysql.connect(
|
||||
host='localhost',
|
||||
user='root',
|
||||
password='rootpassword',
|
||||
database='wjf_employees',
|
||||
cursorclass=pymysql.cursors.DictCursor
|
||||
)
|
||||
|
||||
with conn.cursor() as cur:
|
||||
# Search by first name, last name, or SSO
|
||||
cur.execute('''
|
||||
SELECT SSO, First_Name, Last_Name, Team, Role, Picture
|
||||
FROM employees
|
||||
WHERE First_Name LIKE %s
|
||||
OR Last_Name LIKE %s
|
||||
OR CAST(SSO AS CHAR) LIKE %s
|
||||
ORDER BY Last_Name, First_Name
|
||||
LIMIT %s
|
||||
''', (f'%{query}%', f'%{query}%', f'%{query}%', limit))
|
||||
|
||||
employees = cur.fetchall()
|
||||
|
||||
conn.close()
|
||||
|
||||
return success_response(employees)
|
||||
|
||||
except Exception as e:
|
||||
return error_response(
|
||||
ErrorCodes.DATABASE_ERROR,
|
||||
f'Employee lookup failed: {str(e)}',
|
||||
http_code=500
|
||||
)
|
||||
|
||||
|
||||
@employees_bp.route('/lookup/<sso>', methods=['GET'])
|
||||
def lookup_employee(sso):
|
||||
"""Look up a single employee by SSO."""
|
||||
if not sso.isdigit():
|
||||
return error_response(
|
||||
ErrorCodes.VALIDATION_ERROR,
|
||||
'SSO must be numeric'
|
||||
)
|
||||
|
||||
try:
|
||||
import pymysql
|
||||
conn = pymysql.connect(
|
||||
host='localhost',
|
||||
user='root',
|
||||
password='rootpassword',
|
||||
database='wjf_employees',
|
||||
cursorclass=pymysql.cursors.DictCursor
|
||||
)
|
||||
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
'SELECT SSO, First_Name, Last_Name, Team, Role, Picture FROM employees WHERE SSO = %s',
|
||||
(int(sso),)
|
||||
)
|
||||
employee = cur.fetchone()
|
||||
|
||||
conn.close()
|
||||
|
||||
if not employee:
|
||||
return error_response(
|
||||
ErrorCodes.NOT_FOUND,
|
||||
f'Employee with SSO {sso} not found',
|
||||
http_code=404
|
||||
)
|
||||
|
||||
return success_response(employee)
|
||||
|
||||
except Exception as e:
|
||||
return error_response(
|
||||
ErrorCodes.DATABASE_ERROR,
|
||||
f'Employee lookup failed: {str(e)}',
|
||||
http_code=500
|
||||
)
|
||||
|
||||
|
||||
@employees_bp.route('/lookup', methods=['GET'])
|
||||
def lookup_employees():
|
||||
"""
|
||||
Look up multiple employees by SSO list.
|
||||
|
||||
Query parameters:
|
||||
- sso: Comma-separated list of SSOs
|
||||
"""
|
||||
sso_list = request.args.get('sso', '')
|
||||
ssos = [s.strip() for s in sso_list.split(',') if s.strip().isdigit()]
|
||||
|
||||
if not ssos:
|
||||
return error_response(
|
||||
ErrorCodes.VALIDATION_ERROR,
|
||||
'At least one valid SSO is required'
|
||||
)
|
||||
|
||||
try:
|
||||
import pymysql
|
||||
conn = pymysql.connect(
|
||||
host='localhost',
|
||||
user='root',
|
||||
password='rootpassword',
|
||||
database='wjf_employees',
|
||||
cursorclass=pymysql.cursors.DictCursor
|
||||
)
|
||||
|
||||
with conn.cursor() as cur:
|
||||
placeholders = ','.join(['%s'] * len(ssos))
|
||||
cur.execute(
|
||||
f'SELECT SSO, First_Name, Last_Name, Team, Role, Picture FROM employees WHERE SSO IN ({placeholders})',
|
||||
[int(s) for s in ssos]
|
||||
)
|
||||
employees = cur.fetchall()
|
||||
|
||||
conn.close()
|
||||
|
||||
# Build name string
|
||||
names = ', '.join(
|
||||
f"{e['First_Name'].strip()} {e['Last_Name'].strip()}"
|
||||
for e in employees
|
||||
)
|
||||
|
||||
return success_response({
|
||||
'employees': employees,
|
||||
'names': names
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return error_response(
|
||||
ErrorCodes.DATABASE_ERROR,
|
||||
f'Employee lookup failed: {str(e)}',
|
||||
http_code=500
|
||||
)
|
||||
@@ -1,6 +1,18 @@
|
||||
"""Machines API endpoints."""
|
||||
"""
|
||||
Machines API endpoints.
|
||||
|
||||
from flask import Blueprint, request
|
||||
DEPRECATED: This API is deprecated and will be removed in a future version.
|
||||
Please migrate to the new asset-based APIs:
|
||||
- /api/assets - Unified asset queries
|
||||
- /api/equipment - Equipment CRUD
|
||||
- /api/computers - Computers CRUD
|
||||
- /api/network - Network devices CRUD
|
||||
- /api/printers - Printers CRUD
|
||||
"""
|
||||
|
||||
import logging
|
||||
from functools import wraps
|
||||
from flask import Blueprint, request, g
|
||||
from flask_jwt_extended import jwt_required, current_user
|
||||
|
||||
from shopdb.extensions import db
|
||||
@@ -14,11 +26,40 @@ from shopdb.utils.responses import (
|
||||
)
|
||||
from shopdb.utils.pagination import get_pagination_params, paginate_query
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
machines_bp = Blueprint('machines', __name__)
|
||||
|
||||
|
||||
def add_deprecation_headers(f):
|
||||
"""Decorator to add deprecation headers to responses."""
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
response = f(*args, **kwargs)
|
||||
|
||||
# Add deprecation headers
|
||||
if hasattr(response, 'headers'):
|
||||
response.headers['X-Deprecated'] = 'true'
|
||||
response.headers['X-Deprecated-Message'] = (
|
||||
'This endpoint is deprecated. '
|
||||
'Please migrate to /api/assets, /api/equipment, /api/computers, /api/network, or /api/printers.'
|
||||
)
|
||||
response.headers['Sunset'] = '2026-12-31' # Target sunset date
|
||||
|
||||
# Log deprecation warning (once per request)
|
||||
if not getattr(g, '_deprecation_logged', False):
|
||||
logger.warning(
|
||||
f"Deprecated /api/machines endpoint called: {request.method} {request.path}"
|
||||
)
|
||||
g._deprecation_logged = True
|
||||
|
||||
return response
|
||||
return decorated_function
|
||||
|
||||
|
||||
@machines_bp.route('', methods=['GET'])
|
||||
@jwt_required(optional=True)
|
||||
@add_deprecation_headers
|
||||
def list_machines():
|
||||
"""
|
||||
List all machines with filtering and pagination.
|
||||
@@ -149,6 +190,7 @@ def list_machines():
|
||||
|
||||
@machines_bp.route('/<int:machine_id>', methods=['GET'])
|
||||
@jwt_required(optional=True)
|
||||
@add_deprecation_headers
|
||||
def get_machine(machine_id: int):
|
||||
"""Get a single machine by ID."""
|
||||
machine = Machine.query.get(machine_id)
|
||||
@@ -180,6 +222,7 @@ def get_machine(machine_id: int):
|
||||
|
||||
@machines_bp.route('', methods=['POST'])
|
||||
@jwt_required()
|
||||
@add_deprecation_headers
|
||||
def create_machine():
|
||||
"""Create a new machine."""
|
||||
data = request.get_json()
|
||||
@@ -227,6 +270,7 @@ def create_machine():
|
||||
|
||||
@machines_bp.route('/<int:machine_id>', methods=['PUT'])
|
||||
@jwt_required()
|
||||
@add_deprecation_headers
|
||||
def update_machine(machine_id: int):
|
||||
"""Update an existing machine."""
|
||||
machine = Machine.query.get(machine_id)
|
||||
@@ -274,6 +318,7 @@ def update_machine(machine_id: int):
|
||||
|
||||
@machines_bp.route('/<int:machine_id>', methods=['DELETE'])
|
||||
@jwt_required()
|
||||
@add_deprecation_headers
|
||||
def delete_machine(machine_id: int):
|
||||
"""Soft delete a machine."""
|
||||
machine = Machine.query.get(machine_id)
|
||||
@@ -293,6 +338,7 @@ def delete_machine(machine_id: int):
|
||||
|
||||
@machines_bp.route('/<int:machine_id>/communications', methods=['GET'])
|
||||
@jwt_required()
|
||||
@add_deprecation_headers
|
||||
def get_machine_communications(machine_id: int):
|
||||
"""Get all communications for a machine."""
|
||||
machine = Machine.query.get(machine_id)
|
||||
@@ -310,6 +356,7 @@ def get_machine_communications(machine_id: int):
|
||||
|
||||
@machines_bp.route('/<int:machine_id>/communication', methods=['PUT'])
|
||||
@jwt_required()
|
||||
@add_deprecation_headers
|
||||
def update_machine_communication(machine_id: int):
|
||||
"""Update machine communication (IP address)."""
|
||||
from shopdb.core.models.communication import Communication, CommunicationType
|
||||
@@ -364,6 +411,7 @@ def update_machine_communication(machine_id: int):
|
||||
|
||||
@machines_bp.route('/<int:machine_id>/relationships', methods=['GET'])
|
||||
@jwt_required(optional=True)
|
||||
@add_deprecation_headers
|
||||
def get_machine_relationships(machine_id: int):
|
||||
"""Get all relationships for a machine (both parent and child)."""
|
||||
machine = Machine.query.get(machine_id)
|
||||
@@ -429,6 +477,7 @@ def get_machine_relationships(machine_id: int):
|
||||
|
||||
@machines_bp.route('/<int:machine_id>/relationships', methods=['POST'])
|
||||
@jwt_required()
|
||||
@add_deprecation_headers
|
||||
def create_machine_relationship(machine_id: int):
|
||||
"""Create a relationship for a machine."""
|
||||
machine = Machine.query.get(machine_id)
|
||||
@@ -504,6 +553,7 @@ def create_machine_relationship(machine_id: int):
|
||||
|
||||
@machines_bp.route('/relationships/<int:relationship_id>', methods=['DELETE'])
|
||||
@jwt_required()
|
||||
@add_deprecation_headers
|
||||
def delete_machine_relationship(relationship_id: int):
|
||||
"""Delete a machine relationship."""
|
||||
relationship = MachineRelationship.query.get(relationship_id)
|
||||
@@ -523,6 +573,7 @@ def delete_machine_relationship(relationship_id: int):
|
||||
|
||||
@machines_bp.route('/relationshiptypes', methods=['GET'])
|
||||
@jwt_required(optional=True)
|
||||
@add_deprecation_headers
|
||||
def list_relationship_types():
|
||||
"""List all relationship types."""
|
||||
types = RelationshipType.query.order_by(RelationshipType.relationshiptype).all()
|
||||
@@ -535,6 +586,7 @@ def list_relationship_types():
|
||||
|
||||
@machines_bp.route('/relationshiptypes', methods=['POST'])
|
||||
@jwt_required()
|
||||
@add_deprecation_headers
|
||||
def create_relationship_type():
|
||||
"""Create a new relationship type."""
|
||||
data = request.get_json()
|
||||
|
||||
573
shopdb/core/api/reports.py
Normal file
573
shopdb/core/api/reports.py
Normal file
@@ -0,0 +1,573 @@
|
||||
"""Reports API endpoints."""
|
||||
|
||||
import csv
|
||||
import io
|
||||
from datetime import datetime, timedelta
|
||||
from flask import Blueprint, request, Response
|
||||
from flask_jwt_extended import jwt_required
|
||||
|
||||
from shopdb.extensions import db
|
||||
from shopdb.core.models import (
|
||||
Asset, AssetType, AssetStatus, Machine, MachineStatus,
|
||||
Application, KnowledgeBase, InstalledApp
|
||||
)
|
||||
from shopdb.utils.responses import success_response, error_response, ErrorCodes
|
||||
|
||||
reports_bp = Blueprint('reports', __name__)
|
||||
|
||||
|
||||
def generate_csv(data: list, columns: list) -> str:
|
||||
"""Generate CSV string from data."""
|
||||
output = io.StringIO()
|
||||
writer = csv.writer(output)
|
||||
writer.writerow(columns)
|
||||
for row in data:
|
||||
writer.writerow([row.get(col, '') for col in columns])
|
||||
return output.getvalue()
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Report: Equipment by Type
|
||||
# =============================================================================
|
||||
|
||||
@reports_bp.route('/equipment-by-type', methods=['GET'])
|
||||
@jwt_required()
|
||||
def equipment_by_type():
|
||||
"""
|
||||
Report: Equipment count grouped by equipment type.
|
||||
|
||||
Query parameters:
|
||||
- businessunitid: Filter by business unit
|
||||
- format: 'json' (default) or 'csv'
|
||||
"""
|
||||
try:
|
||||
from plugins.equipment.models import Equipment, EquipmentType
|
||||
except ImportError:
|
||||
return error_response(
|
||||
ErrorCodes.NOT_FOUND,
|
||||
'Equipment plugin not installed',
|
||||
http_code=404
|
||||
)
|
||||
|
||||
query = db.session.query(
|
||||
EquipmentType.equipmenttype,
|
||||
EquipmentType.description,
|
||||
db.func.count(Equipment.equipmentid).label('count')
|
||||
).outerjoin(Equipment, Equipment.equipmenttypeid == EquipmentType.equipmenttypeid
|
||||
).outerjoin(Asset, Asset.assetid == Equipment.assetid
|
||||
).filter(
|
||||
db.or_(Asset.isactive == True, Asset.assetid.is_(None)),
|
||||
EquipmentType.isactive == True
|
||||
)
|
||||
|
||||
# Business unit filter
|
||||
if bu_id := request.args.get('businessunitid'):
|
||||
query = query.filter(Asset.businessunitid == int(bu_id))
|
||||
|
||||
query = query.group_by(
|
||||
EquipmentType.equipmenttypeid,
|
||||
EquipmentType.equipmenttype,
|
||||
EquipmentType.description
|
||||
).order_by(EquipmentType.equipmenttype)
|
||||
|
||||
results = query.all()
|
||||
data = [{'equipmenttype': r.equipmenttype, 'description': r.description or '', 'count': r.count} for r in results]
|
||||
total = sum(r['count'] for r in data)
|
||||
|
||||
if request.args.get('format') == 'csv':
|
||||
csv_data = generate_csv(data, ['equipmenttype', 'description', 'count'])
|
||||
return Response(
|
||||
csv_data,
|
||||
mimetype='text/csv',
|
||||
headers={'Content-Disposition': 'attachment; filename=equipment_by_type.csv'}
|
||||
)
|
||||
|
||||
return success_response({
|
||||
'report': 'equipment_by_type',
|
||||
'generated': datetime.utcnow().isoformat(),
|
||||
'data': data,
|
||||
'total': total
|
||||
})
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Report: Assets by Status
|
||||
# =============================================================================
|
||||
|
||||
@reports_bp.route('/assets-by-status', methods=['GET'])
|
||||
@jwt_required()
|
||||
def assets_by_status():
|
||||
"""
|
||||
Report: Asset count grouped by status.
|
||||
|
||||
Query parameters:
|
||||
- assettypeid: Filter by asset type
|
||||
- businessunitid: Filter by business unit
|
||||
- format: 'json' (default) or 'csv'
|
||||
"""
|
||||
query = db.session.query(
|
||||
AssetStatus.status,
|
||||
AssetStatus.color,
|
||||
db.func.count(Asset.assetid).label('count')
|
||||
).outerjoin(Asset, Asset.statusid == AssetStatus.statusid
|
||||
).filter(
|
||||
db.or_(Asset.isactive == True, Asset.assetid.is_(None)),
|
||||
AssetStatus.isactive == True
|
||||
)
|
||||
|
||||
# Asset type filter
|
||||
if type_id := request.args.get('assettypeid'):
|
||||
query = query.filter(Asset.assettypeid == int(type_id))
|
||||
|
||||
# Business unit filter
|
||||
if bu_id := request.args.get('businessunitid'):
|
||||
query = query.filter(Asset.businessunitid == int(bu_id))
|
||||
|
||||
query = query.group_by(
|
||||
AssetStatus.statusid,
|
||||
AssetStatus.status,
|
||||
AssetStatus.color
|
||||
).order_by(AssetStatus.status)
|
||||
|
||||
results = query.all()
|
||||
data = [{'status': r.status, 'color': r.color or '', 'count': r.count} for r in results]
|
||||
total = sum(r['count'] for r in data)
|
||||
|
||||
if request.args.get('format') == 'csv':
|
||||
csv_data = generate_csv(data, ['status', 'count'])
|
||||
return Response(
|
||||
csv_data,
|
||||
mimetype='text/csv',
|
||||
headers={'Content-Disposition': 'attachment; filename=assets_by_status.csv'}
|
||||
)
|
||||
|
||||
return success_response({
|
||||
'report': 'assets_by_status',
|
||||
'generated': datetime.utcnow().isoformat(),
|
||||
'data': data,
|
||||
'total': total
|
||||
})
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Report: KB Popularity
|
||||
# =============================================================================
|
||||
|
||||
@reports_bp.route('/kb-popularity', methods=['GET'])
|
||||
@jwt_required()
|
||||
def kb_popularity():
|
||||
"""
|
||||
Report: Most clicked knowledge base articles.
|
||||
|
||||
Query parameters:
|
||||
- limit: Number of results (default 20)
|
||||
- format: 'json' (default) or 'csv'
|
||||
"""
|
||||
limit = min(int(request.args.get('limit', 20)), 100)
|
||||
|
||||
articles = KnowledgeBase.query.filter(
|
||||
KnowledgeBase.isactive == True
|
||||
).order_by(
|
||||
KnowledgeBase.clicks.desc()
|
||||
).limit(limit).all()
|
||||
|
||||
data = []
|
||||
for kb in articles:
|
||||
data.append({
|
||||
'linkid': kb.linkid,
|
||||
'shortdescription': kb.shortdescription,
|
||||
'application': kb.application.appname if kb.application else None,
|
||||
'clicks': kb.clicks or 0,
|
||||
'linkurl': kb.linkurl
|
||||
})
|
||||
|
||||
if request.args.get('format') == 'csv':
|
||||
csv_data = generate_csv(data, ['linkid', 'shortdescription', 'application', 'clicks', 'linkurl'])
|
||||
return Response(
|
||||
csv_data,
|
||||
mimetype='text/csv',
|
||||
headers={'Content-Disposition': 'attachment; filename=kb_popularity.csv'}
|
||||
)
|
||||
|
||||
return success_response({
|
||||
'report': 'kb_popularity',
|
||||
'generated': datetime.utcnow().isoformat(),
|
||||
'data': data,
|
||||
'total': len(data)
|
||||
})
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Report: Warranty Status
|
||||
# =============================================================================
|
||||
|
||||
@reports_bp.route('/warranty-status', methods=['GET'])
|
||||
@jwt_required()
|
||||
def warranty_status():
|
||||
"""
|
||||
Report: Assets by warranty expiration status.
|
||||
|
||||
Categories: Expired, Expiring Soon (90 days), Valid, No Warranty Data
|
||||
|
||||
Query parameters:
|
||||
- assettypeid: Filter by asset type
|
||||
- format: 'json' (default) or 'csv'
|
||||
"""
|
||||
now = datetime.utcnow()
|
||||
expiring_threshold = now + timedelta(days=90)
|
||||
|
||||
# Try to get warranty data from equipment or machines
|
||||
try:
|
||||
from plugins.equipment.models import Equipment
|
||||
from plugins.computers.models import Computer
|
||||
|
||||
# Equipment warranty
|
||||
equipment_query = db.session.query(
|
||||
Asset.assetid,
|
||||
Asset.assetnumber,
|
||||
Asset.name,
|
||||
Equipment.warrantyenddate
|
||||
).join(Equipment, Equipment.assetid == Asset.assetid
|
||||
).filter(Asset.isactive == True)
|
||||
|
||||
if type_id := request.args.get('assettypeid'):
|
||||
equipment_query = equipment_query.filter(Asset.assettypeid == int(type_id))
|
||||
|
||||
equipment_data = equipment_query.all()
|
||||
|
||||
expired = []
|
||||
expiring_soon = []
|
||||
valid = []
|
||||
no_data = []
|
||||
|
||||
for row in equipment_data:
|
||||
item = {
|
||||
'assetid': row.assetid,
|
||||
'assetnumber': row.assetnumber,
|
||||
'name': row.name,
|
||||
'warrantyenddate': row.warrantyenddate.isoformat() if row.warrantyenddate else None
|
||||
}
|
||||
|
||||
if row.warrantyenddate is None:
|
||||
no_data.append(item)
|
||||
elif row.warrantyenddate < now:
|
||||
expired.append(item)
|
||||
elif row.warrantyenddate < expiring_threshold:
|
||||
expiring_soon.append(item)
|
||||
else:
|
||||
valid.append(item)
|
||||
|
||||
data = {
|
||||
'expired': {'count': len(expired), 'items': expired},
|
||||
'expiringsoon': {'count': len(expiring_soon), 'items': expiring_soon},
|
||||
'valid': {'count': len(valid), 'items': valid},
|
||||
'nodata': {'count': len(no_data), 'items': no_data}
|
||||
}
|
||||
|
||||
except (ImportError, AttributeError):
|
||||
# Fallback: no warranty data available
|
||||
data = {
|
||||
'expired': {'count': 0, 'items': []},
|
||||
'expiringsoon': {'count': 0, 'items': []},
|
||||
'valid': {'count': 0, 'items': []},
|
||||
'nodata': {'count': 0, 'items': []}
|
||||
}
|
||||
|
||||
if request.args.get('format') == 'csv':
|
||||
# Flatten for CSV
|
||||
flat_data = []
|
||||
for status, info in data.items():
|
||||
for item in info['items']:
|
||||
item['warrantystatus'] = status
|
||||
flat_data.append(item)
|
||||
csv_data = generate_csv(flat_data, ['assetid', 'assetnumber', 'name', 'warrantyenddate', 'warrantystatus'])
|
||||
return Response(
|
||||
csv_data,
|
||||
mimetype='text/csv',
|
||||
headers={'Content-Disposition': 'attachment; filename=warranty_status.csv'}
|
||||
)
|
||||
|
||||
return success_response({
|
||||
'report': 'warranty_status',
|
||||
'generated': datetime.utcnow().isoformat(),
|
||||
'data': data,
|
||||
'summary': {
|
||||
'expired': data['expired']['count'],
|
||||
'expiringsoon': data['expiringsoon']['count'],
|
||||
'valid': data['valid']['count'],
|
||||
'nodata': data['nodata']['count']
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Report: Software Compliance
|
||||
# =============================================================================
|
||||
|
||||
@reports_bp.route('/software-compliance', methods=['GET'])
|
||||
@jwt_required()
|
||||
def software_compliance():
|
||||
"""
|
||||
Report: Required applications vs installed (per PC).
|
||||
|
||||
Shows which PCs have required applications installed.
|
||||
|
||||
Query parameters:
|
||||
- appid: Filter to specific application
|
||||
- format: 'json' (default) or 'csv'
|
||||
"""
|
||||
# Get required applications
|
||||
required_apps = Application.query.filter(
|
||||
Application.isactive == True,
|
||||
Application.isrequired == True
|
||||
).all()
|
||||
|
||||
if not required_apps:
|
||||
return success_response({
|
||||
'report': 'software_compliance',
|
||||
'generated': datetime.utcnow().isoformat(),
|
||||
'data': [],
|
||||
'message': 'No required applications defined'
|
||||
})
|
||||
|
||||
app_filter = request.args.get('appid')
|
||||
|
||||
data = []
|
||||
for app in required_apps:
|
||||
if app_filter and str(app.appid) != app_filter:
|
||||
continue
|
||||
|
||||
# Get all PCs
|
||||
total_pcs = Machine.query.filter(
|
||||
Machine.isactive == True,
|
||||
Machine.pctypeid.isnot(None)
|
||||
).count()
|
||||
|
||||
# Get PCs with this app installed
|
||||
installed_count = db.session.query(InstalledApp).join(
|
||||
Machine, Machine.machineid == InstalledApp.machineid
|
||||
).filter(
|
||||
InstalledApp.appid == app.appid,
|
||||
Machine.isactive == True
|
||||
).count()
|
||||
|
||||
# Get list of non-compliant PCs
|
||||
compliant_pc_ids = db.session.query(InstalledApp.machineid).filter(
|
||||
InstalledApp.appid == app.appid
|
||||
).subquery()
|
||||
|
||||
non_compliant_pcs = Machine.query.filter(
|
||||
Machine.isactive == True,
|
||||
Machine.pctypeid.isnot(None),
|
||||
~Machine.machineid.in_(compliant_pc_ids)
|
||||
).limit(100).all()
|
||||
|
||||
compliance_rate = (installed_count / total_pcs * 100) if total_pcs > 0 else 0
|
||||
|
||||
data.append({
|
||||
'appid': app.appid,
|
||||
'appname': app.appname,
|
||||
'totalpcs': total_pcs,
|
||||
'installed': installed_count,
|
||||
'notinstalled': total_pcs - installed_count,
|
||||
'compliancerate': round(compliance_rate, 1),
|
||||
'noncompliantpcs': [
|
||||
{'machineid': pc.machineid, 'hostname': pc.hostname or pc.machinenumber}
|
||||
for pc in non_compliant_pcs
|
||||
]
|
||||
})
|
||||
|
||||
if request.args.get('format') == 'csv':
|
||||
# Simplified CSV (no nested data)
|
||||
csv_rows = []
|
||||
for item in data:
|
||||
csv_rows.append({
|
||||
'appid': item['appid'],
|
||||
'appname': item['appname'],
|
||||
'totalpcs': item['totalpcs'],
|
||||
'installed': item['installed'],
|
||||
'notinstalled': item['notinstalled'],
|
||||
'compliancerate': item['compliancerate']
|
||||
})
|
||||
csv_data = generate_csv(csv_rows, ['appid', 'appname', 'totalpcs', 'installed', 'notinstalled', 'compliancerate'])
|
||||
return Response(
|
||||
csv_data,
|
||||
mimetype='text/csv',
|
||||
headers={'Content-Disposition': 'attachment; filename=software_compliance.csv'}
|
||||
)
|
||||
|
||||
return success_response({
|
||||
'report': 'software_compliance',
|
||||
'generated': datetime.utcnow().isoformat(),
|
||||
'data': data,
|
||||
'total': len(data)
|
||||
})
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Report: Asset Inventory Summary
|
||||
# =============================================================================
|
||||
|
||||
@reports_bp.route('/asset-inventory', methods=['GET'])
|
||||
@jwt_required()
|
||||
def asset_inventory():
|
||||
"""
|
||||
Report: Complete asset inventory summary.
|
||||
|
||||
Query parameters:
|
||||
- businessunitid: Filter by business unit
|
||||
- locationid: Filter by location
|
||||
- format: 'json' (default) or 'csv'
|
||||
"""
|
||||
# By asset type
|
||||
by_type = db.session.query(
|
||||
AssetType.assettype,
|
||||
db.func.count(Asset.assetid).label('count')
|
||||
).outerjoin(Asset, Asset.assettypeid == AssetType.assettypeid
|
||||
).filter(
|
||||
db.or_(Asset.isactive == True, Asset.assetid.is_(None)),
|
||||
AssetType.isactive == True
|
||||
)
|
||||
|
||||
if bu_id := request.args.get('businessunitid'):
|
||||
by_type = by_type.filter(Asset.businessunitid == int(bu_id))
|
||||
if loc_id := request.args.get('locationid'):
|
||||
by_type = by_type.filter(Asset.locationid == int(loc_id))
|
||||
|
||||
by_type = by_type.group_by(AssetType.assettype).all()
|
||||
|
||||
# By status
|
||||
by_status = db.session.query(
|
||||
AssetStatus.status,
|
||||
AssetStatus.color,
|
||||
db.func.count(Asset.assetid).label('count')
|
||||
).outerjoin(Asset, Asset.statusid == AssetStatus.statusid
|
||||
).filter(
|
||||
db.or_(Asset.isactive == True, Asset.assetid.is_(None)),
|
||||
AssetStatus.isactive == True
|
||||
)
|
||||
|
||||
if bu_id := request.args.get('businessunitid'):
|
||||
by_status = by_status.filter(Asset.businessunitid == int(bu_id))
|
||||
if loc_id := request.args.get('locationid'):
|
||||
by_status = by_status.filter(Asset.locationid == int(loc_id))
|
||||
|
||||
by_status = by_status.group_by(AssetStatus.status, AssetStatus.color).all()
|
||||
|
||||
# By location
|
||||
from shopdb.core.models import Location
|
||||
by_location = db.session.query(
|
||||
Location.locationname,
|
||||
db.func.count(Asset.assetid).label('count')
|
||||
).outerjoin(Asset, Asset.locationid == Location.locationid
|
||||
).filter(
|
||||
db.or_(Asset.isactive == True, Asset.assetid.is_(None)),
|
||||
Location.isactive == True
|
||||
)
|
||||
|
||||
if bu_id := request.args.get('businessunitid'):
|
||||
by_location = by_location.filter(Asset.businessunitid == int(bu_id))
|
||||
|
||||
by_location = by_location.group_by(Location.locationname).all()
|
||||
|
||||
data = {
|
||||
'bytype': [{'type': r.assettype, 'count': r.count} for r in by_type],
|
||||
'bystatus': [{'status': r.status, 'color': r.color, 'count': r.count} for r in by_status],
|
||||
'bylocation': [{'location': r.locationname, 'count': r.count} for r in by_location]
|
||||
}
|
||||
|
||||
total = sum(r['count'] for r in data['bytype'])
|
||||
|
||||
if request.args.get('format') == 'csv':
|
||||
# Create multiple sections in CSV
|
||||
csv_output = io.StringIO()
|
||||
|
||||
csv_output.write("Asset Inventory Report\n")
|
||||
csv_output.write(f"Generated: {datetime.utcnow().isoformat()}\n\n")
|
||||
|
||||
csv_output.write("By Type\n")
|
||||
csv_output.write("Type,Count\n")
|
||||
for r in data['bytype']:
|
||||
csv_output.write(f"{r['type']},{r['count']}\n")
|
||||
|
||||
csv_output.write("\nBy Status\n")
|
||||
csv_output.write("Status,Count\n")
|
||||
for r in data['bystatus']:
|
||||
csv_output.write(f"{r['status']},{r['count']}\n")
|
||||
|
||||
csv_output.write("\nBy Location\n")
|
||||
csv_output.write("Location,Count\n")
|
||||
for r in data['bylocation']:
|
||||
csv_output.write(f"{r['location']},{r['count']}\n")
|
||||
|
||||
return Response(
|
||||
csv_output.getvalue(),
|
||||
mimetype='text/csv',
|
||||
headers={'Content-Disposition': 'attachment; filename=asset_inventory.csv'}
|
||||
)
|
||||
|
||||
return success_response({
|
||||
'report': 'asset_inventory',
|
||||
'generated': datetime.utcnow().isoformat(),
|
||||
'data': data,
|
||||
'total': total
|
||||
})
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Available Reports List
|
||||
# =============================================================================
|
||||
|
||||
@reports_bp.route('', methods=['GET'])
|
||||
@jwt_required()
|
||||
def list_reports():
|
||||
"""List all available reports."""
|
||||
reports = [
|
||||
{
|
||||
'id': 'equipment-by-type',
|
||||
'name': 'Equipment by Type',
|
||||
'description': 'Equipment count grouped by equipment type',
|
||||
'endpoint': '/api/reports/equipment-by-type',
|
||||
'category': 'inventory'
|
||||
},
|
||||
{
|
||||
'id': 'assets-by-status',
|
||||
'name': 'Assets by Status',
|
||||
'description': 'Asset count grouped by status',
|
||||
'endpoint': '/api/reports/assets-by-status',
|
||||
'category': 'inventory'
|
||||
},
|
||||
{
|
||||
'id': 'kb-popularity',
|
||||
'name': 'KB Popularity',
|
||||
'description': 'Most clicked knowledge base articles',
|
||||
'endpoint': '/api/reports/kb-popularity',
|
||||
'category': 'usage'
|
||||
},
|
||||
{
|
||||
'id': 'warranty-status',
|
||||
'name': 'Warranty Status',
|
||||
'description': 'Assets by warranty expiration status',
|
||||
'endpoint': '/api/reports/warranty-status',
|
||||
'category': 'compliance'
|
||||
},
|
||||
{
|
||||
'id': 'software-compliance',
|
||||
'name': 'Software Compliance',
|
||||
'description': 'Required applications vs installed',
|
||||
'endpoint': '/api/reports/software-compliance',
|
||||
'category': 'compliance'
|
||||
},
|
||||
{
|
||||
'id': 'asset-inventory',
|
||||
'name': 'Asset Inventory Summary',
|
||||
'description': 'Complete asset inventory breakdown',
|
||||
'endpoint': '/api/reports/asset-inventory',
|
||||
'category': 'inventory'
|
||||
}
|
||||
]
|
||||
|
||||
return success_response({
|
||||
'reports': reports,
|
||||
'total': len(reports)
|
||||
})
|
||||
@@ -5,7 +5,8 @@ from flask_jwt_extended import jwt_required
|
||||
|
||||
from shopdb.extensions import db
|
||||
from shopdb.core.models import (
|
||||
Machine, Application, KnowledgeBase
|
||||
Machine, Application, KnowledgeBase,
|
||||
Asset, AssetType
|
||||
)
|
||||
from shopdb.utils.responses import success_response
|
||||
|
||||
@@ -46,16 +47,21 @@ def global_search():
|
||||
search_term = f'%{query}%'
|
||||
|
||||
# Search Machines (Equipment and PCs)
|
||||
machines = Machine.query.filter(
|
||||
Machine.isactive == True,
|
||||
db.or_(
|
||||
Machine.machinenumber.ilike(search_term),
|
||||
Machine.alias.ilike(search_term),
|
||||
Machine.hostname.ilike(search_term),
|
||||
Machine.serialnumber.ilike(search_term),
|
||||
Machine.notes.ilike(search_term)
|
||||
)
|
||||
).limit(10).all()
|
||||
try:
|
||||
machines = Machine.query.filter(
|
||||
Machine.isactive == True,
|
||||
db.or_(
|
||||
Machine.machinenumber.ilike(search_term),
|
||||
Machine.alias.ilike(search_term),
|
||||
Machine.hostname.ilike(search_term),
|
||||
Machine.serialnumber.ilike(search_term),
|
||||
Machine.notes.ilike(search_term)
|
||||
)
|
||||
).limit(10).all()
|
||||
except Exception as e:
|
||||
import logging
|
||||
logging.error(f"Machine search failed: {e}")
|
||||
machines = []
|
||||
|
||||
for m in machines:
|
||||
# Determine type: PC, Printer, or Equipment
|
||||
@@ -110,54 +116,62 @@ def global_search():
|
||||
})
|
||||
|
||||
# Search Applications
|
||||
apps = Application.query.filter(
|
||||
Application.isactive == True,
|
||||
db.or_(
|
||||
Application.appname.ilike(search_term),
|
||||
Application.appdescription.ilike(search_term)
|
||||
)
|
||||
).limit(10).all()
|
||||
try:
|
||||
apps = Application.query.filter(
|
||||
Application.isactive == True,
|
||||
db.or_(
|
||||
Application.appname.ilike(search_term),
|
||||
Application.appdescription.ilike(search_term)
|
||||
)
|
||||
).limit(10).all()
|
||||
|
||||
for app in apps:
|
||||
relevance = 20
|
||||
if query.lower() == app.appname.lower():
|
||||
relevance = 100
|
||||
elif query.lower() in app.appname.lower():
|
||||
relevance = 50
|
||||
for app in apps:
|
||||
relevance = 20
|
||||
if query.lower() == app.appname.lower():
|
||||
relevance = 100
|
||||
elif query.lower() in app.appname.lower():
|
||||
relevance = 50
|
||||
|
||||
results.append({
|
||||
'type': 'application',
|
||||
'id': app.appid,
|
||||
'title': app.appname,
|
||||
'subtitle': app.appdescription[:100] if app.appdescription else None,
|
||||
'url': f"/applications/{app.appid}",
|
||||
'relevance': relevance
|
||||
})
|
||||
results.append({
|
||||
'type': 'application',
|
||||
'id': app.appid,
|
||||
'title': app.appname,
|
||||
'subtitle': app.appdescription[:100] if app.appdescription else None,
|
||||
'url': f"/applications/{app.appid}",
|
||||
'relevance': relevance
|
||||
})
|
||||
except Exception as e:
|
||||
import logging
|
||||
logging.error(f"Application search failed: {e}")
|
||||
|
||||
# Search Knowledge Base
|
||||
kb_articles = KnowledgeBase.query.filter(
|
||||
KnowledgeBase.isactive == True,
|
||||
db.or_(
|
||||
KnowledgeBase.shortdescription.ilike(search_term),
|
||||
KnowledgeBase.keywords.ilike(search_term)
|
||||
)
|
||||
).limit(20).all()
|
||||
try:
|
||||
kb_articles = KnowledgeBase.query.filter(
|
||||
KnowledgeBase.isactive == True,
|
||||
db.or_(
|
||||
KnowledgeBase.shortdescription.ilike(search_term),
|
||||
KnowledgeBase.keywords.ilike(search_term)
|
||||
)
|
||||
).limit(20).all()
|
||||
|
||||
for kb in kb_articles:
|
||||
# Weight by clicks and keyword match
|
||||
relevance = 10 + (kb.clicks or 0) * 0.1
|
||||
if kb.keywords and query.lower() in kb.keywords.lower():
|
||||
relevance += 15
|
||||
for kb in kb_articles:
|
||||
# Weight by clicks and keyword match
|
||||
relevance = 10 + (kb.clicks or 0) * 0.1
|
||||
if kb.keywords and query.lower() in kb.keywords.lower():
|
||||
relevance += 15
|
||||
|
||||
results.append({
|
||||
'type': 'knowledgebase',
|
||||
'id': kb.linkid,
|
||||
'title': kb.shortdescription,
|
||||
'subtitle': kb.application.appname if kb.application else None,
|
||||
'url': f"/knowledgebase/{kb.linkid}",
|
||||
'linkurl': kb.linkurl,
|
||||
'relevance': relevance
|
||||
})
|
||||
results.append({
|
||||
'type': 'knowledgebase',
|
||||
'id': kb.linkid,
|
||||
'title': kb.shortdescription,
|
||||
'subtitle': kb.application.appname if kb.application else None,
|
||||
'url': f"/knowledgebase/{kb.linkid}",
|
||||
'linkurl': kb.linkurl,
|
||||
'relevance': relevance
|
||||
})
|
||||
except Exception as e:
|
||||
import logging
|
||||
logging.error(f"KnowledgeBase search failed: {e}")
|
||||
|
||||
# Search Printers (check if printers model exists)
|
||||
try:
|
||||
@@ -187,17 +201,132 @@ def global_search():
|
||||
'url': f"/printers/{p.printerid}",
|
||||
'relevance': relevance
|
||||
})
|
||||
except ImportError:
|
||||
pass # Printers plugin not installed
|
||||
except Exception as e:
|
||||
import logging
|
||||
logging.error(f"Printer search failed: {e}")
|
||||
|
||||
# Search Employees (separate database)
|
||||
try:
|
||||
import pymysql
|
||||
emp_conn = pymysql.connect(
|
||||
host='localhost',
|
||||
user='root',
|
||||
password='rootpassword',
|
||||
database='wjf_employees',
|
||||
cursorclass=pymysql.cursors.DictCursor
|
||||
)
|
||||
|
||||
with emp_conn.cursor() as cur:
|
||||
cur.execute('''
|
||||
SELECT SSO, First_Name, Last_Name, Team, Role
|
||||
FROM employees
|
||||
WHERE First_Name LIKE %s
|
||||
OR Last_Name LIKE %s
|
||||
OR CAST(SSO AS CHAR) LIKE %s
|
||||
ORDER BY Last_Name, First_Name
|
||||
LIMIT 10
|
||||
''', (search_term, search_term, search_term))
|
||||
employees = cur.fetchall()
|
||||
|
||||
emp_conn.close()
|
||||
|
||||
for emp in employees:
|
||||
full_name = f"{emp['First_Name'].strip()} {emp['Last_Name'].strip()}"
|
||||
sso_str = str(emp['SSO'])
|
||||
|
||||
# Calculate relevance
|
||||
relevance = 20
|
||||
if query == sso_str:
|
||||
relevance = 100
|
||||
elif query.lower() == full_name.lower():
|
||||
relevance = 95
|
||||
elif query.lower() in full_name.lower():
|
||||
relevance = 60
|
||||
|
||||
results.append({
|
||||
'type': 'employee',
|
||||
'id': emp['SSO'],
|
||||
'title': full_name,
|
||||
'subtitle': emp.get('Team') or emp.get('Role') or f"SSO: {sso_str}",
|
||||
'url': f"/employees/{emp['SSO']}",
|
||||
'relevance': relevance
|
||||
})
|
||||
except Exception as e:
|
||||
import logging
|
||||
logging.error(f"Employee search failed: {e}")
|
||||
|
||||
# Search unified Assets table
|
||||
try:
|
||||
assets = Asset.query.join(AssetType).filter(
|
||||
Asset.isactive == True,
|
||||
db.or_(
|
||||
Asset.assetnumber.ilike(search_term),
|
||||
Asset.name.ilike(search_term),
|
||||
Asset.serialnumber.ilike(search_term),
|
||||
Asset.notes.ilike(search_term)
|
||||
)
|
||||
).limit(10).all()
|
||||
|
||||
for asset in assets:
|
||||
# Calculate relevance
|
||||
relevance = 15
|
||||
if asset.assetnumber and query.lower() == asset.assetnumber.lower():
|
||||
relevance = 100
|
||||
elif asset.name and query.lower() == asset.name.lower():
|
||||
relevance = 90
|
||||
elif asset.serialnumber and query.lower() == asset.serialnumber.lower():
|
||||
relevance = 85
|
||||
elif asset.name and query.lower() in asset.name.lower():
|
||||
relevance = 50
|
||||
|
||||
# Determine URL and type based on asset type
|
||||
asset_type_name = asset.assettype.assettype if asset.assettype else 'asset'
|
||||
url_map = {
|
||||
'equipment': f"/equipment/{asset.assetid}",
|
||||
'computer': f"/pcs/{asset.assetid}",
|
||||
'network_device': f"/network/{asset.assetid}",
|
||||
'printer': f"/printers/{asset.assetid}",
|
||||
}
|
||||
url = url_map.get(asset_type_name, f"/assets/{asset.assetid}")
|
||||
|
||||
display_name = asset.display_name
|
||||
subtitle = None
|
||||
if asset.name and asset.assetnumber != asset.name:
|
||||
subtitle = asset.assetnumber
|
||||
|
||||
# Get location name
|
||||
location_name = asset.location.locationname if asset.location else None
|
||||
|
||||
results.append({
|
||||
'type': asset_type_name,
|
||||
'id': asset.assetid,
|
||||
'title': display_name,
|
||||
'subtitle': subtitle,
|
||||
'location': location_name,
|
||||
'url': url,
|
||||
'relevance': relevance
|
||||
})
|
||||
except Exception as e:
|
||||
import logging
|
||||
logging.error(f"Asset search failed: {e}")
|
||||
|
||||
# Sort by relevance (highest first)
|
||||
results.sort(key=lambda x: x['relevance'], reverse=True)
|
||||
|
||||
# Remove duplicates (prefer higher relevance)
|
||||
seen_ids = {}
|
||||
unique_results = []
|
||||
for r in results:
|
||||
key = (r['type'], r['id'])
|
||||
if key not in seen_ids:
|
||||
seen_ids[key] = True
|
||||
unique_results.append(r)
|
||||
|
||||
# Limit total results
|
||||
results = results[:30]
|
||||
unique_results = unique_results[:30]
|
||||
|
||||
return success_response({
|
||||
'results': results,
|
||||
'results': unique_results,
|
||||
'query': query,
|
||||
'total': len(results)
|
||||
'total': len(unique_results)
|
||||
})
|
||||
|
||||
74
shopdb/core/api/slides.py
Normal file
74
shopdb/core/api/slides.py
Normal file
@@ -0,0 +1,74 @@
|
||||
"""Slides API for TV dashboard slideshow."""
|
||||
|
||||
import os
|
||||
from flask import Blueprint, current_app
|
||||
from shopdb.utils.responses import success_response, error_response, ErrorCodes
|
||||
|
||||
slides_bp = Blueprint('slides', __name__)
|
||||
|
||||
# Valid image extensions
|
||||
VALID_EXTENSIONS = {'.jpg', '.jpeg', '.png', '.gif', '.bmp', '.webp'}
|
||||
|
||||
|
||||
@slides_bp.route('', methods=['GET'])
|
||||
def get_slides():
|
||||
"""
|
||||
Get list of slides for TV dashboard.
|
||||
|
||||
Returns image files from the static/slides directory.
|
||||
"""
|
||||
# Look for slides in static folder
|
||||
static_folder = current_app.static_folder
|
||||
if not static_folder:
|
||||
static_folder = os.path.join(current_app.root_path, 'static')
|
||||
|
||||
slides_folder = os.path.join(static_folder, 'slides')
|
||||
|
||||
# Also check frontend public folder
|
||||
frontend_slides = os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.dirname(current_app.root_path))),
|
||||
'frontend', 'public', 'slides'
|
||||
)
|
||||
|
||||
# Try multiple possible locations
|
||||
possible_paths = [
|
||||
slides_folder,
|
||||
frontend_slides,
|
||||
'/home/camp/projects/shopdb-flask/shopdb/static/slides',
|
||||
'/home/camp/projects/shopdb-flask/frontend/public/slides',
|
||||
]
|
||||
|
||||
slides_path = None
|
||||
for path in possible_paths:
|
||||
if os.path.isdir(path):
|
||||
slides_path = path
|
||||
break
|
||||
|
||||
if not slides_path:
|
||||
return success_response({
|
||||
'slides': [],
|
||||
'basepath': '/static/slides/',
|
||||
'message': 'Slides folder not found'
|
||||
})
|
||||
|
||||
# Get list of image files
|
||||
slides = []
|
||||
try:
|
||||
for filename in sorted(os.listdir(slides_path)):
|
||||
ext = os.path.splitext(filename)[1].lower()
|
||||
if ext in VALID_EXTENSIONS:
|
||||
slides.append({'filename': filename})
|
||||
except Exception as e:
|
||||
return error_response(
|
||||
ErrorCodes.SERVER_ERROR,
|
||||
f'Error reading slides: {str(e)}',
|
||||
http_code=500
|
||||
)
|
||||
|
||||
# Determine base path for serving files
|
||||
basepath = '/static/slides/'
|
||||
|
||||
return success_response({
|
||||
'slides': slides,
|
||||
'basepath': basepath
|
||||
})
|
||||
@@ -1,13 +1,14 @@
|
||||
"""Core SQLAlchemy models."""
|
||||
|
||||
from .base import BaseModel, SoftDeleteMixin, AuditMixin
|
||||
from .asset import Asset, AssetType, AssetStatus
|
||||
from .machine import Machine, MachineType, MachineStatus, PCType
|
||||
from .vendor import Vendor
|
||||
from .model import Model
|
||||
from .businessunit import BusinessUnit
|
||||
from .location import Location
|
||||
from .operatingsystem import OperatingSystem
|
||||
from .relationship import MachineRelationship, RelationshipType
|
||||
from .relationship import MachineRelationship, AssetRelationship, RelationshipType
|
||||
from .communication import Communication, CommunicationType
|
||||
from .user import User, Role
|
||||
from .application import Application, AppVersion, AppOwner, SupportTeam, InstalledApp
|
||||
@@ -18,7 +19,11 @@ __all__ = [
|
||||
'BaseModel',
|
||||
'SoftDeleteMixin',
|
||||
'AuditMixin',
|
||||
# Machine
|
||||
# Asset (new architecture)
|
||||
'Asset',
|
||||
'AssetType',
|
||||
'AssetStatus',
|
||||
# Machine (legacy)
|
||||
'Machine',
|
||||
'MachineType',
|
||||
'MachineStatus',
|
||||
@@ -31,6 +36,7 @@ __all__ = [
|
||||
'OperatingSystem',
|
||||
# Relationships
|
||||
'MachineRelationship',
|
||||
'AssetRelationship',
|
||||
'RelationshipType',
|
||||
# Communication
|
||||
'Communication',
|
||||
|
||||
@@ -64,7 +64,7 @@ class Application(BaseModel):
|
||||
return f"<Application {self.appname}>"
|
||||
|
||||
|
||||
class AppVersion(BaseModel):
|
||||
class AppVersion(db.Model):
|
||||
"""Application version tracking."""
|
||||
__tablename__ = 'appversions'
|
||||
|
||||
@@ -74,6 +74,7 @@ class AppVersion(BaseModel):
|
||||
releasedate = db.Column(db.Date)
|
||||
notes = db.Column(db.String(255))
|
||||
dateadded = db.Column(db.DateTime, default=db.func.now())
|
||||
isactive = db.Column(db.Boolean, default=True)
|
||||
|
||||
# Relationships
|
||||
application = db.relationship('Application', back_populates='versions')
|
||||
@@ -84,6 +85,18 @@ class AppVersion(BaseModel):
|
||||
db.UniqueConstraint('appid', 'version', name='uq_app_version'),
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
"""Convert to dictionary."""
|
||||
return {
|
||||
'appversionid': self.appversionid,
|
||||
'appid': self.appid,
|
||||
'version': self.version,
|
||||
'releasedate': self.releasedate.isoformat() if self.releasedate else None,
|
||||
'notes': self.notes,
|
||||
'dateadded': self.dateadded.isoformat() + 'Z' if self.dateadded else None,
|
||||
'isactive': self.isactive
|
||||
}
|
||||
|
||||
def __repr__(self):
|
||||
return f"<AppVersion {self.application.appname if self.application else self.appid} v{self.version}>"
|
||||
|
||||
|
||||
200
shopdb/core/models/asset.py
Normal file
200
shopdb/core/models/asset.py
Normal file
@@ -0,0 +1,200 @@
|
||||
"""Polymorphic Asset models - core of the new asset architecture."""
|
||||
|
||||
from shopdb.extensions import db
|
||||
from .base import BaseModel, SoftDeleteMixin, AuditMixin
|
||||
|
||||
|
||||
class AssetType(BaseModel):
|
||||
"""
|
||||
Registry of asset categories.
|
||||
|
||||
Each type maps to a plugin-owned extension table.
|
||||
Examples: equipment, computer, network_device, printer
|
||||
"""
|
||||
__tablename__ = 'assettypes'
|
||||
|
||||
assettypeid = db.Column(db.Integer, primary_key=True)
|
||||
assettype = db.Column(
|
||||
db.String(50),
|
||||
unique=True,
|
||||
nullable=False,
|
||||
comment='Category name: equipment, computer, network_device, printer'
|
||||
)
|
||||
plugin_name = db.Column(
|
||||
db.String(100),
|
||||
nullable=True,
|
||||
comment='Plugin that owns this type'
|
||||
)
|
||||
table_name = db.Column(
|
||||
db.String(100),
|
||||
nullable=True,
|
||||
comment='Extension table name for this type'
|
||||
)
|
||||
description = db.Column(db.Text)
|
||||
icon = db.Column(db.String(50), comment='Icon name for UI')
|
||||
|
||||
def __repr__(self):
|
||||
return f"<AssetType {self.assettype}>"
|
||||
|
||||
|
||||
class AssetStatus(BaseModel):
|
||||
"""Asset status options."""
|
||||
__tablename__ = 'assetstatuses'
|
||||
|
||||
statusid = db.Column(db.Integer, primary_key=True)
|
||||
status = db.Column(db.String(50), unique=True, nullable=False)
|
||||
description = db.Column(db.Text)
|
||||
color = db.Column(db.String(20), comment='CSS color for UI')
|
||||
|
||||
def __repr__(self):
|
||||
return f"<AssetStatus {self.status}>"
|
||||
|
||||
|
||||
class Asset(BaseModel, SoftDeleteMixin, AuditMixin):
|
||||
"""
|
||||
Core asset model - minimal shared fields.
|
||||
|
||||
Category-specific data lives in plugin extension tables
|
||||
(equipment, computers, network_devices, printers).
|
||||
The assetid matches original machineid for migration compatibility.
|
||||
"""
|
||||
__tablename__ = 'assets'
|
||||
|
||||
assetid = db.Column(db.Integer, primary_key=True)
|
||||
|
||||
# Identification
|
||||
assetnumber = db.Column(
|
||||
db.String(50),
|
||||
unique=True,
|
||||
nullable=False,
|
||||
index=True,
|
||||
comment='Business identifier (e.g., CMM01, G5QX1GT3ESF)'
|
||||
)
|
||||
name = db.Column(
|
||||
db.String(100),
|
||||
comment='Display name/alias'
|
||||
)
|
||||
serialnumber = db.Column(
|
||||
db.String(100),
|
||||
index=True,
|
||||
comment='Hardware serial number'
|
||||
)
|
||||
|
||||
# Classification
|
||||
assettypeid = db.Column(
|
||||
db.Integer,
|
||||
db.ForeignKey('assettypes.assettypeid'),
|
||||
nullable=False
|
||||
)
|
||||
statusid = db.Column(
|
||||
db.Integer,
|
||||
db.ForeignKey('assetstatuses.statusid'),
|
||||
default=1,
|
||||
comment='In Use, Spare, Retired, etc.'
|
||||
)
|
||||
|
||||
# Location and organization
|
||||
locationid = db.Column(
|
||||
db.Integer,
|
||||
db.ForeignKey('locations.locationid'),
|
||||
nullable=True
|
||||
)
|
||||
businessunitid = db.Column(
|
||||
db.Integer,
|
||||
db.ForeignKey('businessunits.businessunitid'),
|
||||
nullable=True
|
||||
)
|
||||
|
||||
# Floor map position
|
||||
mapleft = db.Column(db.Integer, comment='X coordinate on floor map')
|
||||
maptop = db.Column(db.Integer, comment='Y coordinate on floor map')
|
||||
|
||||
# Notes
|
||||
notes = db.Column(db.Text, nullable=True)
|
||||
|
||||
# Relationships
|
||||
assettype = db.relationship('AssetType', backref='assets')
|
||||
status = db.relationship('AssetStatus', backref='assets')
|
||||
location = db.relationship('Location', backref='assets')
|
||||
businessunit = db.relationship('BusinessUnit', backref='assets')
|
||||
|
||||
# Communications (one-to-many) - will be migrated to use assetid
|
||||
communications = db.relationship(
|
||||
'Communication',
|
||||
foreign_keys='Communication.assetid',
|
||||
backref='asset',
|
||||
cascade='all, delete-orphan',
|
||||
lazy='dynamic'
|
||||
)
|
||||
|
||||
# Indexes
|
||||
__table_args__ = (
|
||||
db.Index('idx_asset_type_bu', 'assettypeid', 'businessunitid'),
|
||||
db.Index('idx_asset_location', 'locationid'),
|
||||
db.Index('idx_asset_active', 'isactive'),
|
||||
db.Index('idx_asset_status', 'statusid'),
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Asset {self.assetnumber}>"
|
||||
|
||||
@property
|
||||
def display_name(self):
|
||||
"""Get display name (name if set, otherwise assetnumber)."""
|
||||
return self.name or self.assetnumber
|
||||
|
||||
@property
|
||||
def primary_ip(self):
|
||||
"""Get primary IP address from communications."""
|
||||
comm = self.communications.filter_by(
|
||||
isprimary=True,
|
||||
comtypeid=1 # IP type
|
||||
).first()
|
||||
if comm:
|
||||
return comm.ipaddress
|
||||
# Fall back to any IP
|
||||
comm = self.communications.filter_by(comtypeid=1).first()
|
||||
return comm.ipaddress if comm else None
|
||||
|
||||
def to_dict(self, include_type_data=False):
|
||||
"""
|
||||
Convert model to dictionary.
|
||||
|
||||
Args:
|
||||
include_type_data: If True, include category-specific data from extension table
|
||||
"""
|
||||
result = super().to_dict()
|
||||
|
||||
# Add related object names for convenience
|
||||
if self.assettype:
|
||||
result['assettype_name'] = self.assettype.assettype
|
||||
if self.status:
|
||||
result['status_name'] = self.status.status
|
||||
if self.location:
|
||||
result['location_name'] = self.location.locationname
|
||||
if self.businessunit:
|
||||
result['businessunit_name'] = self.businessunit.businessunit
|
||||
|
||||
# Include extension data if requested
|
||||
if include_type_data:
|
||||
ext_data = self._get_extension_data()
|
||||
if ext_data:
|
||||
result['type_data'] = ext_data
|
||||
|
||||
return result
|
||||
|
||||
def _get_extension_data(self):
|
||||
"""Get category-specific data from extension table."""
|
||||
# Check for equipment extension
|
||||
if hasattr(self, 'equipment') and self.equipment:
|
||||
return self.equipment.to_dict()
|
||||
# Check for computer extension
|
||||
if hasattr(self, 'computer') and self.computer:
|
||||
return self.computer.to_dict()
|
||||
# Check for network_device extension
|
||||
if hasattr(self, 'network_device') and self.network_device:
|
||||
return self.network_device.to_dict()
|
||||
# Check for printer extension
|
||||
if hasattr(self, 'printer') and self.printer:
|
||||
return self.printer.to_dict()
|
||||
return None
|
||||
@@ -20,18 +20,30 @@ class CommunicationType(BaseModel):
|
||||
|
||||
class Communication(BaseModel):
|
||||
"""
|
||||
Communication interface for a machine.
|
||||
Communication interface for an asset (or legacy machine).
|
||||
Stores network config, serial settings, etc.
|
||||
"""
|
||||
__tablename__ = 'communications'
|
||||
|
||||
communicationid = db.Column(db.Integer, primary_key=True)
|
||||
|
||||
# New asset-based FK (preferred)
|
||||
assetid = db.Column(
|
||||
db.Integer,
|
||||
db.ForeignKey('assets.assetid'),
|
||||
nullable=True,
|
||||
index=True,
|
||||
comment='FK to assets table (new architecture)'
|
||||
)
|
||||
|
||||
# Legacy machine FK (for backward compatibility during migration)
|
||||
machineid = db.Column(
|
||||
db.Integer,
|
||||
db.ForeignKey('machines.machineid'),
|
||||
nullable=False
|
||||
nullable=True,
|
||||
comment='DEPRECATED: FK to machines table - use assetid instead'
|
||||
)
|
||||
|
||||
comtypeid = db.Column(
|
||||
db.Integer,
|
||||
db.ForeignKey('communicationtypes.comtypeid'),
|
||||
@@ -82,6 +94,7 @@ class Communication(BaseModel):
|
||||
comtype = db.relationship('CommunicationType', backref='communications')
|
||||
|
||||
__table_args__ = (
|
||||
db.Index('idx_comm_asset', 'assetid'),
|
||||
db.Index('idx_comm_machine', 'machineid'),
|
||||
db.Index('idx_comm_ip', 'ipaddress'),
|
||||
)
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
"""Machine relationship models."""
|
||||
"""Machine and Asset relationship models."""
|
||||
|
||||
from shopdb.extensions import db
|
||||
from .base import BaseModel
|
||||
|
||||
|
||||
class RelationshipType(BaseModel):
|
||||
"""Types of relationships between machines."""
|
||||
"""Types of relationships between machines/assets."""
|
||||
__tablename__ = 'relationshiptypes'
|
||||
|
||||
relationshiptypeid = db.Column(db.Integer, primary_key=True)
|
||||
@@ -21,6 +21,65 @@ class RelationshipType(BaseModel):
|
||||
return f"<RelationshipType {self.relationshiptype}>"
|
||||
|
||||
|
||||
class AssetRelationship(BaseModel):
|
||||
"""
|
||||
Relationships between assets.
|
||||
|
||||
Examples:
|
||||
- Computer controls Equipment
|
||||
- Two machines are dualpath partners
|
||||
- Network device connects to equipment
|
||||
"""
|
||||
__tablename__ = 'assetrelationships'
|
||||
|
||||
relationshipid = db.Column(db.Integer, primary_key=True)
|
||||
|
||||
source_assetid = db.Column(
|
||||
db.Integer,
|
||||
db.ForeignKey('assets.assetid'),
|
||||
nullable=False
|
||||
)
|
||||
target_assetid = db.Column(
|
||||
db.Integer,
|
||||
db.ForeignKey('assets.assetid'),
|
||||
nullable=False
|
||||
)
|
||||
relationshiptypeid = db.Column(
|
||||
db.Integer,
|
||||
db.ForeignKey('relationshiptypes.relationshiptypeid'),
|
||||
nullable=False
|
||||
)
|
||||
|
||||
notes = db.Column(db.Text)
|
||||
|
||||
# Relationships
|
||||
source_asset = db.relationship(
|
||||
'Asset',
|
||||
foreign_keys=[source_assetid],
|
||||
backref='outgoing_relationships'
|
||||
)
|
||||
target_asset = db.relationship(
|
||||
'Asset',
|
||||
foreign_keys=[target_assetid],
|
||||
backref='incoming_relationships'
|
||||
)
|
||||
relationship_type = db.relationship('RelationshipType', backref='asset_relationships')
|
||||
|
||||
__table_args__ = (
|
||||
db.UniqueConstraint(
|
||||
'source_assetid',
|
||||
'target_assetid',
|
||||
'relationshiptypeid',
|
||||
name='uq_asset_relationship'
|
||||
),
|
||||
db.Index('idx_asset_rel_source', 'source_assetid'),
|
||||
db.Index('idx_asset_rel_target', 'target_assetid'),
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<AssetRelationship {self.source_assetid} -> {self.target_assetid}>"
|
||||
|
||||
|
||||
class MachineRelationship(BaseModel):
|
||||
"""
|
||||
Relationships between machines.
|
||||
|
||||
104
shopdb/core/services/employee_service.py
Normal file
104
shopdb/core/services/employee_service.py
Normal file
@@ -0,0 +1,104 @@
|
||||
"""Employee lookup service - queries wjf_employees database."""
|
||||
|
||||
from typing import Optional, Dict, List
|
||||
import pymysql
|
||||
from flask import current_app
|
||||
|
||||
|
||||
def get_employee_connection():
|
||||
"""Get connection to wjf_employees database."""
|
||||
return pymysql.connect(
|
||||
host='localhost',
|
||||
user='root',
|
||||
password='rootpassword',
|
||||
database='wjf_employees',
|
||||
cursorclass=pymysql.cursors.DictCursor
|
||||
)
|
||||
|
||||
|
||||
def lookup_employee(sso: str) -> Optional[Dict]:
|
||||
"""
|
||||
Look up employee by SSO.
|
||||
|
||||
Returns dict with: SSO, First_Name, Last_Name, full_name, Picture, etc.
|
||||
"""
|
||||
if not sso or not sso.strip().isdigit():
|
||||
return None
|
||||
|
||||
try:
|
||||
conn = get_employee_connection()
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
'SELECT * FROM employees WHERE SSO = %s',
|
||||
(int(sso.strip()),)
|
||||
)
|
||||
row = cur.fetchone()
|
||||
if row:
|
||||
# Add computed full_name
|
||||
first = (row.get('First_Name') or '').strip()
|
||||
last = (row.get('Last_Name') or '').strip()
|
||||
row['full_name'] = f"{first} {last}".strip()
|
||||
return row
|
||||
conn.close()
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"Employee lookup error: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def lookup_employees(sso_list: str) -> List[Dict]:
|
||||
"""
|
||||
Look up multiple employees by comma-separated SSO list.
|
||||
|
||||
Returns list of employee dicts.
|
||||
"""
|
||||
if not sso_list:
|
||||
return []
|
||||
|
||||
ssos = [s.strip() for s in sso_list.split(',') if s.strip().isdigit()]
|
||||
if not ssos:
|
||||
return []
|
||||
|
||||
try:
|
||||
conn = get_employee_connection()
|
||||
with conn.cursor() as cur:
|
||||
placeholders = ','.join(['%s'] * len(ssos))
|
||||
cur.execute(
|
||||
f'SELECT * FROM employees WHERE SSO IN ({placeholders})',
|
||||
[int(s) for s in ssos]
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
|
||||
# Add computed full_name to each
|
||||
for row in rows:
|
||||
first = (row.get('First_Name') or '').strip()
|
||||
last = (row.get('Last_Name') or '').strip()
|
||||
row['full_name'] = f"{first} {last}".strip()
|
||||
|
||||
return rows
|
||||
conn.close()
|
||||
except Exception as e:
|
||||
current_app.logger.error(f"Employee lookup error: {e}")
|
||||
return []
|
||||
|
||||
|
||||
def get_employee_names(sso_list: str) -> str:
|
||||
"""
|
||||
Get comma-separated list of employee names from SSO list.
|
||||
|
||||
Input: "212574611,212637451"
|
||||
Output: "Brandon Saltz, Jon Kolkmann"
|
||||
"""
|
||||
employees = lookup_employees(sso_list)
|
||||
if not employees:
|
||||
return sso_list # Return SSOs as fallback
|
||||
|
||||
return ', '.join(emp['full_name'] for emp in employees if emp.get('full_name'))
|
||||
|
||||
|
||||
def get_employee_picture_url(sso: str) -> Optional[str]:
|
||||
"""Get URL to employee picture if available."""
|
||||
emp = lookup_employee(sso)
|
||||
if emp and emp.get('Picture'):
|
||||
# Pictures are stored relative paths like "Support/212574611.png"
|
||||
return f"/static/employees/{emp['Picture']}"
|
||||
return None
|
||||
Reference in New Issue
Block a user