feat: bidirectional equipment sync with digital-twin
All checks were successful
Deploy to Production / deploy (push) Successful in 1m37s

Add import, sync, and push capabilities between factoryOps and the
digital-twin (BaSyx AAS) backend. Includes:

- Equipment sync service with field mapping and LWW conflict resolution
- Import preview modal with already-imported detection
- Bidirectional sync (pull updates + push local changes)
- Sync history tracking via equipment_sync_history table
- Machine detail page shows sync status and change history
- Docker networking for container-to-container communication
- UI fixes: responsive layout (375px), touch targets, section spacing
- 30 test cases for sync service
This commit is contained in:
Johngreen
2026-02-12 12:27:21 +09:00
parent b619355763
commit 278cd9d551
16 changed files with 2700 additions and 25 deletions

View File

@@ -0,0 +1,111 @@
"""add equipment sync schema
Revision ID: i1j2k3l4m5n6
Revises: h0c1d2e3f4g5
Create Date: 2026-02-11 17:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
revision: str = "i1j2k3l4m5n6"
down_revision: Union[str, None] = "h0c1d2e3f4g5"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Add columns to machines table
op.add_column("machines", sa.Column("external_id", sa.String(100), nullable=True))
op.add_column(
"machines",
sa.Column("source", sa.String(20), server_default="local", nullable=False),
)
op.add_column(
"machines",
sa.Column("sync_version", sa.Integer(), server_default="0", nullable=False),
)
op.add_column(
"machines",
sa.Column("last_synced_at", sa.TIMESTAMP(timezone=True), nullable=True),
)
# Create index on external_id
op.create_index("ix_machines_external_id", "machines", ["external_id"])
# Create partial unique constraint (external_id IS NOT NULL)
op.execute(
"CREATE UNIQUE INDEX uq_machines_tenant_external_id ON machines (tenant_id, external_id) WHERE external_id IS NOT NULL"
)
# Create machine_change_history table
op.create_table(
"machine_change_history",
sa.Column(
"id",
sa.dialects.postgresql.UUID(as_uuid=True),
primary_key=True,
server_default=sa.text("gen_random_uuid()"),
),
sa.Column(
"tenant_id", sa.String(50), sa.ForeignKey("tenants.id"), nullable=False
),
sa.Column(
"machine_id",
sa.dialects.postgresql.UUID(as_uuid=True),
sa.ForeignKey("machines.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column("field_name", sa.String(50), nullable=False),
sa.Column("old_value", sa.Text(), nullable=True),
sa.Column("new_value", sa.Text(), nullable=True),
sa.Column("change_source", sa.String(20), nullable=False),
sa.Column(
"changed_by",
sa.dialects.postgresql.UUID(as_uuid=True),
sa.ForeignKey("users.id"),
nullable=True,
),
sa.Column(
"changed_at",
sa.TIMESTAMP(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
)
# Create indexes on machine_change_history table
op.create_index(
"ix_machine_change_history_tenant_machine",
"machine_change_history",
["tenant_id", "machine_id"],
)
op.create_index(
"ix_machine_change_history_tenant_date",
"machine_change_history",
["tenant_id", "changed_at"],
)
def downgrade() -> None:
# Drop indexes on machine_change_history table
op.drop_index("ix_machine_change_history_tenant_date")
op.drop_index("ix_machine_change_history_tenant_machine")
# Drop machine_change_history table
op.drop_table("machine_change_history")
# Drop partial unique constraint
op.execute("DROP INDEX IF EXISTS uq_machines_tenant_external_id")
# Drop index on machines table
op.drop_index("ix_machines_external_id", "machines")
# Drop columns from machines table
op.drop_column("machines", "last_synced_at")
op.drop_column("machines", "sync_version")
op.drop_column("machines", "source")
op.drop_column("machines", "external_id")

View File

@@ -2,11 +2,25 @@
import { useState, useCallback, useEffect } from 'react';
import { useParams, useRouter } from 'next/navigation';
import { useMachine, useEquipmentParts, useReplacements } from '@/lib/hooks';
import { useMachine, useEquipmentParts, useReplacements, useMachineHistory } from '@/lib/hooks';
import { useToast } from '@/lib/toast-context';
import { api } from '@/lib/api';
import type { EquipmentPart, PartReplacementLog } from '@/lib/types';
const FIELD_LABELS: Record<string, string> = {
name: '설비명',
equipment_code: '설비 코드',
model: '모델명',
manufacturer: '제조사',
installation_date: '설치일',
location: '위치',
area: '구역',
criticality: '중요도',
rated_capacity: '정격 용량',
power_rating: '전력 사양',
description: '설명',
};
const LIFECYCLE_TYPES = [
{ value: 'hours', label: '시간 (Hours)' },
{ value: 'count', label: '횟수 (Count)' },
@@ -49,6 +63,7 @@ export default function MachineDetailPage() {
const { machine, isLoading: machineLoading, mutate: mutateMachine } = useMachine(tenantId, machineId);
const { parts, isLoading: partsLoading, mutate: mutateParts } = useEquipmentParts(tenantId, machineId);
const { history, isLoading: changeHistoryLoading } = useMachineHistory(tenantId, machineId);
const { addToast } = useToast();
const [showPartModal, setShowPartModal] = useState(false);
@@ -261,6 +276,44 @@ export default function MachineDetailPage() {
</div>
</div>
{machine.source === 'digital-twin' && (
<div className="sync-status-section">
<div className="section-header">
<h3 className="section-title">
<span className="material-symbols-outlined">cloud</span>
</h3>
</div>
<div className="spec-grid">
<div className="spec-item">
<span className="spec-label"></span>
<span className="spec-value">
<span className="sync-badge">
<span className="material-symbols-outlined">cloud</span>
</span>
</span>
</div>
<div className="spec-item">
<span className="spec-label"> </span>
<span className="spec-value">v{machine.sync_version}</span>
</div>
{machine.last_synced_at && (
<div className="spec-item">
<span className="spec-label"> </span>
<span className="spec-value">{new Date(machine.last_synced_at).toLocaleString('ko-KR')}</span>
</div>
)}
{machine.external_id && (
<div className="spec-item">
<span className="spec-label"> ID</span>
<span className="spec-value">{machine.external_id}</span>
</div>
)}
</div>
</div>
)}
{(machine.manufacturer || machine.location || machine.rated_capacity || machine.power_rating || machine.description || machine.installation_date) && (
<div className="section">
<div className="section-header">
@@ -401,6 +454,54 @@ export default function MachineDetailPage() {
)}
</div>
<div className="section">
<div className="section-header">
<h3 className="section-title">
<span className="material-symbols-outlined">history</span>
</h3>
</div>
{historyLoading ? (
<div className="loading-sm">
<span className="material-symbols-outlined spinning">progress_activity</span>
</div>
) : history.length === 0 ? (
<div className="empty-state-sm">
<span className="material-symbols-outlined">history</span>
<p> .</p>
</div>
) : (
<div className="part-table-wrap">
<table className="history-table">
<thead>
<tr>
<th></th>
<th></th>
<th> </th>
<th> </th>
<th></th>
</tr>
</thead>
<tbody>
{history.map((h) => (
<tr key={h.id}>
<td>{new Date(h.changed_at).toLocaleString('ko-KR')}</td>
<td>{FIELD_LABELS[h.field_name] || h.field_name}</td>
<td>{h.old_value || '-'}</td>
<td>{h.new_value || '-'}</td>
<td>
<span className={`history-source-badge ${h.change_source === 'sync' ? 'history-source-sync' : 'history-source-local'}`}>
{h.change_source === 'sync' ? '동기화' : '로컬'}
</span>
</td>
</tr>
))}
</tbody>
</table>
</div>
)}
</div>
{showPartModal && (
<div className="modal-overlay" onClick={closePartModal}>
<div className="modal-content modal-lg" onClick={(e) => e.stopPropagation()}>
@@ -643,7 +744,7 @@ export default function MachineDetailPage() {
</button>
</div>
<div className="replacement-history">
{historyLoading ? (
{changeHistoryLoading ? (
<div className="loading-sm">
<span className="material-symbols-outlined spinning">progress_activity</span>
</div>

View File

@@ -6,7 +6,7 @@ import { useMachines } from '@/lib/hooks';
import { useToast } from '@/lib/toast-context';
import { api } from '@/lib/api';
import { MachineList } from '@/components/MachineList';
import type { Machine } from '@/lib/types';
import type { Machine, ImportPreviewItem, ImportResult, SyncResult } from '@/lib/types';
interface MachineForm {
name: string;
@@ -30,6 +30,13 @@ export default function TenantDashboard() {
const [submitting, setSubmitting] = useState(false);
const [editTarget, setEditTarget] = useState<Machine | null>(null);
const [showImportModal, setShowImportModal] = useState(false);
const [importPreview, setImportPreview] = useState<ImportPreviewItem[]>([]);
const [selectedImports, setSelectedImports] = useState<Set<string>>(new Set());
const [syncing, setSyncing] = useState(false);
const [importing, setImporting] = useState(false);
const [importLoading, setImportLoading] = useState(false);
const openCreate = useCallback(() => {
setEditTarget(null);
setForm(INITIAL_FORM);
@@ -98,8 +105,85 @@ export default function TenantDashboard() {
}
}, [tenantId, addToast, router]);
const openImportModal = useCallback(async () => {
setShowImportModal(true);
setImportLoading(true);
try {
const resp = await api.get<{ equipment: ImportPreviewItem[]; total: number }>(`/api/${tenantId}/machines/import/preview`);
setImportPreview(resp.equipment);
setSelectedImports(new Set());
} catch (err) {
addToast('디지털 트윈 데이터를 불러오는데 실패했습니다.', 'error');
setShowImportModal(false);
} finally {
setImportLoading(false);
}
}, [tenantId, addToast]);
const closeImportModal = useCallback(() => {
setShowImportModal(false);
setImportPreview([]);
setSelectedImports(new Set());
}, []);
const toggleImportSelection = useCallback((id: string) => {
setSelectedImports(prev => {
const next = new Set(prev);
if (next.has(id)) {
next.delete(id);
} else {
next.add(id);
}
return next;
});
}, []);
const toggleAllImports = useCallback(() => {
setSelectedImports(prev => {
const availableItems = importPreview.filter(item => !item.already_imported);
if (prev.size === availableItems.length) {
return new Set();
}
return new Set(availableItems.map(item => item.equipmentId));
});
}, [importPreview]);
const handleImport = useCallback(async () => {
if (selectedImports.size === 0) return;
setImporting(true);
try {
const result = await api.post<ImportResult>(`/api/${tenantId}/machines/import`, {
external_ids: Array.from(selectedImports)
});
addToast(`${result.imported_count}개 설비를 가져왔습니다.`, 'success');
mutate();
closeImportModal();
} catch (err) {
addToast('설비 가져오기에 실패했습니다.', 'error');
} finally {
setImporting(false);
}
}, [tenantId, selectedImports, mutate, addToast, closeImportModal]);
const handleSync = useCallback(async () => {
setSyncing(true);
try {
const result = await api.post<SyncResult>(`/api/${tenantId}/machines/sync`, {});
addToast(`동기화 완료: ${result.pull.synced_count}개 업데이트, ${result.push_count}개 전송`, 'success');
mutate();
} catch (err) {
addToast('동기화에 실패했습니다.', 'error');
} finally {
setSyncing(false);
}
}, [tenantId, mutate, addToast]);
const handleDelete = useCallback(async (machine: Machine) => {
if (!confirm(`"${machine.name}" 설비를 삭제하시겠습니까?`)) return;
const message = machine.source === 'digital-twin'
? `"${machine.name}"은(는) 디지털 트윈에서 가져온 설비입니다. 삭제하시겠습니까? (다음 동기화 시 다시 가져올 수 있습니다.)`
: `"${machine.name}" 설비를 삭제하시겠습니까?`;
if (!confirm(message)) return;
try {
await api.delete(`/api/${tenantId}/machines/${machine.id}`);
addToast('설비가 삭제되었습니다.', 'success');
@@ -125,10 +209,20 @@ export default function TenantDashboard() {
<span className="material-symbols-outlined">precision_manufacturing</span>
</h2>
<button className="btn-primary" onClick={openCreate}>
<span className="material-symbols-outlined">add</span>
</button>
<div className="page-header-actions">
<button className={`btn-sync ${syncing ? 'sync-spinning' : ''}`} onClick={handleSync} disabled={syncing}>
<span className="material-symbols-outlined">sync</span>
</button>
<button className="btn-outline" onClick={openImportModal}>
<span className="material-symbols-outlined">cloud_download</span>
</button>
<button className="btn-primary" onClick={openCreate}>
<span className="material-symbols-outlined">add</span>
</button>
</div>
</div>
{isLoading ? (
@@ -234,6 +328,68 @@ export default function TenantDashboard() {
</div>
</div>
)}
{showImportModal && (
<div className="modal-overlay" onClick={closeImportModal}>
<div className="modal-content import-modal" onClick={e => e.stopPropagation()}>
<div className="modal-header">
<h3> </h3>
<button className="modal-close" onClick={closeImportModal}>
<span className="material-symbols-outlined">close</span>
</button>
</div>
<div className="modal-body-form">
{importLoading ? (
<div className="loading">
<span className="material-symbols-outlined spinning">progress_activity</span>
</div>
) : (
<>
<div className="import-select-all" onClick={toggleAllImports}>
<span className="material-symbols-outlined">
{selectedImports.size > 0 && selectedImports.size === importPreview.filter(i => !i.already_imported).length
? 'check_box'
: 'check_box_outline_blank'}
</span>
</div>
<div className="import-list">
{importPreview.map(item => (
<div
key={item.equipmentId}
className={`import-item ${item.already_imported ? 'import-item-imported' : ''}`}
onClick={() => !item.already_imported && toggleImportSelection(item.equipmentId)}
>
<input
type="checkbox"
className="import-item-checkbox"
checked={selectedImports.has(item.equipmentId) || item.already_imported}
disabled={item.already_imported}
readOnly
/>
<div className="import-item-info">
<div className="import-item-name">{item.equipmentName}</div>
<div className="import-item-code">{item.equipmentId} {item.model && `${item.model}`}</div>
</div>
{item.already_imported && (
<span className="imported-badge"> </span>
)}
</div>
))}
</div>
</>
)}
</div>
<div className="modal-actions">
<button className="btn-outline" onClick={closeImportModal} disabled={importing}></button>
<button className="btn-primary" onClick={handleImport} disabled={importing || selectedImports.size === 0}>
{importing ? <span className="material-symbols-outlined spinning">progress_activity</span> : <span className="material-symbols-outlined">download</span>}
({selectedImports.size})
</button>
</div>
</div>
</div>
)}
</div>
);
}

View File

@@ -241,6 +241,7 @@ a {
.btn-sm {
padding: 6px 14px;
min-height: 36px;
font-size: 12px;
}
@@ -259,6 +260,7 @@ a {
font-weight: 500;
cursor: pointer;
padding: 8px 12px;
min-height: 44px;
border-radius: var(--md-radius-xl);
transition: background var(--md-motion-quick);
position: relative;
@@ -610,6 +612,7 @@ a {
border: none;
cursor: pointer;
padding: 6px 8px;
min-height: 44px;
border-radius: var(--md-radius-sm);
color: var(--md-primary);
transition: background var(--md-motion-quick);
@@ -653,6 +656,7 @@ a {
align-items: center;
gap: 6px;
padding: 8px 16px;
min-height: 44px;
background: none;
border: none;
border-radius: var(--md-radius-xl);
@@ -726,9 +730,17 @@ a {
display: flex;
align-items: center;
justify-content: space-between;
gap: 12px;
flex-wrap: wrap;
margin-bottom: 24px;
}
.page-header-actions {
display: flex;
gap: 8px;
flex-wrap: wrap;
}
.page-title {
display: flex;
align-items: center;
@@ -968,6 +980,7 @@ a {
.detail-icon {
font-size: 36px;
line-height: 1;
color: var(--md-primary);
}
@@ -991,6 +1004,10 @@ a {
box-shadow: var(--md-elevation-1);
}
.section + .section {
margin-top: 20px;
}
.section-header {
display: flex;
align-items: center;
@@ -1010,6 +1027,7 @@ a {
.section-title .material-symbols-outlined {
font-size: 20px;
line-height: 1;
color: var(--md-primary);
}
@@ -2392,12 +2410,30 @@ a {
display: none;
}
.page-header {
gap: 8px;
}
.page-header-actions {
width: 100%;
justify-content: flex-end;
}
.page-title {
font-size: 18px;
}
.btn-primary,
.btn-outline {
font-size: 13px;
padding: 8px 14px;
}
.btn-sync {
font-size: 13px;
padding: 8px 12px;
}
.modal-content {
margin: 8px;
max-height: 95vh;
@@ -2508,3 +2544,401 @@ a {
color: var(--md-on-surface-variant);
border: 1px solid var(--md-outline-variant);
}
/* ===== Equipment Sync ===== */
/* Sync badge on machine cards */
.sync-badge {
display: inline-flex;
align-items: center;
gap: 4px;
padding: 2px 8px;
border-radius: var(--md-radius-sm);
font-size: 11px;
font-weight: 600;
letter-spacing: 0.02em;
background: rgba(26, 115, 232, 0.08);
color: var(--md-primary);
border: 1px solid rgba(26, 115, 232, 0.15);
}
.sync-badge .material-symbols-outlined {
font-size: 14px;
}
/* Sync status section on machine detail page */
.sync-status-section {
display: flex;
align-items: center;
gap: 12px;
padding: 12px 16px;
background: var(--md-surface-container-low);
border-radius: var(--md-radius);
margin-bottom: 16px;
font-size: 13px;
color: var(--md-on-surface-variant);
border: 1px solid var(--md-outline-variant);
}
.sync-status-section .material-symbols-outlined {
font-size: 20px;
color: var(--md-primary);
}
.sync-status-label {
font-weight: 500;
color: var(--md-on-surface);
}
.sync-status-value {
color: var(--md-on-surface-variant);
}
.sync-status-divider {
width: 1px;
height: 20px;
background: var(--md-outline-variant);
}
/* Info tooltip for upstream-managed fields */
.field-info-tooltip {
position: relative;
display: inline-flex;
align-items: center;
margin-left: 4px;
cursor: help;
}
.field-info-tooltip .material-symbols-outlined {
font-size: 16px;
color: var(--md-on-surface-variant);
opacity: 0.6;
}
.field-info-tooltip:hover .material-symbols-outlined {
opacity: 1;
color: var(--md-primary);
}
.field-info-tooltip-text {
display: none;
position: absolute;
bottom: calc(100% + 8px);
left: 50%;
transform: translateX(-50%);
padding: 8px 12px;
background: var(--md-on-surface);
color: var(--md-surface);
font-size: 12px;
border-radius: var(--md-radius-xs);
white-space: nowrap;
z-index: 100;
box-shadow: var(--md-elevation-2);
}
.field-info-tooltip:hover .field-info-tooltip-text {
display: block;
}
/* Import modal - extends .modal-content */
.import-modal {
max-width: 640px;
}
.import-list {
display: flex;
flex-direction: column;
gap: 2px;
max-height: 400px;
overflow-y: auto;
padding: 4px 0;
}
.import-item {
display: flex;
align-items: center;
gap: 12px;
padding: 10px 16px;
border-radius: var(--md-radius-sm);
cursor: pointer;
transition: background var(--md-motion-standard);
}
.import-item:hover {
background: var(--md-surface-container);
}
.import-item-checkbox {
width: 18px;
height: 18px;
accent-color: var(--md-primary);
}
.import-item-info {
flex: 1;
min-width: 0;
}
.import-item-name {
font-size: 14px;
font-weight: 500;
color: var(--md-on-surface);
}
.import-item-code {
font-size: 12px;
color: var(--md-on-surface-variant);
}
.import-item-imported {
opacity: 0.5;
cursor: default;
}
.import-item-imported .import-item-checkbox {
pointer-events: none;
}
.imported-badge {
display: inline-flex;
align-items: center;
padding: 2px 6px;
border-radius: var(--md-radius-xs);
font-size: 10px;
font-weight: 600;
background: var(--md-surface-container-highest);
color: var(--md-on-surface-variant);
}
/* History table */
.history-section {
margin-top: 24px;
}
.history-section-title {
font-size: 16px;
font-weight: 600;
color: var(--md-on-surface);
margin-bottom: 12px;
display: flex;
align-items: center;
gap: 8px;
}
.history-table {
width: 100%;
border-collapse: collapse;
font-size: 13px;
}
.history-table th {
text-align: left;
padding: 8px 12px;
font-weight: 600;
color: var(--md-on-surface-variant);
border-bottom: 2px solid var(--md-outline-variant);
font-size: 12px;
text-transform: uppercase;
letter-spacing: 0.05em;
}
.history-table td {
padding: 8px 12px;
border-bottom: 1px solid var(--md-surface-container-high);
color: var(--md-on-surface);
vertical-align: top;
}
.history-table tr:hover td {
background: var(--md-surface-container-low);
}
.history-source-badge {
display: inline-flex;
align-items: center;
padding: 2px 6px;
border-radius: var(--md-radius-xs);
font-size: 10px;
font-weight: 600;
}
.history-source-local {
background: rgba(26, 115, 232, 0.1);
color: var(--md-primary);
}
.history-source-sync {
background: rgba(30, 142, 62, 0.1);
color: var(--md-success);
}
.history-old-value {
text-decoration: line-through;
color: var(--md-on-surface-variant);
opacity: 0.7;
}
.history-new-value {
font-weight: 500;
}
/* Sync result notification banner */
.sync-result-banner {
display: flex;
align-items: center;
gap: 12px;
padding: 12px 16px;
border-radius: var(--md-radius);
margin-bottom: 16px;
animation: modal-fade-in 200ms ease-out;
}
.sync-result-banner-success {
background: rgba(30, 142, 62, 0.08);
border: 1px solid rgba(30, 142, 62, 0.2);
color: var(--md-success);
}
.sync-result-banner-error {
background: rgba(217, 48, 37, 0.08);
border: 1px solid rgba(217, 48, 37, 0.2);
color: var(--md-error);
}
.sync-result-banner .material-symbols-outlined {
font-size: 20px;
}
.sync-result-banner-text {
flex: 1;
font-size: 13px;
font-weight: 500;
}
.sync-result-banner-close {
background: none;
border: none;
cursor: pointer;
color: inherit;
opacity: 0.6;
padding: 4px;
}
.sync-result-banner-close:hover {
opacity: 1;
}
/* Sync action buttons group */
.sync-btn-group {
display: flex;
align-items: center;
gap: 8px;
}
.btn-sync {
display: inline-flex;
align-items: center;
gap: 6px;
padding: 8px 16px;
border-radius: var(--md-radius-full);
font-size: 13px;
font-weight: 500;
border: 1px solid var(--md-outline-variant);
background: var(--md-surface);
color: var(--md-on-surface);
cursor: pointer;
transition: all var(--md-motion-standard);
}
.btn-sync:hover {
background: var(--md-surface-container);
border-color: var(--md-outline);
}
.btn-sync:disabled {
opacity: 0.5;
cursor: not-allowed;
}
.btn-sync .material-symbols-outlined {
font-size: 18px;
}
.btn-sync-primary {
background: var(--md-primary);
color: var(--md-on-primary);
border-color: var(--md-primary);
}
.btn-sync-primary:hover {
background: var(--md-primary-dark);
border-color: var(--md-primary-dark);
}
/* Sync spinner */
@keyframes sync-spin {
from { transform: rotate(0deg); }
to { transform: rotate(360deg); }
}
.sync-spinning .material-symbols-outlined {
animation: sync-spin 1s linear infinite;
}
/* Select all / deselect all in import modal */
.import-select-all {
display: flex;
align-items: center;
gap: 8px;
padding: 8px 16px;
font-size: 13px;
color: var(--md-primary);
font-weight: 500;
cursor: pointer;
border-bottom: 1px solid var(--md-outline-variant);
}
.import-summary {
padding: 8px 16px;
font-size: 12px;
color: var(--md-on-surface-variant);
border-top: 1px solid var(--md-outline-variant);
}
/* Load more button for history */
.history-load-more {
display: flex;
justify-content: center;
padding: 12px;
}
.history-load-more button {
padding: 8px 24px;
border-radius: var(--md-radius-full);
border: 1px solid var(--md-outline-variant);
background: var(--md-surface);
color: var(--md-primary);
font-size: 13px;
font-weight: 500;
cursor: pointer;
transition: background var(--md-motion-standard);
}
.history-load-more button:hover {
background: var(--md-surface-container);
}
/* Empty state for history */
.history-empty {
text-align: center;
padding: 32px;
color: var(--md-on-surface-variant);
font-size: 14px;
}
.history-empty .material-symbols-outlined {
font-size: 40px;
opacity: 0.4;
margin-bottom: 8px;
display: block;
}

View File

@@ -113,16 +113,24 @@ export function MachineList({ machines, tenantId, onEdit, onDelete, onBatchInspe
className="machine-card-body"
onClick={() => router.push(`/${tenantId}/machines/${machine.id}`)}
>
<div className="machine-card-top">
<div className="machine-card-icon">
<span className="material-symbols-outlined">precision_manufacturing</span>
<div className="machine-card-top">
<div className="machine-card-icon">
<span className="material-symbols-outlined">precision_manufacturing</span>
</div>
<div style={{ display: 'flex', gap: '4px' }}>
{machine.source === 'digital-twin' && (
<span className="sync-badge">
<span className="material-symbols-outlined">cloud</span>
</span>
)}
{machine.criticality && (
<span className={`machine-badge ${getBadgeClass(machine.criticality)}`}>
{getBadgeText(machine.criticality)}
</span>
)}
</div>
</div>
{machine.criticality && (
<span className={`machine-badge ${getBadgeClass(machine.criticality)}`}>
{getBadgeText(machine.criticality)}
</span>
)}
</div>
<div className="machine-card-info">
<h3 className="machine-card-name">{machine.name}</h3>

View File

@@ -1,6 +1,6 @@
import useSWR from 'swr';
import { fetcher, getTenantUrl } from './api';
import type { Tenant, Machine, MachineDetail, EquipmentPart, InspectionTemplate, InspectionSession, PartReplacementLog, Alarm, AlarmSummary } from './types';
import type { Tenant, Machine, MachineDetail, EquipmentPart, InspectionTemplate, InspectionSession, PartReplacementLog, Alarm, AlarmSummary, ChangeHistoryItem } from './types';
export function useTenants() {
const { data, error, isLoading, mutate } = useSWR<{ tenants: Tenant[] }>(
@@ -180,6 +180,23 @@ export function useInspections(tenantId?: string, status?: string, templateId?:
};
}
export function useMachineHistory(tenantId?: string, machineId?: string) {
const url = tenantId && machineId ? `/api/${tenantId}/machines/${machineId}/history` : null;
const { data, error, isLoading, mutate } = useSWR<{ history: ChangeHistoryItem[]; total: number }>(
url,
fetcher,
{ dedupingInterval: 2000 },
);
return {
history: data?.history || [],
total: data?.total || 0,
error,
isLoading,
mutate,
};
}
export function useInspection(tenantId?: string, inspectionId?: string) {
const url = tenantId && inspectionId ? `/api/${tenantId}/inspections/${inspectionId}` : null;
const { data, error, isLoading, mutate } = useSWR<InspectionSession>(

View File

@@ -37,6 +37,10 @@ export interface Machine {
rated_capacity: string | null;
power_rating: string | null;
description: string | null;
source: string | null;
external_id: string | null;
sync_version: number;
last_synced_at: string | null;
parts_count: number;
created_at: string | null;
updated_at: string | null;
@@ -72,6 +76,42 @@ export interface MachineDetail extends Machine {
parts: EquipmentPart[];
}
export interface ChangeHistoryItem {
id: string;
field_name: string;
old_value: string | null;
new_value: string | null;
change_source: string;
changed_by: string | null;
changed_at: string;
}
export interface SyncResult {
pull: {
synced_count: number;
fields_updated: number;
errors: string[];
};
push_count: number;
push_errors: string[];
}
export interface ImportResult {
imported_count: number;
skipped_count: number;
errors: string[];
}
export interface ImportPreviewItem {
id: string;
equipmentName: string;
equipmentId: string;
model: string | null;
manufacturer: string | null;
location: string | null;
already_imported: boolean;
}
export interface InspectionTemplateItem {
id: string;
template_id: string;

59
docker-compose.dev.yml Normal file
View File

@@ -0,0 +1,59 @@
version: '3.8'
services:
api:
build:
context: .
dockerfile: Containerfile.backend
container_name: factoryops-dev-api
restart: unless-stopped
networks:
- default
- digital-twin
ports:
- "8000:8000"
environment:
- PYTHONPATH=/app
- PYTHONUNBUFFERED=1
# 원격 DB (IDC 배포서버)
- DATABASE_URL=postgresql+asyncpg://factoryops:factoryops@211.115.91.141:8200/factoryops_v2
- JWT_SECRET_KEY=factoryops-v2-dev-secret-key-2026
- CORS_ORIGINS=http://localhost:3100,http://127.0.0.1:3100
- DIGITAL_TWIN_API_URL=http://digital-twin-web-backend:4001
volumes:
# 소스 마운트 → 핫 리로드
- ./main.py:/app/main.py:ro
- ./src:/app/src:ro
- ./alembic:/app/alembic:ro
- ./alembic.ini:/app/alembic.ini:ro
- ./scripts:/app/scripts:ro
# uvicorn --reload 로 실행 (개발용)
command: ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"]
dashboard:
image: node:20-alpine
container_name: factoryops-dev-dashboard
restart: unless-stopped
working_dir: /app
ports:
- "3100:3100"
environment:
- NEXT_PUBLIC_API_URL=http://localhost:8000
- NEXT_TELEMETRY_DISABLED=1
volumes:
- ./dashboard:/app
- dashboard_node_modules:/app/node_modules
# npm install 후 dev 서버 실행
command: ["sh", "-c", "npm install && npm run dev"]
depends_on:
- api
volumes:
dashboard_node_modules:
networks:
default:
name: factoryops-dev-network
digital-twin:
external: true
name: digital-twin-web_default

View File

@@ -20,6 +20,7 @@ from src.auth.password import hash_password
from src.tenant import manager as tenant_manager
from src.tenant.manager import TenantNotFoundError, InvalidTenantIdError
from src.api.machines import router as machines_router
from src.api.equipment_sync import router as equipment_sync_router
from src.api.equipment_parts import router as equipment_parts_router
from src.api.templates import router as templates_router
from src.api.inspections import router as inspections_router
@@ -53,6 +54,7 @@ app.add_middleware(
app.include_router(auth_router)
app.include_router(auth_admin_router)
app.include_router(machines_router)
app.include_router(equipment_sync_router)
app.include_router(equipment_parts_router)
app.include_router(templates_router)
app.include_router(inspections_router)

108
src/api/equipment_sync.py Normal file
View File

@@ -0,0 +1,108 @@
import os
from typing import Optional, List
from fastapi import APIRouter, HTTPException, Depends, Path
from pydantic import BaseModel
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from src.database.config import get_db
from src.database.models import Machine
from src.auth.models import TokenData
from src.auth.dependencies import require_auth, verify_tenant_access
from src.services.equipment_sync import EquipmentSyncService
router = APIRouter(prefix="/api/{tenant_id}/machines", tags=["equipment-sync"])
class ImportRequest(BaseModel):
external_ids: Optional[List[str]] = None
class ImportPreviewItem(BaseModel):
id: str
equipmentName: str
equipmentId: str
model: Optional[str] = None
manufacturer: Optional[str] = None
location: Optional[str] = None
already_imported: bool = False
def _check_sync_configured():
if not os.getenv("DIGITAL_TWIN_API_URL"):
raise HTTPException(
status_code=503,
detail="DIGITAL_TWIN_API_URL이 설정되지 않았습니다. 디지털 트윈 연동이 비활성 상태입니다.",
)
@router.post("/sync")
async def sync_machines(
tenant_id: str = Path(...),
current_user: TokenData = Depends(require_auth),
db: AsyncSession = Depends(get_db),
):
verify_tenant_access(tenant_id, current_user)
_check_sync_configured()
svc = EquipmentSyncService(db, tenant_id)
result = await svc.sync()
return result.model_dump()
@router.post("/import")
async def import_machines(
body: ImportRequest,
tenant_id: str = Path(...),
current_user: TokenData = Depends(require_auth),
db: AsyncSession = Depends(get_db),
):
verify_tenant_access(tenant_id, current_user)
_check_sync_configured()
svc = EquipmentSyncService(db, tenant_id)
result = await svc.import_equipment(body.external_ids)
return result.model_dump()
@router.get("/import/preview")
async def import_preview(
tenant_id: str = Path(...),
current_user: TokenData = Depends(require_auth),
db: AsyncSession = Depends(get_db),
):
verify_tenant_access(tenant_id, current_user)
_check_sync_configured()
svc = EquipmentSyncService(db, tenant_id)
remote_list = await svc.fetch_remote_equipment()
existing_stmt = select(Machine.external_id).where(
Machine.tenant_id == tenant_id,
Machine.source == "digital-twin",
Machine.external_id.isnot(None),
)
existing_ids = set()
result = await db.execute(existing_stmt)
for row in result.scalars().all():
existing_ids.add(str(row))
preview = []
for eq in remote_list:
remote_id = str(eq.get("id", ""))
if not remote_id:
continue
preview.append(
ImportPreviewItem(
id=remote_id,
equipmentName=eq.get("name", eq.get("equipmentName", "")),
equipmentId=eq.get("equipmentId", eq.get("equipment_code", "")),
model=eq.get("model"),
manufacturer=eq.get("manufacturer"),
location=eq.get("location"),
already_imported=remote_id in existing_ids,
)
)
return {"equipment": preview, "total": len(preview)}

View File

@@ -1,7 +1,7 @@
from typing import List, Optional
from uuid import UUID
from fastapi import APIRouter, HTTPException, Depends, Path
from fastapi import APIRouter, HTTPException, Depends, Path, Query
from pydantic import BaseModel
from sqlalchemy import select, func, update
from sqlalchemy.ext.asyncio import AsyncSession
@@ -10,6 +10,7 @@ from sqlalchemy.orm import selectinload
from src.database.config import get_db
from src.database.models import (
Machine,
MachineChangeHistory,
EquipmentPart,
InspectionTemplate,
InspectionTemplateItem,
@@ -66,6 +67,10 @@ class MachineResponse(BaseModel):
rated_capacity: Optional[str] = None
power_rating: Optional[str] = None
description: Optional[str] = None
source: Optional[str] = None
external_id: Optional[str] = None
sync_version: int = 0
last_synced_at: Optional[str] = None
parts_count: int = 0
created_at: Optional[str] = None
updated_at: Optional[str] = None
@@ -98,6 +103,10 @@ def _machine_to_response(m: Machine, parts_count: int = 0) -> MachineResponse:
rated_capacity=str(m.rated_capacity) if m.rated_capacity else None,
power_rating=str(m.power_rating) if m.power_rating else None,
description=str(m.description) if m.description else None,
source=str(m.source) if m.source else "local",
external_id=str(m.external_id) if m.external_id else None,
sync_version=int(m.sync_version) if m.sync_version else 0,
last_synced_at=_format_ts(m.last_synced_at),
parts_count=parts_count,
created_at=_format_ts(m.created_at),
updated_at=_format_ts(m.updated_at),
@@ -226,6 +235,10 @@ async def get_machine(
rated_capacity=str(machine.rated_capacity) if machine.rated_capacity else None,
power_rating=str(machine.power_rating) if machine.power_rating else None,
description=str(machine.description) if machine.description else None,
source=str(machine.source) if machine.source else "local",
external_id=str(machine.external_id) if machine.external_id else None,
sync_version=int(machine.sync_version) if machine.sync_version else 0,
last_synced_at=_format_ts(machine.last_synced_at),
parts_count=len(active_parts),
parts=parts_data,
created_at=_format_ts(machine.created_at),
@@ -253,6 +266,27 @@ async def update_machine(
if not machine:
raise HTTPException(status_code=404, detail="설비를 찾을 수 없습니다.")
changes: list[tuple[str, str | None, str | None]] = []
update_fields = body.model_dump(exclude_unset=True)
for field, new_val in update_fields.items():
if field == "criticality" and new_val not in VALID_CRITICALITIES:
raise HTTPException(
status_code=400,
detail=f"criticality는 {', '.join(VALID_CRITICALITIES)} 중 하나여야 합니다.",
)
old_raw = getattr(machine, field, None)
old_val = (
old_raw.isoformat()
if hasattr(old_raw, "isoformat")
else (str(old_raw) if old_raw is not None else None)
)
new_val_str = str(new_val) if new_val is not None else None
if old_val != new_val_str:
changes.append((field, old_val, new_val_str))
if body.name is not None:
machine.name = body.name
if body.equipment_code is not None:
@@ -275,11 +309,6 @@ async def update_machine(
if body.area is not None:
machine.area = body.area
if body.criticality is not None:
if body.criticality not in VALID_CRITICALITIES:
raise HTTPException(
status_code=400,
detail=f"criticality는 {', '.join(VALID_CRITICALITIES)} 중 하나여야 합니다.",
)
machine.criticality = body.criticality
if body.rated_capacity is not None:
machine.rated_capacity = body.rated_capacity
@@ -288,9 +317,39 @@ async def update_machine(
if body.description is not None:
machine.description = body.description
from datetime import datetime as dt_import, timezone as tz
now = dt_import.now(tz.utc)
for field_name, old_v, new_v in changes:
history = MachineChangeHistory(
tenant_id=tenant_id,
machine_id=machine.id,
field_name=field_name,
old_value=old_v,
new_value=new_v,
change_source="local",
changed_by=current_user.user_id,
changed_at=now,
)
db.add(history)
await db.commit()
await db.refresh(machine)
if machine.source == "digital-twin" and machine.external_id and changes:
try:
from src.services.equipment_sync import EquipmentSyncService
sync_svc = EquipmentSyncService(db, tenant_id)
await sync_svc.push_to_remote(machine.id)
except Exception:
import logging
logging.getLogger(__name__).warning(
f"Failed to push changes to digital-twin for machine {machine.id}",
exc_info=True,
)
return _machine_to_response(machine)
@@ -350,4 +409,64 @@ async def delete_machine(
await db.delete(machine)
await db.commit()
return {"status": "success", "message": "설비가 삭제되었습니다."}
response = {"status": "success", "message": "설비가 삭제되었습니다."}
if machine.source == "digital-twin":
response["warning"] = (
"디지털 트윈에서 가져온 설비가 삭제되었습니다. 다음 동기화 시 다시 가져올 수 있습니다."
)
return response
class ChangeHistoryItem(BaseModel):
id: str
field_name: str
old_value: Optional[str]
new_value: Optional[str]
change_source: str
changed_by: Optional[str]
changed_at: str
@router.get("/{machine_id}/history")
async def get_machine_history(
tenant_id: str = Path(...),
machine_id: UUID = Path(...),
limit: int = Query(50, ge=1, le=200),
offset: int = Query(0, ge=0),
current_user: TokenData = Depends(require_auth),
db: AsyncSession = Depends(get_db),
):
verify_tenant_access(tenant_id, current_user)
count_stmt = select(func.count(MachineChangeHistory.id)).where(
MachineChangeHistory.tenant_id == tenant_id,
MachineChangeHistory.machine_id == machine_id,
)
total = (await db.execute(count_stmt)).scalar() or 0
stmt = (
select(MachineChangeHistory)
.where(
MachineChangeHistory.tenant_id == tenant_id,
MachineChangeHistory.machine_id == machine_id,
)
.order_by(MachineChangeHistory.changed_at.desc())
.offset(offset)
.limit(limit)
)
rows = (await db.execute(stmt)).scalars().all()
history = [
ChangeHistoryItem(
id=str(h.id),
field_name=str(h.field_name),
old_value=str(h.old_value) if h.old_value else None,
new_value=str(h.new_value) if h.new_value else None,
change_source=str(h.change_source),
changed_by=str(h.changed_by) if h.changed_by else None,
changed_at=_format_ts(h.changed_at) or "",
)
for h in rows
]
return {"history": history, "total": total}

View File

@@ -70,6 +70,10 @@ class Machine(Base):
rated_capacity = Column(String(100), nullable=True)
power_rating = Column(String(100), nullable=True)
description = Column(Text, nullable=True)
external_id = Column(String(100), nullable=True, index=True)
source = Column(String(20), default="local") # local | digital-twin
sync_version = Column(Integer, default=0)
last_synced_at = Column(TIMESTAMP(timezone=True), nullable=True)
created_at = Column(TIMESTAMP(timezone=True), default=utcnow)
updated_at = Column(TIMESTAMP(timezone=True), default=utcnow, onupdate=utcnow)
@@ -77,11 +81,46 @@ class Machine(Base):
parts = relationship(
"EquipmentPart", back_populates="machine", cascade="all, delete-orphan"
)
change_history = relationship(
"MachineChangeHistory",
back_populates="machine",
cascade="all, delete-orphan",
)
__table_args__ = (
Index("ix_machines_tenant_id", "tenant_id"),
Index("ix_machines_tenant_area", "tenant_id", "area"),
Index("ix_machines_tenant_criticality", "tenant_id", "criticality"),
UniqueConstraint(
"tenant_id", "external_id", name="uq_machines_tenant_external_id"
),
)
class MachineChangeHistory(Base):
__tablename__ = "machine_change_history"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
tenant_id = Column(String(50), ForeignKey("tenants.id"), nullable=False)
machine_id = Column(
UUID(as_uuid=True),
ForeignKey("machines.id", ondelete="CASCADE"),
nullable=False,
)
field_name = Column(String(50), nullable=False)
old_value = Column(Text, nullable=True)
new_value = Column(Text, nullable=True)
change_source = Column(String(20), nullable=False) # local | sync
changed_by = Column(UUID(as_uuid=True), ForeignKey("users.id"), nullable=True)
changed_at = Column(TIMESTAMP(timezone=True), nullable=False, default=utcnow)
tenant = relationship("Tenant")
machine = relationship("Machine", back_populates="change_history")
changed_by_user = relationship("User")
__table_args__ = (
Index("ix_machine_change_history_tenant_machine", "tenant_id", "machine_id"),
Index("ix_machine_change_history_tenant_date", "tenant_id", "changed_at"),
)

View File

@@ -0,0 +1,370 @@
import os
import logging
from datetime import datetime, timezone
from typing import Optional
from uuid import UUID
import httpx
from pydantic import BaseModel
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from src.database.models import Machine, MachineChangeHistory
logger = logging.getLogger(__name__)
UPSTREAM_FIELDS = {
"name",
"equipment_code",
"model",
"manufacturer",
"installation_date",
"location",
"rated_capacity",
"power_rating",
}
LOCAL_FIELDS = {"criticality", "area", "description"}
FIELD_MAPPING = {
"name": "name",
"equipmentId": "equipment_code",
"model": "model",
"manufacturer": "manufacturer",
"installationDate": "installation_date",
"location": "location",
"capacity": "rated_capacity",
"powerConsumption": "power_rating",
"description": "description",
}
class ImportResult(BaseModel):
imported_count: int = 0
skipped_count: int = 0
errors: list[str] = []
class PullResult(BaseModel):
synced_count: int = 0
fields_updated: int = 0
errors: list[str] = []
class PushResult(BaseModel):
success: bool = True
fields_pushed: list[str] = []
error: Optional[str] = None
class SyncResult(BaseModel):
pull: PullResult
push_count: int = 0
push_errors: list[str] = []
class EquipmentSyncService:
def __init__(self, db: AsyncSession, tenant_id: str):
self.db = db
self.tenant_id = tenant_id
self.api_url = os.getenv("DIGITAL_TWIN_API_URL", "")
self.api_key = os.getenv("DIGITAL_TWIN_API_KEY", "")
def _get_headers(self) -> dict:
headers = {"Content-Type": "application/json"}
if self.api_key:
headers["Authorization"] = f"Bearer {self.api_key}"
return headers
def _http_client(self) -> httpx.AsyncClient:
return httpx.AsyncClient(
base_url=self.api_url,
headers=self._get_headers(),
timeout=30.0,
)
async def fetch_remote_equipment(self) -> list[dict]:
if not self.api_url:
return []
try:
async with self._http_client() as client:
resp = await client.get("/api/v1/aas/equipment")
resp.raise_for_status()
data = resp.json()
if isinstance(data, list):
return data
if isinstance(data, dict):
# digital-twin 응답: { success, data: { equipment: [...] } }
inner = data.get("data", data)
if isinstance(inner, dict):
return inner.get("equipment", inner.get("data", []))
if isinstance(inner, list):
return inner
return []
except Exception as e:
logger.error(f"Failed to fetch remote equipment: {e}")
return []
def _map_remote_to_local(self, remote: dict) -> dict:
mapped = {}
for remote_key, local_key in FIELD_MAPPING.items():
val = remote.get(remote_key)
if val is not None:
mapped[local_key] = str(val) if val else None
location = remote.get("location", "")
if location and " " in str(location):
parts = str(location).split(" ")
mapped["area"] = parts[0] if parts else None
return mapped
async def import_equipment(
self, external_ids: Optional[list[str]] = None
) -> ImportResult:
result = ImportResult()
remote_list = await self.fetch_remote_equipment()
if not remote_list:
result.errors.append("디지털 트윈에서 설비 데이터를 가져올 수 없습니다.")
return result
for eq in remote_list:
remote_id = str(eq.get("id", ""))
if not remote_id:
continue
if external_ids and remote_id not in external_ids:
continue
existing = await self.db.execute(
select(Machine).where(
Machine.tenant_id == self.tenant_id,
Machine.external_id == remote_id,
)
)
if existing.scalar_one_or_none():
result.skipped_count += 1
continue
try:
mapped = self._map_remote_to_local(eq)
install_dt = None
raw_date = mapped.get("installation_date")
if raw_date:
try:
install_dt = datetime.fromisoformat(
raw_date.replace("Z", "+00:00")
)
except (ValueError, AttributeError):
pass
machine = Machine(
tenant_id=self.tenant_id,
name=mapped.get("name", f"Equipment-{remote_id[:8]}"),
equipment_code=mapped.get("equipment_code", ""),
model=mapped.get("model"),
manufacturer=mapped.get("manufacturer"),
installation_date=install_dt,
location=mapped.get("location"),
area=mapped.get("area"),
criticality="major",
rated_capacity=mapped.get("rated_capacity"),
power_rating=mapped.get("power_rating"),
source="digital-twin",
external_id=remote_id,
sync_version=1,
last_synced_at=datetime.now(timezone.utc),
)
self.db.add(machine)
result.imported_count += 1
except Exception as e:
result.errors.append(f"설비 {remote_id}: {str(e)}")
if result.imported_count > 0:
await self.db.commit()
return result
async def pull_from_remote(self) -> PullResult:
result = PullResult()
remote_list = await self.fetch_remote_equipment()
if not remote_list:
return result
remote_by_id = {str(eq.get("id", "")): eq for eq in remote_list}
stmt = select(Machine).where(
Machine.tenant_id == self.tenant_id,
Machine.source == "digital-twin",
Machine.external_id.isnot(None),
)
local_machines = (await self.db.execute(stmt)).scalars().all()
for machine in local_machines:
ext_id = str(machine.external_id)
remote = remote_by_id.get(ext_id)
if not remote:
continue
remote_updated = remote.get("updatedAt") or remote.get("updated_at")
remote_dt = None
if remote_updated:
try:
remote_dt = datetime.fromisoformat(
str(remote_updated).replace("Z", "+00:00")
)
except (ValueError, AttributeError):
pass
mapped = self._map_remote_to_local(remote)
fields_changed = 0
for field in UPSTREAM_FIELDS:
remote_val = mapped.get(field)
if remote_val is None:
continue
if field == "installation_date":
local_val = (
machine.installation_date.isoformat()
if machine.installation_date
else None
)
compare_remote = remote_val
else:
local_val = str(getattr(machine, field, "") or "")
compare_remote = str(remote_val or "")
if local_val == compare_remote:
continue
local_updated = machine.updated_at
if remote_dt and local_updated and remote_dt <= local_updated:
continue
history = MachineChangeHistory(
tenant_id=self.tenant_id,
machine_id=machine.id,
field_name=field,
old_value=str(local_val) if local_val else None,
new_value=str(remote_val) if remote_val else None,
change_source="sync",
changed_at=datetime.now(timezone.utc),
)
self.db.add(history)
if field == "installation_date":
try:
setattr(
machine,
field,
datetime.fromisoformat(remote_val.replace("Z", "+00:00")),
)
except (ValueError, AttributeError):
pass
else:
setattr(machine, field, remote_val)
fields_changed += 1
if fields_changed > 0:
machine.sync_version = (machine.sync_version or 0) + 1
machine.last_synced_at = datetime.now(timezone.utc)
result.synced_count += 1
result.fields_updated += fields_changed
if result.synced_count > 0:
await self.db.commit()
return result
async def push_to_remote(self, machine_id: UUID) -> PushResult:
if not self.api_url:
return PushResult(
success=False, error="DIGITAL_TWIN_API_URL이 설정되지 않았습니다."
)
stmt = select(Machine).where(
Machine.id == machine_id,
Machine.tenant_id == self.tenant_id,
Machine.source == "digital-twin",
)
machine = (await self.db.execute(stmt)).scalar_one_or_none()
if not machine or not machine.external_id:
return PushResult(success=False, error="동기화 대상 설비가 아닙니다.")
payload = {}
reverse_mapping = {v: k for k, v in FIELD_MAPPING.items()}
pushed_fields = []
for local_field in UPSTREAM_FIELDS | LOCAL_FIELDS:
remote_key = reverse_mapping.get(local_field)
if not remote_key:
continue
val = getattr(machine, local_field, None)
if val is not None:
if hasattr(val, "isoformat"):
payload[remote_key] = val.isoformat()
else:
payload[remote_key] = str(val)
pushed_fields.append(local_field)
try:
async with self._http_client() as client:
resp = await client.put(
f"/api/v1/aas/equipment/{machine.external_id}",
json=payload,
)
resp.raise_for_status()
return PushResult(success=True, fields_pushed=pushed_fields)
except Exception as e:
logger.error(f"Failed to push to digital-twin: {e}")
return PushResult(success=False, error=str(e))
async def sync(self) -> SyncResult:
if not self.api_url:
return SyncResult(
pull=PullResult(errors=["DIGITAL_TWIN_API_URL이 설정되지 않았습니다."]),
)
pull_result = await self.pull_from_remote()
push_count = 0
push_errors: list[str] = []
stmt = select(Machine).where(
Machine.tenant_id == self.tenant_id,
Machine.source == "digital-twin",
Machine.external_id.isnot(None),
)
synced_machines = (await self.db.execute(stmt)).scalars().all()
for machine in synced_machines:
push_result = await self.push_to_remote(machine.id)
if push_result.success:
push_count += 1
elif push_result.error:
push_errors.append(f"{machine.name}: {push_result.error}")
return SyncResult(
pull=pull_result,
push_count=push_count,
push_errors=push_errors,
)
async def record_change(
self,
machine_id: UUID,
field_name: str,
old_value: Optional[str],
new_value: Optional[str],
change_source: str,
changed_by: Optional[UUID] = None,
):
history = MachineChangeHistory(
tenant_id=self.tenant_id,
machine_id=machine_id,
field_name=field_name,
old_value=old_value,
new_value=new_value,
change_source=change_source,
changed_by=changed_by,
changed_at=datetime.now(timezone.utc),
)
self.db.add(history)

334
start-dev.sh Executable file
View File

@@ -0,0 +1,334 @@
#!/bin/bash
# FactoryOps v2 개발 환경 시작 스크립트 (Podman)
# 색상 정의
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
CYAN='\033[0;36m'
NC='\033[0m'
# 스크립트 위치 기준으로 프로젝트 루트 이동
cd "$(dirname "$0")" || exit 1
COMPOSE_FILE="docker-compose.dev.yml"
PROJECT_NAME="factoryops-dev"
# 도움말
show_help() {
echo "사용법: $0 [OPTIONS]"
echo ""
echo "옵션:"
echo " -l, --logs 로그 표시 모드 (기본: 백그라운드)"
echo " -c, --clean 완전 초기화 (볼륨 포함 모든 리소스 삭제)"
echo " -r, --rebuild 이미지 강제 재빌드"
echo " -h, --help 도움말 표시"
echo ""
echo "실행 예시:"
echo " $0 # 빠른 시작 (기존 이미지 재사용)"
echo " $0 -l # 로그 표시 모드"
echo " $0 -r # 이미지 재빌드"
echo " $0 -c # 완전 초기화 후 시작"
echo " $0 -c -r # 완전 초기화 + 이미지 재빌드"
echo ""
echo "서비스:"
echo " PostgreSQL → localhost:5432"
echo " FastAPI → localhost:8000 (uvicorn --reload)"
echo " Next.js → localhost:3100 (npm run dev)"
}
# Podman 설치 확인
check_podman() {
if ! command -v podman &> /dev/null; then
echo -e "${RED}Podman이 설치되어 있지 않습니다.${NC}"
echo "설치: brew install podman"
exit 1
fi
}
# Podman Machine 상태 확인 및 복구 (macOS)
check_and_fix_podman() {
echo -e "${BLUE}Podman 상태 확인 중...${NC}"
# Linux에서는 Podman Machine 불필요
if [[ "$(uname -s)" != "Darwin" ]]; then
if podman ps >/dev/null 2>&1; then
echo -e "${GREEN} Podman 정상 작동${NC}"
return 0
else
echo -e "${RED} Podman 연결 실패${NC}"
exit 1
fi
fi
# macOS: Podman Machine 확인
if ! podman machine list 2>/dev/null | grep -q "Running"; then
echo -e "${YELLOW} Podman Machine이 실행되지 않았습니다. 시작 중...${NC}"
podman machine start 2>/dev/null || {
echo -e "${YELLOW} Podman Machine 초기화 중...${NC}"
podman machine init --cpus 4 --memory 8192
podman machine start
}
sleep 3
fi
# 연결 테스트 (최대 3회)
local max_attempts=3
local attempt=1
while [ $attempt -le $max_attempts ]; do
if podman ps >/dev/null 2>&1; then
echo -e "${GREEN} Podman 정상 작동${NC}"
return 0
fi
echo -e "${YELLOW} 연결 실패. 복구 시도 ($attempt/$max_attempts)...${NC}"
pkill -9 -f "gvproxy" 2>/dev/null || true
sleep 2
podman machine stop 2>/dev/null || true
sleep 2
podman machine start 2>/dev/null || {
echo -e "${RED} Podman Machine 재시작 실패${NC}"
podman machine rm -f podman-machine-default 2>/dev/null || true
podman machine init --cpus 4 --memory 8192
podman machine start
}
sleep 3
attempt=$((attempt + 1))
done
echo -e "${RED}Podman 복구 실패. 수동으로 확인해주세요:${NC}"
echo " podman machine stop && podman machine start"
exit 1
}
# factoryops-dev 관련 리소스만 정리 (다른 프로젝트 보존)
cleanup_project_resources() {
echo -e "${BLUE}FactoryOps 개발 리소스 정리 중...${NC}"
echo ""
# 1. 실행 중인 컨테이너 중지
echo -e "${BLUE} [1/5] 컨테이너 중지 및 제거...${NC}"
local containers=$(podman ps -aq --filter "name=factoryops-dev" 2>/dev/null || true)
if [ -n "$containers" ]; then
echo "$containers" | xargs -r podman stop -t 5 2>/dev/null || true
echo "$containers" | xargs -r podman rm -f 2>/dev/null || true
echo -e "${GREEN} 컨테이너 제거 완료${NC}"
else
echo -e "${GREEN} 제거할 컨테이너 없음${NC}"
fi
# 2. Pod 정리
echo -e "${BLUE} [2/5] Pod 정리...${NC}"
local pods=$(podman pod ls --filter "name=factoryops" --format "{{.ID}}" 2>/dev/null || true)
if [ -n "$pods" ]; then
echo "$pods" | xargs -r podman pod rm -f 2>/dev/null || true
echo -e "${GREEN} Pod 제거 완료${NC}"
else
echo -e "${GREEN} 제거할 Pod 없음${NC}"
fi
# 3. 네트워크 정리
echo -e "${BLUE} [3/5] 네트워크 정리...${NC}"
local networks=$(podman network ls --format "{{.Name}}" 2>/dev/null | grep "factoryops-dev" || true)
if [ -n "$networks" ]; then
echo "$networks" | xargs -r podman network rm -f 2>/dev/null || true
echo -e "${GREEN} 네트워크 제거 완료${NC}"
else
echo -e "${GREEN} 제거할 네트워크 없음${NC}"
fi
# 4. 볼륨 정리
echo -e "${BLUE} [4/5] 볼륨 정리...${NC}"
local volumes=$(podman volume ls --format "{{.Name}}" 2>/dev/null | grep "factoryops" || true)
if [ -n "$volumes" ]; then
echo "$volumes" | xargs -r podman volume rm -f 2>/dev/null || true
echo -e "${GREEN} 볼륨 제거 완료 (DB 데이터 포함)${NC}"
else
echo -e "${GREEN} 제거할 볼륨 없음${NC}"
fi
# 5. 이미지 정리
echo -e "${BLUE} [5/5] 이미지 정리...${NC}"
local images=$(podman images --format "{{.ID}} {{.Repository}}" 2>/dev/null | grep "factoryops" | awk '{print $1}' || true)
if [ -n "$images" ]; then
echo "$images" | xargs -r podman rmi -f 2>/dev/null || true
echo -e "${GREEN} 이미지 제거 완료${NC}"
else
echo -e "${GREEN} 제거할 이미지 없음${NC}"
fi
echo ""
echo -e "${GREEN} 초기화 완료 (다른 프로젝트 리소스 보존됨)${NC}"
echo ""
}
# 포트 충돌 확인 및 해결
check_and_free_ports() {
echo -e "${BLUE}포트 사용 확인 중...${NC}"
local ports=(8000 3100)
local port_names=("FastAPI" "Next.js")
local ports_freed=false
for i in "${!ports[@]}"; do
local port=${ports[$i]}
local name=${port_names[$i]}
if lsof -ti:$port >/dev/null 2>&1; then
echo -e "${YELLOW} 포트 $port ($name) 사용 중 → 프로세스 종료${NC}"
lsof -ti:$port | xargs kill -9 2>/dev/null || true
ports_freed=true
sleep 1
fi
done
if [ "$ports_freed" = true ]; then
echo -e "${GREEN} 포트 정리 완료${NC}"
sleep 2
else
echo -e "${GREEN} 모든 포트 사용 가능${NC}"
fi
}
# 서비스 헬스체크 대기
wait_for_services() {
echo ""
echo -e "${BLUE}서비스 준비 대기 중...${NC}"
# API 대기
waited=0
echo -ne "${BLUE} FastAPI "
while [ $waited -lt $max_wait ]; do
if curl -sf http://localhost:8000/api/health >/dev/null 2>&1; then
echo -e " ${GREEN}ready${NC}"
break
fi
echo -n "."
sleep 2
waited=$((waited + 2))
done
if [ $waited -ge $max_wait ]; then
echo -e " ${YELLOW}timeout (로그를 확인하세요)${NC}"
fi
# Dashboard 대기 (npm install이 오래 걸릴 수 있음)
waited=0
max_wait=120
echo -ne "${BLUE} Next.js "
while [ $waited -lt $max_wait ]; do
if curl -sf http://localhost:3100 >/dev/null 2>&1; then
echo -e " ${GREEN}ready${NC}"
break
fi
echo -n "."
sleep 3
waited=$((waited + 3))
done
if [ $waited -ge $max_wait ]; then
echo -e " ${YELLOW}아직 준비 중 (npm install 진행 중일 수 있음)${NC}"
fi
}
# ─── 인자 파싱 ───
SHOW_LOGS=false
FULL_CLEAN=false
FORCE_REBUILD=false
while [[ $# -gt 0 ]]; do
case $1 in
-l|--logs) SHOW_LOGS=true; shift ;;
-c|--clean) FULL_CLEAN=true; shift ;;
-r|--rebuild) FORCE_REBUILD=true; shift ;;
-h|--help) show_help; exit 0 ;;
*) echo "알 수 없는 옵션: $1"; show_help; exit 1 ;;
esac
done
# ─── 실행 시작 ───
echo -e "${CYAN}"
echo "================================================="
echo " FactoryOps v2 개발 환경 시작"
echo "================================================="
echo -e "${NC}"
check_podman
check_and_fix_podman
# 완전 초기화 또는 빠른 시작
if [ "$FULL_CLEAN" = true ]; then
echo -e "${YELLOW}완전 초기화 모드 (-c)${NC}"
echo -e "${YELLOW} factoryops 관련 리소스만 삭제됩니다 (볼륨/DB 포함)${NC}"
echo ""
cleanup_project_resources
else
echo -e "${GREEN}빠른 시작 모드 (기존 리소스 재사용)${NC}"
echo -e "${BLUE} 완전 초기화가 필요하면: $0 -c${NC}"
echo ""
# 실행 중인 컨테이너만 중지
running=$(podman ps -q --filter "name=factoryops-dev" 2>/dev/null || true)
if [ -n "$running" ]; then
echo -e "${YELLOW} 기존 컨테이너 중지 중...${NC}"
echo "$running" | xargs -r podman stop -t 5 2>/dev/null || true
echo -e "${GREEN} 기존 컨테이너 중지 완료${NC}"
fi
echo ""
fi
check_and_free_ports
# 빌드 & 실행 옵션
BUILD_OPTIONS=""
RUN_OPTIONS=""
if [ "$FORCE_REBUILD" = true ]; then
BUILD_OPTIONS="--build --no-cache"
echo -e "${BLUE}이미지 재빌드 모드 활성화 (-r)${NC}"
else
echo -e "${GREEN}기존 이미지 재사용 (재빌드: $0 -r)${NC}"
fi
if [ "$SHOW_LOGS" = false ]; then
RUN_OPTIONS="-d"
fi
echo ""
echo -e "${BLUE}서비스 시작 중...${NC}"
# compose 실행
podman compose -f "$COMPOSE_FILE" up $RUN_OPTIONS $BUILD_OPTIONS 2>&1 | \
grep -v "WARNING" || true
if [ "$SHOW_LOGS" = false ]; then
wait_for_services
fi
echo ""
echo -e "${CYAN}"
echo "================================================="
echo " FactoryOps v2 개발 환경 시작 완료"
echo "================================================="
echo -e "${NC}"
echo " 서비스 접속:"
echo " Frontend http://localhost:3100"
echo " Backend http://localhost:8000"
echo " API Docs http://localhost:8000/docs"
echo " Database 211.115.91.141:8200 (원격 DB)"
echo ""
if [ "$SHOW_LOGS" = false ]; then
echo " 로그 확인:"
echo " podman compose -f $COMPOSE_FILE logs -f"
echo " podman compose -f $COMPOSE_FILE logs -f api"
echo " podman compose -f $COMPOSE_FILE logs -f dashboard"
echo ""
fi
echo " 중지:"
echo " ./stop-dev.sh"
echo ""

71
stop-dev.sh Executable file
View File

@@ -0,0 +1,71 @@
#!/bin/bash
# FactoryOps v2 개발 환경 중지 스크립트 (Podman)
# 색상 정의
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
CYAN='\033[0;36m'
NC='\033[0m'
# 스크립트 위치 기준으로 프로젝트 루트 이동
cd "$(dirname "$0")" || exit 1
COMPOSE_FILE="docker-compose.dev.yml"
echo -e "${CYAN}"
echo "================================================="
echo " FactoryOps v2 개발 환경 중지"
echo "================================================="
echo -e "${NC}"
# Podman 설치 확인
if ! command -v podman &> /dev/null; then
echo -e "${RED}Podman이 설치되어 있지 않습니다.${NC}"
exit 1
fi
# 실행 중인 컨테이너 확인
running=$(podman ps --format "{{.Names}}" 2>/dev/null | grep "factoryops-dev" || true)
if [ -z "$running" ]; then
echo -e "${YELLOW}실행 중인 FactoryOps 컨테이너가 없습니다.${NC}"
exit 0
fi
echo -e "${YELLOW}중지할 컨테이너:${NC}"
echo "$running" | while read -r name; do
echo " - $name"
done
echo ""
# 서비스 중지
echo -e "${BLUE}서비스 중지 중...${NC}"
podman compose -f "$COMPOSE_FILE" down 2>/dev/null || true
echo ""
echo -e "${CYAN}"
echo "================================================="
echo " FactoryOps v2 개발 환경 중지 완료"
echo "================================================="
echo -e "${NC}"
# 남아있는 컨테이너 확인
remaining=$(podman ps -a --format "{{.Names}}" 2>/dev/null | grep "factoryops-dev" || true)
if [ -n "$remaining" ]; then
echo -e "${YELLOW}아직 남아있는 컨테이너:${NC}"
echo "$remaining"
echo ""
echo -e "${YELLOW}완전히 제거하려면:${NC}"
echo " podman compose -f $COMPOSE_FILE down --volumes"
else
echo -e "${GREEN}모든 컨테이너가 정상 중지되었습니다.${NC}"
fi
echo ""
echo " 다시 시작하려면: ./start-dev.sh"
echo " 완전 초기화: ./start-dev.sh -c"
echo ""

View File

@@ -0,0 +1,706 @@
import os
import json
from datetime import datetime, timezone
from unittest.mock import AsyncMock, patch, MagicMock
from uuid import uuid4
import pytest
from httpx import AsyncClient
from tests.conftest import get_auth_headers
MOCK_REMOTE_EQUIPMENT = [
{
"id": "dt-001",
"equipmentName": "Press Machine 1",
"equipmentId": "PM-001",
"model": "XM-200",
"manufacturer": "TestCorp",
"installationDate": "2024-01-15T00:00:00Z",
"location": "Bay 1",
"capacity": "200t",
"powerConsumption": "50kW",
"updatedAt": "2025-01-01T00:00:00Z",
},
{
"id": "dt-002",
"equipmentName": "Press Machine 2",
"equipmentId": "PM-002",
"model": "XM-300",
"manufacturer": "TestCorp",
"installationDate": "2024-06-01T00:00:00Z",
"location": "Bay 2",
"capacity": "300t",
"powerConsumption": "75kW",
"updatedAt": "2025-01-01T00:00:00Z",
},
]
@pytest.mark.asyncio
async def test_import_preview(client: AsyncClient, seeded_db):
headers = await get_auth_headers(client)
with patch(
"src.services.equipment_sync.EquipmentSyncService.fetch_remote_equipment",
new_callable=AsyncMock,
) as mock_fetch:
mock_fetch.return_value = MOCK_REMOTE_EQUIPMENT
with patch.dict(os.environ, {"DIGITAL_TWIN_API_URL": "http://mock-api"}):
resp = await client.get(
"/api/test-co/machines/import/preview", headers=headers
)
assert resp.status_code == 200
data = resp.json()
assert "equipment" in data
assert data["total"] == 2
assert len(data["equipment"]) == 2
assert data["equipment"][0]["id"] == "dt-001"
assert data["equipment"][0]["name"] == "Press Machine 1"
assert data["equipment"][0]["equipment_code"] == "PM-001"
assert data["equipment"][0]["already_imported"] is False
@pytest.mark.asyncio
async def test_import_preview_marks_existing_as_imported(
client: AsyncClient, seeded_db
):
headers = await get_auth_headers(client)
create_resp = await client.post(
"/api/test-co/machines",
json={"name": "Press Machine 1", "equipment_code": "PM-001"},
headers=headers,
)
machine_id = create_resp.json()["id"]
from sqlalchemy import select
from src.database.models import Machine
stmt = select(Machine).where(Machine.id == machine_id)
machine = (await seeded_db.execute(stmt)).scalar_one()
machine.external_id = "dt-001"
machine.source = "digital-twin"
await seeded_db.commit()
with patch(
"src.services.equipment_sync.EquipmentSyncService.fetch_remote_equipment",
new_callable=AsyncMock,
) as mock_fetch:
mock_fetch.return_value = MOCK_REMOTE_EQUIPMENT
with patch.dict(os.environ, {"DIGITAL_TWIN_API_URL": "http://mock-api"}):
resp = await client.get(
"/api/test-co/machines/import/preview", headers=headers
)
assert resp.status_code == 200
data = resp.json()
assert data["equipment"][0]["already_imported"] is True
assert data["equipment"][1]["already_imported"] is False
@pytest.mark.asyncio
async def test_import_equipment(client: AsyncClient, seeded_db):
headers = await get_auth_headers(client)
with patch(
"src.services.equipment_sync.EquipmentSyncService.fetch_remote_equipment",
new_callable=AsyncMock,
) as mock_fetch:
mock_fetch.return_value = MOCK_REMOTE_EQUIPMENT
with patch.dict(os.environ, {"DIGITAL_TWIN_API_URL": "http://mock-api"}):
resp = await client.post(
"/api/test-co/machines/import",
json={},
headers=headers,
)
assert resp.status_code == 200
data = resp.json()
assert data["imported_count"] == 2
assert data["skipped_count"] == 0
assert len(data["errors"]) == 0
list_resp = await client.get("/api/test-co/machines", headers=headers)
machines = list_resp.json()["machines"]
assert len(machines) == 2
assert machines[0]["source"] == "digital-twin"
assert machines[0]["external_id"] == "dt-001"
@pytest.mark.asyncio
async def test_import_equipment_with_specific_ids(client: AsyncClient, seeded_db):
headers = await get_auth_headers(client)
with patch(
"src.services.equipment_sync.EquipmentSyncService.fetch_remote_equipment",
new_callable=AsyncMock,
) as mock_fetch:
mock_fetch.return_value = MOCK_REMOTE_EQUIPMENT
with patch.dict(os.environ, {"DIGITAL_TWIN_API_URL": "http://mock-api"}):
resp = await client.post(
"/api/test-co/machines/import",
json={"external_ids": ["dt-001"]},
headers=headers,
)
assert resp.status_code == 200
data = resp.json()
assert data["imported_count"] == 1
assert data["skipped_count"] == 0
list_resp = await client.get("/api/test-co/machines", headers=headers)
machines = list_resp.json()["machines"]
assert len(machines) == 1
assert machines[0]["name"] == "Press Machine 1"
@pytest.mark.asyncio
async def test_import_skips_duplicates(client: AsyncClient, seeded_db):
headers = await get_auth_headers(client)
with patch(
"src.services.equipment_sync.EquipmentSyncService.fetch_remote_equipment",
new_callable=AsyncMock,
) as mock_fetch:
mock_fetch.return_value = MOCK_REMOTE_EQUIPMENT
with patch.dict(os.environ, {"DIGITAL_TWIN_API_URL": "http://mock-api"}):
resp1 = await client.post(
"/api/test-co/machines/import",
json={},
headers=headers,
)
assert resp1.status_code == 200
assert resp1.json()["imported_count"] == 2
with patch(
"src.services.equipment_sync.EquipmentSyncService.fetch_remote_equipment",
new_callable=AsyncMock,
) as mock_fetch:
mock_fetch.return_value = MOCK_REMOTE_EQUIPMENT
with patch.dict(os.environ, {"DIGITAL_TWIN_API_URL": "http://mock-api"}):
resp2 = await client.post(
"/api/test-co/machines/import",
json={},
headers=headers,
)
assert resp2.status_code == 200
assert resp2.json()["imported_count"] == 0
assert resp2.json()["skipped_count"] == 2
@pytest.mark.asyncio
async def test_sync_endpoint(client: AsyncClient, seeded_db):
headers = await get_auth_headers(client)
with patch(
"src.services.equipment_sync.EquipmentSyncService.fetch_remote_equipment",
new_callable=AsyncMock,
) as mock_fetch:
mock_fetch.return_value = MOCK_REMOTE_EQUIPMENT
with patch.dict(os.environ, {"DIGITAL_TWIN_API_URL": "http://mock-api"}):
await client.post(
"/api/test-co/machines/import",
json={},
headers=headers,
)
updated_equipment = [
{
"id": "dt-001",
"equipmentName": "Press Machine 1 Updated",
"equipmentId": "PM-001",
"model": "XM-200-V2",
"manufacturer": "TestCorp",
"installationDate": "2024-01-15T00:00:00Z",
"location": "Bay 1",
"capacity": "200t",
"powerConsumption": "50kW",
"updatedAt": "2025-02-01T00:00:00Z",
},
]
with patch(
"src.services.equipment_sync.EquipmentSyncService.fetch_remote_equipment",
new_callable=AsyncMock,
) as mock_fetch:
mock_fetch.return_value = updated_equipment
with patch(
"src.services.equipment_sync.EquipmentSyncService.push_to_remote",
new_callable=AsyncMock,
) as mock_push:
from src.services.equipment_sync import PushResult
mock_push.return_value = PushResult(success=True, fields_pushed=["name"])
with patch.dict(os.environ, {"DIGITAL_TWIN_API_URL": "http://mock-api"}):
resp = await client.post(
"/api/test-co/machines/sync",
headers=headers,
)
assert resp.status_code == 200
data = resp.json()
assert "pull" in data
assert "push_count" in data
assert data["pull"]["synced_count"] == 1
assert data["pull"]["fields_updated"] == 1
@pytest.mark.asyncio
async def test_machine_history_endpoint(client: AsyncClient, seeded_db):
headers = await get_auth_headers(client)
create_resp = await client.post(
"/api/test-co/machines",
json={"name": "Test Machine", "equipment_code": "TM-001"},
headers=headers,
)
machine_id = create_resp.json()["id"]
await client.put(
f"/api/test-co/machines/{machine_id}",
json={"name": "Test Machine Updated"},
headers=headers,
)
resp = await client.get(
f"/api/test-co/machines/{machine_id}/history",
headers=headers,
)
assert resp.status_code == 200
data = resp.json()
assert "history" in data
assert "total" in data
assert data["total"] >= 1
assert any(h["field_name"] == "name" for h in data["history"])
@pytest.mark.asyncio
async def test_machine_history_pagination(client: AsyncClient, seeded_db):
headers = await get_auth_headers(client)
create_resp = await client.post(
"/api/test-co/machines",
json={"name": "Test Machine", "equipment_code": "TM-001"},
headers=headers,
)
machine_id = create_resp.json()["id"]
for i in range(5):
await client.put(
f"/api/test-co/machines/{machine_id}",
json={"description": f"Update {i}"},
headers=headers,
)
resp = await client.get(
f"/api/test-co/machines/{machine_id}/history?limit=2&offset=0",
headers=headers,
)
assert resp.status_code == 200
data = resp.json()
assert len(data["history"]) == 2
assert data["total"] >= 5
@pytest.mark.asyncio
async def test_update_machine_records_history(client: AsyncClient, seeded_db):
headers = await get_auth_headers(client)
create_resp = await client.post(
"/api/test-co/machines",
json={"name": "Original Name", "equipment_code": "OM-001"},
headers=headers,
)
machine_id = create_resp.json()["id"]
update_resp = await client.put(
f"/api/test-co/machines/{machine_id}",
json={
"name": "Updated Name",
"equipment_code": "UM-001",
"model": "Model-X",
},
headers=headers,
)
assert update_resp.status_code == 200
history_resp = await client.get(
f"/api/test-co/machines/{machine_id}/history",
headers=headers,
)
history = history_resp.json()["history"]
assert len(history) >= 3
name_change = next((h for h in history if h["field_name"] == "name"), None)
assert name_change is not None
assert name_change["old_value"] == "Original Name"
assert name_change["new_value"] == "Updated Name"
assert name_change["change_source"] == "local"
code_change = next(
(h for h in history if h["field_name"] == "equipment_code"), None
)
assert code_change is not None
assert code_change["old_value"] == "OM-001"
assert code_change["new_value"] == "UM-001"
@pytest.mark.asyncio
async def test_sync_without_config_returns_503(client: AsyncClient, seeded_db):
headers = await get_auth_headers(client)
with patch.dict(os.environ, {}, clear=False):
if "DIGITAL_TWIN_API_URL" in os.environ:
del os.environ["DIGITAL_TWIN_API_URL"]
resp = await client.post(
"/api/test-co/machines/sync",
headers=headers,
)
assert resp.status_code == 503
assert "DIGITAL_TWIN_API_URL" in resp.json()["detail"]
@pytest.mark.asyncio
async def test_import_without_config_returns_503(client: AsyncClient, seeded_db):
headers = await get_auth_headers(client)
with patch.dict(os.environ, {}, clear=False):
if "DIGITAL_TWIN_API_URL" in os.environ:
del os.environ["DIGITAL_TWIN_API_URL"]
resp = await client.post(
"/api/test-co/machines/import",
json={},
headers=headers,
)
assert resp.status_code == 503
assert "DIGITAL_TWIN_API_URL" in resp.json()["detail"]
@pytest.mark.asyncio
async def test_import_preview_without_config_returns_503(
client: AsyncClient, seeded_db
):
headers = await get_auth_headers(client)
with patch.dict(os.environ, {}, clear=False):
if "DIGITAL_TWIN_API_URL" in os.environ:
del os.environ["DIGITAL_TWIN_API_URL"]
resp = await client.get(
"/api/test-co/machines/import/preview",
headers=headers,
)
assert resp.status_code == 503
assert "DIGITAL_TWIN_API_URL" in resp.json()["detail"]
@pytest.mark.asyncio
async def test_tenant_isolation_sync(client: AsyncClient, seeded_db):
headers = await get_auth_headers(client)
with patch(
"src.services.equipment_sync.EquipmentSyncService.fetch_remote_equipment",
new_callable=AsyncMock,
) as mock_fetch:
mock_fetch.return_value = MOCK_REMOTE_EQUIPMENT
with patch.dict(os.environ, {"DIGITAL_TWIN_API_URL": "http://mock-api"}):
resp_test = await client.post(
"/api/test-co/machines/import",
json={},
headers=headers,
)
assert resp_test.status_code == 200
assert resp_test.json()["imported_count"] == 2
list_test = await client.get("/api/test-co/machines", headers=headers)
list_other = await client.get("/api/other-co/machines", headers=headers)
assert len(list_test.json()["machines"]) == 2
assert len(list_other.json()["machines"]) == 0
@pytest.mark.asyncio
async def test_sync_lww_logic_remote_wins(client: AsyncClient, seeded_db):
headers = await get_auth_headers(client)
with patch(
"src.services.equipment_sync.EquipmentSyncService.fetch_remote_equipment",
new_callable=AsyncMock,
) as mock_fetch:
mock_fetch.return_value = MOCK_REMOTE_EQUIPMENT
with patch.dict(os.environ, {"DIGITAL_TWIN_API_URL": "http://mock-api"}):
await client.post(
"/api/test-co/machines/import",
json={"external_ids": ["dt-001"]},
headers=headers,
)
machines_resp = await client.get("/api/test-co/machines", headers=headers)
machine_id = machines_resp.json()["machines"][0]["id"]
await client.put(
f"/api/test-co/machines/{machine_id}",
json={"name": "Local Update"},
headers=headers,
)
remote_updated = [
{
"id": "dt-001",
"equipmentName": "Remote Update",
"equipmentId": "PM-001",
"model": "XM-200",
"manufacturer": "TestCorp",
"installationDate": "2024-01-15T00:00:00Z",
"location": "Bay 1",
"capacity": "200t",
"powerConsumption": "50kW",
"updatedAt": "2025-02-11T12:00:00Z",
},
]
with patch(
"src.services.equipment_sync.EquipmentSyncService.fetch_remote_equipment",
new_callable=AsyncMock,
) as mock_fetch:
mock_fetch.return_value = remote_updated
with patch.dict(os.environ, {"DIGITAL_TWIN_API_URL": "http://mock-api"}):
sync_resp = await client.post(
"/api/test-co/machines/sync",
headers=headers,
)
assert sync_resp.status_code == 200
assert sync_resp.json()["pull"]["synced_count"] == 1
updated_machine = await client.get(
f"/api/test-co/machines/{machine_id}",
headers=headers,
)
assert updated_machine.json()["name"] == "Remote Update"
@pytest.mark.asyncio
async def test_sync_lww_logic_local_wins(client: AsyncClient, seeded_db):
headers = await get_auth_headers(client)
with patch(
"src.services.equipment_sync.EquipmentSyncService.fetch_remote_equipment",
new_callable=AsyncMock,
) as mock_fetch:
mock_fetch.return_value = MOCK_REMOTE_EQUIPMENT
with patch.dict(os.environ, {"DIGITAL_TWIN_API_URL": "http://mock-api"}):
await client.post(
"/api/test-co/machines/import",
json={"external_ids": ["dt-001"]},
headers=headers,
)
machines_resp = await client.get("/api/test-co/machines", headers=headers)
machine_id = machines_resp.json()["machines"][0]["id"]
await client.put(
f"/api/test-co/machines/{machine_id}",
json={"name": "Local Update"},
headers=headers,
)
remote_old = [
{
"id": "dt-001",
"equipmentName": "Remote Old",
"equipmentId": "PM-001",
"model": "XM-200",
"manufacturer": "TestCorp",
"installationDate": "2024-01-15T00:00:00Z",
"location": "Bay 1",
"capacity": "200t",
"powerConsumption": "50kW",
"updatedAt": "2025-01-01T00:00:00Z",
},
]
with patch(
"src.services.equipment_sync.EquipmentSyncService.fetch_remote_equipment",
new_callable=AsyncMock,
) as mock_fetch:
mock_fetch.return_value = remote_old
with patch.dict(os.environ, {"DIGITAL_TWIN_API_URL": "http://mock-api"}):
sync_resp = await client.post(
"/api/test-co/machines/sync",
headers=headers,
)
assert sync_resp.status_code == 200
assert sync_resp.json()["pull"]["synced_count"] == 0
unchanged_machine = await client.get(
f"/api/test-co/machines/{machine_id}",
headers=headers,
)
assert unchanged_machine.json()["name"] == "Local Update"
@pytest.mark.asyncio
async def test_unauthenticated_import_preview(client: AsyncClient, seeded_db):
resp = await client.get("/api/test-co/machines/import/preview")
assert resp.status_code == 401
@pytest.mark.asyncio
async def test_unauthenticated_import(client: AsyncClient, seeded_db):
resp = await client.post("/api/test-co/machines/import", json={})
assert resp.status_code == 401
@pytest.mark.asyncio
async def test_unauthenticated_sync(client: AsyncClient, seeded_db):
resp = await client.post("/api/test-co/machines/sync")
assert resp.status_code == 401
@pytest.mark.asyncio
async def test_tenant_access_denied_import_preview(client: AsyncClient, seeded_db):
headers = await get_auth_headers(client, email="admin@test-co.com")
resp = await client.get("/api/other-co/machines/import/preview", headers=headers)
assert resp.status_code == 403
@pytest.mark.asyncio
async def test_tenant_access_denied_import(client: AsyncClient, seeded_db):
headers = await get_auth_headers(client, email="admin@test-co.com")
resp = await client.post(
"/api/other-co/machines/import",
json={},
headers=headers,
)
assert resp.status_code == 403
@pytest.mark.asyncio
async def test_tenant_access_denied_sync(client: AsyncClient, seeded_db):
headers = await get_auth_headers(client, email="admin@test-co.com")
resp = await client.post("/api/other-co/machines/sync", headers=headers)
assert resp.status_code == 403
@pytest.mark.asyncio
async def test_history_endpoint_empty_machine(client: AsyncClient, seeded_db):
headers = await get_auth_headers(client)
create_resp = await client.post(
"/api/test-co/machines",
json={"name": "No Changes Machine"},
headers=headers,
)
machine_id = create_resp.json()["id"]
resp = await client.get(
f"/api/test-co/machines/{machine_id}/history",
headers=headers,
)
assert resp.status_code == 200
data = resp.json()
assert data["total"] == 0
assert len(data["history"]) == 0
@pytest.mark.asyncio
async def test_history_endpoint_nonexistent_machine(client: AsyncClient, seeded_db):
headers = await get_auth_headers(client)
fake_id = str(uuid4())
resp = await client.get(
f"/api/test-co/machines/{fake_id}/history",
headers=headers,
)
assert resp.status_code == 200
data = resp.json()
assert data["total"] == 0
assert len(data["history"]) == 0
@pytest.mark.asyncio
async def test_sync_version_increments(client: AsyncClient, seeded_db):
headers = await get_auth_headers(client)
with patch(
"src.services.equipment_sync.EquipmentSyncService.fetch_remote_equipment",
new_callable=AsyncMock,
) as mock_fetch:
mock_fetch.return_value = MOCK_REMOTE_EQUIPMENT
with patch.dict(os.environ, {"DIGITAL_TWIN_API_URL": "http://mock-api"}):
await client.post(
"/api/test-co/machines/import",
json={"external_ids": ["dt-001"]},
headers=headers,
)
machines_resp = await client.get("/api/test-co/machines", headers=headers)
machine = machines_resp.json()["machines"][0]
assert machine["sync_version"] == 1
updated_equipment = [
{
"id": "dt-001",
"equipmentName": "Press Machine 1 Updated",
"equipmentId": "PM-001",
"model": "XM-200-V2",
"manufacturer": "TestCorp",
"installationDate": "2024-01-15T00:00:00Z",
"location": "Bay 1",
"capacity": "200t",
"powerConsumption": "50kW",
"updatedAt": "2025-02-11T12:00:00Z",
},
]
with patch(
"src.services.equipment_sync.EquipmentSyncService.fetch_remote_equipment",
new_callable=AsyncMock,
) as mock_fetch:
mock_fetch.return_value = updated_equipment
with patch.dict(os.environ, {"DIGITAL_TWIN_API_URL": "http://mock-api"}):
await client.post(
"/api/test-co/machines/sync",
headers=headers,
)
updated_resp = await client.get("/api/test-co/machines", headers=headers)
updated_machine = updated_resp.json()["machines"][0]
assert updated_machine["sync_version"] == 2