Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
202 changes: 92 additions & 110 deletions src/sac-dm/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,135 +88,117 @@ def instantCompare( instant, average, deviation, file_tags):

return conclusion

def instantsClassification(instant, file_tags):
"""
This function receives 2 parameters with the objective of unifying the classifications of the axis X, Y or Z,
into just one classification.

:param instant: List that contains 3 integer values, which correspond to the return of the <function instantCompare> for an instant, that is, a point in X, Y and Z at the same moment.
:param file_tags: List that has the size of the labels for classification, not necessarily being filled with the labels themselves.

:param return: Returns an integer value that corresponds to the weighting of the classification of the points contained at an instant.
"""
#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

instant = np.array(instant)
#check if the axes are in the same condition
for i in range(len(file_tags)):
auxConclusion = np.where(instant == i)[0]
if(len(auxConclusion) == len(instant)):
# print(f"Instant: {instant} classified: F{i} 3-Equals")
return i

if(len(auxConclusion) == 2):
# print(f"Instant: {instant} classified: F{i} 2-Equals")
#If 2 axes are healthy and the other is not
if(instant[auxConclusion[0]] == 0):
for j in range(len(instant)):
if(instant[j] > 0):
#2 healthy axes and another inconclusive
if(instant[j] == len(file_tags)):
return 0

return instant[j]

return i
if(len(instant) == 2):
healthy = np.where(instant == 0)[0]
failure = np.where(instant > 0)[0]
if(len(failure) == 2 ):

# Different failures
if(instant[failure[0]] >= 1 and instant[failure[0]] < len(file_tags) and instant[failure[1]] >= 1 and instant[failure[1]] < len(file_tags) ):
return (len(file_tags))

# 1 axis with failure and another inconclusive: classified as failure
if(instant[failure[0]] < len(file_tags)):
return (instant[failure[0]])
else:
return (instant[failure[1]])

# 1 axis inclusive and another one healthy
if(len(failure) == 1 and instant[failure[0]] == len(file_tags)):
return (instant[healthy[0]])

# 1 axis with failure
if(len(failure) == 1):
return (instant[failure[0]])

# print(f"Instant: {instant} classified: Inconclusivo")
return len(file_tags)
def instantsClassification(instant, file_tags):
instant = np.array(instant)

# Caso binário (só "NF"): NF somente se TODOS forem NF
if len(file_tags) == 1:
return 0 if np.all(instant == 0) else len(file_tags)

# --- abaixo mantém sua lógica original para múltiplos rótulos (NF, FC1, FC2, ...) ---

for i in range(len(file_tags)):
auxConclusion = np.where(instant == i)[0]
if len(auxConclusion) == len(instant):
return i
if len(auxConclusion) == 2:
if instant[auxConclusion[0]] == 0:
for j in range(len(instant)):
if instant[j] > 0:
if instant[j] == len(file_tags):
return 0
return instant[j]
return i

if len(instant) == 2:
healthy = np.where(instant == 0)[0]
failure = np.where(instant > 0)[0]
if len(failure) == 2:
if (instant[failure[0]] >= 1 and instant[failure[0]] < len(file_tags) and
instant[failure[1]] >= 1 and instant[failure[1]] < len(file_tags)):
return len(file_tags)
if instant[failure[0]] < len(file_tags):
return instant[failure[0]]
else:
return instant[failure[1]]
if len(failure) == 1 and instant[failure[0]] == len(file_tags):
return instant[healthy[0]]
if len(failure) == 1:
return instant[failure[0]]

return len(file_tags)

def windowingClassification(axes_classification, window_size, file_tags):

"""
This function receives 3 parameters and aims to partition (separate into windows) the list
axes_classification, and from that build a new classification list through simple voting.
import numpy as np

:param axes_classification: List that contains the axes classification, which correspond to the return of the <function instantsClassification>.
:param window_size: Value that corresponds to the interval in which the windowing will be performed.
:param file_tags: List that has the labels for classification.

:param return: List that contains the classification of the data.
"""
INCONCLUSIVE = "inconclusivo"

window_classification = []
count_window = 0
def windowing_classification(axes_classification, window_size, hop=1):
"""
axes_classification: lista de rótulos por instante (strings: "NF" ou "inconclusivo")
window_size: tamanho da janela
hop: passo do deslizamento (1 = máxima sobreposição)
"""
out = []
N = len(axes_classification)
if N == 0 or window_size <= 0 or hop <= 0:
return out

for j in range(0,(len(axes_classification)), window_size):
window = np.zeros(window_size)
count_window += 1
if (j + window_size <= len(axes_classification)):
window = axes_classification[j:j+window_size]
else:
window = axes_classification[j:]

values, counts = np.unique(window, return_counts=True)
# garante ao menos 1 iteração quando N < window_size
last_start = 0 if N < window_size else (N - window_size)
for start in range(0, last_start + 1, hop):
end = min(start + window_size, N)
window = axes_classification[start:end]

# checks if there is more than one value with the same and greater repetition
if(np.count_nonzero(counts == counts[np.argmax(counts)]) > 1):
window_classification.append(len(file_tags))
# print(f"window: {(window)} classification: {len(file_tags)}")
else:
window_classification.append(values[np.argmax(counts)])
# print(f"window: {(window)} classification: {values[np.argmax(counts)]}")
# ignora votos inconclusivos na contagem
valid = [w for w in window if w != INCONCLUSIVE]
if not valid:
out.append(INCONCLUSIVE)
continue

return window_classification
values, counts = np.unique(valid, return_counts=True)
top = counts.max()
winners = values[counts == top]

def classification(sac_instants, average, deviation, window_size, file_tags):
out.append(INCONCLUSIVE if len(winners) > 1 else str(winners[0]))
return out

"""
This function receives 5 parameters with the objective of classifying the data.

:param instant: List that contains instant, which each position are composed of 3 floating values coming from <function sac_am>
:param average: List containing the averages that will be used in the test
:param deviation: List containing the standard deviations that will be used in the test
:param window_size: Value that corresponds to the interval in which the windowing will be performed
:param file_tags: List of the labels for classification

:return: Returns the label of the classification data.
"""

sac_classification = testingInstants(sac_instants, average, deviation, file_tags)
def classification(sac_instants, means, deviations, window_size=5, hop=1, file_tags=("NF",)):
"""
Retorna "NF" ou "inconclusivo".
- testingInstants(...) deve classificar por eixo/instante usando média±desvio.
- instantsClassification(...) deve combinar X/Y/Z por instante aplicando
as regras do artigo; como você usa só "NF" como rótulo válido, qualquer
coisa que não seja unanimidade/maioria para "NF" vira "inconclusivo".
"""
sac_classification = testingInstants(sac_instants, means, deviations, file_tags)

axes_classification = []
# rótulo COMBINADO por instante (string): "NF" ou "inconclusivo"
axes_classification = [instantsClassification(sac_classification[i], file_tags)
for i in range(len(sac_classification))]

for i in range(len(sac_classification)):
axes_classification.append(instantsClassification(sac_classification[i], file_tags))
# janelamento deslizante
window_cls = windowing_classification(axes_classification, window_size, hop)
if not window_cls:
return INCONCLUSIVE

window_classification = windowingClassification(axes_classification, window_size, file_tags)
# decisão final por maioria sobre as janelas (ignora inconclusivo)
valid = [c for c in window_cls if c != INCONCLUSIVE]
if not valid:
return INCONCLUSIVE

values, counts = np.unique(window_classification, return_counts=True)
values, counts = np.unique(valid, return_counts=True)
top = counts.max()
winners = values[counts == top]
return INCONCLUSIVE if len(winners) > 1 else str(winners[0])

# checks if there is more than one value with the same and greater repetition
# simple voting to classify the data
if(np.count_nonzero(counts == counts[np.argmax(counts)]) > 1 or values[np.argmax(counts)] == len(file_tags)):
# return len(file_tags)
return "inconclusivo"
else:
return file_tags[values[np.argmax(counts)]]



#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

def plotEditingHalfTraining(dataset, title, fig, ax, file_tag):

Expand Down
122 changes: 122 additions & 0 deletions src/server/controllers/accelerometer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
import datetime
from models.models import AccelerometerAcquisition, Device
from schemas.accelerometer import AccelerometerSchema
from sqlalchemy.orm import Session
from typing import List
from fastapi import status
from fastapi.responses import JSONResponse


def create_accelerometer_record(accelerometer_schema: List[AccelerometerSchema], db: Session):
try:
records = [AccelerometerAcquisition(**accelerometer_record.dict()) for accelerometer_record in accelerometer_schema]
db.add_all(records)
db.commit()
except Exception:
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
content={"error": "Failed to insert data to the database."}
)
return JSONResponse(
status_code=status.HTTP_200_OK,
content="Successfully entered data!")

def get_all_accelerometer_records(db: Session):
return db.query(AccelerometerAcquisition).all()


# def get_accelerometer_record_by_label(data: Filter, db: Session):
# print("Data: ", data)
# if data.label:
# return db.query(AccelerometerAcquisition).filter(AccelerometerAcquisition.label == data.label).all()


def get_accelerometer_by_filter(device_id: int, datetime_initial: str, datetime_final: str, db: Session):
if device_id and not datetime_initial and not datetime_final:
return db.query(AccelerometerAcquisition).filter(AccelerometerAcquisition.device_id == device_id).all()
elif device_id and datetime_initial and not datetime_final:
return db.query(AccelerometerAcquisition).filter(AccelerometerAcquisition.device_id == device_id, AccelerometerAcquisition.timestamp >= datetime_initial).all()
elif device_id and datetime_final and not datetime_initial:
return db.query(AccelerometerAcquisition).filter(AccelerometerAcquisition.device_id == device_id, AccelerometerAcquisition.timestamp <= datetime_final).all()
elif device_id and datetime_initial and datetime_final:
return db.query(AccelerometerAcquisition).filter(AccelerometerAcquisition.device_id == device_id, AccelerometerAcquisition.timestamp >= datetime_initial, AccelerometerAcquisition.timestamp <= datetime_final ).all()
elif datetime_initial and not datetime_final and not device_id:
return db.query(AccelerometerAcquisition).filter(AccelerometerAcquisition.timestamp >= datetime_initial).all()
elif datetime_final and not datetime_initial and not device_id:
return db.query(AccelerometerAcquisition).filter(AccelerometerAcquisition.timestamp <= datetime_final).all()
elif datetime_initial and datetime_final and not device_id:
return db.query(AccelerometerAcquisition).filter(AccelerometerAcquisition.timestamp >= datetime_initial, AccelerometerAcquisition.timestamp <= datetime_final ).all()


def delete_accelerometer_records_by_device_code(device_code: int, db: Session):
device_id = db.query(Device.id).filter(Device.device_code == device_code).first()
try:
records = db.query(AccelerometerAcquisition).filter(AccelerometerAcquisition.device_id == device_id[0]).all()
if(not records):
return JSONResponse(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
content="Device don't have logs!")
for record in records:
db.delete(record)
db.commit()
return JSONResponse(
status_code=status.HTTP_200_OK,
content="Successfully deleted data!")
except Exception:
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
content="Delete failed!")


def delete_accelerometer_records_by_datetime(datetime_initial: str, datetime_final: str, db: Session):
if datetime_initial and not datetime_final:
try:
records = db.query(AccelerometerAcquisition).filter(AccelerometerAcquisition.timestamp >= datetime_initial).all()
if(not records):
return JSONResponse(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
content="Datetime don't have logs!")
for record in records:
db.delete(record)
db.commit()
return JSONResponse(
status_code=status.HTTP_200_OK,
content="Successfully deleted data!")
except Exception:
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
content="Delete failed!")
elif datetime_final and not datetime_initial:
try:
records = db.query(AccelerometerAcquisition).filter(AccelerometerAcquisition.timestamp <= datetime_final).all()
if(not records):
return JSONResponse(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
content="Datetime don't have logs!")
for record in records:
db.delete(record)
db.commit()
return JSONResponse(
status_code=status.HTTP_200_OK,
content="Successfully deleted data!")
except Exception:
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
content="Delete failed!")
elif datetime_initial and datetime_final:
try:
records = db.query(AccelerometerAcquisition).filter(AccelerometerAcquisition.timestamp >= datetime_initial, AccelerometerAcquisition.timestamp <= datetime_final).all()
if(not records):
return JSONResponse(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
content="Datetime don't have logs!")
for record in records:
db.delete(record)
db.commit()
return JSONResponse(
status_code=status.HTTP_200_OK,
content="Successfully deleted data!")
except Exception:
return JSONResponse(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
content="Delete failed!")
2 changes: 1 addition & 1 deletion src/server/controllers/condition.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,5 +19,5 @@ def create_condition(condition_schema: ConditionSchema, db: Session):
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
content={"message": "Failed to create condition!"})

async def get_all_condition(db: Session):
def get_all_condition(db: Session):
return db.query(Condition).all()
3 changes: 2 additions & 1 deletion src/server/controllers/fault.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,8 @@ def log_verifier(sac_dm_data: List[SACDMSchema], db: Session):
# Estado atual do dispositivo (assumindo que 1 = normal, not 1 = falha)
current_condition = vehicle.condition_id if vehicle.condition_id else 1 # Se não houver logs, assume que está normal

is_faulty = classification(*formated_data, 5, ["NF"])

is_faulty = classification(*formated_data, window_size=5, hop=1, file_tags=["NF"])

if is_faulty == "inconclusivo" and current_condition == 1: # Se estava normal e agora está em falha
new_log = Log(
Expand Down
11 changes: 11 additions & 0 deletions src/server/models/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,4 +119,15 @@ class FaultCounter(Base):
limit = Column(Integer, nullable=False)


class AccelerometerAcquisition(Base):
__tablename__ = "accelerometer_acquisition"

id = Column(Integer, primary_key=True, autoincrement=True)
device_id = Column(Integer, ForeignKey('device.id'), nullable=False)
ACx = Column(Float, nullable=False)
ACy = Column(Float, nullable=False)
ACz = Column(Float, nullable=False)
timestamp = Column(String, nullable=True)
label = Column(String, nullable=True)

Base.metadata.create_all(bind=engine)
2 changes: 1 addition & 1 deletion src/server/routers/condition_router.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from schemas.condition import ConditionSchema
from database import get_db

router = APIRouter(prefix="/condition", tags=["Condition"])
router = APIRouter(tags=["Condition"])

# Route to insert a new data into the condition table
@router.post("/condition")
Expand Down
Loading