feat: implementar estatísticas
This commit is contained in:
@@ -21,6 +21,8 @@ def read_ids(df):
|
||||
ids = _get_uniques(df)
|
||||
_show_events(ids)
|
||||
|
||||
def get_unique_events_table(df):
|
||||
return df.drop_duplicates(subset="ID", keep="first")
|
||||
|
||||
def read_header(df, event_id):
|
||||
# Informações do header do evento
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
# pyright: basic
|
||||
import io
|
||||
import warnings
|
||||
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
|
||||
96
utils/stats.py
Normal file
96
utils/stats.py
Normal file
@@ -0,0 +1,96 @@
|
||||
# pyright: basic
|
||||
|
||||
import os
|
||||
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
|
||||
STAT_MENU = """=== Earthquakes ===
|
||||
== Estatísticas ==
|
||||
[1] Média
|
||||
[2] Variância
|
||||
[3] Desvio padrão
|
||||
[4] Máximo
|
||||
[5] Mínimo
|
||||
|
||||
[Q] Voltar ao menu principal
|
||||
"""
|
||||
|
||||
|
||||
def stat_menu(df: pd.DataFrame):
|
||||
inStats = True
|
||||
while inStats:
|
||||
os.system("cls")
|
||||
print(STAT_MENU)
|
||||
usrIn = input("Opção: ").lower()
|
||||
|
||||
match usrIn:
|
||||
case "1":
|
||||
pass
|
||||
case "2":
|
||||
pass
|
||||
case "3":
|
||||
pass
|
||||
case "4":
|
||||
pass
|
||||
case "5":
|
||||
pass
|
||||
case "q":
|
||||
inStats = False
|
||||
pass
|
||||
case _:
|
||||
pass
|
||||
|
||||
|
||||
def average(df: pd.DataFrame, filter_by):
|
||||
values = df[filter_by].to_numpy()
|
||||
|
||||
if filter_by == "Magnitudes":
|
||||
values = _unpack_mags(values)
|
||||
|
||||
return np.average(values)
|
||||
|
||||
|
||||
def variance(df, filter_by):
|
||||
values = df[filter_by].to_numpy()
|
||||
|
||||
if filter_by == "Magnitudes":
|
||||
values = _unpack_mags(values)
|
||||
|
||||
return np.var(values)
|
||||
|
||||
|
||||
def std_dev(df, filter_by):
|
||||
values = df[filter_by].to_numpy()
|
||||
|
||||
if filter_by == "Magnitudes":
|
||||
values = _unpack_mags(values)
|
||||
|
||||
return np.std(values)
|
||||
|
||||
|
||||
def max(df, filter_by):
|
||||
values = df[filter_by].to_numpy()
|
||||
|
||||
if filter_by == "Magnitudes":
|
||||
values = _unpack_mags(values)
|
||||
|
||||
return np.max(values)
|
||||
|
||||
|
||||
def min(df, filter_by):
|
||||
values = df[filter_by].to_numpy()
|
||||
|
||||
if filter_by == "Magnitudes":
|
||||
values = _unpack_mags(values)
|
||||
|
||||
return np.min(values)
|
||||
|
||||
|
||||
def _unpack_mags(arr: np.ndarray):
|
||||
newVals = np.empty(0)
|
||||
for v in arr:
|
||||
for m in v:
|
||||
newVals = np.append(newVals, np.float32(m["Magnitude"]))
|
||||
return newVals
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
1996 6 7 1325 29.2 L 59.846 5.130 12.0F TES 12 .60 1.9LTES 2.2CTES 2.0LNAO1
|
||||
GAP=177 2.78 4.5 12.80.000 0.2239E+02 0.6258E+03 -0.2817E+03E
|
||||
1996 6 7 1325 30.5 L 59.763 5.396 29.2 NAO 2 1.0 2.0LNAO1
|
||||
8.3 41.0 74.7 1 F
|
||||
1996-06-07-1324-51S.TEST__009 6
|
||||
ACTION:SPL 14-12-11 12:04 OP:jh STATUS: ID:19960607132529 L I
|
||||
STAT COM NTLO IPHASE W HHMM SS.SSS PAR1 PAR2 AGA OPE AIN RES W DIS CAZ7
|
||||
EGD HHZ NS IP 4 1325 35.950 C BER jh 120.0-1.131047.70 6
|
||||
EGD HHZ NS END 1325 35.950 111.0 BER jh 0.0 47.70 6
|
||||
EGD HHZ NS AMP 1325 35.950 11.1 33.3 BER jh 47.70 6
|
||||
EGD HHN NS ES 1325 42.030 BER jh 70.0-.8901047.70 6
|
||||
BER BHZ NS00 IP 1325 38.120 C kkk AUT -.9801061.00 11
|
||||
BER BHZ NS00 END 1325 38.120 55.0 BER jh 4.8 61.00 11
|
||||
BER BHN NS00 ES 1325 45.440 BER jh 70.0-.9901061.00 11
|
||||
BER BHZ NS00 IAML A1325 46.710 31.7 0.20 BER jh 0.4 61.00 11
|
||||
KMY BHZ NS10 IP 1325 40.260 C PPP Ajh 70.0 .3301070.90 175
|
||||
KMY BHZ NS10 END 1325 40.260 62.0 BER jh 70.90 175
|
||||
KMY BHN NS10 ES 1325 48.740 BER jh 70.0.3001070.90 175
|
||||
KMY BHZ NS10 IAML 1325 48.920 83.6 0.20 BER jh 70.90 175
|
||||
ASK SHZ NS EP 2 1325 39.590 D -1.031071.10 3
|
||||
ASK SHZ NS END 1325 39.590 68.0 71.10 3
|
||||
ASK SHZ NS ES 1325 48.070 -1.021071.10 3
|
||||
ASK SHZ NS AMP 1325 48.070 333.3 2.20 71.10 3
|
||||
ASK SHZ NS IAML 1325 50.900 111.0 0.30 71.10 3
|
||||
NRA0 S Z Pn A1326 19.090 50.0-.05010368.0 72
|
||||
NRA0 S Z END 1326 19.090 333.0 368.0 72
|
||||
NRA0 S Z BAZ-P 1326 19.090 256.9 6.9 0. 368.0 72
|
||||
NRA0 S Z Pg 1326 27.940 -.64010368.0 72
|
||||
NRA0 S Z BAZ 1326 27.940 253.0 7.3 -3. 368.0 72
|
||||
NRA0 S Z Lg 1327 10.540 -.89010368.0 72
|
||||
NRA0 S Z BAZ 1327 10.540 266.6 4.1 9. 368.0 72
|
||||
@@ -1,50 +0,0 @@
|
||||
import pytest
|
||||
import parser
|
||||
|
||||
def test_type_1():
|
||||
test_data =[" 1996 6 7 1325 29.2 L 59.846 5.130 12.0F TES 12 .60 1.9LTES 2.2CTES 2.0LNAO1",
|
||||
" 1996 6 7 1325 30.5 L 59.763 5.396 29.2 NAO 2 1.0 2.0LNAO1"]
|
||||
expected = {"DateTime": "1996-06-07T13:25:29.200000", "Distance Indicator": "L", "Event ID": " ", 'Lat': 59.846, 'Long': 5.13,'Depth': 12.0, 'Agency': 'TES', 'Magnitudes': [{'M': ' 1.9', 'T': 'L'},{'M': ' 2.2', 'T': 'C'},{'M': ' 2.0', 'T': 'L'},{'M': ' 2.0', 'T': 'L'}]}
|
||||
|
||||
_ret = parser.parse_type_1(test_data)
|
||||
for (k,v) in _ret.items():
|
||||
assert _ret[k] == expected[k]
|
||||
|
||||
def test_type_3():
|
||||
test_data = [" OP: CVUA-RM/RC 3",
|
||||
" STATUS: OK SENTIDO 3",
|
||||
" SENTIDO: II/III -Pico: S. Caetano 3",
|
||||
" PUB: NAO 3",
|
||||
" WEB: SIM 3",
|
||||
" OBS: Por ordem do CT nao foi emitido novo comunicado 3",
|
||||
" OBS: Sismo sobreposto 3",
|
||||
" REGIAO: Pico,VZ14,SZ06,FE95 405 3"]
|
||||
|
||||
_ret = parser.parse_type_3(test_data)
|
||||
assert len(_ret["Comments"]) == 8
|
||||
|
||||
def test_type_6():
|
||||
test_data = [" 1996-06-03-2002-18S.TEST__012 6",
|
||||
" 1996-06-03-1917-52S.TEST__002 6"]
|
||||
expected = {"Wave": ["1996-06-03-2002-18S.TEST__012", "1996-06-03-1917-52S.TEST__002"]}
|
||||
|
||||
_ret = parser.parse_type_6(test_data)
|
||||
for (k,v) in _ret.items():
|
||||
assert _ret[k] == expected[k]
|
||||
|
||||
def test_type_i():
|
||||
test_data = [" ACTION:SPL 08-10-02 10:19 OP:jh STATUS: ID:19960603195540 I"]
|
||||
expected = {"Action": "SPL", "Action Extra": {"Date": '2008-10-02T10:19:00', "OP": "jh", "Status": "", "ID":19960603195540}}
|
||||
|
||||
_ret = parser.parse_type_i(test_data)
|
||||
for (k,v) in _ret.items():
|
||||
assert _ret[k] == expected[k]
|
||||
|
||||
def test_type_e():
|
||||
test_data =[" GAP=348 2.88 999.9 999.9999.9 -0.1404E+08 -0.3810E+08 0.1205E+09E"]
|
||||
expected = {"Gap": 348, "Origin": 2.88, "Error_lat": 999.9, "Error_long": 999.9, "Error_depth": 999.9, "Cov_xy": -14040000.0, "Cov_xz": -38100000.0, "Cov_yz": 120500000.0}
|
||||
|
||||
_ret = parser.parse_type_e(test_data)
|
||||
for (k,v) in _ret.items():
|
||||
assert _ret[k] == expected[k]
|
||||
|
||||
Reference in New Issue
Block a user