Skip to content

Commit

Permalink
Merge pull request #312 from bandprotocol/extra-rest-25-add-stats
Browse files Browse the repository at this point in the history
[Extra/rest 2.5] Add stats for os and ds, Fix jail status
  • Loading branch information
colmazia committed Jul 6, 2023
2 parents 2cac67a + 061dbf8 commit 46fea12
Show file tree
Hide file tree
Showing 6 changed files with 123 additions and 2 deletions.
21 changes: 19 additions & 2 deletions flusher/flusher/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,7 @@ class CustomDateTime(sa.types.TypeDecorator):
impl = sa.DateTime

def process_bind_param(self, value, dialect):
return datetime.fromtimestamp(value / 1e9)

return datetime.fromtimestamp(value / 1e9) if value != None else None

class CustomBase64(sa.types.TypeDecorator):
"""Custom LargeBinary type that accepts base64-encoded string."""
Expand Down Expand Up @@ -161,6 +160,7 @@ def Column(*args, **kwargs):
Column("fee", sa.String),
Column("transaction_id", sa.Integer, sa.ForeignKey("transactions.id"), nullable=True),
Column("accumulated_revenue", sa.BigInteger),
Column("last_request", CustomDateTime, nullable=True),
)

oracle_scripts = sa.Table(
Expand All @@ -175,6 +175,7 @@ def Column(*args, **kwargs):
Column("source_code_url", sa.String),
Column("transaction_id", sa.Integer, sa.ForeignKey("transactions.id"), nullable=True),
Column("version", sa.Integer, nullable=True),
Column("last_request", CustomDateTime, nullable=True),
)

requests = sa.Table(
Expand Down Expand Up @@ -397,13 +398,29 @@ def Column(*args, **kwargs):
Column("count", sa.Integer),
)

data_source_requests_per_days = sa.Table(
"data_source_requests_per_days",
metadata,
Column("date", CustomDate, primary_key=True),
Column("data_source_id", sa.Integer, sa.ForeignKey("data_sources.id"), primary_key=True),
Column("count", sa.Integer),
)

oracle_script_requests = sa.Table(
"oracle_script_requests",
metadata,
Column("oracle_script_id", sa.Integer, sa.ForeignKey("oracle_scripts.id"), primary_key=True),
Column("count", sa.Integer),
)

oracle_script_requests_per_days = sa.Table(
"oracle_script_requests_per_days",
metadata,
Column("date", CustomDate, primary_key=True),
Column("oracle_script_id", sa.Integer, sa.ForeignKey("oracle_scripts.id"), primary_key=True),
Column("count", sa.Integer),
)

request_count_per_days = sa.Table(
"request_count_per_days",
metadata,
Expand Down
56 changes: 56 additions & 0 deletions flusher/flusher/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,9 @@
related_data_source_oracle_scripts,
historical_oracle_statuses,
data_source_requests,
data_source_requests_per_days,
oracle_script_requests,
oracle_script_requests_per_days,
request_count_per_days,
incoming_packets,
outgoing_packets,
Expand Down Expand Up @@ -63,6 +65,16 @@ def get_request_count(self, date):
select([request_count_per_days.c.count]).where(request_count_per_days.c.date == date)
).scalar()

def get_oracle_script_requests_count_per_day(self, date, oracle_script_id):
return self.conn.execute(
select([oracle_script_requests_per_days.c.count]).where((oracle_script_requests_per_days.c.date == date) & (oracle_script_requests_per_days.c.oracle_script_id == oracle_script_id))
).scalar()

def get_data_source_requests_count_per_day(self, date, data_source_id):
return self.conn.execute(
select([data_source_requests_per_days.c.count]).where((data_source_requests_per_days.c.date == date) & (data_source_requests_per_days.c.data_source_id == data_source_id))
).scalar()

def get_data_source_id(self, id):
return self.conn.execute(select([data_sources.c.id]).where(data_sources.c.id == id)).scalar()

Expand Down Expand Up @@ -144,6 +156,8 @@ def handle_new_request(self, msg):
del msg["tx_hash"]
if "timestamp" in msg:
self.handle_set_request_count_per_day({"date": msg["timestamp"]})
self.handle_update_oracle_script_requests_count_per_day({"date": msg["timestamp"], "oracle_script_id": msg["oracle_script_id"]})
self.update_oracle_script_last_request(msg["oracle_script_id"], msg["timestamp"])
del msg["timestamp"]
self.conn.execute(requests.insert(), msg)
self.increase_oracle_script_count(msg["oracle_script_id"])
Expand All @@ -166,6 +180,10 @@ def handle_update_related_ds_os(self, msg):

def handle_new_raw_request(self, msg):
self.increase_data_source_count(msg["data_source_id"])
if "timestamp" in msg:
self.handle_update_data_source_requests_count_per_day({"date": msg["timestamp"], "data_source_id": msg["data_source_id"]})
self.update_data_source_last_request(msg["data_source_id"], msg["timestamp"])
del msg["timestamp"]
self.handle_update_related_ds_os(
{
"oracle_script_id": self.conn.execute(
Expand Down Expand Up @@ -391,6 +409,30 @@ def handle_set_request_count_per_day(self, msg):
request_count_per_days.update(condition).values(count=request_count_per_days.c.count + 1)
)

def handle_update_oracle_script_requests_count_per_day(self, msg):
if self.get_oracle_script_requests_count_per_day(msg["date"], msg["oracle_script_id"]) is None:
msg["count"] = 1
self.conn.execute(oracle_script_requests_per_days.insert(), msg)
else:
condition = True
for col in oracle_script_requests_per_days.primary_key.columns.values():
condition = (col == msg[col.name]) & condition
self.conn.execute(
oracle_script_requests_per_days.update(condition).values(count=oracle_script_requests_per_days.c.count + 1)
)

def handle_update_data_source_requests_count_per_day(self, msg):
if self.get_data_source_requests_count_per_day(msg["date"], msg["data_source_id"]) is None:
msg["count"] = 1
self.conn.execute(data_source_requests_per_days.insert(), msg)
else:
condition = True
for col in data_source_requests_per_days.primary_key.columns.values():
condition = (col == msg[col.name]) & condition
self.conn.execute(
data_source_requests_per_days.update(condition).values(count=data_source_requests_per_days.c.count + 1)
)

def handle_new_incoming_packet(self, msg):
self.update_last_update_channel(msg['dst_port'], msg['dst_channel'], msg['block_time'])
del msg["block_time"]
Expand Down Expand Up @@ -428,6 +470,20 @@ def increase_oracle_script_count(self, id):
)
)

def update_oracle_script_last_request(self, id, timestamp):
self.conn.execute(
oracle_scripts.update(oracle_scripts.c.id == id).values(
last_request=timestamp
)
)

def update_data_source_last_request(self, id, timestamp):
self.conn.execute(
data_sources.update(data_sources.c.id == id).values(
last_request=timestamp
)
)

def handle_new_historical_bonded_token_on_validator(self, msg):
self.conn.execute(
insert(historical_bonded_token_on_validators)
Expand Down
15 changes: 15 additions & 0 deletions flusher/flusher/init.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,21 @@ def init(chain_id, topic, replay_topic, db):
requests.resolve_status;
"""
)
engine.execute(
"""
CREATE VIEW data_source_statistic_last_1_day
AS
SELECT data_sources.id,
count(*) AS count
FROM data_sources
join raw_requests
ON data_sources.id = raw_requests.data_source_id
join requests
ON raw_requests.request_id = requests.id
WHERE requests.request_time >= CAST(EXTRACT(epoch FROM NOW()) AS INT) - 86400
GROUP BY data_sources.id;
"""
)
# TODO: replace select&group_by d.validator_id with d.delegator_id
engine.execute(
"""
Expand Down
31 changes: 31 additions & 0 deletions hasura/hasura-metadata/tables.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,16 @@
- name: data_source
using:
foreign_key_constraint_on: data_source_id
- table:
name: data_source_requests_per_days
schema: public
object_relationships:
- name: data_source
using:
foreign_key_constraint_on: data_source_id
- table:
name: data_source_statistic_last_1_day
schema: public
- table:
name: data_sources
schema: public
Expand All @@ -223,6 +233,13 @@
using:
foreign_key_constraint_on: transaction_id
array_relationships:
- name: data_source_requests_per_days
using:
foreign_key_constraint_on:
column: data_source_id
table:
name: data_source_requests_per_days
schema: public
- name: raw_requests
using:
foreign_key_constraint_on:
Expand Down Expand Up @@ -313,6 +330,13 @@
- name: oracle_script
using:
foreign_key_constraint_on: oracle_script_id
- table:
name: oracle_script_requests_per_days
schema: public
object_relationships:
- name: oracle_script
using:
foreign_key_constraint_on: oracle_script_id
- table:
name: oracle_script_statistic_last_1_day
schema: public
Expand All @@ -337,6 +361,13 @@
using:
foreign_key_constraint_on: transaction_id
array_relationships:
- name: oracle_script_requests_per_days
using:
foreign_key_constraint_on:
column: oracle_script_id
table:
name: oracle_script_requests_per_days
schema: public
- name: related_data_source_oracle_scripts
using:
foreign_key_constraint_on:
Expand Down
1 change: 1 addition & 0 deletions hooks/emitter/oracle.go
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ func (h *Hook) emitRawRequestAndValRequest(
"data_source_id": raw.DataSourceID,
"fee": fee.Amount,
"calldata": parseBytes(raw.Calldata),
"timestamp": ctx.BlockTime().UnixNano(),
})
ds := h.oracleKeeper.MustGetDataSource(ctx, raw.DataSourceID)
h.AddAccountsInTx(ds.Treasury)
Expand Down
1 change: 1 addition & 0 deletions hooks/emitter/staking.go
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ func (h *Hook) emitUpdateValidator(ctx sdk.Context, addr sdk.ValAddress) (types.
"delegator_shares": val.DelegatorShares.String(),
"current_reward": currentReward,
"current_ratio": currentRatio,
"jailed": val.Jailed,
"last_update": ctx.BlockTime().UnixNano(),
})
return val, true
Expand Down

0 comments on commit 46fea12

Please sign in to comment.