Skip to content

Commit

Permalink
Add wallets in sheets stat
Browse files Browse the repository at this point in the history
  • Loading branch information
ClementWalter committed Sep 12, 2023
1 parent f91a964 commit 8af17dd
Show file tree
Hide file tree
Showing 2 changed files with 213 additions and 82 deletions.
3 changes: 3 additions & 0 deletions packages/starksheet-cairo/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -40,3 +40,6 @@ daily_sheets.png
hourly_sheets.png
cumsum_sheets.png
monthly_sheets.png
cursors.json
class_hashes.json
wallets.png
292 changes: 210 additions & 82 deletions packages/starksheet-cairo/notebooks/sheets.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
# %% Imports and query
import itertools
import json
import logging
import time
from pathlib import Path

import matplotlib.pyplot as plt
import pandas as pd
Expand All @@ -23,139 +27,247 @@
"sec-fetch-site": "cross-site",
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36",
}
data = {
"query": """query AccountCallsTablePaginationFragment(
$after: String,
$first: Int!,
$input: CallsInput!
) {
...AccountCallsTablePaginationFragment_calls_2DAjA4
}
fragment AccountCallsTablePaginationFragment_calls_2DAjA4 on Query {
calls(first: $first, after: $after, input: $input) {
edges {
node {
...AccountCallsTableRowFragment_call
}


def get_contracts_calls(contract_addresses):
cursors = json.load(open("cursors.json")) if Path("cursors.json").is_file() else {}
data = {
"query": """query AccountCallsTablePaginationFragment(
$after: String,
$first: Int!,
$input: CallsInput!
) {
...AccountCallsTablePaginationFragment_calls_2DAjA4
}
fragment AccountCallsTablePaginationFragment_calls_2DAjA4 on Query {
calls(first: $first, after: $after, input: $input) {
edges {
node {
...AccountCallsTableRowFragment_call
}
cursor
}
pageInfo {
endCursor
hasNextPage
}
}
pageInfo {
endCursor
hasNextPage
}
}
}
fragment AccountCallsTableRowFragment_call on Call {
contract_identifier
timestamp
selector_name
}
""",
"variables": {
"after": None,
"first": 1000,
"input": {
"contract_address": "0x071d48483dcfa86718a717f57cf99a72ff8198b4538a6edccd955312fe624747",
"is_account_call": True,
"max_block_number": None,
"max_timestamp": None,
"min_block_number": None,
"min_timestamp": None,
"order_by": "desc",
"sort_by": "timestamp",
fragment AccountCallsTableRowFragment_call on Call {
contract_identifier
timestamp
selector_name
caller_address
}
""",
"variables": {
"after": None,
"first": 1000,
"input": {
"contract_address": "0x071d48483dcfa86718a717f57cf99a72ff8198b4538a6edccd955312fe624747",
"is_account_call": True,
"max_block_number": None,
"max_timestamp": None,
"min_block_number": None,
"min_timestamp": None,
"order_by": "desc",
"sort_by": "timestamp",
},
},
},
}
}
header = [
"contract_identifier",
"timestamp",
"selector_name",
"caller_address",
"class_hash",
]

# %% Fetch data
_calls = []
for contract_address in [
"0x028850a764600d53b2009b17428ae9eb980a4c4ea930a69ed8668048ef082a04",
"0x076a028b19d27310f5e9f941041ae4a3a52c0e0024d593ddcb0d34e1dcd24af1",
"0x071d48483dcfa86718a717f57cf99a72ff8198b4538a6edccd955312fe624747",
]:
page = 0
data["variables"]["after"] = None
data["variables"]["input"]["contract_address"] = contract_address
response = requests.post(url, headers=headers, json=data)
_calls += response.json()["data"]["calls"]["edges"]
while response.json()["data"]["calls"]["pageInfo"]["hasNextPage"]:
page += 1
logger.info(f"⏳ contract {contract_address}: fetching page {page}")
data["variables"]["after"] = response.json()["data"]["calls"]["pageInfo"][
"endCursor"
]
def get_contract_calls(contract_address):
data["variables"]["after"] = cursors.get(contract_address)
data["variables"]["input"]["contract_address"] = contract_address
response = requests.post(url, headers=headers, json=data)
_calls += response.json()["data"]["calls"]["edges"]
_calls = response.json()["data"]["calls"]["edges"]
page_info = response.json()["data"]["calls"]["pageInfo"]
if not _calls:
return []

cursors[contract_address] = _calls[-1]["cursor"]
page = 0
while page_info["hasNextPage"]:
page += 1
logger.info(f"⏳ contract {contract_address}: fetching page {page}")
data["variables"]["after"] = page_info["endCursor"]
response = requests.post(url, headers=headers, json=data)
_calls += response.json()["data"]["calls"]["edges"]
page_info = response.json()["data"]["calls"]["pageInfo"]
cursors[contract_address] = _calls[-1]["cursor"]

return [call["node"] for call in _calls]

calls = (
pd.DataFrame([call["node"] for call in _calls])
.loc[lambda df: df.selector_name == "addSheet"]
.astype({"timestamp": "datetime64[s]"})
.assign(
contract_identifier=lambda df: df.contract_identifier.str.extract(r"(v\d+)")
calls = (
pd.concat(
[
pd.DataFrame(
itertools.chain.from_iterable(
[get_contract_calls(address) for address in contract_addresses]
)
),
(
pd.read_csv("calls.csv")
if Path("calls.csv").is_file()
else pd.DataFrame(columns=header)
),
],
ignore_index=True,
)
.reindex(header, axis=1)
.loc[lambda df: df.selector_name == "addSheet"]
.astype({"timestamp": int})
.astype({"timestamp": "datetime64[s]", "contract_identifier": str})
.assign(
contract_identifier=lambda df: df.contract_identifier.str.extract(
r"(v\d+)"
),
class_hash=lambda df: get_class_hashes(df.caller_address.tolist()),
)
.sort_values("timestamp", ascending=False)
)
)
json.dump(cursors, open("cursors.json", "w"), indent=4)
calls.to_csv("calls.csv", index=False)
return calls

calls.to_csv("calls.csv", index=False)

def get_class_hashes(contract_addresses):
labels = {
"0x025ec026985a3bf9d0cc1fe17326b245dfdc3ff89b8fde106542a3ea56c5a918": "Argent",
"0x03131fa018d520a037686ce3efddeab8f28895662f019ca3ca18a626650f7d1e": "Braavos",
}
known_classes = (
json.load(open("class_hashes.json"))
if Path("class_hashes.json").is_file()
else {}
)

def get_clash_hash(contract_address):
if known_classes.get(contract_address):
return known_classes.get(contract_address)

data = {
"query": """query ContractPageQuery(
$input: ContractInput!
) {
contract(input: $input) {
contract_address
class_hash
id
}
}""",
"variables": {"input": {"contract_address": contract_address}},
}
response = requests.post(url, headers=headers, json=data)
class_hash = response.json()["data"]["contract"]["class_hash"]
known_classes[contract_address] = class_hash
return class_hash

class_hashes = []
i = 0
while i < len(contract_addresses):
address = contract_addresses[i]
try:
class_hashes += [get_clash_hash(address)]
i += 1
except:
time.sleep(60)
json.dump(known_classes, open("class_hashes.json", "w"), indent=4)
return [labels.get(class_hash, class_hash) for class_hash in class_hashes]


# %% Fetch data
calls = get_contracts_calls(
[
"0x028850a764600d53b2009b17428ae9eb980a4c4ea930a69ed8668048ef082a04",
"0x076a028b19d27310f5e9f941041ae4a3a52c0e0024d593ddcb0d34e1dcd24af1",
"0x071d48483dcfa86718a717f57cf99a72ff8198b4538a6edccd955312fe624747",
]
)
logger.info(f"📈 sheets: {len(calls)}")

# %% Plot daily sheet creation
# define the date range for the plot
plt.clf()
start_date = pd.Timestamp("2023-03-01")
end_date = pd.Timestamp.today()
date_range = pd.date_range(start_date, end_date, freq="D")

counts = (
daily = (
calls.groupby(["contract_identifier", pd.Grouper(key="timestamp", freq="D")])
.size()
.unstack("contract_identifier", fill_value=0)
.reindex(date_range)
.fillna(0)
.astype(int)
)

ax = counts.plot(kind="bar", stacked=True)
ax = daily.plot(kind="bar", stacked=True, figsize=(20, 7))

x_labels = [d.date().strftime("%m-%d") for d in counts.index]
category_sums = counts.sum()
category_sums = daily.sum()
ax.legend(
labels=[f"{category}: {category_sums[category]}" for category in counts.columns],
labels=[f"{category}: {category_sums[category]}" for category in daily.columns],
loc="upper left",
bbox_to_anchor=(0, 1),
)
ax.set_xticks(range(len(counts)))
x_labels = [d.date().strftime("%m-%d") for d in daily.index[::10]]
ax.set_xticks(range(0, len(daily), 10))
ax.set_xticklabels(x_labels, size=6)
ax.set_axisbelow(True)
ax.grid(axis="y", linestyle="--", color="grey")
ax.set_xlabel("Date")
ax.set_ylabel("New sheets")
ax.set_title(f"Total: {counts.sum().sum()}")
logger.info(f"📈 sheets: {counts.sum().sum()}")
ax.set_title(f"Total: {daily.sum().sum()}")
plt.tight_layout()
plt.savefig("daily_sheets.png")


# %% Plot cumsum
plt.clf()
ax = counts.cumsum().plot.area(grid=True)
ax = daily.sort_index().cumsum().plot.area(grid=True, alpha=0.85)
for line in ax.lines:
line.set_linewidth(0)
handles, _ = ax.get_legend_handles_labels()
ax.legend(
handles,
[f"{category}: {category_sums[category]}" for category in daily.columns],
loc="upper left",
bbox_to_anchor=(0, 1),
)
ax.set_title(f"Total: {daily.sum().sum()}")
plt.tight_layout()
plt.savefig("cumsum_sheets.png")
plt.savefig("cumsum_sheets.png", dpi=300)


# %% Plot monthly
counts = (
plt.clf()
monthly = (
calls.groupby(["contract_identifier", pd.Grouper(key="timestamp", freq="M")])
.size()
.unstack("contract_identifier", fill_value=0)
.fillna(0)
.astype(int)
)
ax = counts.plot(kind="bar", stacked=True)
x_labels = [d.date().strftime("%Y-%m") for d in counts.index]
ax = monthly.plot(kind="bar", stacked=True)
x_labels = [d.date().strftime("%Y-%m") for d in monthly.index]
ax.set_xticklabels(x_labels)
ax.grid(axis="y", linestyle="--", color="grey")
ax.legend(
labels=[
f"{category}: {category_sum}"
for category, category_sum in category_sums.items()
],
loc="upper left",
bbox_to_anchor=(0, 1),
)
ax.set_title(f"Total: {monthly.sum().sum()}")
plt.tight_layout()
plt.savefig("monthly_sheets.png")
plt.savefig("monthly_sheets.png", dpi=300)

# %% Plot hourly sheet creation
plt.clf()
Expand All @@ -167,3 +279,19 @@
)
plt.tight_layout()
plt.savefig("hourly_sheets.png")

# %% Users wallets
plt.clf()
ax = (
calls.groupby(["class_hash", pd.Grouper(key="timestamp", freq="D")])
.size()
.unstack("class_hash", fill_value=0)
.fillna(0)
.astype(int)
.sort_index()
.cumsum()
.transform(lambda row: row / row.sum(), axis=1)
.plot.area()
)
plt.tight_layout()
plt.savefig("wallets.png", dpi=300)

0 comments on commit 8af17dd

Please sign in to comment.