Skip to content

Commit

Permalink
Merge pull request #23 from automata-network/dev-v0.5
Browse files Browse the repository at this point in the history
release v0.5
  • Loading branch information
chzyer authored Oct 23, 2024
2 parents 9048afc + f6fad83 commit 5a9e15c
Show file tree
Hide file tree
Showing 10 changed files with 121 additions and 29 deletions.
10 changes: 8 additions & 2 deletions aggregator/aggregator.go
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,8 @@ type Config struct {
GenTaskSampling uint64
ExecTaskSampling uint64

LineaMaxBlock int64

OpenTelemetry *xmetric.OpenTelemetryConfig

TaskFetcher []*xtask.TaskManagerConfig
Expand All @@ -91,6 +93,9 @@ func (cfg *Config) Init() error {
if cfg.Sampling == 0 {
cfg.Sampling = 2000
}
if cfg.LineaMaxBlock == 0 {
cfg.LineaMaxBlock = 100
}
if cfg.ExecTaskSampling == 0 {
cfg.ExecTaskSampling = cfg.Sampling
}
Expand Down Expand Up @@ -179,7 +184,7 @@ func NewAggregator(ctx context.Context, cfg *Config) (*Aggregator, error) {

collector := xmetric.NewAggregatorCollector("avs")

taskManager, err := xtask.NewTaskManager(collector, int64(cfg.GenTaskSampling), eigenClients.EthHttpClient, cfg.TaskFetcher)
taskManager, err := xtask.NewTaskManager(collector, int64(cfg.GenTaskSampling), cfg.LineaMaxBlock, eigenClients.EthHttpClient, cfg.TaskFetcher)
if err != nil {
return nil, logex.Trace(err)
}
Expand Down Expand Up @@ -264,12 +269,13 @@ func (agg *Aggregator) startUpdateOperators(ctx context.Context) (func() error,
}

func (agg *Aggregator) verifyKey(x [32]byte, y [32]byte) (bool, error) {
for _, layer := range agg.attestationLayer {
for idx, layer := range agg.attestationLayer {
pass, err := layer.caller.VerifyLivenessProof(nil, x, y)
if err != nil {
return false, logex.Trace(err, "v1")
}
if pass {
logex.Info("pass attestation check in", idx)
return true, nil
}
}
Expand Down
7 changes: 4 additions & 3 deletions aggregator/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,10 @@ type Metadata struct {
}

type TaskRequest struct {
Task *StateHeader
Signature *bls.Signature
OperatorId types.OperatorId
Task *StateHeader
Signature *bls.Signature
OperatorId types.OperatorId
ProverSignature hexutil.Bytes
}

type StateHeader struct {
Expand Down
64 changes: 63 additions & 1 deletion contracts/bindings/TEELivenessVerifier/TEELivenessVerifier.go

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions contracts/script/DeployTEELivenessService.s.sol
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,7 @@ contract DeployTEELivenessVerifier is Script, DcapTestUtils, CRLParser {
);
console.log("reuse proxy");
console.logAddress(verifierProxyAddr);
console.logAddress(address(proxyAdmin));
} else {
console.log("Deploy new proxy");
EmptyContract emptyContract = new EmptyContract();
Expand Down Expand Up @@ -229,6 +230,7 @@ contract DeployTEELivenessVerifier is Script, DcapTestUtils, CRLParser {
attestValiditySecs
);
}

proxyAdmin.upgradeAndCall(
ITransparentUpgradeableProxy(verifierProxyAddr),
address(verifierImpl),
Expand Down
22 changes: 18 additions & 4 deletions contracts/src/core/TEELivenessVerifier.sol
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ contract TEELivenessVerifier is OwnableUpgradeable {

// added at v2
uint256 public maxBlockNumberDiff;
mapping(bytes32 proverKey => address proverAddr) public attestedProverAddr;

constructor() {
_disableInitializers();
Expand Down Expand Up @@ -97,13 +98,13 @@ contract TEELivenessVerifier is OwnableUpgradeable {
bytes32 reportHash = keccak256(_report);
require(!attestedReports[reportHash], "report is already used");

(, bytes32 reportDataHash) = splitBytes64(reportData);
(bytes32 proverBytes, bytes32 reportDataHash) = splitBytes64(reportData);
require(dataHash == reportDataHash, "report data hash mismatch");

Prover memory prover = Prover(_data.pubkey, block.timestamp);
attestedProvers[
keccak256(abi.encode(_data.pubkey.x, _data.pubkey.y))
] = prover;
bytes32 proverKey = keccak256(abi.encode(_data.pubkey.x, _data.pubkey.y));
attestedProvers[proverKey] = prover;
attestedProverAddr[proverKey] = address(uint160(uint256(proverBytes)));
attestedReports[reportHash] = true;
}

Expand All @@ -117,6 +118,19 @@ contract TEELivenessVerifier is OwnableUpgradeable {
block.timestamp;
}

function verifyLivenessProofV2(
bytes32 pubkeyX,
bytes32 pubkeyY,
address proverKey
) public view returns (bool) {
bytes32 signer = keccak256(abi.encode(pubkeyX, pubkeyY));
bool succ = attestedProvers[signer].time + attestValiditySeconds > block.timestamp;
if (!succ) {
return false;
}
return attestedProverAddr[signer] == proverKey;
}

function verifyAttestationV2(
bytes32 pubkeyX,
bytes32 pubkeyY,
Expand Down
14 changes: 8 additions & 6 deletions operator/operator.go
Original file line number Diff line number Diff line change
Expand Up @@ -324,9 +324,10 @@ func (o *Operator) processLineaTask(ctx context.Context, resp *aggregator.FetchT
submitTaskTime := time.Now()
// submit to aggregator
if err := o.aggregator.SubmitTask(ctx, &aggregator.TaskRequest{
Task: stateHeader,
Signature: sig,
OperatorId: o.operatorId,
Task: stateHeader,
Signature: sig,
OperatorId: o.operatorId,
ProverSignature: poe.PoeSignature,
}); err != nil {
return logex.Trace(err)
}
Expand Down Expand Up @@ -399,9 +400,10 @@ func (o *Operator) processScrollTask(ctx context.Context, resp *aggregator.Fetch
submitTaskTime := time.Now()
// submit to aggregator
if err := o.aggregator.SubmitTask(ctx, &aggregator.TaskRequest{
Task: stateHeader,
Signature: sig,
OperatorId: o.operatorId,
Task: stateHeader,
Signature: sig,
OperatorId: o.operatorId,
ProverSignature: poe.PoeSignature,
}); err != nil {
return logex.Trace(err)
}
Expand Down
4 changes: 1 addition & 3 deletions scripts/attestation.sh
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,7 @@ function deploy() {
ENV=$ENV \
_script script/DeployTEELivenessService.s.sol --sig $1'()'
teeVerifierAddr=$(_get_key $TEE_DEPLOY .TEELivenessVerifierProxy)
_set_key config/aggregator.json TEELivenessVerifierContractAddress $teeVerifierAddr
_set_key config/operator.json TEELivenessVerifierAddress $teeVerifierAddr
cat $TEE_DEPLOY
}
function set_validity_secs() {
Expand Down
2 changes: 1 addition & 1 deletion sgx-prover
Submodule sgx-prover updated 135 files
12 changes: 7 additions & 5 deletions xtask/prover_client.go
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,12 @@ func NewProverClient(url string) (*ProverClient, error) {
}

type PoeResponse struct {
NotReady bool `json:"not_ready"`
BatchId uint64 `json:"batch_id"`
StartBlock uint64 `json:"start_block"`
EndBlock uint64 `json:"end_block"`
Poe *Poe `json:"poe"`
NotReady bool `json:"not_ready"`
BatchId uint64 `json:"batch_id"`
StartBlock uint64 `json:"start_block"`
EndBlock uint64 `json:"end_block"`
Poe *Poe `json:"poe"`
PoeSignature hexutil.Bytes `json:"poe_signature"`
}

type Poe struct {
Expand Down Expand Up @@ -93,6 +94,7 @@ func (p *ProverClient) DaTryLock(ctx context.Context, hash common.Hash) (string,
if err := p.client.CallContext(ctx, &result, "da_tryLock", hash); err != nil {
return "", logex.Trace(err)
}
logex.Infof("DA TryLock[%v]: %v", hash, result)
return result, nil
}

Expand Down
13 changes: 9 additions & 4 deletions xtask/task_manager.go
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,8 @@ type TaskManager struct {
tasksMutex sync.Mutex
tasks map[TaskType]*TaskTuple

lineaPrevLog *types.Log
lineaMaxBlock int64
lineaPrevLog *types.Log
}

type TaskTuple struct {
Expand All @@ -128,7 +129,7 @@ type TaskInfo struct {
Ext json.RawMessage
}

func NewTaskManager(collector *xmetric.AggregatorCollector, sampling int64, referenceClient eth.Client, tasks []*TaskManagerConfig) (*TaskManager, error) {
func NewTaskManager(collector *xmetric.AggregatorCollector, sampling int64, lineaMaxBlock int64, referenceClient eth.Client, tasks []*TaskManagerConfig) (*TaskManager, error) {
sources := make(map[string]*ethclient.Client)
tracers := make(map[TaskType]*utils.LogTracer)
contexts := make(map[TaskType]*TaskContext)
Expand All @@ -139,6 +140,7 @@ func NewTaskManager(collector *xmetric.AggregatorCollector, sampling int64, refe
contexts: contexts,
collector: collector,
referenceClient: referenceClient,
lineaMaxBlock: lineaMaxBlock,
tasks: make(map[TaskType]*TaskTuple, MaxTaskType),
}

Expand Down Expand Up @@ -296,6 +298,9 @@ func (t *TaskManager) onLineaTask(ctx context.Context, _ *ethclient.Client, log

startBlock := new(big.Int).SetBytes(prevLog.Topics[1][:]).Int64() + 1
endBlock := new(big.Int).SetBytes(log.Topics[1][:]).Int64()
if endBlock-startBlock > t.lineaMaxBlock {
startBlock = startBlock - endBlock
}
batchId := endBlock // can't determine the batch, so we use the end block number

logex.Infof("generating task[linea] for #%v, refblk: %v", batchId, referenceBlockNumber)
Expand Down Expand Up @@ -323,7 +328,7 @@ func (t *TaskManager) onLineaTask(ctx context.Context, _ *ethclient.Client, log
return nil
}
if err != nil {
return logex.Trace(err, fmt.Sprintf("fetching context for scroll batchId#%v", batchId))
return logex.Trace(err, fmt.Sprintf("fetching context for linea batchId#%v", batchId))
}
generateContextCost := time.Since(startGenerateContext).Truncate(time.Millisecond)

Expand Down Expand Up @@ -395,7 +400,7 @@ func (t *TaskManager) onScrollTask(ctx context.Context, source *ethclient.Client
return logex.Trace(err)
}

t.updateTask(*taskInfo)
// t.updateTask(*taskInfo)

startGenerateContext := time.Now()
taskCtx, ignore, err := prover.GenerateScrollContext(ctx, startBlock, endBlock, taskInfo.Type)
Expand Down

0 comments on commit 5a9e15c

Please sign in to comment.