Skip to content

Commit

Permalink
Merge branch 'main' into rate-limiter
Browse files Browse the repository at this point in the history
  • Loading branch information
hackerwins committed Feb 28, 2025
2 parents 1cd6410 + c7797cf commit 99f0ef0
Show file tree
Hide file tree
Showing 14 changed files with 131 additions and 274 deletions.
69 changes: 45 additions & 24 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -109,24 +109,17 @@ jobs:
with:
go-version: ${{ env.GO_VERSION }}

- name: Check out code
- name: Checkout current repository
uses: actions/checkout@v4
with:
path: repo

- name: Stack
run: docker compose -f build/docker/docker-compose.yml up --build -d

- name: Bench
id: curr-bench
run: |
make bench
content=$(cat output.txt | jq -R -s .)
echo "BENCH_RESULT=$content" >> $GITHUB_OUTPUT
- name: Set up cache
uses: actions/cache@v3
- name: Checkout benchmark branch
uses: actions/checkout@v4
with:
path: ./cache
key: ${{ runner.os }}-benchmark
ref: yorkie-ci-benchmark
path: benchmark-repo
continue-on-error: true

- name: Read previous benchmark result
if: github.event_name == 'pull_request'
Expand All @@ -135,16 +128,26 @@ jobs:
echo "PREV_BENCH_RESULT=null" >> $GITHUB_OUTPUT
echo "PREV_COMMIT=null" >> $GITHUB_OUTPUT
if [ -f "./cache/bench_result.txt" ]; then
content=$(cat ./cache/bench_result.txt | jq -R -s .)
if [ -d "benchmark-repo" ] && [ -f "benchmark-repo/bench_result.txt" ]; then
content=$(cat benchmark-repo/bench_result.txt | jq -R -s .)
echo "PREV_BENCH_RESULT=$content" >> $GITHUB_OUTPUT
if [ -f "./cache/commit_hash.txt" ]; then
prev_commit=$(cat ./cache/commit_hash.txt)
if [ -f "benchmark-repo/commit_hash.txt" ]; then
prev_commit=$(cat benchmark-repo/commit_hash.txt)
echo "PREV_COMMIT=$prev_commit" >> $GITHUB_OUTPUT
fi
fi
- name: Stack
run: docker compose -f repo/build/docker/docker-compose.yml up --build -d

- name: Bench
id: curr-bench
run: |
cd repo
make bench
content=$(cat output.txt | jq -R -s .)
echo "BENCH_RESULT=$content" >> $GITHUB_OUTPUT
- name: Trigger n8n webhook
if: github.event_name == 'pull_request'
run: |
Expand All @@ -166,12 +169,30 @@ jobs:
exit 1
fi
- name: Store benchmark result to cache
- name: Store benchmark result
if: github.ref == 'refs/heads/main'
run: |
mkdir -p ./cache
cp output.txt ./cache/bench_result.txt
echo "${{ github.sha }}" > ./cache/commit_hash.txt
mkdir -p benchmark-repo
cp repo/output.txt benchmark-repo/bench_result.txt
echo "${{ github.sha }}" > benchmark-repo/commit_hash.txt
cd benchmark-repo
git config user.name "GitHub Actions"
git config user.email "[email protected]"
if [ ! -d ".git" ]; then
git init
git remote add origin https://x-access-token:${{ secrets.GITHUB_TOKEN }}@github.com/${{ github.repository }}
fi
git add bench_result.txt
git add commit_hash.txt
TIMESTAMP=$(date -u +"%Y-%m-%d %H:%M:%S UTC")
git diff --staged --quiet || git commit -m "Update benchmark results at $TIMESTAMP"
git checkout -B yorkie-ci-benchmark
git push -f origin yorkie-ci-benchmark
echo "Benchmark results have been pushed to yorkie-ci-benchmark branch"
complex-test:
name: complex-test
Expand Down
2 changes: 0 additions & 2 deletions .golangci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,6 @@ linters-settings:
wrapcheck:
ignoreSigRegexps:
- github.com/yorkie-team/yorkie # Ignore all methods in internal package
- google.golang.org/grpc/status # Ignore all methods in grpc/status package
- google.golang.org/grpc/internal/status # Ignore all methods in grpc/internal/status package
- context.Context # Ignore all methods in context package

issues:
Expand Down
4 changes: 2 additions & 2 deletions api/converter/from_pb.go
Original file line number Diff line number Diff line change
Expand Up @@ -156,12 +156,12 @@ func FromChanges(pbChanges []*api.Change) ([]*change.Change, error) {
func fromChangeID(id *api.ChangeID) (change.ID, error) {
actorID, err := time.ActorIDFromBytes(id.ActorId)
if err != nil {
return change.InitialID, err
return change.InitialID(), err
}

vector, err := FromVersionVector(id.VersionVector)
if err != nil {
return change.InitialID, err
return change.InitialID(), err
}

return change.NewID(
Expand Down
24 changes: 16 additions & 8 deletions cmd/yorkie/project/create.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@ import (
"errors"
"fmt"

"connectrpc.com/connect"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"google.golang.org/genproto/googleapis/rpc/errdetails"
"google.golang.org/grpc/status"
"gopkg.in/yaml.v3"

"github.com/yorkie-team/yorkie/admin"
Expand Down Expand Up @@ -61,13 +61,21 @@ func newCreateCommand() *cobra.Command {
ctx := context.Background()
project, err := cli.CreateProject(ctx, name)
if err != nil {
// TODO(chacha912): consider creating the error details type to remove the dependency on gRPC.
st := status.Convert(err)
for _, detail := range st.Details() {
switch t := detail.(type) {
case *errdetails.BadRequest:
for _, violation := range t.GetFieldViolations() {
cmd.Printf("Invalid Fields: The %q field was wrong: %s\n", violation.GetField(), violation.GetDescription())
var connErr *connect.Error
if errors.As(err, &connErr) {
for _, detail := range connErr.Details() {
value, err := detail.Value()
if err != nil {
continue
}

badReq, ok := value.(*errdetails.BadRequest)
if !ok {
continue
}

for _, violation := range badReq.GetFieldViolations() {
cmd.Printf("Invalid Field: %q - %s\n", violation.GetField(), violation.GetDescription())
}
}
}
Expand Down
24 changes: 16 additions & 8 deletions cmd/yorkie/project/update.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@ import (
"errors"
"fmt"

"connectrpc.com/connect"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"google.golang.org/genproto/googleapis/rpc/errdetails"
"google.golang.org/grpc/status"
"gopkg.in/yaml.v3"

"github.com/yorkie-team/yorkie/admin"
Expand Down Expand Up @@ -137,13 +137,21 @@ func newUpdateCommand() *cobra.Command {

updated, err := cli.UpdateProject(ctx, id, updatableProjectFields)
if err != nil {
// TODO(chacha912): consider creating the error details type to remove the dependency on gRPC.
st := status.Convert(err)
for _, detail := range st.Details() {
switch t := detail.(type) {
case *errdetails.BadRequest:
for _, violation := range t.GetFieldViolations() {
cmd.Printf("Invalid Fields: The %q field was wrong: %s\n", violation.GetField(), violation.GetDescription())
var connErr *connect.Error
if errors.As(err, &connErr) {
for _, detail := range connErr.Details() {
value, err := detail.Value()
if err != nil {
continue
}

badReq, ok := value.(*errdetails.BadRequest)
if !ok {
continue
}

for _, violation := range badReq.GetFieldViolations() {
cmd.Printf("Invalid Field: %q - %s\n", violation.GetField(), violation.GetDescription())
}
}
}
Expand Down
18 changes: 10 additions & 8 deletions pkg/document/change/id.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,6 @@ const (
InitialLamport = 0
)

var (
// InitialID represents the initial state ID. Usually this is used to
// represent a state where nothing has been edited.
InitialID = NewID(InitialClientSeq, InitialServerSeq, InitialLamport, time.InitialActorID, time.InitialVersionVector)
)

// ID represents the identifier of the change. It is used to identify the
// change and to order the changes. It is also used to detect the relationship
// between changes whether they are causally related or concurrent.
Expand Down Expand Up @@ -75,6 +69,12 @@ func NewID(
}
}

// InitialID creates an initial state ID. Usually this is used to
// represent a state where nothing has been edited.
func InitialID() ID {
return NewID(InitialClientSeq, InitialServerSeq, InitialLamport, time.InitialActorID, time.NewVersionVector())
}

// Next creates a next ID of this ID.
func (id ID) Next() ID {
versionVector := id.versionVector.DeepCopy()
Expand Down Expand Up @@ -112,8 +112,9 @@ func (id ID) SyncClocks(other ID) ID {
otherVV = otherVV.DeepCopy()
otherVV.Set(other.actorID, other.lamport)
}
id.versionVector.Max(&otherVV)

newID := NewID(id.clientSeq, InitialServerSeq, lamport, id.actorID, id.versionVector.Max(otherVV))
newID := NewID(id.clientSeq, InitialServerSeq, lamport, id.actorID, id.versionVector)
newID.versionVector.Set(id.actorID, lamport)
return newID
}
Expand All @@ -134,8 +135,9 @@ func (id ID) SetClocks(otherLamport int64, vector time.VersionVector) ID {
// Semantically, including a non-client actor in version vector is
// problematic. To address this, we remove the InitialActorID from snapshots.
vector.Unset(time.InitialActorID)
id.versionVector.Max(&vector)

newID := NewID(id.clientSeq, id.serverSeq, lamport, id.actorID, id.versionVector.Max(vector))
newID := NewID(id.clientSeq, id.serverSeq, lamport, id.actorID, id.versionVector)
newID.versionVector.Set(id.actorID, lamport)

return newID
Expand Down
7 changes: 5 additions & 2 deletions pkg/document/internal_document.go
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ func NewInternalDocument(k key.Key) *InternalDocument {
status: StatusDetached,
root: crdt.NewRoot(root),
checkpoint: change.InitialCheckpoint,
changeID: change.InitialID,
changeID: change.InitialID(),
presences: innerpresence.NewMap(),
onlineClients: &gosync.Map{},
}
Expand All @@ -116,14 +116,17 @@ func NewInternalDocumentFromSnapshot(
return nil, err
}

changeID := change.InitialID()
changeID.SetClocks(lamport, vector)

return &InternalDocument{
key: k,
status: StatusDetached,
root: crdt.NewRoot(obj),
presences: presences,
onlineClients: &gosync.Map{},
checkpoint: change.InitialCheckpoint.NextServerSeq(serverSeq),
changeID: change.InitialID.SetClocks(lamport, vector),
changeID: changeID,
}, nil
}

Expand Down
48 changes: 19 additions & 29 deletions pkg/document/time/version_vector.go
Original file line number Diff line number Diff line change
Expand Up @@ -130,54 +130,44 @@ func (v VersionVector) EqualToOrAfter(other *Ticket) bool {
return clientLamport >= other.lamport
}

// Min returns new vv consists of every min value in each column.
func (v VersionVector) Min(other VersionVector) VersionVector {
minVV := NewVersionVector()

// Min modifies the receiver in-place to contain the minimum values between itself
// and the given version vector.
// Note: This method modifies the receiver for memory efficiency.
func (v VersionVector) Min(other *VersionVector) {
for key, value := range v {
if otherValue, exists := other[key]; exists {
if value < otherValue {
minVV[key] = value
} else {
minVV[key] = otherValue
if otherValue, exists := (*other)[key]; exists {
if value > otherValue {
v[key] = otherValue
}
} else {
minVV[key] = 0
v[key] = 0
}
}

for key := range other {
for key := range *other {
if _, exists := v[key]; !exists {
minVV[key] = 0
v[key] = 0
}
}

return minVV
}

// Max returns new vv consists of every max value in each column.
func (v VersionVector) Max(other VersionVector) VersionVector {
maxVV := NewVersionVector()

// Max modifies the receiver in-place to contain the maximum values between itself
// and the given version vector.
// Note: This method modifies the receiver for memory efficiency.
func (v VersionVector) Max(other *VersionVector) {
for key, value := range v {
if otherValue, exists := other[key]; exists {
if value > otherValue {
maxVV[key] = value
} else {
maxVV[key] = otherValue
if otherValue, exists := (*other)[key]; exists {
if value < otherValue {
v[key] = otherValue
}
} else {
maxVV[key] = value
}
}

for key, value := range other {
for key, value := range *other {
if _, exists := v[key]; !exists {
maxVV[key] = value
v[key] = value
}
}

return maxVV
}

// MaxLamport returns max lamport value in version vector.
Expand Down
6 changes: 3 additions & 3 deletions pkg/document/time/version_vector_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -99,10 +99,10 @@ func TestVersionVector(t *testing.T) {
},
}

for _, tc := range tests {
for i, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
result := tc.v1.Min(tc.v2)
assert.Equal(t, tc.expect, result)
tc.v1.Min(&tests[i].v2)
assert.Equal(t, tc.expect, tc.v1)
})
}
}
Loading

0 comments on commit 99f0ef0

Please sign in to comment.